2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* true if no code generation wanted for an expression */
54 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
56 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
58 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
59 ST_DATA
const char *funcname
;
61 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
;
63 ST_DATA
struct switch_t
{
67 } **p
; int n
; /* list of case ranges */
68 int def_sym
; /* default symbol */
69 } *cur_switch
; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType
*type
);
74 static inline CType
*pointed_type(CType
*type
);
75 static int is_compatible_types(CType
*type1
, CType
*type2
);
76 static int parse_btype(CType
*type
, AttributeDef
*ad
);
77 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
78 static void parse_expr_type(CType
*type
);
79 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
80 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
81 static void block(int *bsym
, int *csym
, int is_expr
);
82 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
83 static int decl0(int l
, int is_for_loop_init
, Sym
*);
84 static void expr_eq(void);
85 static void vla_runtime_type_size(CType
*type
, int *a
);
86 static void vla_sp_restore(void);
87 static void vla_sp_restore_root(void);
88 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
);
89 static inline int64_t expr_const64(void);
90 ST_FUNC
void vpush64(int ty
, unsigned long long v
);
91 ST_FUNC
void vpush(CType
*type
);
92 ST_FUNC
int gvtst(int inv
, int t
);
93 ST_FUNC
int is_btype_size(int bt
);
94 static void gen_inline_functions(TCCState
*s
);
96 ST_INLN
int is_float(int t
)
100 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
103 /* we use our own 'finite' function to avoid potential problems with
104 non standard math libs */
105 /* XXX: endianness dependent */
106 ST_FUNC
int ieee_finite(double d
)
109 memcpy(p
, &d
, sizeof(double));
110 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
113 ST_FUNC
void test_lvalue(void)
115 if (!(vtop
->r
& VT_LVAL
))
119 ST_FUNC
void check_vstack(void)
122 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
125 /* ------------------------------------------------------------------------- */
126 /* vstack debugging aid */
129 void pv (const char *lbl
, int a
, int b
)
132 for (i
= a
; i
< a
+ b
; ++i
) {
133 SValue
*p
= &vtop
[-i
];
134 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
135 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
140 /* ------------------------------------------------------------------------- */
141 /* start of translation unit info */
142 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
147 /* file info: full path + filename */
148 section_sym
= put_elf_sym(symtab_section
, 0, 0,
149 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
150 text_section
->sh_num
, NULL
);
151 getcwd(buf
, sizeof(buf
));
153 normalize_slashes(buf
);
155 pstrcat(buf
, sizeof(buf
), "/");
156 put_stabs_r(buf
, N_SO
, 0, 0,
157 text_section
->data_offset
, text_section
, section_sym
);
158 put_stabs_r(file
->filename
, N_SO
, 0, 0,
159 text_section
->data_offset
, text_section
, section_sym
);
164 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
165 symbols can be safely used */
166 put_elf_sym(symtab_section
, 0, 0,
167 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
168 SHN_ABS
, file
->filename
);
171 /* put end of translation unit info */
172 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
176 put_stabs_r(NULL
, N_SO
, 0, 0,
177 text_section
->data_offset
, text_section
, section_sym
);
181 /* generate line number info */
182 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
186 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
187 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
189 last_line_num
= file
->line_num
;
193 /* put function symbol */
194 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
202 /* XXX: we put here a dummy type */
203 snprintf(buf
, sizeof(buf
), "%s:%c1",
204 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
205 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
206 cur_text_section
, sym
->c
);
207 /* //gr gdb wants a line at the function */
208 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
214 /* put function size */
215 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
219 put_stabn(N_FUN
, 0, 0, size
);
222 /* ------------------------------------------------------------------------- */
223 ST_FUNC
void tccgen_start(TCCState
*s1
)
225 cur_text_section
= NULL
;
227 anon_sym
= SYM_FIRST_ANOM
;
232 /* define some often used types */
234 char_pointer_type
.t
= VT_BYTE
;
235 mk_pointer(&char_pointer_type
);
237 size_type
.t
= VT_INT
;
239 size_type
.t
= VT_LLONG
;
241 func_old_type
.t
= VT_FUNC
;
242 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, FUNC_CDECL
, FUNC_OLD
);
246 #ifdef TCC_TARGET_ARM
251 ST_FUNC
void tccgen_end(TCCState
*s1
)
253 gen_inline_functions(s1
);
255 /* end of translation unit info */
259 /* ------------------------------------------------------------------------- */
260 /* apply storage attributes to Elf symbol */
262 static void update_storage(Sym
*sym
)
271 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
274 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
275 | ((t
& VT_VIS_MASK
) >> VT_VIS_SHIFT
);
278 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, ELFW(ST_TYPE
)(esym
->st_info
));
282 esym
->st_other
|= ST_PE_EXPORT
;
286 /* ------------------------------------------------------------------------- */
287 /* update sym->c so that it points to an external symbol in section
288 'section' with value 'value' */
290 ST_FUNC
void put_extern_sym2(Sym
*sym
, Section
*section
,
291 addr_t value
, unsigned long size
,
292 int can_add_underscore
)
294 int sym_type
, sym_bind
, sh_num
, info
, other
, t
;
298 #ifdef CONFIG_TCC_BCHECK
304 else if (section
== SECTION_ABS
)
307 sh_num
= section
->sh_num
;
310 name
= get_tok_str(sym
->v
, NULL
);
311 #ifdef CONFIG_TCC_BCHECK
312 if (tcc_state
->do_bounds_check
) {
313 /* XXX: avoid doing that for statics ? */
314 /* if bound checking is activated, we change some function
315 names by adding the "__bound" prefix */
318 /* XXX: we rely only on malloc hooks */
331 strcpy(buf
, "__bound_");
339 if ((t
& VT_BTYPE
) == VT_FUNC
) {
341 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
342 sym_type
= STT_NOTYPE
;
344 sym_type
= STT_OBJECT
;
347 sym_bind
= STB_LOCAL
;
349 sym_bind
= STB_GLOBAL
;
352 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
353 Sym
*ref
= sym
->type
.ref
;
354 if (ref
->a
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
355 sprintf(buf1
, "_%s@%d", name
, ref
->a
.func_args
* PTR_SIZE
);
357 other
|= ST_PE_STDCALL
;
358 can_add_underscore
= 0;
362 other
|= ST_PE_IMPORT
;
364 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
366 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
370 name
= get_tok_str(sym
->asm_label
, NULL
);
371 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
372 sym
->c
= set_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
374 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
375 esym
->st_value
= value
;
376 esym
->st_size
= size
;
377 esym
->st_shndx
= sh_num
;
382 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
383 addr_t value
, unsigned long size
)
385 put_extern_sym2(sym
, section
, value
, size
, 1);
388 /* add a new relocation entry to symbol 'sym' in section 's' */
389 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
394 if (nocode_wanted
&& s
== cur_text_section
)
399 put_extern_sym(sym
, NULL
, 0, 0);
403 /* now we can add ELF relocation info */
404 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
408 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
410 greloca(s
, sym
, offset
, type
, 0);
414 /* ------------------------------------------------------------------------- */
415 /* symbol allocator */
416 static Sym
*__sym_malloc(void)
418 Sym
*sym_pool
, *sym
, *last_sym
;
421 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
422 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
424 last_sym
= sym_free_first
;
426 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
427 sym
->next
= last_sym
;
431 sym_free_first
= last_sym
;
435 static inline Sym
*sym_malloc(void)
439 sym
= sym_free_first
;
441 sym
= __sym_malloc();
442 sym_free_first
= sym
->next
;
445 sym
= tcc_malloc(sizeof(Sym
));
450 ST_INLN
void sym_free(Sym
*sym
)
453 sym
->next
= sym_free_first
;
454 sym_free_first
= sym
;
460 /* push, without hashing */
461 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, long c
)
481 /* find a symbol and return its associated structure. 's' is the top
482 of the symbol stack */
483 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
495 /* structure lookup */
496 ST_INLN Sym
*struct_find(int v
)
499 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
501 return table_ident
[v
]->sym_struct
;
504 /* find an identifier */
505 ST_INLN Sym
*sym_find(int v
)
508 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
510 return table_ident
[v
]->sym_identifier
;
513 /* push a given symbol on the symbol stack */
514 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, long c
)
523 s
= sym_push2(ps
, v
, type
->t
, c
);
524 s
->type
.ref
= type
->ref
;
526 /* don't record fields or anonymous symbols */
528 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
529 /* record symbol in token array */
530 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
532 ps
= &ts
->sym_struct
;
534 ps
= &ts
->sym_identifier
;
537 s
->scope
= local_scope
;
538 if (s
->prev_tok
&& s
->prev_tok
->scope
== s
->scope
)
539 tcc_error("redeclaration of '%s'",
540 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
545 /* push a global identifier */
546 ST_FUNC Sym
*global_identifier_push(int v
, int t
, long c
)
549 s
= sym_push2(&global_stack
, v
, t
, c
);
550 /* don't record anonymous symbol */
551 if (v
< SYM_FIRST_ANOM
) {
552 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
553 /* modify the top most local identifier, so that
554 sym_identifier will point to 's' when popped */
556 ps
= &(*ps
)->prev_tok
;
563 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
564 pop them yet from the list, but do remove them from the token array. */
565 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
575 /* remove symbol in token array */
577 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
578 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
580 ps
= &ts
->sym_struct
;
582 ps
= &ts
->sym_identifier
;
593 /* ------------------------------------------------------------------------- */
595 static void vsetc(CType
*type
, int r
, CValue
*vc
)
599 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
600 tcc_error("memory full (vstack)");
601 /* cannot let cpu flags if other instruction are generated. Also
602 avoid leaving VT_JMP anywhere except on the top of the stack
603 because it would complicate the code generator.
605 Don't do this when nocode_wanted. vtop might come from
606 !nocode_wanted regions (see 88_codeopt.c) and transforming
607 it to a register without actually generating code is wrong
608 as their value might still be used for real. All values
609 we push under nocode_wanted will eventually be popped
610 again, so that the VT_CMP/VT_JMP value will be in vtop
611 when code is unsuppressed again.
613 Same logic below in vswap(); */
614 if (vtop
>= vstack
&& !nocode_wanted
) {
615 v
= vtop
->r
& VT_VALMASK
;
616 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
628 ST_FUNC
void vswap(void)
631 /* cannot vswap cpu flags. See comment at vsetc() above */
632 if (vtop
>= vstack
&& !nocode_wanted
) {
633 int v
= vtop
->r
& VT_VALMASK
;
634 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
642 /* pop stack value */
643 ST_FUNC
void vpop(void)
646 v
= vtop
->r
& VT_VALMASK
;
647 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
648 /* for x86, we need to pop the FP stack */
650 o(0xd8dd); /* fstp %st(0) */
653 if (v
== VT_JMP
|| v
== VT_JMPI
) {
654 /* need to put correct jump if && or || without test */
660 /* push constant of type "type" with useless value */
661 ST_FUNC
void vpush(CType
*type
)
664 vsetc(type
, VT_CONST
, &cval
);
667 /* push integer constant */
668 ST_FUNC
void vpushi(int v
)
672 vsetc(&int_type
, VT_CONST
, &cval
);
675 /* push a pointer sized constant */
676 static void vpushs(addr_t v
)
680 vsetc(&size_type
, VT_CONST
, &cval
);
683 /* push arbitrary 64bit constant */
684 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
691 vsetc(&ctype
, VT_CONST
, &cval
);
694 /* push long long constant */
695 static inline void vpushll(long long v
)
697 vpush64(VT_LLONG
, v
);
700 ST_FUNC
void vset(CType
*type
, int r
, long v
)
705 vsetc(type
, r
, &cval
);
708 static void vseti(int r
, int v
)
716 ST_FUNC
void vpushv(SValue
*v
)
718 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
719 tcc_error("memory full (vstack)");
724 static void vdup(void)
729 /* rotate n first stack elements to the bottom
730 I1 ... In -> I2 ... In I1 [top is right]
732 ST_FUNC
void vrotb(int n
)
743 /* rotate the n elements before entry e towards the top
744 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
746 ST_FUNC
void vrote(SValue
*e
, int n
)
752 for(i
= 0;i
< n
- 1; i
++)
757 /* rotate n first stack elements to the top
758 I1 ... In -> In I1 ... I(n-1) [top is right]
760 ST_FUNC
void vrott(int n
)
765 /* push a symbol value of TYPE */
766 static inline void vpushsym(CType
*type
, Sym
*sym
)
770 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
774 /* Return a static symbol pointing to a section */
775 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
781 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
782 sym
->type
.ref
= type
->ref
;
783 sym
->r
= VT_CONST
| VT_SYM
;
784 put_extern_sym(sym
, sec
, offset
, size
);
788 /* push a reference to a section offset by adding a dummy symbol */
789 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
791 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
794 /* define a new external reference to a symbol 'v' of type 'u' */
795 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
801 /* push forward reference */
802 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
803 s
->type
.ref
= type
->ref
;
804 s
->r
= r
| VT_CONST
| VT_SYM
;
809 /* Merge some storage attributes. */
810 static void patch_storage(Sym
*sym
, CType
*type
)
813 if (!is_compatible_types(&sym
->type
, type
))
814 tcc_error("incompatible types for redefinition of '%s'",
815 get_tok_str(sym
->v
, NULL
));
818 if ((sym
->type
.t
^ t
) & VT_IMPORT
)
819 tcc_error("incompatible dll linkage for redefinition of '%s'",
820 get_tok_str(sym
->v
, NULL
));
822 sym
->type
.t
|= t
& (VT_EXPORT
|VT_WEAK
);
823 if (t
& VT_VIS_MASK
) {
824 int vis
= sym
->type
.t
& VT_VIS_MASK
;
825 int vis2
= t
& VT_VIS_MASK
;
826 if (vis
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
828 else if (vis2
!= (STV_DEFAULT
<< VT_VIS_SHIFT
))
829 vis
= (vis
< vis2
) ? vis
: vis2
;
830 sym
->type
.t
= (sym
->type
.t
& ~VT_VIS_MASK
) | vis
;
834 /* define a new external reference to a symbol 'v' */
835 static Sym
*external_sym(int v
, CType
*type
, int r
)
840 /* push forward reference */
841 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
842 s
->type
.t
|= VT_EXTERN
;
844 if (s
->type
.ref
== func_old_type
.ref
) {
845 s
->type
.ref
= type
->ref
;
846 s
->r
= r
| VT_CONST
| VT_SYM
;
847 s
->type
.t
|= VT_EXTERN
;
849 patch_storage(s
, type
);
855 /* push a reference to global symbol v */
856 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
858 vpushsym(type
, external_global_sym(v
, type
, 0));
861 /* save registers up to (vtop - n) stack entry */
862 ST_FUNC
void save_regs(int n
)
865 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
869 /* save r to the memory stack, and mark it as being free */
870 ST_FUNC
void save_reg(int r
)
872 save_reg_upstack(r
, 0);
875 /* save r to the memory stack, and mark it as being free,
876 if seen up to (vtop - n) stack entry */
877 ST_FUNC
void save_reg_upstack(int r
, int n
)
879 int l
, saved
, size
, align
;
883 if ((r
&= VT_VALMASK
) >= VT_CONST
)
888 /* modify all stack values */
891 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
892 if ((p
->r
& VT_VALMASK
) == r
||
893 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
894 /* must save value on stack if not already done */
896 /* NOTE: must reload 'r' because r might be equal to r2 */
897 r
= p
->r
& VT_VALMASK
;
898 /* store register in the stack */
900 if ((p
->r
& VT_LVAL
) ||
901 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
903 type
= &char_pointer_type
;
907 size
= type_size(type
, &align
);
908 loc
= (loc
- size
) & -align
;
910 sv
.r
= VT_LOCAL
| VT_LVAL
;
913 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
914 /* x86 specific: need to pop fp register ST0 if saved */
916 o(0xd8dd); /* fstp %st(0) */
920 /* special long long case */
921 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
929 /* mark that stack entry as being saved on the stack */
930 if (p
->r
& VT_LVAL
) {
931 /* also clear the bounded flag because the
932 relocation address of the function was stored in
934 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
936 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
944 #ifdef TCC_TARGET_ARM
945 /* find a register of class 'rc2' with at most one reference on stack.
946 * If none, call get_reg(rc) */
947 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
952 for(r
=0;r
<NB_REGS
;r
++) {
953 if (reg_classes
[r
] & rc2
) {
956 for(p
= vstack
; p
<= vtop
; p
++) {
957 if ((p
->r
& VT_VALMASK
) == r
||
958 (p
->r2
& VT_VALMASK
) == r
)
969 /* find a free register of class 'rc'. If none, save one register */
970 ST_FUNC
int get_reg(int rc
)
975 /* find a free register */
976 for(r
=0;r
<NB_REGS
;r
++) {
977 if (reg_classes
[r
] & rc
) {
980 for(p
=vstack
;p
<=vtop
;p
++) {
981 if ((p
->r
& VT_VALMASK
) == r
||
982 (p
->r2
& VT_VALMASK
) == r
)
990 /* no register left : free the first one on the stack (VERY
991 IMPORTANT to start from the bottom to ensure that we don't
992 spill registers used in gen_opi()) */
993 for(p
=vstack
;p
<=vtop
;p
++) {
994 /* look at second register (if long long) */
995 r
= p
->r2
& VT_VALMASK
;
996 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
998 r
= p
->r
& VT_VALMASK
;
999 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1005 /* Should never comes here */
1009 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1011 static void move_reg(int r
, int s
, int t
)
1025 /* get address of vtop (vtop MUST BE an lvalue) */
1026 ST_FUNC
void gaddrof(void)
1028 vtop
->r
&= ~VT_LVAL
;
1029 /* tricky: if saved lvalue, then we can go back to lvalue */
1030 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1031 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1036 #ifdef CONFIG_TCC_BCHECK
1037 /* generate lvalue bound code */
1038 static void gbound(void)
1043 vtop
->r
&= ~VT_MUSTBOUND
;
1044 /* if lvalue, then use checking code before dereferencing */
1045 if (vtop
->r
& VT_LVAL
) {
1046 /* if not VT_BOUNDED value, then make one */
1047 if (!(vtop
->r
& VT_BOUNDED
)) {
1048 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1049 /* must save type because we must set it to int to get pointer */
1051 vtop
->type
.t
= VT_PTR
;
1054 gen_bounded_ptr_add();
1055 vtop
->r
|= lval_type
;
1058 /* then check for dereferencing */
1059 gen_bounded_ptr_deref();
1064 /* store vtop a register belonging to class 'rc'. lvalues are
1065 converted to values. Cannot be used if cannot be converted to
1066 register value (such as structures). */
1067 ST_FUNC
int gv(int rc
)
1069 int r
, bit_pos
, bit_size
, size
, align
;
1072 /* NOTE: get_reg can modify vstack[] */
1073 if (vtop
->type
.t
& VT_BITFIELD
) {
1076 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
1077 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
1078 /* remove bit field info to avoid loops */
1079 vtop
->type
.t
&= ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
1080 /* cast to int to propagate signedness in following ops */
1081 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1086 if((vtop
->type
.t
& VT_UNSIGNED
) ||
1087 (vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
||
1088 (((vtop
->type
.t
& VT_BTYPE
) == VT_ENUM
) &&
1089 vtop
->type
.ref
->a
.unsigned_enum
))
1090 type
.t
|= VT_UNSIGNED
;
1092 /* generate shifts */
1093 vpushi(bits
- (bit_pos
+ bit_size
));
1095 vpushi(bits
- bit_size
);
1096 /* NOTE: transformed to SHR if unsigned */
1100 if (is_float(vtop
->type
.t
) &&
1101 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1102 unsigned long offset
;
1103 /* CPUs usually cannot use float constants, so we store them
1104 generically in data segment */
1105 size
= type_size(&vtop
->type
, &align
);
1106 offset
= section_add(data_section
, size
, align
);
1107 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1109 init_putv(&vtop
->type
, data_section
, offset
);
1112 #ifdef CONFIG_TCC_BCHECK
1113 if (vtop
->r
& VT_MUSTBOUND
)
1117 r
= vtop
->r
& VT_VALMASK
;
1118 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1119 #ifndef TCC_TARGET_ARM64
1122 #ifdef TCC_TARGET_X86_64
1123 else if (rc
== RC_FRET
)
1127 /* need to reload if:
1129 - lvalue (need to dereference pointer)
1130 - already a register, but not in the right class */
1132 || (vtop
->r
& VT_LVAL
)
1133 || !(reg_classes
[r
] & rc
)
1135 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1136 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1138 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1144 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1145 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1147 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1148 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1149 unsigned long long ll
;
1151 int r2
, original_type
;
1152 original_type
= vtop
->type
.t
;
1153 /* two register type load : expand to two words
1156 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1159 vtop
->c
.i
= ll
; /* first word */
1161 vtop
->r
= r
; /* save register value */
1162 vpushi(ll
>> 32); /* second word */
1165 if (vtop
->r
& VT_LVAL
) {
1166 /* We do not want to modifier the long long
1167 pointer here, so the safest (and less
1168 efficient) is to save all the other registers
1169 in the stack. XXX: totally inefficient. */
1173 /* lvalue_save: save only if used further down the stack */
1174 save_reg_upstack(vtop
->r
, 1);
1176 /* load from memory */
1177 vtop
->type
.t
= load_type
;
1180 vtop
[-1].r
= r
; /* save register value */
1181 /* increment pointer to get second word */
1182 vtop
->type
.t
= addr_type
;
1187 vtop
->type
.t
= load_type
;
1189 /* move registers */
1192 vtop
[-1].r
= r
; /* save register value */
1193 vtop
->r
= vtop
[-1].r2
;
1195 /* Allocate second register. Here we rely on the fact that
1196 get_reg() tries first to free r2 of an SValue. */
1200 /* write second register */
1202 vtop
->type
.t
= original_type
;
1203 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1205 /* lvalue of scalar type : need to use lvalue type
1206 because of possible cast */
1209 /* compute memory access type */
1210 if (vtop
->r
& VT_LVAL_BYTE
)
1212 else if (vtop
->r
& VT_LVAL_SHORT
)
1214 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1218 /* restore wanted type */
1221 /* one register type load */
1226 #ifdef TCC_TARGET_C67
1227 /* uses register pairs for doubles */
1228 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1235 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1236 ST_FUNC
void gv2(int rc1
, int rc2
)
1240 /* generate more generic register first. But VT_JMP or VT_CMP
1241 values must be generated first in all cases to avoid possible
1243 v
= vtop
[0].r
& VT_VALMASK
;
1244 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1249 /* test if reload is needed for first register */
1250 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1260 /* test if reload is needed for first register */
1261 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1267 #ifndef TCC_TARGET_ARM64
1268 /* wrapper around RC_FRET to return a register by type */
1269 static int rc_fret(int t
)
1271 #ifdef TCC_TARGET_X86_64
1272 if (t
== VT_LDOUBLE
) {
1280 /* wrapper around REG_FRET to return a register by type */
1281 static int reg_fret(int t
)
1283 #ifdef TCC_TARGET_X86_64
1284 if (t
== VT_LDOUBLE
) {
1292 /* expand 64bit on stack in two ints */
1293 static void lexpand(void)
1296 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1297 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1298 if (v
== VT_CONST
) {
1301 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1307 vtop
[0].r
= vtop
[-1].r2
;
1308 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1310 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1314 #ifdef TCC_TARGET_ARM
1315 /* expand long long on stack */
1316 ST_FUNC
void lexpand_nr(void)
1320 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1322 vtop
->r2
= VT_CONST
;
1323 vtop
->type
.t
= VT_INT
| u
;
1324 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1325 if (v
== VT_CONST
) {
1326 vtop
[-1].c
.i
= vtop
->c
.i
;
1327 vtop
->c
.i
= vtop
->c
.i
>> 32;
1329 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1331 vtop
->r
= vtop
[-1].r
;
1332 } else if (v
> VT_CONST
) {
1336 vtop
->r
= vtop
[-1].r2
;
1337 vtop
[-1].r2
= VT_CONST
;
1338 vtop
[-1].type
.t
= VT_INT
| u
;
1343 /* build a long long from two ints */
1344 static void lbuild(int t
)
1346 gv2(RC_INT
, RC_INT
);
1347 vtop
[-1].r2
= vtop
[0].r
;
1348 vtop
[-1].type
.t
= t
;
1353 /* convert stack entry to register and duplicate its value in another
1355 static void gv_dup(void)
1362 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1369 /* stack: H L L1 H1 */
1379 /* duplicate value */
1384 #ifdef TCC_TARGET_X86_64
1385 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1395 load(r1
, &sv
); /* move r to r1 */
1397 /* duplicates value */
1403 /* Generate value test
1405 * Generate a test for any value (jump, comparison and integers) */
1406 ST_FUNC
int gvtst(int inv
, int t
)
1408 int v
= vtop
->r
& VT_VALMASK
;
1409 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1413 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1414 /* constant jmp optimization */
1415 if ((vtop
->c
.i
!= 0) != inv
)
1420 return gtst(inv
, t
);
1424 /* generate CPU independent (unsigned) long long operations */
1425 static void gen_opl(int op
)
1427 int t
, a
, b
, op1
, c
, i
;
1429 unsigned short reg_iret
= REG_IRET
;
1430 unsigned short reg_lret
= REG_LRET
;
1436 func
= TOK___divdi3
;
1439 func
= TOK___udivdi3
;
1442 func
= TOK___moddi3
;
1445 func
= TOK___umoddi3
;
1452 /* call generic long long function */
1453 vpush_global_sym(&func_old_type
, func
);
1458 vtop
->r2
= reg_lret
;
1466 //pv("gen_opl A",0,2);
1472 /* stack: L1 H1 L2 H2 */
1477 vtop
[-2] = vtop
[-3];
1480 /* stack: H1 H2 L1 L2 */
1481 //pv("gen_opl B",0,4);
1487 /* stack: H1 H2 L1 L2 ML MH */
1490 /* stack: ML MH H1 H2 L1 L2 */
1494 /* stack: ML MH H1 L2 H2 L1 */
1499 /* stack: ML MH M1 M2 */
1502 } else if (op
== '+' || op
== '-') {
1503 /* XXX: add non carry method too (for MIPS or alpha) */
1509 /* stack: H1 H2 (L1 op L2) */
1512 gen_op(op1
+ 1); /* TOK_xxxC2 */
1515 /* stack: H1 H2 (L1 op L2) */
1518 /* stack: (L1 op L2) H1 H2 */
1520 /* stack: (L1 op L2) (H1 op H2) */
1528 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1529 t
= vtop
[-1].type
.t
;
1533 /* stack: L H shift */
1535 /* constant: simpler */
1536 /* NOTE: all comments are for SHL. the other cases are
1537 done by swapping words */
1548 if (op
!= TOK_SAR
) {
1581 /* XXX: should provide a faster fallback on x86 ? */
1584 func
= TOK___ashrdi3
;
1587 func
= TOK___lshrdi3
;
1590 func
= TOK___ashldi3
;
1596 /* compare operations */
1602 /* stack: L1 H1 L2 H2 */
1604 vtop
[-1] = vtop
[-2];
1606 /* stack: L1 L2 H1 H2 */
1609 /* when values are equal, we need to compare low words. since
1610 the jump is inverted, we invert the test too. */
1613 else if (op1
== TOK_GT
)
1615 else if (op1
== TOK_ULT
)
1617 else if (op1
== TOK_UGT
)
1627 /* generate non equal test */
1633 /* compare low. Always unsigned */
1637 else if (op1
== TOK_LE
)
1639 else if (op1
== TOK_GT
)
1641 else if (op1
== TOK_GE
)
1652 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1654 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1655 return (a
^ b
) >> 63 ? -x
: x
;
1658 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1660 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1663 /* handle integer constant optimizations and various machine
1665 static void gen_opic(int op
)
1667 SValue
*v1
= vtop
- 1;
1669 int t1
= v1
->type
.t
& VT_BTYPE
;
1670 int t2
= v2
->type
.t
& VT_BTYPE
;
1671 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1672 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1673 uint64_t l1
= c1
? v1
->c
.i
: 0;
1674 uint64_t l2
= c2
? v2
->c
.i
: 0;
1675 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1677 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1678 l1
= ((uint32_t)l1
|
1679 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1680 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1681 l2
= ((uint32_t)l2
|
1682 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1686 case '+': l1
+= l2
; break;
1687 case '-': l1
-= l2
; break;
1688 case '&': l1
&= l2
; break;
1689 case '^': l1
^= l2
; break;
1690 case '|': l1
|= l2
; break;
1691 case '*': l1
*= l2
; break;
1698 /* if division by zero, generate explicit division */
1701 tcc_error("division by zero in constant");
1705 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1706 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1707 case TOK_UDIV
: l1
= l1
/ l2
; break;
1708 case TOK_UMOD
: l1
= l1
% l2
; break;
1711 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1712 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1714 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1717 case TOK_ULT
: l1
= l1
< l2
; break;
1718 case TOK_UGE
: l1
= l1
>= l2
; break;
1719 case TOK_EQ
: l1
= l1
== l2
; break;
1720 case TOK_NE
: l1
= l1
!= l2
; break;
1721 case TOK_ULE
: l1
= l1
<= l2
; break;
1722 case TOK_UGT
: l1
= l1
> l2
; break;
1723 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1724 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1725 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1726 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1728 case TOK_LAND
: l1
= l1
&& l2
; break;
1729 case TOK_LOR
: l1
= l1
|| l2
; break;
1733 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1734 l1
= ((uint32_t)l1
|
1735 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1739 /* if commutative ops, put c2 as constant */
1740 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1741 op
== '|' || op
== '*')) {
1743 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1744 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1746 if (!const_wanted
&&
1748 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1749 (l1
== -1 && op
== TOK_SAR
))) {
1750 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1752 } else if (!const_wanted
&&
1753 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1754 (l2
== -1 && op
== '|') ||
1755 (l2
== 0xffffffff && t2
!= VT_LLONG
&& op
== '|') ||
1756 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1757 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1762 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1765 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1766 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1770 /* filter out NOP operations like x*1, x-0, x&-1... */
1772 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1773 /* try to use shifts instead of muls or divs */
1774 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1783 else if (op
== TOK_PDIV
)
1789 } else if (c2
&& (op
== '+' || op
== '-') &&
1790 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1791 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1792 /* symbol + constant case */
1796 /* The backends can't always deal with addends to symbols
1797 larger than +-1<<31. Don't construct such. */
1804 /* call low level op generator */
1805 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1806 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1814 /* generate a floating point operation with constant propagation */
1815 static void gen_opif(int op
)
1823 /* currently, we cannot do computations with forward symbols */
1824 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1825 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1827 if (v1
->type
.t
== VT_FLOAT
) {
1830 } else if (v1
->type
.t
== VT_DOUBLE
) {
1838 /* NOTE: we only do constant propagation if finite number (not
1839 NaN or infinity) (ANSI spec) */
1840 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1844 case '+': f1
+= f2
; break;
1845 case '-': f1
-= f2
; break;
1846 case '*': f1
*= f2
; break;
1850 tcc_error("division by zero in constant");
1855 /* XXX: also handles tests ? */
1859 /* XXX: overflow test ? */
1860 if (v1
->type
.t
== VT_FLOAT
) {
1862 } else if (v1
->type
.t
== VT_DOUBLE
) {
1874 static int pointed_size(CType
*type
)
1877 return type_size(pointed_type(type
), &align
);
1880 static void vla_runtime_pointed_size(CType
*type
)
1883 vla_runtime_type_size(pointed_type(type
), &align
);
1886 static inline int is_null_pointer(SValue
*p
)
1888 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
1890 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
1891 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
1892 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
1893 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
1896 static inline int is_integer_btype(int bt
)
1898 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
1899 bt
== VT_INT
|| bt
== VT_LLONG
);
1902 /* check types for comparison or subtraction of pointers */
1903 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
1905 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
1908 /* null pointers are accepted for all comparisons as gcc */
1909 if (is_null_pointer(p1
) || is_null_pointer(p2
))
1913 bt1
= type1
->t
& VT_BTYPE
;
1914 bt2
= type2
->t
& VT_BTYPE
;
1915 /* accept comparison between pointer and integer with a warning */
1916 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
1917 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
1918 tcc_warning("comparison between pointer and integer");
1922 /* both must be pointers or implicit function pointers */
1923 if (bt1
== VT_PTR
) {
1924 type1
= pointed_type(type1
);
1925 } else if (bt1
!= VT_FUNC
)
1926 goto invalid_operands
;
1928 if (bt2
== VT_PTR
) {
1929 type2
= pointed_type(type2
);
1930 } else if (bt2
!= VT_FUNC
) {
1932 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
1934 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
1935 (type2
->t
& VT_BTYPE
) == VT_VOID
)
1939 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1940 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1941 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
1942 /* gcc-like error if '-' is used */
1944 goto invalid_operands
;
1946 tcc_warning("comparison of distinct pointer types lacks a cast");
1950 /* generic gen_op: handles types problems */
1951 ST_FUNC
void gen_op(int op
)
1953 int u
, t1
, t2
, bt1
, bt2
, t
;
1957 t1
= vtop
[-1].type
.t
;
1958 t2
= vtop
[0].type
.t
;
1959 bt1
= t1
& VT_BTYPE
;
1960 bt2
= t2
& VT_BTYPE
;
1962 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
1963 tcc_error("operation on a struct");
1964 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
1965 if (bt2
== VT_FUNC
) {
1966 mk_pointer(&vtop
->type
);
1969 if (bt1
== VT_FUNC
) {
1971 mk_pointer(&vtop
->type
);
1976 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
1977 /* at least one operand is a pointer */
1978 /* relational op: must be both pointers */
1979 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
1980 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1981 /* pointers are handled are unsigned */
1983 t
= VT_LLONG
| VT_UNSIGNED
;
1985 t
= VT_INT
| VT_UNSIGNED
;
1989 /* if both pointers, then it must be the '-' op */
1990 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
1992 tcc_error("cannot use pointers here");
1993 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1994 /* XXX: check that types are compatible */
1995 if (vtop
[-1].type
.t
& VT_VLA
) {
1996 vla_runtime_pointed_size(&vtop
[-1].type
);
1998 vpushi(pointed_size(&vtop
[-1].type
));
2002 /* set to integer type */
2004 vtop
->type
.t
= VT_LLONG
;
2006 vtop
->type
.t
= VT_INT
;
2011 /* exactly one pointer : must be '+' or '-'. */
2012 if (op
!= '-' && op
!= '+')
2013 tcc_error("cannot use pointers here");
2014 /* Put pointer as first operand */
2015 if (bt2
== VT_PTR
) {
2017 t
= t1
, t1
= t2
, t2
= t
;
2020 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2021 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2022 gen_cast(&int_type
);
2024 type1
= vtop
[-1].type
;
2025 type1
.t
&= ~VT_ARRAY
;
2026 if (vtop
[-1].type
.t
& VT_VLA
)
2027 vla_runtime_pointed_size(&vtop
[-1].type
);
2029 u
= pointed_size(&vtop
[-1].type
);
2031 tcc_error("unknown array element size");
2035 /* XXX: cast to int ? (long long case) */
2041 /* #ifdef CONFIG_TCC_BCHECK
2042 The main reason to removing this code:
2049 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2050 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2052 When this code is on. then the output looks like
2054 v+(i-j) = 0xbff84000
2056 /* if evaluating constant expression, no code should be
2057 generated, so no bound check */
2058 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2059 /* if bounded pointers, we generate a special code to
2066 gen_bounded_ptr_add();
2072 /* put again type if gen_opic() swaped operands */
2075 } else if (is_float(bt1
) || is_float(bt2
)) {
2076 /* compute bigger type and do implicit casts */
2077 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2079 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2084 /* floats can only be used for a few operations */
2085 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2086 (op
< TOK_ULT
|| op
> TOK_GT
))
2087 tcc_error("invalid operands for binary operation");
2089 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2090 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2091 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2094 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2095 /* cast to biggest op */
2097 /* convert to unsigned if it does not fit in a long long */
2098 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2099 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2103 /* integer operations */
2105 /* convert to unsigned if it does not fit in an integer */
2106 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2107 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2110 /* XXX: currently, some unsigned operations are explicit, so
2111 we modify them here */
2112 if (t
& VT_UNSIGNED
) {
2119 else if (op
== TOK_LT
)
2121 else if (op
== TOK_GT
)
2123 else if (op
== TOK_LE
)
2125 else if (op
== TOK_GE
)
2132 /* special case for shifts and long long: we keep the shift as
2134 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2141 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2142 /* relational op: the result is an int */
2143 vtop
->type
.t
= VT_INT
;
2148 // Make sure that we have converted to an rvalue:
2149 if (vtop
->r
& VT_LVAL
)
2150 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2153 #ifndef TCC_TARGET_ARM
2154 /* generic itof for unsigned long long case */
2155 static void gen_cvt_itof1(int t
)
2157 #ifdef TCC_TARGET_ARM64
2160 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2161 (VT_LLONG
| VT_UNSIGNED
)) {
2164 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2165 #if LDOUBLE_SIZE != 8
2166 else if (t
== VT_LDOUBLE
)
2167 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2170 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2174 vtop
->r
= reg_fret(t
);
2182 /* generic ftoi for unsigned long long case */
2183 static void gen_cvt_ftoi1(int t
)
2185 #ifdef TCC_TARGET_ARM64
2190 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2191 /* not handled natively */
2192 st
= vtop
->type
.t
& VT_BTYPE
;
2194 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2195 #if LDOUBLE_SIZE != 8
2196 else if (st
== VT_LDOUBLE
)
2197 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2200 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2205 vtop
->r2
= REG_LRET
;
2212 /* force char or short cast */
2213 static void force_charshort_cast(int t
)
2217 /* XXX: add optimization if lvalue : just change type and offset */
2222 if (t
& VT_UNSIGNED
) {
2223 vpushi((1 << bits
) - 1);
2226 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2232 /* result must be signed or the SAR is converted to an SHL
2233 This was not the case when "t" was a signed short
2234 and the last value on the stack was an unsigned int */
2235 vtop
->type
.t
&= ~VT_UNSIGNED
;
2241 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2242 static void gen_cast(CType
*type
)
2244 int sbt
, dbt
, sf
, df
, c
, p
;
2246 /* special delayed cast for char/short */
2247 /* XXX: in some cases (multiple cascaded casts), it may still
2249 if (vtop
->r
& VT_MUSTCAST
) {
2250 vtop
->r
&= ~VT_MUSTCAST
;
2251 force_charshort_cast(vtop
->type
.t
);
2254 /* bitfields first get cast to ints */
2255 if (vtop
->type
.t
& VT_BITFIELD
) {
2259 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2260 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2265 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2266 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2268 /* constant case: we can do it now */
2269 /* XXX: in ISOC, cannot do it if error in convert */
2270 if (sbt
== VT_FLOAT
)
2271 vtop
->c
.ld
= vtop
->c
.f
;
2272 else if (sbt
== VT_DOUBLE
)
2273 vtop
->c
.ld
= vtop
->c
.d
;
2276 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2277 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2278 vtop
->c
.ld
= vtop
->c
.i
;
2280 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2282 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2283 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2285 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2288 if (dbt
== VT_FLOAT
)
2289 vtop
->c
.f
= (float)vtop
->c
.ld
;
2290 else if (dbt
== VT_DOUBLE
)
2291 vtop
->c
.d
= (double)vtop
->c
.ld
;
2292 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2293 vtop
->c
.i
= vtop
->c
.ld
;
2294 } else if (sf
&& dbt
== VT_BOOL
) {
2295 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2298 vtop
->c
.i
= vtop
->c
.ld
;
2299 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2301 else if (sbt
& VT_UNSIGNED
)
2302 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2304 else if (sbt
== VT_PTR
)
2307 else if (sbt
!= VT_LLONG
)
2308 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2309 -(vtop
->c
.i
& 0x80000000));
2311 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2313 else if (dbt
== VT_BOOL
)
2314 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2316 else if (dbt
== VT_PTR
)
2319 else if (dbt
!= VT_LLONG
) {
2320 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2321 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2324 if (!(dbt
& VT_UNSIGNED
))
2325 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2328 } else if (p
&& dbt
== VT_BOOL
) {
2332 /* non constant case: generate code */
2334 /* convert from fp to fp */
2337 /* convert int to fp */
2340 /* convert fp to int */
2341 if (dbt
== VT_BOOL
) {
2345 /* we handle char/short/etc... with generic code */
2346 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2347 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2351 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2352 /* additional cast for char/short... */
2358 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2359 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2360 /* scalar to long long */
2361 /* machine independent conversion */
2363 /* generate high word */
2364 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2368 if (sbt
== VT_PTR
) {
2369 /* cast from pointer to int before we apply
2370 shift operation, which pointers don't support*/
2371 gen_cast(&int_type
);
2377 /* patch second register */
2378 vtop
[-1].r2
= vtop
->r
;
2382 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2383 (dbt
& VT_BTYPE
) == VT_PTR
||
2384 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2385 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2386 (sbt
& VT_BTYPE
) != VT_PTR
&&
2387 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2388 /* need to convert from 32bit to 64bit */
2390 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2391 #if defined(TCC_TARGET_ARM64)
2393 #elif defined(TCC_TARGET_X86_64)
2395 /* x86_64 specific: movslq */
2397 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2404 } else if (dbt
== VT_BOOL
) {
2405 /* scalar to bool */
2408 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2409 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2410 if (sbt
== VT_PTR
) {
2411 vtop
->type
.t
= VT_INT
;
2412 tcc_warning("nonportable conversion from pointer to char/short");
2414 force_charshort_cast(dbt
);
2416 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2418 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2419 /* from long long: just take low order word */
2423 /* if lvalue and single word type, nothing to do because
2424 the lvalue already contains the real type size (see
2425 VT_LVAL_xxx constants) */
2429 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2430 /* if we are casting between pointer types,
2431 we must update the VT_LVAL_xxx size */
2432 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2433 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2438 /* return type size as known at compile time. Put alignment at 'a' */
2439 ST_FUNC
int type_size(CType
*type
, int *a
)
2444 bt
= type
->t
& VT_BTYPE
;
2445 if (bt
== VT_STRUCT
) {
2450 } else if (bt
== VT_PTR
) {
2451 if (type
->t
& VT_ARRAY
) {
2455 ts
= type_size(&s
->type
, a
);
2457 if (ts
< 0 && s
->c
< 0)
2465 } else if (bt
== VT_LDOUBLE
) {
2467 return LDOUBLE_SIZE
;
2468 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2469 #ifdef TCC_TARGET_I386
2470 #ifdef TCC_TARGET_PE
2475 #elif defined(TCC_TARGET_ARM)
2485 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2488 } else if (bt
== VT_SHORT
) {
2491 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2494 } else if (bt
== VT_ENUM
) {
2496 /* Enums might be incomplete, so don't just return '4' here. */
2497 return type
->ref
->c
;
2499 /* char, void, function, _Bool */
2505 /* push type size as known at runtime time on top of value stack. Put
2507 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2509 if (type
->t
& VT_VLA
) {
2510 type_size(&type
->ref
->type
, a
);
2511 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2513 vpushi(type_size(type
, a
));
2517 static void vla_sp_restore(void) {
2518 if (vlas_in_scope
) {
2519 gen_vla_sp_restore(vla_sp_loc
);
2523 static void vla_sp_restore_root(void) {
2524 if (vlas_in_scope
) {
2525 gen_vla_sp_restore(vla_sp_root_loc
);
2529 /* return the pointed type of t */
2530 static inline CType
*pointed_type(CType
*type
)
2532 return &type
->ref
->type
;
2535 /* modify type so that its it is a pointer to type. */
2536 ST_FUNC
void mk_pointer(CType
*type
)
2539 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2540 type
->t
= VT_PTR
| (type
->t
& ~VT_TYPE
);
2544 /* compare function types. OLD functions match any new functions */
2545 static int is_compatible_func(CType
*type1
, CType
*type2
)
2551 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2553 /* check func_call */
2554 if (s1
->a
.func_call
!= s2
->a
.func_call
)
2556 /* XXX: not complete */
2557 if (s1
->c
== FUNC_OLD
|| s2
->c
== FUNC_OLD
)
2561 while (s1
!= NULL
) {
2564 if (!is_compatible_parameter_types(&s1
->type
, &s2
->type
))
2574 /* return true if type1 and type2 are the same. If unqualified is
2575 true, qualifiers on the types are ignored.
2577 - enums are not checked as gcc __builtin_types_compatible_p ()
2579 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2583 t1
= type1
->t
& VT_TYPE
;
2584 t2
= type2
->t
& VT_TYPE
;
2586 /* strip qualifiers before comparing */
2587 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2588 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2590 /* Default Vs explicit signedness only matters for char */
2591 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2595 /* An enum is compatible with (unsigned) int. Ideally we would
2596 store the enums signedness in type->ref.a.<some_bit> and
2597 only accept unsigned enums with unsigned int and vice versa.
2598 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2599 from pointer target types, so we can't add it here either. */
2600 if ((t1
& VT_BTYPE
) == VT_ENUM
) {
2602 if (type1
->ref
->a
.unsigned_enum
)
2605 if ((t2
& VT_BTYPE
) == VT_ENUM
) {
2607 if (type2
->ref
->a
.unsigned_enum
)
2610 /* XXX: bitfields ? */
2613 /* test more complicated cases */
2614 bt1
= t1
& VT_BTYPE
;
2615 if (bt1
== VT_PTR
) {
2616 type1
= pointed_type(type1
);
2617 type2
= pointed_type(type2
);
2618 return is_compatible_types(type1
, type2
);
2619 } else if (bt1
== VT_STRUCT
) {
2620 return (type1
->ref
== type2
->ref
);
2621 } else if (bt1
== VT_FUNC
) {
2622 return is_compatible_func(type1
, type2
);
2628 /* return true if type1 and type2 are exactly the same (including
2631 static int is_compatible_types(CType
*type1
, CType
*type2
)
2633 return compare_types(type1
,type2
,0);
2636 /* return true if type1 and type2 are the same (ignoring qualifiers).
2638 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
)
2640 return compare_types(type1
,type2
,1);
2643 /* print a type. If 'varstr' is not NULL, then the variable is also
2644 printed in the type */
2646 /* XXX: add array and function pointers */
2647 static void type_to_str(char *buf
, int buf_size
,
2648 CType
*type
, const char *varstr
)
2658 if (t
& VT_CONSTANT
)
2659 pstrcat(buf
, buf_size
, "const ");
2660 if (t
& VT_VOLATILE
)
2661 pstrcat(buf
, buf_size
, "volatile ");
2662 if ((t
& (VT_DEFSIGN
| VT_UNSIGNED
)) == (VT_DEFSIGN
| VT_UNSIGNED
))
2663 pstrcat(buf
, buf_size
, "unsigned ");
2664 else if (t
& VT_DEFSIGN
)
2665 pstrcat(buf
, buf_size
, "signed ");
2667 pstrcat(buf
, buf_size
, "extern ");
2669 pstrcat(buf
, buf_size
, "static ");
2671 pstrcat(buf
, buf_size
, "typedef ");
2673 pstrcat(buf
, buf_size
, "inline ");
2674 buf_size
-= strlen(buf
);
2705 tstr
= "long double";
2707 pstrcat(buf
, buf_size
, tstr
);
2711 if (bt
== VT_STRUCT
)
2715 pstrcat(buf
, buf_size
, tstr
);
2716 v
= type
->ref
->v
& ~SYM_STRUCT
;
2717 if (v
>= SYM_FIRST_ANOM
)
2718 pstrcat(buf
, buf_size
, "<anonymous>");
2720 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2724 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2725 pstrcat(buf
, buf_size
, "(");
2727 while (sa
!= NULL
) {
2728 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2729 pstrcat(buf
, buf_size
, buf1
);
2732 pstrcat(buf
, buf_size
, ", ");
2734 pstrcat(buf
, buf_size
, ")");
2739 snprintf(buf1
, sizeof(buf1
), "%s[%ld]", varstr
? varstr
: "", s
->c
);
2740 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2743 pstrcpy(buf1
, sizeof(buf1
), "*");
2744 if (t
& VT_CONSTANT
)
2745 pstrcat(buf1
, buf_size
, "const ");
2746 if (t
& VT_VOLATILE
)
2747 pstrcat(buf1
, buf_size
, "volatile ");
2749 pstrcat(buf1
, sizeof(buf1
), varstr
);
2750 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2754 pstrcat(buf
, buf_size
, " ");
2755 pstrcat(buf
, buf_size
, varstr
);
2760 /* verify type compatibility to store vtop in 'dt' type, and generate
2762 static void gen_assign_cast(CType
*dt
)
2764 CType
*st
, *type1
, *type2
, tmp_type1
, tmp_type2
;
2765 char buf1
[256], buf2
[256];
2768 st
= &vtop
->type
; /* source type */
2769 dbt
= dt
->t
& VT_BTYPE
;
2770 sbt
= st
->t
& VT_BTYPE
;
2771 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2772 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2774 It is Ok if both are void
2780 gcc accepts this program
2783 tcc_error("cannot cast from/to void");
2785 if (dt
->t
& VT_CONSTANT
)
2786 tcc_warning("assignment of read-only location");
2789 /* special cases for pointers */
2790 /* '0' can also be a pointer */
2791 if (is_null_pointer(vtop
))
2793 /* accept implicit pointer to integer cast with warning */
2794 if (is_integer_btype(sbt
)) {
2795 tcc_warning("assignment makes pointer from integer without a cast");
2798 type1
= pointed_type(dt
);
2799 /* a function is implicitly a function pointer */
2800 if (sbt
== VT_FUNC
) {
2801 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2802 !is_compatible_types(pointed_type(dt
), st
))
2803 tcc_warning("assignment from incompatible pointer type");
2808 type2
= pointed_type(st
);
2809 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2810 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2811 /* void * can match anything */
2813 /* exact type match, except for qualifiers */
2816 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2817 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2818 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2819 /* Like GCC don't warn by default for merely changes
2820 in pointer target signedness. Do warn for different
2821 base types, though, in particular for unsigned enums
2822 and signed int targets. */
2823 if ((tmp_type1
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) !=
2824 (tmp_type2
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) &&
2825 (tmp_type1
.t
& VT_BTYPE
) == (tmp_type2
.t
& VT_BTYPE
))
2828 tcc_warning("assignment from incompatible pointer type");
2831 /* check const and volatile */
2832 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
) &&
2833 ((type2
->t
& VT_BTYPE
) != VT_BYTE
|| tcc_state
->warn_write_strings
)) ||
2834 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2835 tcc_warning("assignment discards qualifiers from pointer target type");
2841 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2842 tcc_warning("assignment makes integer from pointer without a cast");
2843 } else if (sbt
== VT_STRUCT
) {
2844 goto case_VT_STRUCT
;
2846 /* XXX: more tests */
2852 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2853 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2854 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2856 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2857 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2858 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
2866 /* store vtop in lvalue pushed on stack */
2867 ST_FUNC
void vstore(void)
2869 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
2871 ft
= vtop
[-1].type
.t
;
2872 sbt
= vtop
->type
.t
& VT_BTYPE
;
2873 dbt
= ft
& VT_BTYPE
;
2874 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
2875 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
2876 && !(vtop
->type
.t
& VT_BITFIELD
)) {
2877 /* optimize char/short casts */
2878 delayed_cast
= VT_MUSTCAST
;
2879 vtop
->type
.t
= (ft
& VT_TYPE
& ~VT_BITFIELD
&
2880 ((1 << VT_STRUCT_SHIFT
) - 1));
2881 /* XXX: factorize */
2882 if (ft
& VT_CONSTANT
)
2883 tcc_warning("assignment of read-only location");
2886 if (!(ft
& VT_BITFIELD
))
2887 gen_assign_cast(&vtop
[-1].type
);
2890 if (sbt
== VT_STRUCT
) {
2891 /* if structure, only generate pointer */
2892 /* structure assignment : generate memcpy */
2893 /* XXX: optimize if small size */
2894 size
= type_size(&vtop
->type
, &align
);
2898 vtop
->type
.t
= VT_PTR
;
2901 /* address of memcpy() */
2904 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
2905 else if(!(align
& 3))
2906 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
2909 /* Use memmove, rather than memcpy, as dest and src may be same: */
2910 vpush_global_sym(&func_old_type
, TOK_memmove
);
2915 vtop
->type
.t
= VT_PTR
;
2921 /* leave source on stack */
2922 } else if (ft
& VT_BITFIELD
) {
2923 /* bitfield store handling */
2925 /* save lvalue as expression result (example: s.b = s.a = n;) */
2926 vdup(), vtop
[-1] = vtop
[-2];
2928 bit_pos
= (ft
>> VT_STRUCT_SHIFT
) & 0x3f;
2929 bit_size
= (ft
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
2930 /* remove bit field info to avoid loops */
2931 vtop
[-1].type
.t
= ft
& ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
2933 if((ft
& VT_BTYPE
) == VT_BOOL
) {
2934 gen_cast(&vtop
[-1].type
);
2935 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
2938 /* duplicate destination */
2940 vtop
[-1] = vtop
[-2];
2942 /* mask and shift source */
2943 if((ft
& VT_BTYPE
) != VT_BOOL
) {
2944 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2945 vpushll((1ULL << bit_size
) - 1ULL);
2947 vpushi((1 << bit_size
) - 1);
2953 /* load destination, mask and or with source */
2955 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2956 vpushll(~(((1ULL << bit_size
) - 1ULL) << bit_pos
));
2958 vpushi(~(((1 << bit_size
) - 1) << bit_pos
));
2964 /* ... and discard */
2968 #ifdef CONFIG_TCC_BCHECK
2969 /* bound check case */
2970 if (vtop
[-1].r
& VT_MUSTBOUND
) {
2979 #ifdef TCC_TARGET_X86_64
2980 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
2982 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
2987 r
= gv(rc
); /* generate value */
2988 /* if lvalue was saved on stack, must read it */
2989 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
2991 t
= get_reg(RC_INT
);
2997 sv
.r
= VT_LOCAL
| VT_LVAL
;
2998 sv
.c
.i
= vtop
[-1].c
.i
;
3000 vtop
[-1].r
= t
| VT_LVAL
;
3002 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3004 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3005 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3007 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3008 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3010 vtop
[-1].type
.t
= load_type
;
3013 /* convert to int to increment easily */
3014 vtop
->type
.t
= addr_type
;
3020 vtop
[-1].type
.t
= load_type
;
3021 /* XXX: it works because r2 is spilled last ! */
3022 store(vtop
->r2
, vtop
- 1);
3028 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3029 vtop
->r
|= delayed_cast
;
3033 /* post defines POST/PRE add. c is the token ++ or -- */
3034 ST_FUNC
void inc(int post
, int c
)
3037 vdup(); /* save lvalue */
3039 gv_dup(); /* duplicate value */
3044 vpushi(c
- TOK_MID
);
3046 vstore(); /* store value */
3048 vpop(); /* if post op, return saved value */
3051 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3053 /* read the string */
3057 while (tok
== TOK_STR
) {
3058 /* XXX: add \0 handling too ? */
3059 cstr_cat(astr
, tokc
.str
.data
, -1);
3062 cstr_ccat(astr
, '\0');
3065 /* If I is >= 1 and a power of two, returns log2(i)+1.
3066 If I is 0 returns 0. */
3067 static int exact_log2p1(int i
)
3072 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3083 /* Parse GNUC __attribute__ extension. Currently, the following
3084 extensions are recognized:
3085 - aligned(n) : set data/function alignment.
3086 - packed : force data alignment to 1
3087 - section(x) : generate data/code in this section.
3088 - unused : currently ignored, but may be used someday.
3089 - regparm(n) : pass function parameters in registers (i386 only)
3091 static void parse_attribute(AttributeDef
*ad
)
3096 while (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
) {
3100 while (tok
!= ')') {
3101 if (tok
< TOK_IDENT
)
3102 expect("attribute name");
3109 parse_mult_str(&astr
, "section name");
3110 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3117 parse_mult_str(&astr
, "alias(\"target\")");
3118 ad
->alias_target
= /* save string as token, for later */
3119 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3123 case TOK_VISIBILITY1
:
3124 case TOK_VISIBILITY2
:
3126 parse_mult_str(&astr
,
3127 "visibility(\"default|hidden|internal|protected\")");
3128 if (!strcmp (astr
.data
, "default"))
3129 ad
->a
.visibility
= STV_DEFAULT
;
3130 else if (!strcmp (astr
.data
, "hidden"))
3131 ad
->a
.visibility
= STV_HIDDEN
;
3132 else if (!strcmp (astr
.data
, "internal"))
3133 ad
->a
.visibility
= STV_INTERNAL
;
3134 else if (!strcmp (astr
.data
, "protected"))
3135 ad
->a
.visibility
= STV_PROTECTED
;
3137 expect("visibility(\"default|hidden|internal|protected\")");
3146 if (n
<= 0 || (n
& (n
- 1)) != 0)
3147 tcc_error("alignment must be a positive power of two");
3152 ad
->a
.aligned
= exact_log2p1(n
);
3153 if (n
!= 1 << (ad
->a
.aligned
- 1))
3154 tcc_error("alignment of %d is larger than implemented", n
);
3166 /* currently, no need to handle it because tcc does not
3167 track unused objects */
3171 /* currently, no need to handle it because tcc does not
3172 track unused objects */
3177 ad
->a
.func_call
= FUNC_CDECL
;
3182 ad
->a
.func_call
= FUNC_STDCALL
;
3184 #ifdef TCC_TARGET_I386
3194 ad
->a
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3200 ad
->a
.func_call
= FUNC_FASTCALLW
;
3207 ad
->a
.mode
= VT_LLONG
+ 1;
3210 ad
->a
.mode
= VT_BYTE
+ 1;
3213 ad
->a
.mode
= VT_SHORT
+ 1;
3217 ad
->a
.mode
= VT_INT
+ 1;
3220 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3227 ad
->a
.func_export
= 1;
3230 ad
->a
.func_import
= 1;
3233 if (tcc_state
->warn_unsupported
)
3234 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3235 /* skip parameters */
3237 int parenthesis
= 0;
3241 else if (tok
== ')')
3244 } while (parenthesis
&& tok
!= -1);
3257 static Sym
* find_field (CType
*type
, int v
)
3261 while ((s
= s
->next
) != NULL
) {
3262 if ((s
->v
& SYM_FIELD
) &&
3263 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3264 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3265 Sym
*ret
= find_field (&s
->type
, v
);
3275 static void struct_add_offset (Sym
*s
, int offset
)
3277 while ((s
= s
->next
) != NULL
) {
3278 if ((s
->v
& SYM_FIELD
) &&
3279 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3280 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3281 struct_add_offset(s
->type
.ref
, offset
);
3287 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3289 int align
, maxalign
, offset
, c
, bit_pos
, bt
, prevbt
, prev_bit_size
;
3290 int pcc
= !tcc_state
->ms_bitfields
;
3291 int packwarn
= tcc_state
->warn_gcc_compat
;
3292 int typealign
, bit_size
, size
;
3296 maxalign
= 1 << (ad
->a
.aligned
- 1);
3302 prevbt
= VT_STRUCT
; /* make it never match */
3306 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3307 size
= type_size(&f
->type
, &typealign
);
3308 if (f
->type
.t
& VT_BITFIELD
)
3309 bit_size
= (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
3312 if (bit_size
== 0 && pcc
) {
3313 /* Zero-width bit-fields in PCC mode aren't affected
3314 by any packing (attribute or pragma). */
3316 } else if (f
->r
> 1) {
3318 } else if (ad
->a
.packed
|| f
->r
== 1) {
3320 /* Packed fields or packed records don't let the base type
3321 influence the records type alignment. */
3326 if (type
->ref
->type
.t
!= TOK_STRUCT
) {
3327 if (pcc
&& bit_size
>= 0)
3328 size
= (bit_size
+ 7) >> 3;
3329 /* Bit position is already zero from our caller. */
3333 } else if (bit_size
< 0) {
3334 int addbytes
= pcc
? (bit_pos
+ 7) >> 3 : 0;
3337 c
= (c
+ addbytes
+ align
- 1) & -align
;
3343 /* A bit-field. Layout is more complicated. There are two
3344 options TCC implements: PCC compatible and MS compatible
3345 (PCC compatible is what GCC uses for almost all targets).
3346 In PCC layout the overall size of the struct (in c) is
3347 _excluding_ the current run of bit-fields (that is,
3348 there's at least additional bit_pos bits after c). In
3349 MS layout c does include the current run of bit-fields.
3351 This matters for calculating the natural alignment buckets
3354 /* 'align' will be used to influence records alignment,
3355 so it's the max of specified and type alignment, except
3356 in certain cases that depend on the mode. */
3357 if (align
< typealign
)
3360 /* In PCC layout a non-packed bit-field is placed adjacent
3361 to the preceding bit-fields, except if it would overflow
3362 its container (depending on base type) or it's a zero-width
3363 bit-field. Packed non-zero-width bit-fields always are
3365 int ofs
= (c
* 8 + bit_pos
) % (typealign
* 8);
3366 int ofs2
= ofs
+ bit_size
+ (typealign
* 8) - 1;
3367 if (bit_size
== 0 ||
3369 (ofs2
/ (typealign
* 8)) > (size
/typealign
))) {
3370 c
= (c
+ ((bit_pos
+ 7) >> 3) + typealign
- 1) & -typealign
;
3372 } else if (bit_pos
+ bit_size
> size
* 8) {
3375 if (bit_pos
+ bit_size
> size
* 8) {
3376 c
+= 1, bit_pos
= 0;
3377 if ((ad
->a
.packed
|| f
->r
) && packwarn
) {
3378 tcc_warning("struct layout not compatible with GCC (internal limitation)");
3384 /* In PCC layout named bit-fields influence the alignment
3385 of the containing struct using the base types alignment,
3386 except for packed fields (which here have correct
3387 align/typealign). */
3388 if ((f
->v
& SYM_FIRST_ANOM
))
3391 bt
= f
->type
.t
& VT_BTYPE
;
3392 if ((bit_pos
+ bit_size
> size
* 8) ||
3393 (bit_size
> 0) == (bt
!= prevbt
)) {
3394 c
= (c
+ typealign
- 1) & -typealign
;
3397 /* In MS bitfield mode a bit-field run always uses
3398 at least as many bits as the underlying type.
3399 To start a new run it's also required that this
3400 or the last bit-field had non-zero width. */
3401 if (bit_size
|| prev_bit_size
)
3404 /* In MS layout the records alignment is normally
3405 influenced by the field, except for a zero-width
3406 field at the start of a run (but by further zero-width
3407 fields it is again). */
3408 if (bit_size
== 0 && prevbt
!= bt
)
3411 prev_bit_size
= bit_size
;
3413 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3414 | (bit_pos
<< VT_STRUCT_SHIFT
);
3415 bit_pos
+= bit_size
;
3416 if (pcc
&& bit_pos
>= size
* 8) {
3418 bit_pos
-= size
* 8;
3421 if (align
> maxalign
)
3424 printf("set field %s offset=%d",
3425 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
);
3426 if (f
->type
.t
& VT_BITFIELD
) {
3427 printf(" pos=%d size=%d",
3428 (f
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f,
3429 (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f);
3434 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3436 /* An anonymous struct/union. Adjust member offsets
3437 to reflect the real offset of our containing struct.
3438 Also set the offset of this anon member inside
3439 the outer struct to be zero. Via this it
3440 works when accessing the field offset directly
3441 (from base object), as well as when recursing
3442 members in initializer handling. */
3443 int v2
= f
->type
.ref
->v
;
3444 if (!(v2
& SYM_FIELD
) &&
3445 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3447 /* This happens only with MS extensions. The
3448 anon member has a named struct type, so it
3449 potentially is shared with other references.
3450 We need to unshare members so we can modify
3453 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3454 &f
->type
.ref
->type
, 0,
3456 pps
= &f
->type
.ref
->next
;
3457 while ((ass
= ass
->next
) != NULL
) {
3458 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3459 pps
= &((*pps
)->next
);
3463 struct_add_offset(f
->type
.ref
, offset
);
3471 /* store size and alignment */
3472 type
->ref
->c
= (c
+ (pcc
? (bit_pos
+ 7) >> 3 : 0)
3473 + maxalign
- 1) & -maxalign
;
3474 type
->ref
->r
= maxalign
;
3475 if (offset
+ size
> type
->ref
->c
&& type
->ref
->c
)
3476 tcc_warning("will touch memory past end of the struct (internal limitation)");
3479 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3480 static void struct_decl(CType
*type
, AttributeDef
*ad
, int u
)
3482 int a
, v
, size
, align
, flexible
, alignoverride
;
3484 int bit_size
, bsize
, bt
;
3489 a
= tok
; /* save decl type */
3491 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3492 parse_attribute(ad
);
3496 /* struct already defined ? return it */
3498 expect("struct/union/enum name");
3500 if (s
&& (s
->scope
== local_scope
|| (tok
!= '{' && tok
!= ';'))) {
3502 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3508 /* Record the original enum/struct/union token. */
3511 /* we put an undefined size for struct/union */
3512 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3513 s
->r
= 0; /* default alignment is zero as gcc */
3514 /* put struct/union/enum name in type */
3522 tcc_error("struct/union/enum already defined");
3523 /* cannot be empty */
3525 /* non empty enums are not allowed */
3526 if (a
== TOK_ENUM
) {
3530 CType
*t
= &int_type
;
3533 expect("identifier");
3535 if (ss
&& !local_stack
)
3536 tcc_error("redefinition of enumerator '%s'",
3537 get_tok_str(v
, NULL
));
3544 /* We really want to support long long enums
3545 on i386 as well, but the Sym structure only
3546 holds a 'long' for associated constants,
3547 and enlarging it would bump its size (no
3548 available padding). So punt for now. */
3554 if (c
!= (int)c
&& (unsigned long)c
!= (unsigned int)c
)
3555 seen_wide
= 1, t
= &size_type
;
3556 /* enum symbols have static storage */
3557 ss
= sym_push(v
, t
, VT_CONST
, c
);
3558 ss
->type
.t
|= VT_STATIC
;
3563 /* NOTE: we accept a trailing comma */
3568 s
->a
.unsigned_enum
= 1;
3569 s
->c
= type_size(seen_wide
? &size_type
: &int_type
, &align
);
3574 while (tok
!= '}') {
3575 if (!parse_btype(&btype
, &ad1
)) {
3581 tcc_error("flexible array member '%s' not at the end of struct",
3582 get_tok_str(v
, NULL
));
3588 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3590 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3591 expect("identifier");
3593 int v
= btype
.ref
->v
;
3594 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3595 if (tcc_state
->ms_extensions
== 0)
3596 expect("identifier");
3600 if (type_size(&type1
, &align
) < 0) {
3601 if ((a
== TOK_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3604 tcc_error("field '%s' has incomplete type",
3605 get_tok_str(v
, NULL
));
3607 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3608 (type1
.t
& VT_STORAGE
))
3609 tcc_error("invalid type for '%s'",
3610 get_tok_str(v
, NULL
));
3614 bit_size
= expr_const();
3615 /* XXX: handle v = 0 case for messages */
3617 tcc_error("negative width in bit-field '%s'",
3618 get_tok_str(v
, NULL
));
3619 if (v
&& bit_size
== 0)
3620 tcc_error("zero width for bit-field '%s'",
3621 get_tok_str(v
, NULL
));
3622 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3623 parse_attribute(&ad1
);
3625 size
= type_size(&type1
, &align
);
3626 /* Only remember non-default alignment. */
3628 if (ad1
.a
.aligned
) {
3629 int speca
= 1 << (ad1
.a
.aligned
- 1);
3630 alignoverride
= speca
;
3631 } else if (ad1
.a
.packed
|| ad
->a
.packed
) {
3633 } else if (*tcc_state
->pack_stack_ptr
) {
3634 if (align
>= *tcc_state
->pack_stack_ptr
)
3635 alignoverride
= *tcc_state
->pack_stack_ptr
;
3637 if (bit_size
>= 0) {
3638 bt
= type1
.t
& VT_BTYPE
;
3645 tcc_error("bitfields must have scalar type");
3647 if (bit_size
> bsize
) {
3648 tcc_error("width of '%s' exceeds its type",
3649 get_tok_str(v
, NULL
));
3650 } else if (bit_size
== bsize
) {
3651 /* no need for bit fields */
3654 type1
.t
|= VT_BITFIELD
|
3655 (0 << VT_STRUCT_SHIFT
) |
3656 (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3659 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3660 /* Remember we've seen a real field to check
3661 for placement of flexible array member. */
3664 /* If member is a struct or bit-field, enforce
3665 placing into the struct (as anonymous). */
3667 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3672 ss
= sym_push(v
| SYM_FIELD
, &type1
, alignoverride
, 0);
3676 if (tok
== ';' || tok
== TOK_EOF
)
3683 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3684 parse_attribute(ad
);
3685 struct_layout(type
, ad
);
3690 /* return 1 if basic type is a type size (short, long, long long) */
3691 ST_FUNC
int is_btype_size(int bt
)
3693 return bt
== VT_SHORT
|| bt
== VT_LONG
|| bt
== VT_LLONG
;
3696 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3697 are added to the element type, copied because it could be a typedef. */
3698 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3700 while (type
->t
& VT_ARRAY
) {
3701 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3702 type
= &type
->ref
->type
;
3704 type
->t
|= qualifiers
;
3707 /* return 0 if no type declaration. otherwise, return the basic type
3710 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3712 int t
, u
, bt_size
, complete
, type_found
, typespec_found
, g
;
3716 memset(ad
, 0, sizeof(AttributeDef
));
3724 /* currently, we really ignore extension */
3735 tcc_error("too many basic types");
3737 bt_size
= is_btype_size (u
& VT_BTYPE
);
3738 if (u
== VT_INT
|| (!bt_size
&& !(t
& VT_TYPEDEF
)))
3753 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3754 #ifndef TCC_TARGET_PE
3755 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3757 } else if ((t
& VT_BTYPE
) == VT_LONG
) {
3758 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3764 #ifdef TCC_TARGET_ARM64
3766 /* GCC's __uint128_t appears in some Linux header files. Make it a
3767 synonym for long double to get the size and alignment right. */
3779 if ((t
& VT_BTYPE
) == VT_LONG
) {
3780 #ifdef TCC_TARGET_PE
3781 t
= (t
& ~VT_BTYPE
) | VT_DOUBLE
;
3783 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3791 struct_decl(&type1
, ad
, VT_ENUM
);
3794 type
->ref
= type1
.ref
;
3798 struct_decl(&type1
, ad
, VT_STRUCT
);
3801 /* type modifiers */
3806 parse_btype_qualify(type
, VT_CONSTANT
);
3814 parse_btype_qualify(type
, VT_VOLATILE
);
3821 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
3822 tcc_error("signed and unsigned modifier");
3835 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
3836 tcc_error("signed and unsigned modifier");
3837 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
3853 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
3854 tcc_error("multiple storage classes");
3865 /* GNUC attribute */
3866 case TOK_ATTRIBUTE1
:
3867 case TOK_ATTRIBUTE2
:
3868 parse_attribute(ad
);
3871 t
= (t
& ~VT_BTYPE
) | u
;
3879 parse_expr_type(&type1
);
3880 /* remove all storage modifiers except typedef */
3881 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
3887 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
3890 type
->t
= ((s
->type
.t
& ~VT_TYPEDEF
) |
3891 (t
& ~(VT_CONSTANT
| VT_VOLATILE
)));
3892 type
->ref
= s
->type
.ref
;
3893 if (t
& (VT_CONSTANT
| VT_VOLATILE
))
3894 parse_btype_qualify(type
, t
& (VT_CONSTANT
| VT_VOLATILE
));
3898 /* get attributes from typedef */
3899 if (0 == ad
->a
.aligned
)
3900 ad
->a
.aligned
= s
->a
.aligned
;
3901 if (0 == ad
->a
.func_call
)
3902 ad
->a
.func_call
= s
->a
.func_call
;
3903 ad
->a
.packed
|= s
->a
.packed
;
3912 if (tcc_state
->char_is_unsigned
) {
3913 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
3917 /* long is never used as type */
3918 if ((t
& VT_BTYPE
) == VT_LONG
)
3919 #if PTR_SIZE == 8 && !defined TCC_TARGET_PE
3920 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3922 t
= (t
& ~VT_BTYPE
) | VT_INT
;
3928 /* convert a function parameter type (array to pointer and function to
3929 function pointer) */
3930 static inline void convert_parameter_type(CType
*pt
)
3932 /* remove const and volatile qualifiers (XXX: const could be used
3933 to indicate a const function parameter */
3934 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3935 /* array must be transformed to pointer according to ANSI C */
3937 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
3942 ST_FUNC
void parse_asm_str(CString
*astr
)
3945 parse_mult_str(astr
, "string constant");
3948 /* Parse an asm label and return the token */
3949 static int asm_label_instr(void)
3955 parse_asm_str(&astr
);
3958 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
3960 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
3965 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
3967 int n
, l
, t1
, arg_size
, align
;
3968 Sym
**plast
, *s
, *first
;
3973 /* function type, or recursive declarator (return if so) */
3975 if (td
&& !(td
& TYPE_ABSTRACT
))
3979 else if (parse_btype(&pt
, &ad1
))
3990 /* read param name and compute offset */
3991 if (l
!= FUNC_OLD
) {
3992 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
3994 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
3995 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
3996 tcc_error("parameter declared as void");
3997 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4001 expect("identifier");
4002 pt
.t
= VT_VOID
; /* invalid type */
4005 convert_parameter_type(&pt
);
4006 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4012 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4017 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4018 tcc_error("invalid type");
4021 /* if no parameters, then old type prototype */
4024 /* NOTE: const is ignored in returned type as it has a special
4025 meaning in gcc / C++ */
4026 type
->t
&= ~VT_CONSTANT
;
4027 /* some ancient pre-K&R C allows a function to return an array
4028 and the array brackets to be put after the arguments, such
4029 that "int c()[]" means something like "int[] c()" */
4032 skip(']'); /* only handle simple "[]" */
4035 /* we push a anonymous symbol which will contain the function prototype */
4036 ad
->a
.func_args
= arg_size
;
4037 s
= sym_push(SYM_FIELD
, type
, 0, l
);
4042 } else if (tok
== '[') {
4043 int saved_nocode_wanted
= nocode_wanted
;
4044 /* array definition */
4046 if (tok
== TOK_RESTRICT1
)
4051 if (!local_stack
|| (storage
& VT_STATIC
))
4052 vpushi(expr_const());
4054 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4055 length must always be evaluated, even under nocode_wanted,
4056 so that its size slot is initialized (e.g. under sizeof
4061 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4064 tcc_error("invalid array size");
4066 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4067 tcc_error("size of variable length array should be an integer");
4072 /* parse next post type */
4073 post_type(type
, ad
, storage
, 0);
4074 if (type
->t
== VT_FUNC
)
4075 tcc_error("declaration of an array of functions");
4076 t1
|= type
->t
& VT_VLA
;
4079 loc
-= type_size(&int_type
, &align
);
4083 vla_runtime_type_size(type
, &align
);
4085 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4091 nocode_wanted
= saved_nocode_wanted
;
4093 /* we push an anonymous symbol which will contain the array
4095 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4096 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4102 /* Parse a type declarator (except basic type), and return the type
4103 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4104 expected. 'type' should contain the basic type. 'ad' is the
4105 attribute definition of the basic type. It can be modified by
4106 type_decl(). If this (possibly abstract) declarator is a pointer chain
4107 it returns the innermost pointed to type (equals *type, but is a different
4108 pointer), otherwise returns type itself, that's used for recursive calls. */
4109 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4112 int qualifiers
, storage
;
4114 /* recursive type, remove storage bits first, apply them later again */
4115 storage
= type
->t
& VT_STORAGE
;
4116 type
->t
&= ~VT_STORAGE
;
4118 while (tok
== '*') {
4126 qualifiers
|= VT_CONSTANT
;
4131 qualifiers
|= VT_VOLATILE
;
4137 /* XXX: clarify attribute handling */
4138 case TOK_ATTRIBUTE1
:
4139 case TOK_ATTRIBUTE2
:
4140 parse_attribute(ad
);
4144 type
->t
|= qualifiers
;
4146 /* innermost pointed to type is the one for the first derivation */
4147 ret
= pointed_type(type
);
4151 /* This is possibly a parameter type list for abstract declarators
4152 ('int ()'), use post_type for testing this. */
4153 if (!post_type(type
, ad
, 0, td
)) {
4154 /* It's not, so it's a nested declarator, and the post operations
4155 apply to the innermost pointed to type (if any). */
4156 /* XXX: this is not correct to modify 'ad' at this point, but
4157 the syntax is not clear */
4158 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4159 parse_attribute(ad
);
4160 post
= type_decl(type
, ad
, v
, td
);
4163 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4164 /* type identifier */
4168 if (!(td
& TYPE_ABSTRACT
))
4169 expect("identifier");
4172 post_type(post
, ad
, storage
, 0);
4173 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
4174 parse_attribute(ad
);
4179 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4180 ST_FUNC
int lvalue_type(int t
)
4185 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4187 else if (bt
== VT_SHORT
)
4191 if (t
& VT_UNSIGNED
)
4192 r
|= VT_LVAL_UNSIGNED
;
4196 /* indirection with full error checking and bound check */
4197 ST_FUNC
void indir(void)
4199 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4200 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4204 if (vtop
->r
& VT_LVAL
)
4206 vtop
->type
= *pointed_type(&vtop
->type
);
4207 /* Arrays and functions are never lvalues */
4208 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4209 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4210 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4211 /* if bound checking, the referenced pointer must be checked */
4212 #ifdef CONFIG_TCC_BCHECK
4213 if (tcc_state
->do_bounds_check
)
4214 vtop
->r
|= VT_MUSTBOUND
;
4219 /* pass a parameter to a function and do type checking and casting */
4220 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4225 func_type
= func
->c
;
4226 if (func_type
== FUNC_OLD
||
4227 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4228 /* default casting : only need to convert float to double */
4229 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4232 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4233 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4234 type
.ref
= vtop
->type
.ref
;
4237 } else if (arg
== NULL
) {
4238 tcc_error("too many arguments to function");
4241 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4242 gen_assign_cast(&type
);
4246 /* parse an expression and return its type without any side effect.
4247 If UNRY we parse an unary expression, otherwise a full one. */
4248 static void expr_type(CType
*type
, int unry
)
4260 /* parse an expression of the form '(type)' or '(expr)' and return its
4262 static void parse_expr_type(CType
*type
)
4268 if (parse_btype(type
, &ad
)) {
4269 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4276 static void parse_type(CType
*type
)
4281 if (!parse_btype(type
, &ad
)) {
4284 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4287 static void parse_builtin_params(int nc
, const char *args
)
4294 while ((c
= *args
++)) {
4298 case 'e': expr_eq(); continue;
4299 case 't': parse_type(&t
); vpush(&t
); continue;
4300 default: tcc_error("internal error"); break;
4308 ST_FUNC
void unary(void)
4310 int n
, t
, align
, size
, r
, sizeof_caller
;
4315 sizeof_caller
= in_sizeof
;
4317 /* XXX: GCC 2.95.3 does not generate a table although it should be
4331 vsetc(&type
, VT_CONST
, &tokc
);
4335 t
= VT_INT
| VT_UNSIGNED
;
4341 t
= VT_LLONG
| VT_UNSIGNED
;
4353 case TOK___FUNCTION__
:
4355 goto tok_identifier
;
4361 /* special function name identifier */
4362 len
= strlen(funcname
) + 1;
4363 /* generate char[len] type */
4368 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4369 ptr
= section_ptr_add(data_section
, len
);
4370 memcpy(ptr
, funcname
, len
);
4375 #ifdef TCC_TARGET_PE
4376 t
= VT_SHORT
| VT_UNSIGNED
;
4382 /* string parsing */
4389 memset(&ad
, 0, sizeof(AttributeDef
));
4390 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4395 if (parse_btype(&type
, &ad
)) {
4396 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4398 /* check ISOC99 compound literal */
4400 /* data is allocated locally by default */
4405 /* all except arrays are lvalues */
4406 if (!(type
.t
& VT_ARRAY
))
4407 r
|= lvalue_type(type
.t
);
4408 memset(&ad
, 0, sizeof(AttributeDef
));
4409 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4411 if (sizeof_caller
) {
4418 } else if (tok
== '{') {
4419 int saved_nocode_wanted
= nocode_wanted
;
4421 tcc_error("expected constant");
4422 /* save all registers */
4424 /* statement expression : we do not accept break/continue
4425 inside as GCC does. We do retain the nocode_wanted state,
4426 as statement expressions can't ever be entered from the
4427 outside, so any reactivation of code emission (from labels
4428 or loop heads) can be disabled again after the end of it. */
4429 block(NULL
, NULL
, 1);
4430 nocode_wanted
= saved_nocode_wanted
;
4445 /* functions names must be treated as function pointers,
4446 except for unary '&' and sizeof. Since we consider that
4447 functions are not lvalues, we only have to handle it
4448 there and in function calls. */
4449 /* arrays can also be used although they are not lvalues */
4450 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4451 !(vtop
->type
.t
& VT_ARRAY
))
4453 mk_pointer(&vtop
->type
);
4459 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4461 boolean
.t
= VT_BOOL
;
4463 vtop
->c
.i
= !vtop
->c
.i
;
4464 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4468 vseti(VT_JMP
, gvtst(1, 0));
4480 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4481 tcc_error("pointer not accepted for unary plus");
4482 /* In order to force cast, we add zero, except for floating point
4483 where we really need an noop (otherwise -0.0 will be transformed
4485 if (!is_float(vtop
->type
.t
)) {
4496 expr_type(&type
, 1); // Perform a in_sizeof = 0;
4497 size
= type_size(&type
, &align
);
4498 if (t
== TOK_SIZEOF
) {
4499 if (!(type
.t
& VT_VLA
)) {
4501 tcc_error("sizeof applied to an incomplete type");
4504 vla_runtime_type_size(&type
, &align
);
4509 vtop
->type
.t
|= VT_UNSIGNED
;
4512 case TOK_builtin_expect
:
4513 /* __builtin_expect is a no-op for now */
4514 parse_builtin_params(0, "ee");
4517 case TOK_builtin_types_compatible_p
:
4518 parse_builtin_params(0, "tt");
4519 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4520 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4521 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4525 case TOK_builtin_choose_expr
:
4552 case TOK_builtin_constant_p
:
4553 parse_builtin_params(1, "e");
4554 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4558 case TOK_builtin_frame_address
:
4559 case TOK_builtin_return_address
:
4566 if (tok
!= TOK_CINT
) {
4567 tcc_error("%s only takes positive integers",
4568 tok1
== TOK_builtin_return_address
?
4569 "__builtin_return_address" :
4570 "__builtin_frame_address");
4572 level
= (uint32_t)tokc
.i
;
4577 vset(&type
, VT_LOCAL
, 0); /* local frame */
4579 mk_pointer(&vtop
->type
);
4580 indir(); /* -> parent frame */
4582 if (tok1
== TOK_builtin_return_address
) {
4583 // assume return address is just above frame pointer on stack
4586 mk_pointer(&vtop
->type
);
4591 #ifdef TCC_TARGET_X86_64
4592 #ifdef TCC_TARGET_PE
4593 case TOK_builtin_va_start
:
4594 parse_builtin_params(0, "ee");
4595 r
= vtop
->r
& VT_VALMASK
;
4599 tcc_error("__builtin_va_start expects a local variable");
4601 vtop
->type
= char_pointer_type
;
4606 case TOK_builtin_va_arg_types
:
4607 parse_builtin_params(0, "t");
4608 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4615 #ifdef TCC_TARGET_ARM64
4616 case TOK___va_start
: {
4617 parse_builtin_params(0, "ee");
4621 vtop
->type
.t
= VT_VOID
;
4624 case TOK___va_arg
: {
4626 parse_builtin_params(0, "et");
4634 case TOK___arm64_clear_cache
: {
4635 parse_builtin_params(0, "ee");
4638 vtop
->type
.t
= VT_VOID
;
4642 /* pre operations */
4653 t
= vtop
->type
.t
& VT_BTYPE
;
4655 /* In IEEE negate(x) isn't subtract(0,x), but rather
4659 vtop
->c
.f
= -1.0 * 0.0;
4660 else if (t
== VT_DOUBLE
)
4661 vtop
->c
.d
= -1.0 * 0.0;
4663 vtop
->c
.ld
= -1.0 * 0.0;
4671 goto tok_identifier
;
4673 /* allow to take the address of a label */
4674 if (tok
< TOK_UIDENT
)
4675 expect("label identifier");
4676 s
= label_find(tok
);
4678 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4680 if (s
->r
== LABEL_DECLARED
)
4681 s
->r
= LABEL_FORWARD
;
4684 s
->type
.t
= VT_VOID
;
4685 mk_pointer(&s
->type
);
4686 s
->type
.t
|= VT_STATIC
;
4688 vpushsym(&s
->type
, s
);
4692 // special qnan , snan and infinity values
4694 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4698 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
4702 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
4711 expect("identifier");
4714 const char *name
= get_tok_str(t
, NULL
);
4716 tcc_error("'%s' undeclared", name
);
4717 /* for simple function calls, we tolerate undeclared
4718 external reference to int() function */
4719 if (tcc_state
->warn_implicit_function_declaration
4720 #ifdef TCC_TARGET_PE
4721 /* people must be warned about using undeclared WINAPI functions
4722 (which usually start with uppercase letter) */
4723 || (name
[0] >= 'A' && name
[0] <= 'Z')
4726 tcc_warning("implicit declaration of function '%s'", name
);
4727 s
= external_global_sym(t
, &func_old_type
, 0);
4731 /* A symbol that has a register is a local register variable,
4732 which starts out as VT_LOCAL value. */
4733 if ((r
& VT_VALMASK
) < VT_CONST
)
4734 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
4736 vset(&s
->type
, r
, s
->c
);
4737 /* Point to s as backpointer (even without r&VT_SYM).
4738 Will be used by at least the x86 inline asm parser for
4741 if (vtop
->r
& VT_SYM
) {
4747 /* post operations */
4749 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
4752 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
4755 if (tok
== TOK_ARROW
)
4757 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
4760 /* expect pointer on structure */
4761 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
4762 expect("struct or union");
4763 if (tok
== TOK_CDOUBLE
)
4764 expect("field name");
4766 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
4767 expect("field name");
4768 s
= find_field(&vtop
->type
, tok
);
4770 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
4771 /* add field offset to pointer */
4772 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
4775 /* change type to field type, and set to lvalue */
4776 vtop
->type
= s
->type
;
4777 vtop
->type
.t
|= qualifiers
;
4778 /* an array is never an lvalue */
4779 if (!(vtop
->type
.t
& VT_ARRAY
)) {
4780 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4781 #ifdef CONFIG_TCC_BCHECK
4782 /* if bound checking, the referenced pointer must be checked */
4783 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4784 vtop
->r
|= VT_MUSTBOUND
;
4788 } else if (tok
== '[') {
4794 } else if (tok
== '(') {
4797 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
4800 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4801 /* pointer test (no array accepted) */
4802 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
4803 vtop
->type
= *pointed_type(&vtop
->type
);
4804 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4808 expect("function pointer");
4811 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
4813 /* get return type */
4816 sa
= s
->next
; /* first parameter */
4817 nb_args
= regsize
= 0;
4819 /* compute first implicit argument if a structure is returned */
4820 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
4821 variadic
= (s
->c
== FUNC_ELLIPSIS
);
4822 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
4823 &ret_align
, ®size
);
4825 /* get some space for the returned structure */
4826 size
= type_size(&s
->type
, &align
);
4827 #ifdef TCC_TARGET_ARM64
4828 /* On arm64, a small struct is return in registers.
4829 It is much easier to write it to memory if we know
4830 that we are allowed to write some extra bytes, so
4831 round the allocated space up to a power of 2: */
4833 while (size
& (size
- 1))
4834 size
= (size
| (size
- 1)) + 1;
4836 loc
= (loc
- size
) & -align
;
4838 ret
.r
= VT_LOCAL
| VT_LVAL
;
4839 /* pass it as 'int' to avoid structure arg passing
4841 vseti(VT_LOCAL
, loc
);
4851 /* return in register */
4852 if (is_float(ret
.type
.t
)) {
4853 ret
.r
= reg_fret(ret
.type
.t
);
4854 #ifdef TCC_TARGET_X86_64
4855 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
4859 #ifndef TCC_TARGET_ARM64
4860 #ifdef TCC_TARGET_X86_64
4861 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
4863 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
4874 gfunc_param_typed(s
, sa
);
4884 tcc_error("too few arguments to function");
4886 gfunc_call(nb_args
);
4889 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
4890 vsetc(&ret
.type
, r
, &ret
.c
);
4891 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
4894 /* handle packed struct return */
4895 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
4898 size
= type_size(&s
->type
, &align
);
4899 /* We're writing whole regs often, make sure there's enough
4900 space. Assume register size is power of 2. */
4901 if (regsize
> align
)
4903 loc
= (loc
- size
) & -align
;
4907 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4911 if (--ret_nregs
== 0)
4915 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
4923 ST_FUNC
void expr_prod(void)
4928 while (tok
== '*' || tok
== '/' || tok
== '%') {
4936 ST_FUNC
void expr_sum(void)
4941 while (tok
== '+' || tok
== '-') {
4949 static void expr_shift(void)
4954 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
4962 static void expr_cmp(void)
4967 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
4968 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
4976 static void expr_cmpeq(void)
4981 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
4989 static void expr_and(void)
4992 while (tok
== '&') {
4999 static void expr_xor(void)
5002 while (tok
== '^') {
5009 static void expr_or(void)
5012 while (tok
== '|') {
5019 static void expr_land(void)
5022 if (tok
== TOK_LAND
) {
5025 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5033 while (tok
== TOK_LAND
) {
5041 gen_cast(&int_type
);
5049 if (tok
!= TOK_LAND
) {
5062 static void expr_lor(void)
5065 if (tok
== TOK_LOR
) {
5068 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5076 while (tok
== TOK_LOR
) {
5084 gen_cast(&int_type
);
5092 if (tok
!= TOK_LOR
) {
5105 /* Assuming vtop is a value used in a conditional context
5106 (i.e. compared with zero) return 0 if it's false, 1 if
5107 true and -1 if it can't be statically determined. */
5108 static int condition_3way(void)
5111 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5112 (!(vtop
->r
& VT_SYM
) ||
5113 !(vtop
->sym
->type
.t
& VT_WEAK
))) {
5115 boolean
.t
= VT_BOOL
;
5124 static void expr_cond(void)
5126 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5128 CType type
, type1
, type2
;
5133 c
= condition_3way();
5134 g
= (tok
== ':' && gnu_ext
);
5136 /* needed to avoid having different registers saved in
5138 if (is_float(vtop
->type
.t
)) {
5140 #ifdef TCC_TARGET_X86_64
5141 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5166 sv
= *vtop
; /* save value to handle it later */
5167 vtop
--; /* no vpop so that FP stack is not flushed */
5185 bt1
= t1
& VT_BTYPE
;
5187 bt2
= t2
& VT_BTYPE
;
5188 /* cast operands to correct type according to ISOC rules */
5189 if (is_float(bt1
) || is_float(bt2
)) {
5190 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5191 type
.t
= VT_LDOUBLE
;
5193 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5198 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5199 /* cast to biggest op */
5201 /* convert to unsigned if it does not fit in a long long */
5202 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5203 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5204 type
.t
|= VT_UNSIGNED
;
5205 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5206 /* If one is a null ptr constant the result type
5208 if (is_null_pointer (vtop
))
5210 else if (is_null_pointer (&sv
))
5212 /* XXX: test pointer compatibility, C99 has more elaborate
5216 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5217 /* XXX: test function pointer compatibility */
5218 type
= bt1
== VT_FUNC
? type1
: type2
;
5219 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5220 /* XXX: test structure compatibility */
5221 type
= bt1
== VT_STRUCT
? type1
: type2
;
5222 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5223 /* NOTE: as an extension, we accept void on only one side */
5226 /* integer operations */
5228 /* convert to unsigned if it does not fit in an integer */
5229 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5230 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5231 type
.t
|= VT_UNSIGNED
;
5233 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5234 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5235 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5238 /* now we convert second operand */
5242 mk_pointer(&vtop
->type
);
5244 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5249 if (is_float(type
.t
)) {
5251 #ifdef TCC_TARGET_X86_64
5252 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5256 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5257 /* for long longs, we use fixed registers to avoid having
5258 to handle a complicated move */
5269 /* this is horrible, but we must also convert first
5275 mk_pointer(&vtop
->type
);
5277 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5283 move_reg(r2
, r1
, type
.t
);
5293 static void expr_eq(void)
5299 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5300 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5301 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5316 ST_FUNC
void gexpr(void)
5327 /* parse a constant expression and return value in vtop. */
5328 static void expr_const1(void)
5335 /* parse an integer constant and return its value. */
5336 static inline int64_t expr_const64(void)
5340 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5341 expect("constant expression");
5347 /* parse an integer constant and return its value.
5348 Complain if it doesn't fit 32bit (signed or unsigned). */
5349 ST_FUNC
int expr_const(void)
5352 int64_t wc
= expr_const64();
5354 if (c
!= wc
&& (unsigned)c
!= wc
)
5355 tcc_error("constant exceeds 32 bit");
5359 /* return the label token if current token is a label, otherwise
5361 static int is_label(void)
5365 /* fast test first */
5366 if (tok
< TOK_UIDENT
)
5368 /* no need to save tokc because tok is an identifier */
5374 unget_tok(last_tok
);
5379 #ifndef TCC_TARGET_ARM64
5380 static void gfunc_return(CType
*func_type
)
5382 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5383 CType type
, ret_type
;
5384 int ret_align
, ret_nregs
, regsize
;
5385 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5386 &ret_align
, ®size
);
5387 if (0 == ret_nregs
) {
5388 /* if returning structure, must copy it to implicit
5389 first pointer arg location */
5392 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5395 /* copy structure value to pointer */
5398 /* returning structure packed into registers */
5399 int r
, size
, addr
, align
;
5400 size
= type_size(func_type
,&align
);
5401 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5402 (vtop
->c
.i
& (ret_align
-1)))
5403 && (align
& (ret_align
-1))) {
5404 loc
= (loc
- size
) & -ret_align
;
5407 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5411 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5413 vtop
->type
= ret_type
;
5414 if (is_float(ret_type
.t
))
5415 r
= rc_fret(ret_type
.t
);
5426 if (--ret_nregs
== 0)
5428 /* We assume that when a structure is returned in multiple
5429 registers, their classes are consecutive values of the
5432 vtop
->c
.i
+= regsize
;
5436 } else if (is_float(func_type
->t
)) {
5437 gv(rc_fret(func_type
->t
));
5441 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5445 static int case_cmp(const void *pa
, const void *pb
)
5447 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5448 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5449 return a
< b
? -1 : a
> b
;
5452 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5456 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5474 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5476 gcase(base
, len
/2, bsym
);
5477 if (cur_switch
->def_sym
)
5478 gjmp_addr(cur_switch
->def_sym
);
5480 *bsym
= gjmp(*bsym
);
5484 base
+= e
; len
-= e
;
5494 if (p
->v1
== p
->v2
) {
5496 gtst_addr(0, p
->sym
);
5506 gtst_addr(0, p
->sym
);
5512 static void block(int *bsym
, int *csym
, int is_expr
)
5514 int a
, b
, c
, d
, cond
;
5517 /* generate line number info */
5518 if (tcc_state
->do_debug
)
5519 tcc_debug_line(tcc_state
);
5522 /* default return value is (void) */
5524 vtop
->type
.t
= VT_VOID
;
5527 if (tok
== TOK_IF
) {
5529 int saved_nocode_wanted
= nocode_wanted
;
5534 cond
= condition_3way();
5540 nocode_wanted
|= 0x20000000;
5541 block(bsym
, csym
, 0);
5543 nocode_wanted
= saved_nocode_wanted
;
5545 if (c
== TOK_ELSE
) {
5550 nocode_wanted
|= 0x20000000;
5551 block(bsym
, csym
, 0);
5552 gsym(d
); /* patch else jmp */
5554 nocode_wanted
= saved_nocode_wanted
;
5557 } else if (tok
== TOK_WHILE
) {
5558 int saved_nocode_wanted
;
5559 nocode_wanted
&= ~0x20000000;
5569 saved_nocode_wanted
= nocode_wanted
;
5571 nocode_wanted
= saved_nocode_wanted
;
5576 } else if (tok
== '{') {
5578 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5581 /* record local declaration stack position */
5583 llabel
= local_label_stack
;
5586 /* handle local labels declarations */
5587 if (tok
== TOK_LABEL
) {
5590 if (tok
< TOK_UIDENT
)
5591 expect("label identifier");
5592 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5602 while (tok
!= '}') {
5603 if ((a
= is_label()))
5610 block(bsym
, csym
, is_expr
);
5613 /* pop locally defined labels */
5614 label_pop(&local_label_stack
, llabel
);
5615 /* pop locally defined symbols */
5617 /* In the is_expr case (a statement expression is finished here),
5618 vtop might refer to symbols on the local_stack. Either via the
5619 type or via vtop->sym. We can't pop those nor any that in turn
5620 might be referred to. To make it easier we don't roll back
5621 any symbols in that case; some upper level call to block() will
5622 do that. We do have to remove such symbols from the lookup
5623 tables, though. sym_pop will do that. */
5624 sym_pop(&local_stack
, s
, is_expr
);
5626 /* Pop VLA frames and restore stack pointer if required */
5627 if (vlas_in_scope
> saved_vlas_in_scope
) {
5628 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5631 vlas_in_scope
= saved_vlas_in_scope
;
5634 } else if (tok
== TOK_RETURN
) {
5638 gen_assign_cast(&func_vt
);
5639 gfunc_return(&func_vt
);
5642 /* jump unless last stmt in top-level block */
5643 if (tok
!= '}' || local_scope
!= 1)
5645 nocode_wanted
|= 0x20000000;
5646 } else if (tok
== TOK_BREAK
) {
5649 tcc_error("cannot break");
5650 *bsym
= gjmp(*bsym
);
5653 nocode_wanted
|= 0x20000000;
5654 } else if (tok
== TOK_CONTINUE
) {
5657 tcc_error("cannot continue");
5658 vla_sp_restore_root();
5659 *csym
= gjmp(*csym
);
5662 } else if (tok
== TOK_FOR
) {
5664 int saved_nocode_wanted
;
5665 nocode_wanted
&= ~0x20000000;
5671 /* c99 for-loop init decl? */
5672 if (!decl0(VT_LOCAL
, 1, NULL
)) {
5673 /* no, regular for-loop init expr */
5699 saved_nocode_wanted
= nocode_wanted
;
5701 nocode_wanted
= saved_nocode_wanted
;
5706 sym_pop(&local_stack
, s
, 0);
5709 if (tok
== TOK_DO
) {
5710 int saved_nocode_wanted
;
5711 nocode_wanted
&= ~0x20000000;
5717 saved_nocode_wanted
= nocode_wanted
;
5725 nocode_wanted
= saved_nocode_wanted
;
5730 if (tok
== TOK_SWITCH
) {
5731 struct switch_t
*saved
, sw
;
5732 int saved_nocode_wanted
= nocode_wanted
;
5738 switchval
= *vtop
--;
5740 b
= gjmp(0); /* jump to first case */
5741 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
5745 nocode_wanted
= saved_nocode_wanted
;
5746 a
= gjmp(a
); /* add implicit break */
5749 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
5750 for (b
= 1; b
< sw
.n
; b
++)
5751 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
5752 tcc_error("duplicate case value");
5753 /* Our switch table sorting is signed, so the compared
5754 value needs to be as well when it's 64bit. */
5755 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5756 switchval
.type
.t
&= ~VT_UNSIGNED
;
5758 gcase(sw
.p
, sw
.n
, &a
);
5761 gjmp_addr(sw
.def_sym
);
5762 dynarray_reset(&sw
.p
, &sw
.n
);
5767 if (tok
== TOK_CASE
) {
5768 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
5771 nocode_wanted
&= ~0x20000000;
5773 cr
->v1
= cr
->v2
= expr_const64();
5774 if (gnu_ext
&& tok
== TOK_DOTS
) {
5776 cr
->v2
= expr_const64();
5777 if (cr
->v2
< cr
->v1
)
5778 tcc_warning("empty case range");
5781 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
5784 goto block_after_label
;
5786 if (tok
== TOK_DEFAULT
) {
5791 if (cur_switch
->def_sym
)
5792 tcc_error("too many 'default'");
5793 cur_switch
->def_sym
= ind
;
5795 goto block_after_label
;
5797 if (tok
== TOK_GOTO
) {
5799 if (tok
== '*' && gnu_ext
) {
5803 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5806 } else if (tok
>= TOK_UIDENT
) {
5807 s
= label_find(tok
);
5808 /* put forward definition if needed */
5810 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5812 if (s
->r
== LABEL_DECLARED
)
5813 s
->r
= LABEL_FORWARD
;
5815 vla_sp_restore_root();
5816 if (s
->r
& LABEL_FORWARD
)
5817 s
->jnext
= gjmp(s
->jnext
);
5819 gjmp_addr(s
->jnext
);
5822 expect("label identifier");
5825 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
5834 if (s
->r
== LABEL_DEFINED
)
5835 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
5837 s
->r
= LABEL_DEFINED
;
5839 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
5843 /* we accept this, but it is a mistake */
5845 nocode_wanted
&= ~0x20000000;
5847 tcc_warning("deprecated use of label at end of compound statement");
5851 block(bsym
, csym
, is_expr
);
5854 /* expression case */
5869 /* This skips over a stream of tokens containing balanced {} and ()
5870 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
5871 with a '{'). If STR then allocates and stores the skipped tokens
5872 in *STR. This doesn't check if () and {} are nested correctly,
5873 i.e. "({)}" is accepted. */
5874 static void skip_or_save_block(TokenString
**str
)
5876 int braces
= tok
== '{';
5879 *str
= tok_str_alloc();
5881 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';'))) {
5883 if (tok
== TOK_EOF
) {
5884 if (str
|| level
> 0)
5885 tcc_error("unexpected end of file");
5890 tok_str_add_tok(*str
);
5893 if (t
== '{' || t
== '(') {
5895 } else if (t
== '}' || t
== ')') {
5897 if (level
== 0 && braces
&& t
== '}')
5902 tok_str_add(*str
, -1);
5903 tok_str_add(*str
, 0);
5907 #define EXPR_CONST 1
5910 static void parse_init_elem(int expr_type
)
5912 int saved_global_expr
;
5915 /* compound literals must be allocated globally in this case */
5916 saved_global_expr
= global_expr
;
5919 global_expr
= saved_global_expr
;
5920 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
5921 (compound literals). */
5922 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
5923 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
5924 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
5925 #ifdef TCC_TARGET_PE
5926 || (vtop
->type
.t
& VT_IMPORT
)
5929 tcc_error("initializer element is not constant");
5937 /* put zeros for variable based init */
5938 static void init_putz(Section
*sec
, unsigned long c
, int size
)
5941 /* nothing to do because globals are already set to zero */
5943 vpush_global_sym(&func_old_type
, TOK_memset
);
5945 #ifdef TCC_TARGET_ARM
5956 /* t is the array or struct type. c is the array or struct
5957 address. cur_field is the pointer to the current
5958 field, for arrays the 'c' member contains the current start
5959 index. 'size_only' is true if only size info is needed (only used
5960 in arrays). al contains the already initialized length of the
5961 current container (starting at c). This returns the new length of that. */
5962 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
5963 Sym
**cur_field
, int size_only
, int al
)
5966 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
5967 unsigned long corig
= c
;
5971 if (gnu_ext
&& (l
= is_label()) != 0)
5973 /* NOTE: we only support ranges for last designator */
5974 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
5976 if (!(type
->t
& VT_ARRAY
))
5977 expect("array type");
5979 index
= index_last
= expr_const();
5980 if (tok
== TOK_DOTS
&& gnu_ext
) {
5982 index_last
= expr_const();
5986 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
5988 tcc_error("invalid index");
5990 (*cur_field
)->c
= index_last
;
5991 type
= pointed_type(type
);
5992 elem_size
= type_size(type
, &align
);
5993 c
+= index
* elem_size
;
5994 nb_elems
= index_last
- index
+ 1;
6000 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6001 expect("struct/union type");
6002 f
= find_field(type
, l
);
6015 } else if (!gnu_ext
) {
6019 if (type
->t
& VT_ARRAY
) {
6020 index
= (*cur_field
)->c
;
6021 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6022 tcc_error("index too large");
6023 type
= pointed_type(type
);
6024 c
+= index
* type_size(type
, &align
);
6027 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6028 *cur_field
= f
= f
->next
;
6030 tcc_error("too many field init");
6035 /* must put zero in holes (note that doing it that way
6036 ensures that it even works with designators) */
6037 if (!size_only
&& c
- corig
> al
)
6038 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6039 decl_initializer(type
, sec
, c
, 0, size_only
);
6041 /* XXX: make it more general */
6042 if (!size_only
&& nb_elems
> 1) {
6043 unsigned long c_end
;
6048 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6049 for (i
= 1; i
< nb_elems
; i
++) {
6050 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6056 c_end
= c
+ nb_elems
* elem_size
;
6057 if (c_end
> sec
->data_allocated
)
6058 section_realloc(sec
, c_end
);
6059 src
= sec
->data
+ c
;
6061 for(i
= 1; i
< nb_elems
; i
++) {
6063 memcpy(dst
, src
, elem_size
);
6067 c
+= nb_elems
* type_size(type
, &align
);
6073 /* store a value or an expression directly in global data or in local array */
6074 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6076 int bt
, bit_pos
, bit_size
;
6078 unsigned long long bit_mask
;
6082 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6086 /* XXX: not portable */
6087 /* XXX: generate error if incorrect relocation */
6088 gen_assign_cast(&dtype
);
6089 bt
= type
->t
& VT_BTYPE
;
6090 size
= type_size(type
, &align
);
6091 section_reserve(sec
, c
+ size
);
6092 ptr
= sec
->data
+ c
;
6093 /* XXX: make code faster ? */
6094 if (!(type
->t
& VT_BITFIELD
)) {
6096 bit_size
= PTR_SIZE
* 8;
6099 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
6100 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
6101 bit_mask
= (1LL << bit_size
) - 1;
6103 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6104 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6105 /* XXX This rejects compound literals like
6106 '(void *){ptr}'. The problem is that '&sym' is
6107 represented the same way, which would be ruled out
6108 by the SYM_FIRST_ANOM check above, but also '"string"'
6109 in 'char *p = "string"' is represented the same
6110 with the type being VT_PTR and the symbol being an
6111 anonymous one. That is, there's no difference in vtop
6112 between '(void *){x}' and '&(void *){x}'. Ignore
6113 pointer typed entities here. Hopefully no real code
6114 will every use compound literals with scalar type. */
6115 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6116 /* These come from compound literals, memcpy stuff over. */
6120 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[vtop
->sym
->c
];
6121 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6122 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6124 /* We need to copy over all memory contents, and that
6125 includes relocations. Use the fact that relocs are
6126 created it order, so look from the end of relocs
6127 until we hit one before the copied region. */
6128 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6129 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6130 while (num_relocs
--) {
6132 if (rel
->r_offset
>= esym
->st_value
+ size
)
6134 if (rel
->r_offset
< esym
->st_value
)
6136 /* Note: if the same fields are initialized multiple
6137 times (possible with designators) then we possibly
6138 add multiple relocations for the same offset here.
6139 That would lead to wrong code, the last reloc needs
6140 to win. We clean this up later after the whole
6141 initializer is parsed. */
6142 put_elf_reloca(symtab_section
, sec
,
6143 c
+ rel
->r_offset
- esym
->st_value
,
6144 ELFW(R_TYPE
)(rel
->r_info
),
6145 ELFW(R_SYM
)(rel
->r_info
),
6155 if ((vtop
->r
& VT_SYM
) &&
6161 (bt
== VT_LLONG
&& bit_size
!= 64) ||
6165 (bt
== VT_INT
&& bit_size
!= 32)
6168 tcc_error("initializer element is not computable at load time");
6170 /* XXX: when cross-compiling we assume that each type has the
6171 same representation on host and target, which is likely to
6172 be wrong in the case of long double */
6174 vtop
->c
.i
= (vtop
->c
.i
!= 0);
6176 *(char *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6179 *(short *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6182 *(float*)ptr
= vtop
->c
.f
;
6185 *(double *)ptr
= vtop
->c
.d
;
6188 if (sizeof(long double) == LDOUBLE_SIZE
)
6189 *(long double *)ptr
= vtop
->c
.ld
;
6190 else if (sizeof(double) == LDOUBLE_SIZE
)
6191 *(double *)ptr
= (double)vtop
->c
.ld
;
6192 #if (defined __i386__ || defined __x86_64__) && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
6193 else if (sizeof (long double) >= 10)
6194 memcpy(memset(ptr
, 0, LDOUBLE_SIZE
), &vtop
->c
.ld
, 10);
6196 else if (sizeof (long double) == sizeof (double))
6197 __asm__("fldl %1\nfstpt %0\n" : "=m"
6198 (memset(ptr
, 0, LDOUBLE_SIZE
), ptr
) : "m" (vtop
->c
.ld
));
6202 tcc_error("can't cross compile long double constants");
6206 *(long long *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6213 addr_t val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6215 if (vtop
->r
& VT_SYM
)
6216 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6218 *(addr_t
*)ptr
|= val
;
6220 if (vtop
->r
& VT_SYM
)
6221 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6222 *(addr_t
*)ptr
|= val
;
6228 int val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6230 if (vtop
->r
& VT_SYM
)
6231 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6235 if (vtop
->r
& VT_SYM
)
6236 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6245 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6252 /* 't' contains the type and storage info. 'c' is the offset of the
6253 object in section 'sec'. If 'sec' is NULL, it means stack based
6254 allocation. 'first' is true if array '{' must be read (multi
6255 dimension implicit array init handling). 'size_only' is true if
6256 size only evaluation is wanted (only for arrays). */
6257 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6258 int first
, int size_only
)
6260 int len
, n
, no_oblock
, nb
, i
;
6267 /* If we currently are at an '}' or ',' we have read an initializer
6268 element in one of our callers, and not yet consumed it. */
6269 have_elem
= tok
== '}' || tok
== ',';
6270 if (!have_elem
&& tok
!= '{' &&
6271 /* In case of strings we have special handling for arrays, so
6272 don't consume them as initializer value (which would commit them
6273 to some anonymous symbol). */
6274 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6276 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6281 !(type
->t
& VT_ARRAY
) &&
6282 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6283 The source type might have VT_CONSTANT set, which is
6284 of course assignable to non-const elements. */
6285 is_compatible_parameter_types(type
, &vtop
->type
)) {
6286 init_putv(type
, sec
, c
);
6287 } else if (type
->t
& VT_ARRAY
) {
6290 t1
= pointed_type(type
);
6291 size1
= type_size(t1
, &align1
);
6294 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6297 tcc_error("character array initializer must be a literal,"
6298 " optionally enclosed in braces");
6303 /* only parse strings here if correct type (otherwise: handle
6304 them as ((w)char *) expressions */
6305 if ((tok
== TOK_LSTR
&&
6306 #ifdef TCC_TARGET_PE
6307 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6309 (t1
->t
& VT_BTYPE
) == VT_INT
6311 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6313 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6316 /* compute maximum number of chars wanted */
6318 cstr_len
= tokc
.str
.size
;
6320 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6323 if (n
>= 0 && nb
> (n
- len
))
6327 tcc_warning("initializer-string for array is too long");
6328 /* in order to go faster for common case (char
6329 string in global variable, we handle it
6331 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6332 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6336 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6338 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6340 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6347 /* only add trailing zero if enough storage (no
6348 warning in this case since it is standard) */
6349 if (n
< 0 || len
< n
) {
6352 init_putv(t1
, sec
, c
+ (len
* size1
));
6363 while (tok
!= '}' || have_elem
) {
6364 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
6366 if (type
->t
& VT_ARRAY
) {
6368 /* special test for multi dimensional arrays (may not
6369 be strictly correct if designators are used at the
6371 if (no_oblock
&& len
>= n
*size1
)
6374 if (s
->type
.t
== TOK_UNION
)
6378 if (no_oblock
&& f
== NULL
)
6387 /* put zeros at the end */
6388 if (!size_only
&& len
< n
*size1
)
6389 init_putz(sec
, c
+ len
, n
*size1
- len
);
6392 /* patch type size if needed, which happens only for array types */
6394 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
6395 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6398 if (first
|| tok
== '{') {
6406 } else if (tok
== '{') {
6408 decl_initializer(type
, sec
, c
, first
, size_only
);
6410 } else if (size_only
) {
6411 /* If we supported only ISO C we wouldn't have to accept calling
6412 this on anything than an array size_only==1 (and even then
6413 only on the outermost level, so no recursion would be needed),
6414 because initializing a flex array member isn't supported.
6415 But GNU C supports it, so we need to recurse even into
6416 subfields of structs and arrays when size_only is set. */
6417 /* just skip expression */
6418 skip_or_save_block(NULL
);
6421 /* This should happen only when we haven't parsed
6422 the init element above for fear of committing a
6423 string constant to memory too early. */
6424 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6425 expect("string constant");
6426 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6428 init_putv(type
, sec
, c
);
6432 /* parse an initializer for type 't' if 'has_init' is non zero, and
6433 allocate space in local or global data space ('r' is either
6434 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6435 variable 'v' of scope 'scope' is declared before initializers
6436 are parsed. If 'v' is zero, then a reference to the new object
6437 is put in the value stack. If 'has_init' is 2, a special parsing
6438 is done to handle string constants. */
6439 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6440 int has_init
, int v
, int scope
)
6442 int size
, align
, addr
;
6443 ParseState saved_parse_state
= {0};
6444 TokenString
*init_str
= NULL
;
6446 Sym
*flexible_array
;
6448 flexible_array
= NULL
;
6449 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6450 Sym
*field
= type
->ref
->next
;
6453 field
= field
->next
;
6454 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6455 flexible_array
= field
;
6459 size
= type_size(type
, &align
);
6460 /* If unknown size, we must evaluate it before
6461 evaluating initializers because
6462 initializers can generate global data too
6463 (e.g. string pointers or ISOC99 compound
6464 literals). It also simplifies local
6465 initializers handling */
6466 if (size
< 0 || (flexible_array
&& has_init
)) {
6468 tcc_error("unknown type size");
6469 /* get all init string */
6470 if (has_init
== 2) {
6471 init_str
= tok_str_alloc();
6472 /* only get strings */
6473 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6474 tok_str_add_tok(init_str
);
6477 tok_str_add(init_str
, -1);
6478 tok_str_add(init_str
, 0);
6480 skip_or_save_block(&init_str
);
6484 save_parse_state(&saved_parse_state
);
6486 begin_macro(init_str
, 1);
6488 decl_initializer(type
, NULL
, 0, 1, 1);
6489 /* prepare second initializer parsing */
6490 macro_ptr
= init_str
->str
;
6493 /* if still unknown size, error */
6494 size
= type_size(type
, &align
);
6496 tcc_error("unknown type size");
6498 /* If there's a flex member and it was used in the initializer
6500 if (flexible_array
&&
6501 flexible_array
->type
.ref
->c
> 0)
6502 size
+= flexible_array
->type
.ref
->c
6503 * pointed_size(&flexible_array
->type
);
6504 /* take into account specified alignment if bigger */
6505 if (ad
->a
.aligned
) {
6506 int speca
= 1 << (ad
->a
.aligned
- 1);
6509 } else if (ad
->a
.packed
) {
6512 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6514 #ifdef CONFIG_TCC_BCHECK
6515 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6519 loc
= (loc
- size
) & -align
;
6521 #ifdef CONFIG_TCC_BCHECK
6522 /* handles bounds */
6523 /* XXX: currently, since we do only one pass, we cannot track
6524 '&' operators, so we add only arrays */
6525 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6527 /* add padding between regions */
6529 /* then add local bound info */
6530 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6531 bounds_ptr
[0] = addr
;
6532 bounds_ptr
[1] = size
;
6536 /* local variable */
6537 #ifdef CONFIG_TCC_ASM
6538 if (ad
->asm_label
) {
6539 int reg
= asm_parse_regvar(ad
->asm_label
);
6541 r
= (r
& ~VT_VALMASK
) | reg
;
6544 sym_push(v
, type
, r
, addr
);
6546 /* push local reference */
6547 vset(type
, r
, addr
);
6551 if (v
&& scope
== VT_CONST
) {
6552 /* see if the symbol was already defined */
6555 patch_storage(sym
, type
);
6556 if (sym
->type
.t
& VT_EXTERN
) {
6557 /* if the variable is extern, it was not allocated */
6558 sym
->type
.t
&= ~VT_EXTERN
;
6559 /* set array size if it was omitted in extern
6561 if ((sym
->type
.t
& VT_ARRAY
) &&
6562 sym
->type
.ref
->c
< 0 &&
6564 sym
->type
.ref
->c
= type
->ref
->c
;
6565 } else if (!has_init
) {
6566 /* we accept several definitions of the same
6567 global variable. this is tricky, because we
6568 must play with the SHN_COMMON type of the symbol */
6569 /* no init data, we won't add more to the symbol */
6570 update_storage(sym
);
6572 } else if (sym
->c
) {
6574 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
6575 if (esym
->st_shndx
== data_section
->sh_num
)
6576 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6581 /* allocate symbol in corresponding section */
6586 else if (tcc_state
->nocommon
)
6591 addr
= section_add(sec
, size
, align
);
6592 #ifdef CONFIG_TCC_BCHECK
6593 /* add padding if bound check */
6594 if (tcc_state
->do_bounds_check
)
6595 section_add(sec
, 1, 1);
6598 addr
= align
; /* SHN_COMMON is special, symbol value is align */
6599 sec
= common_section
;
6604 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6605 sym
->asm_label
= ad
->asm_label
;
6607 /* update symbol definition */
6608 put_extern_sym(sym
, sec
, addr
, size
);
6610 /* push global reference */
6611 sym
= get_sym_ref(type
, sec
, addr
, size
);
6612 vpushsym(type
, sym
);
6616 #ifdef CONFIG_TCC_BCHECK
6617 /* handles bounds now because the symbol must be defined
6618 before for the relocation */
6619 if (tcc_state
->do_bounds_check
) {
6622 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
6623 /* then add global bound info */
6624 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6625 bounds_ptr
[0] = 0; /* relocated */
6626 bounds_ptr
[1] = size
;
6631 if (type
->t
& VT_VLA
) {
6634 /* save current stack pointer */
6635 if (vlas_in_scope
== 0) {
6636 if (vla_sp_root_loc
== -1)
6637 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
6638 gen_vla_sp_save(vla_sp_root_loc
);
6641 vla_runtime_type_size(type
, &a
);
6642 gen_vla_alloc(type
, a
);
6643 gen_vla_sp_save(addr
);
6647 } else if (has_init
) {
6648 size_t oldreloc_offset
= 0;
6649 if (sec
&& sec
->reloc
)
6650 oldreloc_offset
= sec
->reloc
->data_offset
;
6651 decl_initializer(type
, sec
, addr
, 1, 0);
6652 if (sec
&& sec
->reloc
)
6653 squeeze_multi_relocs(sec
, oldreloc_offset
);
6654 /* patch flexible array member size back to -1, */
6655 /* for possible subsequent similar declarations */
6657 flexible_array
->type
.ref
->c
= -1;
6661 /* restore parse state if needed */
6664 restore_parse_state(&saved_parse_state
);
6668 /* parse a function defined by symbol 'sym' and generate its code in
6669 'cur_text_section' */
6670 static void gen_function(Sym
*sym
)
6673 ind
= cur_text_section
->data_offset
;
6674 /* NOTE: we patch the symbol size later */
6675 put_extern_sym(sym
, cur_text_section
, ind
, 0);
6676 funcname
= get_tok_str(sym
->v
, NULL
);
6678 /* Initialize VLA state */
6680 vla_sp_root_loc
= -1;
6681 /* put debug symbol */
6682 tcc_debug_funcstart(tcc_state
, sym
);
6683 /* push a dummy symbol to enable local sym storage */
6684 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
6685 local_scope
= 1; /* for function parameters */
6686 gfunc_prolog(&sym
->type
);
6689 block(NULL
, NULL
, 0);
6693 cur_text_section
->data_offset
= ind
;
6694 label_pop(&global_label_stack
, NULL
);
6695 /* reset local stack */
6697 sym_pop(&local_stack
, NULL
, 0);
6698 /* end of function */
6699 /* patch symbol size */
6700 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
6702 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
6703 /* It's better to crash than to generate wrong code */
6704 cur_text_section
= NULL
;
6705 funcname
= ""; /* for safety */
6706 func_vt
.t
= VT_VOID
; /* for safety */
6707 func_var
= 0; /* for safety */
6708 ind
= 0; /* for safety */
6713 static void gen_inline_functions(TCCState
*s
)
6716 int inline_generated
, i
, ln
;
6717 struct InlineFunc
*fn
;
6719 ln
= file
->line_num
;
6720 /* iterate while inline function are referenced */
6722 inline_generated
= 0;
6723 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6724 fn
= s
->inline_fns
[i
];
6726 if (sym
&& sym
->c
) {
6727 /* the function was used: generate its code and
6728 convert it to a normal function */
6731 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
6732 sym
->type
.t
&= ~VT_INLINE
;
6734 begin_macro(fn
->func_str
, 1);
6736 cur_text_section
= text_section
;
6740 inline_generated
= 1;
6743 if (!inline_generated
)
6746 file
->line_num
= ln
;
6749 ST_FUNC
void free_inline_functions(TCCState
*s
)
6752 /* free tokens of unused inline functions */
6753 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6754 struct InlineFunc
*fn
= s
->inline_fns
[i
];
6756 tok_str_free(fn
->func_str
);
6758 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
6761 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
6762 if parsing old style parameter decl list (and FUNC_SYM is set then) */
6763 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
6771 if (!parse_btype(&btype
, &ad
)) {
6772 if (is_for_loop_init
)
6774 /* skip redundant ';' if not in old parameter decl scope */
6775 if (tok
== ';' && l
!= VT_CMP
) {
6779 if (l
== VT_CONST
&&
6780 (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6781 /* global asm block */
6785 /* special test for old K&R protos without explicit int
6786 type. Only accepted when defining global data */
6787 if (l
!= VT_CONST
|| tok
< TOK_UIDENT
)
6791 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6792 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6794 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
6795 int v
= btype
.ref
->v
;
6796 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
6797 tcc_warning("unnamed struct/union that defines no instances");
6802 while (1) { /* iterate thru each declaration */
6804 /* If the base type itself was an array type of unspecified
6805 size (like in 'typedef int arr[]; arr x = {1};') then
6806 we will overwrite the unknown size by the real one for
6807 this decl. We need to unshare the ref symbol holding
6809 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
6810 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
6812 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6816 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
6817 printf("type = '%s'\n", buf
);
6820 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6821 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
6822 tcc_error("function without file scope cannot be static");
6824 /* if old style function prototype, we accept a
6827 if (sym
->c
== FUNC_OLD
&& l
== VT_CONST
)
6828 decl0(VT_CMP
, 0, sym
);
6831 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6832 ad
.asm_label
= asm_label_instr();
6833 /* parse one last attribute list, after asm label */
6834 parse_attribute(&ad
);
6841 #ifdef TCC_TARGET_PE
6842 if (ad
.a
.func_import
|| ad
.a
.func_export
) {
6843 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
6844 tcc_error("cannot have dll linkage with static or typedef");
6845 if (ad
.a
.func_export
)
6846 type
.t
|= VT_EXPORT
;
6847 else if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
6848 type
.t
|= VT_IMPORT
|VT_EXTERN
;
6851 type
.t
|= ad
.a
.visibility
<< VT_VIS_SHIFT
;
6855 tcc_error("cannot use local functions");
6856 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
6857 expect("function definition");
6859 /* reject abstract declarators in function definition
6860 make old style params without decl have int type */
6862 while ((sym
= sym
->next
) != NULL
) {
6863 if (!(sym
->v
& ~SYM_FIELD
))
6864 expect("identifier");
6865 if (sym
->type
.t
== VT_VOID
)
6866 sym
->type
= int_type
;
6869 /* XXX: cannot do better now: convert extern line to static inline */
6870 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
6871 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6876 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6879 ref
= sym
->type
.ref
;
6881 /* use func_call from prototype if not defined */
6882 if (ref
->a
.func_call
!= FUNC_CDECL
6883 && type
.ref
->a
.func_call
== FUNC_CDECL
)
6884 type
.ref
->a
.func_call
= ref
->a
.func_call
;
6886 /* use static from prototype */
6887 if (sym
->type
.t
& VT_STATIC
)
6888 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6890 /* If the definition has no visibility use the
6891 one from prototype. */
6892 if (! (type
.t
& VT_VIS_MASK
))
6893 type
.t
|= sym
->type
.t
& VT_VIS_MASK
;
6895 /* apply other storage attributes from prototype */
6896 type
.t
|= sym
->type
.t
& (VT_EXPORT
|VT_WEAK
);
6898 if (!is_compatible_types(&sym
->type
, &type
)) {
6900 tcc_error("incompatible types for redefinition of '%s'",
6901 get_tok_str(v
, NULL
));
6903 if (ref
->a
.func_body
)
6904 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6905 /* if symbol is already defined, then put complete type */
6909 /* put function symbol */
6910 sym
= global_identifier_push(v
, type
.t
, 0);
6911 sym
->type
.ref
= type
.ref
;
6914 sym
->type
.ref
->a
.func_body
= 1;
6915 sym
->r
= VT_SYM
| VT_CONST
;
6917 /* static inline functions are just recorded as a kind
6918 of macro. Their code will be emitted at the end of
6919 the compilation unit only if they are used */
6920 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
6921 (VT_INLINE
| VT_STATIC
)) {
6922 struct InlineFunc
*fn
;
6923 const char *filename
;
6925 filename
= file
? file
->filename
: "";
6926 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
6927 strcpy(fn
->filename
, filename
);
6929 skip_or_save_block(&fn
->func_str
);
6930 dynarray_add(&tcc_state
->inline_fns
,
6931 &tcc_state
->nb_inline_fns
, fn
);
6933 /* compute text section */
6934 cur_text_section
= ad
.section
;
6935 if (!cur_text_section
)
6936 cur_text_section
= text_section
;
6942 /* find parameter in function parameter list */
6943 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
6944 if ((sym
->v
& ~SYM_FIELD
) == v
)
6946 tcc_error("declaration for parameter '%s' but no such parameter",
6947 get_tok_str(v
, NULL
));
6949 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
6950 tcc_error("storage class specified for '%s'",
6951 get_tok_str(v
, NULL
));
6952 if (sym
->type
.t
!= VT_VOID
)
6953 tcc_error("redefinition of parameter '%s'",
6954 get_tok_str(v
, NULL
));
6955 convert_parameter_type(&type
);
6957 } else if (type
.t
& VT_TYPEDEF
) {
6958 /* save typedefed type */
6959 /* XXX: test storage specifiers ? */
6961 if (sym
&& sym
->scope
== local_scope
) {
6962 if (!is_compatible_types(&sym
->type
, &type
)
6963 || !(sym
->type
.t
& VT_TYPEDEF
))
6964 tcc_error("incompatible redefinition of '%s'",
6965 get_tok_str(v
, NULL
));
6968 sym
= sym_push(v
, &type
, 0, 0);
6973 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6974 /* external function definition */
6975 /* specific case for func_call attribute */
6977 } else if (!(type
.t
& VT_ARRAY
)) {
6978 /* not lvalue if array */
6979 r
|= lvalue_type(type
.t
);
6981 has_init
= (tok
== '=');
6982 if (has_init
&& (type
.t
& VT_VLA
))
6983 tcc_error("variable length array cannot be initialized");
6984 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
6985 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
6986 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
6987 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
6988 /* external variable or function */
6989 /* NOTE: as GCC, uninitialized global static
6990 arrays of null size are considered as
6992 sym
= external_sym(v
, &type
, r
);
6993 sym
->asm_label
= ad
.asm_label
;
6994 if (ad
.alias_target
) {
6999 alias_target
= sym_find(ad
.alias_target
);
7000 if (!alias_target
|| !alias_target
->c
)
7001 tcc_error("unsupported forward __alias__ attribute");
7002 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[alias_target
->c
];
7003 tsec
.sh_num
= esym
->st_shndx
;
7004 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
7007 if (type
.t
& VT_STATIC
)
7013 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7017 if (is_for_loop_init
)
7030 ST_FUNC
void decl(int l
)
7035 /* ------------------------------------------------------------------------- */