2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* no code generation wanted */
54 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
55 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
56 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
57 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
58 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
60 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
61 ST_DATA
const char *funcname
;
64 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
;
66 ST_DATA
struct switch_t
{
70 } **p
; int n
; /* list of case ranges */
71 int def_sym
; /* default symbol */
72 } *cur_switch
; /* current switch */
74 /* ------------------------------------------------------------------------- */
76 static void gen_cast(CType
*type
);
77 static void gen_cast_s(int t
);
78 static inline CType
*pointed_type(CType
*type
);
79 static int is_compatible_types(CType
*type1
, CType
*type2
);
80 static int parse_btype(CType
*type
, AttributeDef
*ad
);
81 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
82 static void parse_expr_type(CType
*type
);
83 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
84 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
85 static void block(int *bsym
, int *csym
, int is_expr
);
86 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
87 static int decl0(int l
, int is_for_loop_init
, Sym
*);
88 static void expr_eq(void);
89 static void vla_runtime_type_size(CType
*type
, int *a
);
90 static void vla_sp_restore(void);
91 static void vla_sp_restore_root(void);
92 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
93 static inline int64_t expr_const64(void);
94 ST_FUNC
void vpush64(int ty
, unsigned long long v
);
95 ST_FUNC
void vpush(CType
*type
);
96 ST_FUNC
int gvtst(int inv
, int t
);
97 static void gen_inline_functions(TCCState
*s
);
98 static void skip_or_save_block(TokenString
**str
);
99 static void gv_dup(void);
101 ST_INLN
int is_float(int t
)
105 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
108 /* we use our own 'finite' function to avoid potential problems with
109 non standard math libs */
110 /* XXX: endianness dependent */
111 ST_FUNC
int ieee_finite(double d
)
114 memcpy(p
, &d
, sizeof(double));
115 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
118 ST_FUNC
void test_lvalue(void)
120 if (!(vtop
->r
& VT_LVAL
))
124 ST_FUNC
void check_vstack(void)
127 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
130 /* ------------------------------------------------------------------------- */
131 /* vstack debugging aid */
134 void pv (const char *lbl
, int a
, int b
)
137 for (i
= a
; i
< a
+ b
; ++i
) {
138 SValue
*p
= &vtop
[-i
];
139 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
140 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
145 /* ------------------------------------------------------------------------- */
146 /* start of translation unit info */
147 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
152 /* file info: full path + filename */
153 section_sym
= put_elf_sym(symtab_section
, 0, 0,
154 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
155 text_section
->sh_num
, NULL
);
156 getcwd(buf
, sizeof(buf
));
158 normalize_slashes(buf
);
160 pstrcat(buf
, sizeof(buf
), "/");
161 put_stabs_r(buf
, N_SO
, 0, 0,
162 text_section
->data_offset
, text_section
, section_sym
);
163 put_stabs_r(file
->filename
, N_SO
, 0, 0,
164 text_section
->data_offset
, text_section
, section_sym
);
169 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
170 symbols can be safely used */
171 put_elf_sym(symtab_section
, 0, 0,
172 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
173 SHN_ABS
, file
->filename
);
176 /* put end of translation unit info */
177 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
181 put_stabs_r(NULL
, N_SO
, 0, 0,
182 text_section
->data_offset
, text_section
, section_sym
);
186 /* generate line number info */
187 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
191 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
192 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
194 last_line_num
= file
->line_num
;
198 /* put function symbol */
199 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
207 /* XXX: we put here a dummy type */
208 snprintf(buf
, sizeof(buf
), "%s:%c1",
209 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
210 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
211 cur_text_section
, sym
->c
);
212 /* //gr gdb wants a line at the function */
213 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
219 /* put function size */
220 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
224 put_stabn(N_FUN
, 0, 0, size
);
227 /* ------------------------------------------------------------------------- */
228 ST_FUNC
void tccgen_start(TCCState
*s1
)
230 cur_text_section
= NULL
;
232 anon_sym
= SYM_FIRST_ANOM
;
235 nocode_wanted
= 0x80000000;
237 /* define some often used types */
239 char_pointer_type
.t
= VT_BYTE
;
240 mk_pointer(&char_pointer_type
);
242 size_type
.t
= VT_INT
;
244 size_type
.t
= VT_LLONG
;
246 func_old_type
.t
= VT_FUNC
;
247 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
248 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
249 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
253 #ifdef TCC_TARGET_ARM
258 ST_FUNC
void tccgen_end(TCCState
*s1
)
260 gen_inline_functions(s1
);
262 /* end of translation unit info */
266 /* ------------------------------------------------------------------------- */
267 /* apply storage attributes to Elf symbol */
269 static void update_storage(Sym
*sym
)
274 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
275 if (sym
->a
.visibility
)
276 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
279 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, ELFW(ST_TYPE
)(esym
->st_info
));
281 if (sym
->a
.dllimport
)
282 esym
->st_other
|= ST_PE_IMPORT
;
283 if (sym
->a
.dllexport
)
284 esym
->st_other
|= ST_PE_EXPORT
;
287 printf("storage %s: vis=%d weak=%d exp=%d imp=%d\n",
288 get_tok_str(sym
->v
, NULL
),
297 /* ------------------------------------------------------------------------- */
298 /* update sym->c so that it points to an external symbol in section
299 'section' with value 'value' */
301 ST_FUNC
void put_extern_sym2(Sym
*sym
, Section
*section
,
302 addr_t value
, unsigned long size
,
303 int can_add_underscore
)
305 int sym_type
, sym_bind
, sh_num
, info
, other
, t
;
309 #ifdef CONFIG_TCC_BCHECK
315 else if (section
== SECTION_ABS
)
318 sh_num
= section
->sh_num
;
321 name
= get_tok_str(sym
->v
, NULL
);
322 #ifdef CONFIG_TCC_BCHECK
323 if (tcc_state
->do_bounds_check
) {
324 /* XXX: avoid doing that for statics ? */
325 /* if bound checking is activated, we change some function
326 names by adding the "__bound" prefix */
329 /* XXX: we rely only on malloc hooks */
342 strcpy(buf
, "__bound_");
350 if ((t
& VT_BTYPE
) == VT_FUNC
) {
352 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
353 sym_type
= STT_NOTYPE
;
355 sym_type
= STT_OBJECT
;
358 sym_bind
= STB_LOCAL
;
360 sym_bind
= STB_GLOBAL
;
363 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
364 Sym
*ref
= sym
->type
.ref
;
365 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
366 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
368 other
|= ST_PE_STDCALL
;
369 can_add_underscore
= 0;
373 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
375 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
379 name
= get_tok_str(sym
->asm_label
, NULL
);
380 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
381 sym
->c
= set_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
383 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
384 esym
->st_value
= value
;
385 esym
->st_size
= size
;
386 esym
->st_shndx
= sh_num
;
391 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
392 addr_t value
, unsigned long size
)
394 put_extern_sym2(sym
, section
, value
, size
, 1);
397 /* add a new relocation entry to symbol 'sym' in section 's' */
398 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
403 if (nocode_wanted
&& s
== cur_text_section
)
408 put_extern_sym(sym
, NULL
, 0, 0);
412 /* now we can add ELF relocation info */
413 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
417 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
419 greloca(s
, sym
, offset
, type
, 0);
423 /* ------------------------------------------------------------------------- */
424 /* symbol allocator */
425 static Sym
*__sym_malloc(void)
427 Sym
*sym_pool
, *sym
, *last_sym
;
430 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
431 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
433 last_sym
= sym_free_first
;
435 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
436 sym
->next
= last_sym
;
440 sym_free_first
= last_sym
;
444 static inline Sym
*sym_malloc(void)
448 sym
= sym_free_first
;
450 sym
= __sym_malloc();
451 sym_free_first
= sym
->next
;
454 sym
= tcc_malloc(sizeof(Sym
));
459 ST_INLN
void sym_free(Sym
*sym
)
462 sym
->next
= sym_free_first
;
463 sym_free_first
= sym
;
469 /* push, without hashing */
470 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
475 memset(s
, 0, sizeof *s
);
485 /* find a symbol and return its associated structure. 's' is the top
486 of the symbol stack */
487 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
499 /* structure lookup */
500 ST_INLN Sym
*struct_find(int v
)
503 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
505 return table_ident
[v
]->sym_struct
;
508 /* find an identifier */
509 ST_INLN Sym
*sym_find(int v
)
512 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
514 return table_ident
[v
]->sym_identifier
;
517 /* push a given symbol on the symbol stack */
518 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
527 s
= sym_push2(ps
, v
, type
->t
, c
);
528 s
->type
.ref
= type
->ref
;
530 /* don't record fields or anonymous symbols */
532 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
533 /* record symbol in token array */
534 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
536 ps
= &ts
->sym_struct
;
538 ps
= &ts
->sym_identifier
;
541 s
->sym_scope
= local_scope
;
542 if (s
->prev_tok
&& s
->prev_tok
->sym_scope
== s
->sym_scope
)
543 tcc_error("redeclaration of '%s'",
544 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
549 /* push a global identifier */
550 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
553 s
= sym_push2(&global_stack
, v
, t
, c
);
554 /* don't record anonymous symbol */
555 if (v
< SYM_FIRST_ANOM
) {
556 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
557 /* modify the top most local identifier, so that
558 sym_identifier will point to 's' when popped */
560 ps
= &(*ps
)->prev_tok
;
567 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
568 pop them yet from the list, but do remove them from the token array. */
569 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
579 /* remove symbol in token array */
581 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
582 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
584 ps
= &ts
->sym_struct
;
586 ps
= &ts
->sym_identifier
;
597 /* ------------------------------------------------------------------------- */
599 static void vsetc(CType
*type
, int r
, CValue
*vc
)
603 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
604 tcc_error("memory full (vstack)");
605 /* cannot let cpu flags if other instruction are generated. Also
606 avoid leaving VT_JMP anywhere except on the top of the stack
607 because it would complicate the code generator.
609 Don't do this when nocode_wanted. vtop might come from
610 !nocode_wanted regions (see 88_codeopt.c) and transforming
611 it to a register without actually generating code is wrong
612 as their value might still be used for real. All values
613 we push under nocode_wanted will eventually be popped
614 again, so that the VT_CMP/VT_JMP value will be in vtop
615 when code is unsuppressed again.
617 Same logic below in vswap(); */
618 if (vtop
>= vstack
&& !nocode_wanted
) {
619 v
= vtop
->r
& VT_VALMASK
;
620 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
632 ST_FUNC
void vswap(void)
635 /* cannot vswap cpu flags. See comment at vsetc() above */
636 if (vtop
>= vstack
&& !nocode_wanted
) {
637 int v
= vtop
->r
& VT_VALMASK
;
638 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
646 /* pop stack value */
647 ST_FUNC
void vpop(void)
650 v
= vtop
->r
& VT_VALMASK
;
651 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
652 /* for x86, we need to pop the FP stack */
654 o(0xd8dd); /* fstp %st(0) */
657 if (v
== VT_JMP
|| v
== VT_JMPI
) {
658 /* need to put correct jump if && or || without test */
664 /* push constant of type "type" with useless value */
665 ST_FUNC
void vpush(CType
*type
)
667 vset(type
, VT_CONST
, 0);
670 /* push integer constant */
671 ST_FUNC
void vpushi(int v
)
675 vsetc(&int_type
, VT_CONST
, &cval
);
678 /* push a pointer sized constant */
679 static void vpushs(addr_t v
)
683 vsetc(&size_type
, VT_CONST
, &cval
);
686 /* push arbitrary 64bit constant */
687 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
694 vsetc(&ctype
, VT_CONST
, &cval
);
697 /* push long long constant */
698 static inline void vpushll(long long v
)
700 vpush64(VT_LLONG
, v
);
703 ST_FUNC
void vset(CType
*type
, int r
, int v
)
708 vsetc(type
, r
, &cval
);
711 static void vseti(int r
, int v
)
719 ST_FUNC
void vpushv(SValue
*v
)
721 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
722 tcc_error("memory full (vstack)");
727 static void vdup(void)
732 /* rotate n first stack elements to the bottom
733 I1 ... In -> I2 ... In I1 [top is right]
735 ST_FUNC
void vrotb(int n
)
746 /* rotate the n elements before entry e towards the top
747 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
749 ST_FUNC
void vrote(SValue
*e
, int n
)
755 for(i
= 0;i
< n
- 1; i
++)
760 /* rotate n first stack elements to the top
761 I1 ... In -> In I1 ... I(n-1) [top is right]
763 ST_FUNC
void vrott(int n
)
768 /* push a symbol value of TYPE */
769 static inline void vpushsym(CType
*type
, Sym
*sym
)
773 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
777 /* Return a static symbol pointing to a section */
778 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
784 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
785 sym
->type
.ref
= type
->ref
;
786 sym
->r
= VT_CONST
| VT_SYM
;
787 put_extern_sym(sym
, sec
, offset
, size
);
791 /* push a reference to a section offset by adding a dummy symbol */
792 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
794 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
797 /* define a new external reference to a symbol 'v' of type 'u' */
798 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
804 /* push forward reference */
805 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
806 s
->type
.ref
= type
->ref
;
807 s
->r
= r
| VT_CONST
| VT_SYM
;
812 /* Merge some storage attributes. */
813 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
815 if (type
&& !is_compatible_types(&sym
->type
, type
))
816 tcc_error("incompatible types for redefinition of '%s'",
817 get_tok_str(sym
->v
, NULL
));
819 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
820 tcc_error("incompatible dll linkage for redefinition of '%s'",
821 get_tok_str(sym
->v
, NULL
));
823 sym
->a
.dllexport
|= ad
->a
.dllexport
;
824 sym
->a
.weak
|= ad
->a
.weak
;
825 if (ad
->a
.visibility
) {
826 int vis
= sym
->a
.visibility
;
827 int vis2
= ad
->a
.visibility
;
828 if (vis
== STV_DEFAULT
)
830 else if (vis2
!= STV_DEFAULT
)
831 vis
= (vis
< vis2
) ? vis
: vis2
;
832 sym
->a
.visibility
= vis
;
835 sym
->a
.aligned
= ad
->a
.aligned
;
837 sym
->asm_label
= ad
->asm_label
;
841 /* define a new external reference to a symbol 'v' */
842 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
847 /* push forward reference */
848 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
849 s
->type
.t
|= VT_EXTERN
;
853 if (s
->type
.ref
== func_old_type
.ref
) {
854 s
->type
.ref
= type
->ref
;
855 s
->r
= r
| VT_CONST
| VT_SYM
;
856 s
->type
.t
|= VT_EXTERN
;
858 patch_storage(s
, ad
, type
);
863 /* push a reference to global symbol v */
864 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
866 vpushsym(type
, external_global_sym(v
, type
, 0));
869 /* save registers up to (vtop - n) stack entry */
870 ST_FUNC
void save_regs(int n
)
873 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
877 /* save r to the memory stack, and mark it as being free */
878 ST_FUNC
void save_reg(int r
)
880 save_reg_upstack(r
, 0);
883 /* save r to the memory stack, and mark it as being free,
884 if seen up to (vtop - n) stack entry */
885 ST_FUNC
void save_reg_upstack(int r
, int n
)
887 int l
, saved
, size
, align
;
891 if ((r
&= VT_VALMASK
) >= VT_CONST
)
896 /* modify all stack values */
899 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
900 if ((p
->r
& VT_VALMASK
) == r
||
901 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
902 /* must save value on stack if not already done */
904 /* NOTE: must reload 'r' because r might be equal to r2 */
905 r
= p
->r
& VT_VALMASK
;
906 /* store register in the stack */
908 if ((p
->r
& VT_LVAL
) ||
909 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
911 type
= &char_pointer_type
;
915 size
= type_size(type
, &align
);
916 loc
= (loc
- size
) & -align
;
918 sv
.r
= VT_LOCAL
| VT_LVAL
;
921 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
922 /* x86 specific: need to pop fp register ST0 if saved */
924 o(0xd8dd); /* fstp %st(0) */
928 /* special long long case */
929 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
937 /* mark that stack entry as being saved on the stack */
938 if (p
->r
& VT_LVAL
) {
939 /* also clear the bounded flag because the
940 relocation address of the function was stored in
942 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
944 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
952 #ifdef TCC_TARGET_ARM
953 /* find a register of class 'rc2' with at most one reference on stack.
954 * If none, call get_reg(rc) */
955 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
960 for(r
=0;r
<NB_REGS
;r
++) {
961 if (reg_classes
[r
] & rc2
) {
964 for(p
= vstack
; p
<= vtop
; p
++) {
965 if ((p
->r
& VT_VALMASK
) == r
||
966 (p
->r2
& VT_VALMASK
) == r
)
977 /* find a free register of class 'rc'. If none, save one register */
978 ST_FUNC
int get_reg(int rc
)
983 /* find a free register */
984 for(r
=0;r
<NB_REGS
;r
++) {
985 if (reg_classes
[r
] & rc
) {
988 for(p
=vstack
;p
<=vtop
;p
++) {
989 if ((p
->r
& VT_VALMASK
) == r
||
990 (p
->r2
& VT_VALMASK
) == r
)
998 /* no register left : free the first one on the stack (VERY
999 IMPORTANT to start from the bottom to ensure that we don't
1000 spill registers used in gen_opi()) */
1001 for(p
=vstack
;p
<=vtop
;p
++) {
1002 /* look at second register (if long long) */
1003 r
= p
->r2
& VT_VALMASK
;
1004 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1006 r
= p
->r
& VT_VALMASK
;
1007 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1013 /* Should never comes here */
1017 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1019 static void move_reg(int r
, int s
, int t
)
1033 /* get address of vtop (vtop MUST BE an lvalue) */
1034 ST_FUNC
void gaddrof(void)
1036 vtop
->r
&= ~VT_LVAL
;
1037 /* tricky: if saved lvalue, then we can go back to lvalue */
1038 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1039 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1044 #ifdef CONFIG_TCC_BCHECK
1045 /* generate lvalue bound code */
1046 static void gbound(void)
1051 vtop
->r
&= ~VT_MUSTBOUND
;
1052 /* if lvalue, then use checking code before dereferencing */
1053 if (vtop
->r
& VT_LVAL
) {
1054 /* if not VT_BOUNDED value, then make one */
1055 if (!(vtop
->r
& VT_BOUNDED
)) {
1056 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1057 /* must save type because we must set it to int to get pointer */
1059 vtop
->type
.t
= VT_PTR
;
1062 gen_bounded_ptr_add();
1063 vtop
->r
|= lval_type
;
1066 /* then check for dereferencing */
1067 gen_bounded_ptr_deref();
1072 static void incr_bf_adr(int o
)
1074 vtop
->type
= char_pointer_type
;
1078 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1079 | (VT_BYTE
|VT_UNSIGNED
);
1080 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1081 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1084 /* single-byte load mode for packed or otherwise unaligned bitfields */
1085 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1088 save_reg_upstack(vtop
->r
, 1);
1089 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1090 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1099 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1101 vpushi((1 << n
) - 1), gen_op('&');
1104 vpushi(bits
), gen_op(TOK_SHL
);
1107 bits
+= n
, bit_size
-= n
, o
= 1;
1110 if (!(type
->t
& VT_UNSIGNED
)) {
1111 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1112 vpushi(n
), gen_op(TOK_SHL
);
1113 vpushi(n
), gen_op(TOK_SAR
);
1117 /* single-byte store mode for packed or otherwise unaligned bitfields */
1118 static void store_packed_bf(int bit_pos
, int bit_size
)
1120 int bits
, n
, o
, m
, c
;
1122 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1124 save_reg_upstack(vtop
->r
, 1);
1125 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1127 incr_bf_adr(o
); // X B
1129 c
? vdup() : gv_dup(); // B V X
1132 vpushi(bits
), gen_op(TOK_SHR
);
1134 vpushi(bit_pos
), gen_op(TOK_SHL
);
1139 m
= ((1 << n
) - 1) << bit_pos
;
1140 vpushi(m
), gen_op('&'); // X B V1
1141 vpushv(vtop
-1); // X B V1 B
1142 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1143 gen_op('&'); // X B V1 B1
1144 gen_op('|'); // X B V2
1146 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1147 vstore(), vpop(); // X B
1148 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1153 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1156 if (0 == sv
->type
.ref
)
1158 t
= sv
->type
.ref
->auxtype
;
1159 if (t
!= -1 && t
!= VT_STRUCT
) {
1160 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1161 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1166 /* store vtop a register belonging to class 'rc'. lvalues are
1167 converted to values. Cannot be used if cannot be converted to
1168 register value (such as structures). */
1169 ST_FUNC
int gv(int rc
)
1171 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1173 /* NOTE: get_reg can modify vstack[] */
1174 if (vtop
->type
.t
& VT_BITFIELD
) {
1177 bit_pos
= BIT_POS(vtop
->type
.t
);
1178 bit_size
= BIT_SIZE(vtop
->type
.t
);
1179 /* remove bit field info to avoid loops */
1180 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1183 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1184 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1185 type
.t
|= VT_UNSIGNED
;
1187 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1189 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1194 if (r
== VT_STRUCT
) {
1195 load_packed_bf(&type
, bit_pos
, bit_size
);
1197 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1198 /* cast to int to propagate signedness in following ops */
1200 /* generate shifts */
1201 vpushi(bits
- (bit_pos
+ bit_size
));
1203 vpushi(bits
- bit_size
);
1204 /* NOTE: transformed to SHR if unsigned */
1209 if (is_float(vtop
->type
.t
) &&
1210 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1211 /* CPUs usually cannot use float constants, so we store them
1212 generically in data segment */
1213 size
= type_size(&vtop
->type
, &align
);
1214 vpush_ref(&vtop
->type
, data_section
, data_section
->data_offset
, size
);
1216 init_putv(&vtop
->type
, data_section
, data_section
->data_offset
);
1219 #ifdef CONFIG_TCC_BCHECK
1220 if (vtop
->r
& VT_MUSTBOUND
)
1224 r
= vtop
->r
& VT_VALMASK
;
1225 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1226 #ifndef TCC_TARGET_ARM64
1229 #ifdef TCC_TARGET_X86_64
1230 else if (rc
== RC_FRET
)
1234 /* need to reload if:
1236 - lvalue (need to dereference pointer)
1237 - already a register, but not in the right class */
1239 || (vtop
->r
& VT_LVAL
)
1240 || !(reg_classes
[r
] & rc
)
1242 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1243 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1245 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1251 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1252 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1254 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1255 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1256 unsigned long long ll
;
1258 int r2
, original_type
;
1259 original_type
= vtop
->type
.t
;
1260 /* two register type load : expand to two words
1263 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1266 vtop
->c
.i
= ll
; /* first word */
1268 vtop
->r
= r
; /* save register value */
1269 vpushi(ll
>> 32); /* second word */
1272 if (vtop
->r
& VT_LVAL
) {
1273 /* We do not want to modifier the long long
1274 pointer here, so the safest (and less
1275 efficient) is to save all the other registers
1276 in the stack. XXX: totally inefficient. */
1280 /* lvalue_save: save only if used further down the stack */
1281 save_reg_upstack(vtop
->r
, 1);
1283 /* load from memory */
1284 vtop
->type
.t
= load_type
;
1287 vtop
[-1].r
= r
; /* save register value */
1288 /* increment pointer to get second word */
1289 vtop
->type
.t
= addr_type
;
1294 vtop
->type
.t
= load_type
;
1296 /* move registers */
1299 vtop
[-1].r
= r
; /* save register value */
1300 vtop
->r
= vtop
[-1].r2
;
1302 /* Allocate second register. Here we rely on the fact that
1303 get_reg() tries first to free r2 of an SValue. */
1307 /* write second register */
1309 vtop
->type
.t
= original_type
;
1310 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1312 /* lvalue of scalar type : need to use lvalue type
1313 because of possible cast */
1316 /* compute memory access type */
1317 if (vtop
->r
& VT_LVAL_BYTE
)
1319 else if (vtop
->r
& VT_LVAL_SHORT
)
1321 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1325 /* restore wanted type */
1328 /* one register type load */
1333 #ifdef TCC_TARGET_C67
1334 /* uses register pairs for doubles */
1335 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1342 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1343 ST_FUNC
void gv2(int rc1
, int rc2
)
1347 /* generate more generic register first. But VT_JMP or VT_CMP
1348 values must be generated first in all cases to avoid possible
1350 v
= vtop
[0].r
& VT_VALMASK
;
1351 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1356 /* test if reload is needed for first register */
1357 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1367 /* test if reload is needed for first register */
1368 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1374 #ifndef TCC_TARGET_ARM64
1375 /* wrapper around RC_FRET to return a register by type */
1376 static int rc_fret(int t
)
1378 #ifdef TCC_TARGET_X86_64
1379 if (t
== VT_LDOUBLE
) {
1387 /* wrapper around REG_FRET to return a register by type */
1388 static int reg_fret(int t
)
1390 #ifdef TCC_TARGET_X86_64
1391 if (t
== VT_LDOUBLE
) {
1399 /* expand 64bit on stack in two ints */
1400 static void lexpand(void)
1403 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1404 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1405 if (v
== VT_CONST
) {
1408 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1414 vtop
[0].r
= vtop
[-1].r2
;
1415 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1417 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1421 #ifdef TCC_TARGET_ARM
1422 /* expand long long on stack */
1423 ST_FUNC
void lexpand_nr(void)
1427 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1429 vtop
->r2
= VT_CONST
;
1430 vtop
->type
.t
= VT_INT
| u
;
1431 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1432 if (v
== VT_CONST
) {
1433 vtop
[-1].c
.i
= vtop
->c
.i
;
1434 vtop
->c
.i
= vtop
->c
.i
>> 32;
1436 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1438 vtop
->r
= vtop
[-1].r
;
1439 } else if (v
> VT_CONST
) {
1443 vtop
->r
= vtop
[-1].r2
;
1444 vtop
[-1].r2
= VT_CONST
;
1445 vtop
[-1].type
.t
= VT_INT
| u
;
1450 /* build a long long from two ints */
1451 static void lbuild(int t
)
1453 gv2(RC_INT
, RC_INT
);
1454 vtop
[-1].r2
= vtop
[0].r
;
1455 vtop
[-1].type
.t
= t
;
1460 /* convert stack entry to register and duplicate its value in another
1462 static void gv_dup(void)
1469 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1470 if (t
& VT_BITFIELD
) {
1480 /* stack: H L L1 H1 */
1490 /* duplicate value */
1495 #ifdef TCC_TARGET_X86_64
1496 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1506 load(r1
, &sv
); /* move r to r1 */
1508 /* duplicates value */
1514 /* Generate value test
1516 * Generate a test for any value (jump, comparison and integers) */
1517 ST_FUNC
int gvtst(int inv
, int t
)
1519 int v
= vtop
->r
& VT_VALMASK
;
1520 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1524 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1525 /* constant jmp optimization */
1526 if ((vtop
->c
.i
!= 0) != inv
)
1531 return gtst(inv
, t
);
1535 /* generate CPU independent (unsigned) long long operations */
1536 static void gen_opl(int op
)
1538 int t
, a
, b
, op1
, c
, i
;
1540 unsigned short reg_iret
= REG_IRET
;
1541 unsigned short reg_lret
= REG_LRET
;
1547 func
= TOK___divdi3
;
1550 func
= TOK___udivdi3
;
1553 func
= TOK___moddi3
;
1556 func
= TOK___umoddi3
;
1563 /* call generic long long function */
1564 vpush_global_sym(&func_old_type
, func
);
1569 vtop
->r2
= reg_lret
;
1577 //pv("gen_opl A",0,2);
1583 /* stack: L1 H1 L2 H2 */
1588 vtop
[-2] = vtop
[-3];
1591 /* stack: H1 H2 L1 L2 */
1592 //pv("gen_opl B",0,4);
1598 /* stack: H1 H2 L1 L2 ML MH */
1601 /* stack: ML MH H1 H2 L1 L2 */
1605 /* stack: ML MH H1 L2 H2 L1 */
1610 /* stack: ML MH M1 M2 */
1613 } else if (op
== '+' || op
== '-') {
1614 /* XXX: add non carry method too (for MIPS or alpha) */
1620 /* stack: H1 H2 (L1 op L2) */
1623 gen_op(op1
+ 1); /* TOK_xxxC2 */
1626 /* stack: H1 H2 (L1 op L2) */
1629 /* stack: (L1 op L2) H1 H2 */
1631 /* stack: (L1 op L2) (H1 op H2) */
1639 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1640 t
= vtop
[-1].type
.t
;
1644 /* stack: L H shift */
1646 /* constant: simpler */
1647 /* NOTE: all comments are for SHL. the other cases are
1648 done by swapping words */
1659 if (op
!= TOK_SAR
) {
1692 /* XXX: should provide a faster fallback on x86 ? */
1695 func
= TOK___ashrdi3
;
1698 func
= TOK___lshrdi3
;
1701 func
= TOK___ashldi3
;
1707 /* compare operations */
1713 /* stack: L1 H1 L2 H2 */
1715 vtop
[-1] = vtop
[-2];
1717 /* stack: L1 L2 H1 H2 */
1720 /* when values are equal, we need to compare low words. since
1721 the jump is inverted, we invert the test too. */
1724 else if (op1
== TOK_GT
)
1726 else if (op1
== TOK_ULT
)
1728 else if (op1
== TOK_UGT
)
1738 /* generate non equal test */
1744 /* compare low. Always unsigned */
1748 else if (op1
== TOK_LE
)
1750 else if (op1
== TOK_GT
)
1752 else if (op1
== TOK_GE
)
1763 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1765 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1766 return (a
^ b
) >> 63 ? -x
: x
;
1769 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1771 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1774 /* handle integer constant optimizations and various machine
1776 static void gen_opic(int op
)
1778 SValue
*v1
= vtop
- 1;
1780 int t1
= v1
->type
.t
& VT_BTYPE
;
1781 int t2
= v2
->type
.t
& VT_BTYPE
;
1782 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1783 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1784 uint64_t l1
= c1
? v1
->c
.i
: 0;
1785 uint64_t l2
= c2
? v2
->c
.i
: 0;
1786 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1788 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1789 l1
= ((uint32_t)l1
|
1790 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1791 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1792 l2
= ((uint32_t)l2
|
1793 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1797 case '+': l1
+= l2
; break;
1798 case '-': l1
-= l2
; break;
1799 case '&': l1
&= l2
; break;
1800 case '^': l1
^= l2
; break;
1801 case '|': l1
|= l2
; break;
1802 case '*': l1
*= l2
; break;
1809 /* if division by zero, generate explicit division */
1812 tcc_error("division by zero in constant");
1816 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1817 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1818 case TOK_UDIV
: l1
= l1
/ l2
; break;
1819 case TOK_UMOD
: l1
= l1
% l2
; break;
1822 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1823 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1825 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1828 case TOK_ULT
: l1
= l1
< l2
; break;
1829 case TOK_UGE
: l1
= l1
>= l2
; break;
1830 case TOK_EQ
: l1
= l1
== l2
; break;
1831 case TOK_NE
: l1
= l1
!= l2
; break;
1832 case TOK_ULE
: l1
= l1
<= l2
; break;
1833 case TOK_UGT
: l1
= l1
> l2
; break;
1834 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1835 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1836 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1837 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1839 case TOK_LAND
: l1
= l1
&& l2
; break;
1840 case TOK_LOR
: l1
= l1
|| l2
; break;
1844 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1845 l1
= ((uint32_t)l1
|
1846 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1850 /* if commutative ops, put c2 as constant */
1851 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1852 op
== '|' || op
== '*')) {
1854 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1855 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1857 if (!const_wanted
&&
1859 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1860 (l1
== -1 && op
== TOK_SAR
))) {
1861 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1863 } else if (!const_wanted
&&
1864 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1866 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
1867 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1868 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1873 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1876 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1877 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1880 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
1881 /* filter out NOP operations like x*1, x-0, x&-1... */
1883 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1884 /* try to use shifts instead of muls or divs */
1885 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1894 else if (op
== TOK_PDIV
)
1900 } else if (c2
&& (op
== '+' || op
== '-') &&
1901 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1902 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1903 /* symbol + constant case */
1907 /* The backends can't always deal with addends to symbols
1908 larger than +-1<<31. Don't construct such. */
1915 /* call low level op generator */
1916 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1917 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1925 /* generate a floating point operation with constant propagation */
1926 static void gen_opif(int op
)
1930 #if defined _MSC_VER && defined _AMD64_
1931 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
1938 /* currently, we cannot do computations with forward symbols */
1939 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1940 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1942 if (v1
->type
.t
== VT_FLOAT
) {
1945 } else if (v1
->type
.t
== VT_DOUBLE
) {
1953 /* NOTE: we only do constant propagation if finite number (not
1954 NaN or infinity) (ANSI spec) */
1955 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1959 case '+': f1
+= f2
; break;
1960 case '-': f1
-= f2
; break;
1961 case '*': f1
*= f2
; break;
1965 tcc_error("division by zero in constant");
1970 /* XXX: also handles tests ? */
1974 /* XXX: overflow test ? */
1975 if (v1
->type
.t
== VT_FLOAT
) {
1977 } else if (v1
->type
.t
== VT_DOUBLE
) {
1989 static int pointed_size(CType
*type
)
1992 return type_size(pointed_type(type
), &align
);
1995 static void vla_runtime_pointed_size(CType
*type
)
1998 vla_runtime_type_size(pointed_type(type
), &align
);
2001 static inline int is_null_pointer(SValue
*p
)
2003 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2005 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2006 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2007 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2008 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
2011 static inline int is_integer_btype(int bt
)
2013 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2014 bt
== VT_INT
|| bt
== VT_LLONG
);
2017 /* check types for comparison or subtraction of pointers */
2018 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2020 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2023 /* null pointers are accepted for all comparisons as gcc */
2024 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2028 bt1
= type1
->t
& VT_BTYPE
;
2029 bt2
= type2
->t
& VT_BTYPE
;
2030 /* accept comparison between pointer and integer with a warning */
2031 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2032 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2033 tcc_warning("comparison between pointer and integer");
2037 /* both must be pointers or implicit function pointers */
2038 if (bt1
== VT_PTR
) {
2039 type1
= pointed_type(type1
);
2040 } else if (bt1
!= VT_FUNC
)
2041 goto invalid_operands
;
2043 if (bt2
== VT_PTR
) {
2044 type2
= pointed_type(type2
);
2045 } else if (bt2
!= VT_FUNC
) {
2047 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2049 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2050 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2054 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2055 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2056 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2057 /* gcc-like error if '-' is used */
2059 goto invalid_operands
;
2061 tcc_warning("comparison of distinct pointer types lacks a cast");
2065 /* generic gen_op: handles types problems */
2066 ST_FUNC
void gen_op(int op
)
2068 int u
, t1
, t2
, bt1
, bt2
, t
;
2072 t1
= vtop
[-1].type
.t
;
2073 t2
= vtop
[0].type
.t
;
2074 bt1
= t1
& VT_BTYPE
;
2075 bt2
= t2
& VT_BTYPE
;
2077 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2078 tcc_error("operation on a struct");
2079 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2080 if (bt2
== VT_FUNC
) {
2081 mk_pointer(&vtop
->type
);
2084 if (bt1
== VT_FUNC
) {
2086 mk_pointer(&vtop
->type
);
2091 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2092 /* at least one operand is a pointer */
2093 /* relational op: must be both pointers */
2094 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2095 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2096 /* pointers are handled are unsigned */
2098 t
= VT_LLONG
| VT_UNSIGNED
;
2100 t
= VT_INT
| VT_UNSIGNED
;
2104 /* if both pointers, then it must be the '-' op */
2105 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2107 tcc_error("cannot use pointers here");
2108 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2109 /* XXX: check that types are compatible */
2110 if (vtop
[-1].type
.t
& VT_VLA
) {
2111 vla_runtime_pointed_size(&vtop
[-1].type
);
2113 vpushi(pointed_size(&vtop
[-1].type
));
2117 /* set to integer type */
2119 vtop
->type
.t
= VT_LLONG
;
2121 vtop
->type
.t
= VT_INT
;
2126 /* exactly one pointer : must be '+' or '-'. */
2127 if (op
!= '-' && op
!= '+')
2128 tcc_error("cannot use pointers here");
2129 /* Put pointer as first operand */
2130 if (bt2
== VT_PTR
) {
2132 t
= t1
, t1
= t2
, t2
= t
;
2135 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2136 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2139 type1
= vtop
[-1].type
;
2140 type1
.t
&= ~VT_ARRAY
;
2141 if (vtop
[-1].type
.t
& VT_VLA
)
2142 vla_runtime_pointed_size(&vtop
[-1].type
);
2144 u
= pointed_size(&vtop
[-1].type
);
2146 tcc_error("unknown array element size");
2150 /* XXX: cast to int ? (long long case) */
2156 /* #ifdef CONFIG_TCC_BCHECK
2157 The main reason to removing this code:
2164 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2165 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2167 When this code is on. then the output looks like
2169 v+(i-j) = 0xbff84000
2171 /* if evaluating constant expression, no code should be
2172 generated, so no bound check */
2173 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2174 /* if bounded pointers, we generate a special code to
2181 gen_bounded_ptr_add();
2187 /* put again type if gen_opic() swaped operands */
2190 } else if (is_float(bt1
) || is_float(bt2
)) {
2191 /* compute bigger type and do implicit casts */
2192 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2194 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2199 /* floats can only be used for a few operations */
2200 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2201 (op
< TOK_ULT
|| op
> TOK_GT
))
2202 tcc_error("invalid operands for binary operation");
2204 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2205 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2206 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2209 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2210 /* cast to biggest op */
2212 /* convert to unsigned if it does not fit in a long long */
2213 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2214 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2218 /* integer operations */
2220 /* convert to unsigned if it does not fit in an integer */
2221 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2222 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2225 /* XXX: currently, some unsigned operations are explicit, so
2226 we modify them here */
2227 if (t
& VT_UNSIGNED
) {
2234 else if (op
== TOK_LT
)
2236 else if (op
== TOK_GT
)
2238 else if (op
== TOK_LE
)
2240 else if (op
== TOK_GE
)
2248 /* special case for shifts and long long: we keep the shift as
2250 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2257 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2258 /* relational op: the result is an int */
2259 vtop
->type
.t
= VT_INT
;
2264 // Make sure that we have converted to an rvalue:
2265 if (vtop
->r
& VT_LVAL
)
2266 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2269 #ifndef TCC_TARGET_ARM
2270 /* generic itof for unsigned long long case */
2271 static void gen_cvt_itof1(int t
)
2273 #ifdef TCC_TARGET_ARM64
2276 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2277 (VT_LLONG
| VT_UNSIGNED
)) {
2280 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2281 #if LDOUBLE_SIZE != 8
2282 else if (t
== VT_LDOUBLE
)
2283 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2286 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2290 vtop
->r
= reg_fret(t
);
2298 /* generic ftoi for unsigned long long case */
2299 static void gen_cvt_ftoi1(int t
)
2301 #ifdef TCC_TARGET_ARM64
2306 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2307 /* not handled natively */
2308 st
= vtop
->type
.t
& VT_BTYPE
;
2310 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2311 #if LDOUBLE_SIZE != 8
2312 else if (st
== VT_LDOUBLE
)
2313 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2316 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2321 vtop
->r2
= REG_LRET
;
2328 /* force char or short cast */
2329 static void force_charshort_cast(int t
)
2333 /* cannot cast static initializers */
2334 if (STATIC_DATA_WANTED
)
2338 /* XXX: add optimization if lvalue : just change type and offset */
2343 if (t
& VT_UNSIGNED
) {
2344 vpushi((1 << bits
) - 1);
2347 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2353 /* result must be signed or the SAR is converted to an SHL
2354 This was not the case when "t" was a signed short
2355 and the last value on the stack was an unsigned int */
2356 vtop
->type
.t
&= ~VT_UNSIGNED
;
2362 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2363 static void gen_cast_s(int t
)
2371 static void gen_cast(CType
*type
)
2373 int sbt
, dbt
, sf
, df
, c
, p
;
2375 /* special delayed cast for char/short */
2376 /* XXX: in some cases (multiple cascaded casts), it may still
2378 if (vtop
->r
& VT_MUSTCAST
) {
2379 vtop
->r
&= ~VT_MUSTCAST
;
2380 force_charshort_cast(vtop
->type
.t
);
2383 /* bitfields first get cast to ints */
2384 if (vtop
->type
.t
& VT_BITFIELD
) {
2388 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2389 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2394 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2395 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2397 /* constant case: we can do it now */
2398 /* XXX: in ISOC, cannot do it if error in convert */
2399 if (sbt
== VT_FLOAT
)
2400 vtop
->c
.ld
= vtop
->c
.f
;
2401 else if (sbt
== VT_DOUBLE
)
2402 vtop
->c
.ld
= vtop
->c
.d
;
2405 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2406 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2407 vtop
->c
.ld
= vtop
->c
.i
;
2409 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2411 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2412 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2414 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2417 if (dbt
== VT_FLOAT
)
2418 vtop
->c
.f
= (float)vtop
->c
.ld
;
2419 else if (dbt
== VT_DOUBLE
)
2420 vtop
->c
.d
= (double)vtop
->c
.ld
;
2421 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2422 vtop
->c
.i
= vtop
->c
.ld
;
2423 } else if (sf
&& dbt
== VT_BOOL
) {
2424 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2427 vtop
->c
.i
= vtop
->c
.ld
;
2428 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2430 else if (sbt
& VT_UNSIGNED
)
2431 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2433 else if (sbt
== VT_PTR
)
2436 else if (sbt
!= VT_LLONG
)
2437 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2438 -(vtop
->c
.i
& 0x80000000));
2440 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2442 else if (dbt
== VT_BOOL
)
2443 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2445 else if (dbt
== VT_PTR
)
2448 else if (dbt
!= VT_LLONG
) {
2449 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2450 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2453 if (!(dbt
& VT_UNSIGNED
))
2454 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2457 } else if (p
&& dbt
== VT_BOOL
) {
2461 /* non constant case: generate code */
2463 /* convert from fp to fp */
2466 /* convert int to fp */
2469 /* convert fp to int */
2470 if (dbt
== VT_BOOL
) {
2474 /* we handle char/short/etc... with generic code */
2475 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2476 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2480 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2481 /* additional cast for char/short... */
2487 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2488 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2489 /* scalar to long long */
2490 /* machine independent conversion */
2492 /* generate high word */
2493 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2497 if (sbt
== VT_PTR
) {
2498 /* cast from pointer to int before we apply
2499 shift operation, which pointers don't support*/
2506 /* patch second register */
2507 vtop
[-1].r2
= vtop
->r
;
2511 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2512 (dbt
& VT_BTYPE
) == VT_PTR
||
2513 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2514 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2515 (sbt
& VT_BTYPE
) != VT_PTR
&&
2516 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2517 /* need to convert from 32bit to 64bit */
2519 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2520 #if defined(TCC_TARGET_ARM64)
2522 #elif defined(TCC_TARGET_X86_64)
2524 /* x86_64 specific: movslq */
2526 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2533 } else if (dbt
== VT_BOOL
) {
2534 /* scalar to bool */
2537 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2538 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2539 if (sbt
== VT_PTR
) {
2540 vtop
->type
.t
= VT_INT
;
2541 tcc_warning("nonportable conversion from pointer to char/short");
2543 force_charshort_cast(dbt
);
2545 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2547 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2548 /* from long long: just take low order word */
2552 /* if lvalue and single word type, nothing to do because
2553 the lvalue already contains the real type size (see
2554 VT_LVAL_xxx constants) */
2558 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2559 /* if we are casting between pointer types,
2560 we must update the VT_LVAL_xxx size */
2561 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2562 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2567 /* return type size as known at compile time. Put alignment at 'a' */
2568 ST_FUNC
int type_size(CType
*type
, int *a
)
2573 bt
= type
->t
& VT_BTYPE
;
2574 if (bt
== VT_STRUCT
) {
2579 } else if (bt
== VT_PTR
) {
2580 if (type
->t
& VT_ARRAY
) {
2584 ts
= type_size(&s
->type
, a
);
2586 if (ts
< 0 && s
->c
< 0)
2594 } else if (IS_ENUM(type
->t
) && type
->ref
->c
== -1) {
2595 return -1; /* incomplete enum */
2596 } else if (bt
== VT_LDOUBLE
) {
2598 return LDOUBLE_SIZE
;
2599 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2600 #ifdef TCC_TARGET_I386
2601 #ifdef TCC_TARGET_PE
2606 #elif defined(TCC_TARGET_ARM)
2616 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2619 } else if (bt
== VT_SHORT
) {
2622 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2626 /* char, void, function, _Bool */
2632 /* push type size as known at runtime time on top of value stack. Put
2634 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2636 if (type
->t
& VT_VLA
) {
2637 type_size(&type
->ref
->type
, a
);
2638 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2640 vpushi(type_size(type
, a
));
2644 static void vla_sp_restore(void) {
2645 if (vlas_in_scope
) {
2646 gen_vla_sp_restore(vla_sp_loc
);
2650 static void vla_sp_restore_root(void) {
2651 if (vlas_in_scope
) {
2652 gen_vla_sp_restore(vla_sp_root_loc
);
2656 /* return the pointed type of t */
2657 static inline CType
*pointed_type(CType
*type
)
2659 return &type
->ref
->type
;
2662 /* modify type so that its it is a pointer to type. */
2663 ST_FUNC
void mk_pointer(CType
*type
)
2666 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2667 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2671 /* compare function types. OLD functions match any new functions */
2672 static int is_compatible_func(CType
*type1
, CType
*type2
)
2678 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2680 /* check func_call */
2681 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2683 /* XXX: not complete */
2684 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2686 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2688 while (s1
!= NULL
) {
2691 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2701 /* return true if type1 and type2 are the same. If unqualified is
2702 true, qualifiers on the types are ignored.
2704 - enums are not checked as gcc __builtin_types_compatible_p ()
2706 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2710 t1
= type1
->t
& VT_TYPE
;
2711 t2
= type2
->t
& VT_TYPE
;
2713 /* strip qualifiers before comparing */
2714 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2715 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2717 /* Default Vs explicit signedness only matters for char */
2718 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2723 /* XXX: bitfields ? */
2726 /* test more complicated cases */
2727 bt1
= t1
& VT_BTYPE
;
2728 if (bt1
== VT_PTR
) {
2729 type1
= pointed_type(type1
);
2730 type2
= pointed_type(type2
);
2731 return is_compatible_types(type1
, type2
);
2732 } else if (bt1
== VT_STRUCT
) {
2733 return (type1
->ref
== type2
->ref
);
2734 } else if (bt1
== VT_FUNC
) {
2735 return is_compatible_func(type1
, type2
);
2741 /* return true if type1 and type2 are exactly the same (including
2744 static int is_compatible_types(CType
*type1
, CType
*type2
)
2746 return compare_types(type1
,type2
,0);
2749 /* return true if type1 and type2 are the same (ignoring qualifiers).
2751 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2753 return compare_types(type1
,type2
,1);
2756 /* print a type. If 'varstr' is not NULL, then the variable is also
2757 printed in the type */
2759 /* XXX: add array and function pointers */
2760 static void type_to_str(char *buf
, int buf_size
,
2761 CType
*type
, const char *varstr
)
2771 if (t
& VT_CONSTANT
)
2772 pstrcat(buf
, buf_size
, "const ");
2773 if (t
& VT_VOLATILE
)
2774 pstrcat(buf
, buf_size
, "volatile ");
2775 if ((t
& (VT_DEFSIGN
| VT_UNSIGNED
)) == (VT_DEFSIGN
| VT_UNSIGNED
))
2776 pstrcat(buf
, buf_size
, "unsigned ");
2777 else if (t
& VT_DEFSIGN
)
2778 pstrcat(buf
, buf_size
, "signed ");
2780 pstrcat(buf
, buf_size
, "extern ");
2782 pstrcat(buf
, buf_size
, "static ");
2784 pstrcat(buf
, buf_size
, "typedef ");
2786 pstrcat(buf
, buf_size
, "inline ");
2787 buf_size
-= strlen(buf
);
2822 tstr
= "long double";
2824 pstrcat(buf
, buf_size
, tstr
);
2831 pstrcat(buf
, buf_size
, tstr
);
2832 v
= type
->ref
->v
& ~SYM_STRUCT
;
2833 if (v
>= SYM_FIRST_ANOM
)
2834 pstrcat(buf
, buf_size
, "<anonymous>");
2836 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2840 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2841 pstrcat(buf
, buf_size
, "(");
2843 while (sa
!= NULL
) {
2844 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2845 pstrcat(buf
, buf_size
, buf1
);
2848 pstrcat(buf
, buf_size
, ", ");
2850 pstrcat(buf
, buf_size
, ")");
2855 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2856 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2859 pstrcpy(buf1
, sizeof(buf1
), "*");
2860 if (t
& VT_CONSTANT
)
2861 pstrcat(buf1
, buf_size
, "const ");
2862 if (t
& VT_VOLATILE
)
2863 pstrcat(buf1
, buf_size
, "volatile ");
2865 pstrcat(buf1
, sizeof(buf1
), varstr
);
2866 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2870 pstrcat(buf
, buf_size
, " ");
2871 pstrcat(buf
, buf_size
, varstr
);
2876 /* verify type compatibility to store vtop in 'dt' type, and generate
2878 static void gen_assign_cast(CType
*dt
)
2880 CType
*st
, *type1
, *type2
, tmp_type1
, tmp_type2
;
2881 char buf1
[256], buf2
[256];
2884 st
= &vtop
->type
; /* source type */
2885 dbt
= dt
->t
& VT_BTYPE
;
2886 sbt
= st
->t
& VT_BTYPE
;
2887 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2888 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2890 It is Ok if both are void
2896 gcc accepts this program
2899 tcc_error("cannot cast from/to void");
2901 if (dt
->t
& VT_CONSTANT
)
2902 tcc_warning("assignment of read-only location");
2905 /* special cases for pointers */
2906 /* '0' can also be a pointer */
2907 if (is_null_pointer(vtop
))
2909 /* accept implicit pointer to integer cast with warning */
2910 if (is_integer_btype(sbt
)) {
2911 tcc_warning("assignment makes pointer from integer without a cast");
2914 type1
= pointed_type(dt
);
2915 /* a function is implicitly a function pointer */
2916 if (sbt
== VT_FUNC
) {
2917 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2918 !is_compatible_types(pointed_type(dt
), st
))
2919 tcc_warning("assignment from incompatible pointer type");
2924 type2
= pointed_type(st
);
2925 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2926 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2927 /* void * can match anything */
2929 //printf("types %08x %08x\n", type1->t, type2->t);
2930 /* exact type match, except for qualifiers */
2931 if (!is_compatible_unqualified_types(type1
, type2
)) {
2932 /* Like GCC don't warn by default for merely changes
2933 in pointer target signedness. Do warn for different
2934 base types, though, in particular for unsigned enums
2935 and signed int targets. */
2936 if ((type1
->t
& VT_BTYPE
) != (type2
->t
& VT_BTYPE
)
2937 || IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)
2939 tcc_warning("assignment from incompatible pointer type");
2942 /* check const and volatile */
2943 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2944 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2945 tcc_warning("assignment discards qualifiers from pointer target type");
2951 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2952 tcc_warning("assignment makes integer from pointer without a cast");
2953 } else if (sbt
== VT_STRUCT
) {
2954 goto case_VT_STRUCT
;
2956 /* XXX: more tests */
2962 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2963 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2964 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2966 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2967 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2968 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
2976 /* store vtop in lvalue pushed on stack */
2977 ST_FUNC
void vstore(void)
2979 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
2981 ft
= vtop
[-1].type
.t
;
2982 sbt
= vtop
->type
.t
& VT_BTYPE
;
2983 dbt
= ft
& VT_BTYPE
;
2984 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
2985 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
2986 && !(vtop
->type
.t
& VT_BITFIELD
)) {
2987 /* optimize char/short casts */
2988 delayed_cast
= VT_MUSTCAST
;
2989 vtop
->type
.t
= ft
& VT_TYPE
;
2990 /* XXX: factorize */
2991 if (ft
& VT_CONSTANT
)
2992 tcc_warning("assignment of read-only location");
2995 if (!(ft
& VT_BITFIELD
))
2996 gen_assign_cast(&vtop
[-1].type
);
2999 if (sbt
== VT_STRUCT
) {
3000 /* if structure, only generate pointer */
3001 /* structure assignment : generate memcpy */
3002 /* XXX: optimize if small size */
3003 size
= type_size(&vtop
->type
, &align
);
3007 vtop
->type
.t
= VT_PTR
;
3010 /* address of memcpy() */
3013 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3014 else if(!(align
& 3))
3015 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3018 /* Use memmove, rather than memcpy, as dest and src may be same: */
3019 vpush_global_sym(&func_old_type
, TOK_memmove
);
3024 vtop
->type
.t
= VT_PTR
;
3030 /* leave source on stack */
3031 } else if (ft
& VT_BITFIELD
) {
3032 /* bitfield store handling */
3034 /* save lvalue as expression result (example: s.b = s.a = n;) */
3035 vdup(), vtop
[-1] = vtop
[-2];
3037 bit_pos
= BIT_POS(ft
);
3038 bit_size
= BIT_SIZE(ft
);
3039 /* remove bit field info to avoid loops */
3040 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3042 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3043 gen_cast(&vtop
[-1].type
);
3044 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3047 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3048 if (r
== VT_STRUCT
) {
3049 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3050 store_packed_bf(bit_pos
, bit_size
);
3052 unsigned long long mask
= (1ULL << bit_size
) - 1;
3053 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3055 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3058 vpushi((unsigned)mask
);
3065 /* duplicate destination */
3068 /* load destination, mask and or with source */
3069 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3070 vpushll(~(mask
<< bit_pos
));
3072 vpushi(~((unsigned)mask
<< bit_pos
));
3077 /* ... and discard */
3081 #ifdef CONFIG_TCC_BCHECK
3082 /* bound check case */
3083 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3092 #ifdef TCC_TARGET_X86_64
3093 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3095 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3100 r
= gv(rc
); /* generate value */
3101 /* if lvalue was saved on stack, must read it */
3102 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3104 t
= get_reg(RC_INT
);
3110 sv
.r
= VT_LOCAL
| VT_LVAL
;
3111 sv
.c
.i
= vtop
[-1].c
.i
;
3113 vtop
[-1].r
= t
| VT_LVAL
;
3115 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3117 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3118 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3120 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3121 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3123 vtop
[-1].type
.t
= load_type
;
3126 /* convert to int to increment easily */
3127 vtop
->type
.t
= addr_type
;
3133 vtop
[-1].type
.t
= load_type
;
3134 /* XXX: it works because r2 is spilled last ! */
3135 store(vtop
->r2
, vtop
- 1);
3141 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3142 vtop
->r
|= delayed_cast
;
3146 /* post defines POST/PRE add. c is the token ++ or -- */
3147 ST_FUNC
void inc(int post
, int c
)
3150 vdup(); /* save lvalue */
3152 gv_dup(); /* duplicate value */
3157 vpushi(c
- TOK_MID
);
3159 vstore(); /* store value */
3161 vpop(); /* if post op, return saved value */
3164 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3166 /* read the string */
3170 while (tok
== TOK_STR
) {
3171 /* XXX: add \0 handling too ? */
3172 cstr_cat(astr
, tokc
.str
.data
, -1);
3175 cstr_ccat(astr
, '\0');
3178 /* If I is >= 1 and a power of two, returns log2(i)+1.
3179 If I is 0 returns 0. */
3180 static int exact_log2p1(int i
)
3185 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3196 /* Parse __attribute__((...)) GNUC extension. */
3197 static void parse_attribute(AttributeDef
*ad
)
3203 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3208 while (tok
!= ')') {
3209 if (tok
< TOK_IDENT
)
3210 expect("attribute name");
3217 parse_mult_str(&astr
, "section name");
3218 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3225 parse_mult_str(&astr
, "alias(\"target\")");
3226 ad
->alias_target
= /* save string as token, for later */
3227 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3231 case TOK_VISIBILITY1
:
3232 case TOK_VISIBILITY2
:
3234 parse_mult_str(&astr
,
3235 "visibility(\"default|hidden|internal|protected\")");
3236 if (!strcmp (astr
.data
, "default"))
3237 ad
->a
.visibility
= STV_DEFAULT
;
3238 else if (!strcmp (astr
.data
, "hidden"))
3239 ad
->a
.visibility
= STV_HIDDEN
;
3240 else if (!strcmp (astr
.data
, "internal"))
3241 ad
->a
.visibility
= STV_INTERNAL
;
3242 else if (!strcmp (astr
.data
, "protected"))
3243 ad
->a
.visibility
= STV_PROTECTED
;
3245 expect("visibility(\"default|hidden|internal|protected\")");
3254 if (n
<= 0 || (n
& (n
- 1)) != 0)
3255 tcc_error("alignment must be a positive power of two");
3260 ad
->a
.aligned
= exact_log2p1(n
);
3261 if (n
!= 1 << (ad
->a
.aligned
- 1))
3262 tcc_error("alignment of %d is larger than implemented", n
);
3274 /* currently, no need to handle it because tcc does not
3275 track unused objects */
3279 /* currently, no need to handle it because tcc does not
3280 track unused objects */
3285 ad
->f
.func_call
= FUNC_CDECL
;
3290 ad
->f
.func_call
= FUNC_STDCALL
;
3292 #ifdef TCC_TARGET_I386
3302 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3308 ad
->f
.func_call
= FUNC_FASTCALLW
;
3315 ad
->attr_mode
= VT_LLONG
+ 1;
3318 ad
->attr_mode
= VT_BYTE
+ 1;
3321 ad
->attr_mode
= VT_SHORT
+ 1;
3325 ad
->attr_mode
= VT_INT
+ 1;
3328 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3335 ad
->a
.dllexport
= 1;
3338 ad
->a
.dllimport
= 1;
3341 if (tcc_state
->warn_unsupported
)
3342 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3343 /* skip parameters */
3345 int parenthesis
= 0;
3349 else if (tok
== ')')
3352 } while (parenthesis
&& tok
!= -1);
3365 static Sym
* find_field (CType
*type
, int v
)
3369 while ((s
= s
->next
) != NULL
) {
3370 if ((s
->v
& SYM_FIELD
) &&
3371 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3372 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3373 Sym
*ret
= find_field (&s
->type
, v
);
3383 static void struct_add_offset (Sym
*s
, int offset
)
3385 while ((s
= s
->next
) != NULL
) {
3386 if ((s
->v
& SYM_FIELD
) &&
3387 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3388 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3389 struct_add_offset(s
->type
.ref
, offset
);
3395 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3397 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3398 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3399 int pcc
= !tcc_state
->ms_bitfields
;
3400 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3407 prevbt
= VT_STRUCT
; /* make it never match */
3412 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3413 if (f
->type
.t
& VT_BITFIELD
)
3414 bit_size
= BIT_SIZE(f
->type
.t
);
3417 size
= type_size(&f
->type
, &align
);
3418 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3421 if (pcc
&& bit_size
== 0) {
3422 /* in pcc mode, packing does not affect zero-width bitfields */
3425 /* in pcc mode, attribute packed overrides if set. */
3426 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3429 /* pragma pack overrides align if lesser and packs bitfields always */
3432 if (pragma_pack
< align
)
3433 align
= pragma_pack
;
3434 /* in pcc mode pragma pack also overrides individual align */
3435 if (pcc
&& pragma_pack
< a
)
3439 /* some individual align was specified */
3443 if (type
->ref
->type
.t
== VT_UNION
) {
3444 if (pcc
&& bit_size
>= 0)
3445 size
= (bit_size
+ 7) >> 3;
3450 } else if (bit_size
< 0) {
3452 c
+= (bit_pos
+ 7) >> 3;
3453 c
= (c
+ align
- 1) & -align
;
3462 /* A bit-field. Layout is more complicated. There are two
3463 options: PCC (GCC) compatible and MS compatible */
3465 /* In PCC layout a bit-field is placed adjacent to the
3466 preceding bit-fields, except if:
3468 - an individual alignment was given
3469 - it would overflow its base type container and
3470 there is no packing */
3471 if (bit_size
== 0) {
3473 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3475 } else if (f
->a
.aligned
) {
3477 } else if (!packed
) {
3479 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3480 if (ofs
> size
/ align
)
3484 /* in pcc mode, long long bitfields have type int if they fit */
3485 if (size
== 8 && bit_size
<= 32)
3486 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3488 while (bit_pos
>= align
* 8)
3489 c
+= align
, bit_pos
-= align
* 8;
3492 /* In PCC layout named bit-fields influence the alignment
3493 of the containing struct using the base types alignment,
3494 except for packed fields (which here have correct align). */
3495 if (f
->v
& SYM_FIRST_ANOM
)
3498 bt
= f
->type
.t
& VT_BTYPE
;
3499 if ((bit_pos
+ bit_size
> size
* 8)
3500 || (bit_size
> 0) == (bt
!= prevbt
)
3502 c
= (c
+ align
- 1) & -align
;
3505 /* In MS bitfield mode a bit-field run always uses
3506 at least as many bits as the underlying type.
3507 To start a new run it's also required that this
3508 or the last bit-field had non-zero width. */
3509 if (bit_size
|| prev_bit_size
)
3512 /* In MS layout the records alignment is normally
3513 influenced by the field, except for a zero-width
3514 field at the start of a run (but by further zero-width
3515 fields it is again). */
3516 if (bit_size
== 0 && prevbt
!= bt
)
3519 prev_bit_size
= bit_size
;
3522 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3523 | (bit_pos
<< VT_STRUCT_SHIFT
);
3524 bit_pos
+= bit_size
;
3526 if (align
> maxalign
)
3530 printf("set field %s offset %-2d size %-2d align %-2d",
3531 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3532 if (f
->type
.t
& VT_BITFIELD
) {
3533 printf(" pos %-2d bits %-2d",
3541 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3543 /* An anonymous struct/union. Adjust member offsets
3544 to reflect the real offset of our containing struct.
3545 Also set the offset of this anon member inside
3546 the outer struct to be zero. Via this it
3547 works when accessing the field offset directly
3548 (from base object), as well as when recursing
3549 members in initializer handling. */
3550 int v2
= f
->type
.ref
->v
;
3551 if (!(v2
& SYM_FIELD
) &&
3552 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3554 /* This happens only with MS extensions. The
3555 anon member has a named struct type, so it
3556 potentially is shared with other references.
3557 We need to unshare members so we can modify
3560 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3561 &f
->type
.ref
->type
, 0,
3563 pps
= &f
->type
.ref
->next
;
3564 while ((ass
= ass
->next
) != NULL
) {
3565 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3566 pps
= &((*pps
)->next
);
3570 struct_add_offset(f
->type
.ref
, offset
);
3580 c
+= (bit_pos
+ 7) >> 3;
3582 /* store size and alignment */
3583 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3587 if (pragma_pack
&& pragma_pack
< maxalign
) {
3588 /* can happen if individual align for some member was given. In
3589 this case MSVC ignores maxalign when aligning the size */
3594 c
= (c
+ a
- 1) & -a
;
3598 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3601 /* check whether we can access bitfields by their type */
3602 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3606 if (0 == (f
->type
.t
& VT_BITFIELD
))
3610 bit_size
= BIT_SIZE(f
->type
.t
);
3613 bit_pos
= BIT_POS(f
->type
.t
);
3614 size
= type_size(&f
->type
, &align
);
3615 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3618 /* try to access the field using a differnt type */
3619 c0
= -1, s
= align
= 1;
3621 px
= f
->c
* 8 + bit_pos
;
3622 cx
= (px
>> 3) & -align
;
3623 px
= px
- (cx
<< 3);
3626 s
= (px
+ bit_size
+ 7) >> 3;
3636 s
= type_size(&t
, &align
);
3640 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3641 /* update offset and bit position */
3644 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3645 | (bit_pos
<< VT_STRUCT_SHIFT
);
3649 printf("FIX field %s offset %-2d size %-2d align %-2d "
3650 "pos %-2d bits %-2d\n",
3651 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3652 cx
, s
, align
, px
, bit_size
);
3655 /* fall back to load/store single-byte wise */
3656 f
->auxtype
= VT_STRUCT
;
3658 printf("FIX field %s : load byte-wise\n",
3659 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3665 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3666 static void struct_decl(CType
*type
, int u
)
3668 int v
, c
, size
, align
, flexible
;
3669 int bit_size
, bsize
, bt
;
3671 AttributeDef ad
, ad1
;
3674 memset(&ad
, 0, sizeof ad
);
3676 parse_attribute(&ad
);
3680 /* struct already defined ? return it */
3682 expect("struct/union/enum name");
3684 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3687 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3689 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3694 /* Record the original enum/struct/union token. */
3695 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3697 /* we put an undefined size for struct/union */
3698 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3699 s
->r
= 0; /* default alignment is zero as gcc */
3701 type
->t
= s
->type
.t
;
3707 tcc_error("struct/union/enum already defined");
3708 /* cannot be empty */
3709 /* non empty enums are not allowed */
3712 long long ll
= 0, pl
= 0, nl
= 0;
3715 /* enum symbols have static storage */
3716 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3720 expect("identifier");
3722 if (ss
&& !local_stack
)
3723 tcc_error("redefinition of enumerator '%s'",
3724 get_tok_str(v
, NULL
));
3728 ll
= expr_const64();
3730 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3732 *ps
= ss
, ps
= &ss
->next
;
3741 /* NOTE: we accept a trailing comma */
3746 /* set integral type of the enum */
3749 if (pl
!= (unsigned)pl
)
3752 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3754 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3756 /* set type for enum members */
3757 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3759 if (ll
== (int)ll
) /* default is int if it fits */
3761 if (t
.t
& VT_UNSIGNED
) {
3762 ss
->type
.t
|= VT_UNSIGNED
;
3763 if (ll
== (unsigned)ll
)
3766 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
) | VT_LLONG
;
3771 while (tok
!= '}') {
3772 if (!parse_btype(&btype
, &ad1
)) {
3778 tcc_error("flexible array member '%s' not at the end of struct",
3779 get_tok_str(v
, NULL
));
3785 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3787 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3788 expect("identifier");
3790 int v
= btype
.ref
->v
;
3791 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3792 if (tcc_state
->ms_extensions
== 0)
3793 expect("identifier");
3797 if (type_size(&type1
, &align
) < 0) {
3798 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3801 tcc_error("field '%s' has incomplete type",
3802 get_tok_str(v
, NULL
));
3804 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3805 (type1
.t
& VT_STORAGE
))
3806 tcc_error("invalid type for '%s'",
3807 get_tok_str(v
, NULL
));
3811 bit_size
= expr_const();
3812 /* XXX: handle v = 0 case for messages */
3814 tcc_error("negative width in bit-field '%s'",
3815 get_tok_str(v
, NULL
));
3816 if (v
&& bit_size
== 0)
3817 tcc_error("zero width for bit-field '%s'",
3818 get_tok_str(v
, NULL
));
3819 parse_attribute(&ad1
);
3821 size
= type_size(&type1
, &align
);
3822 if (bit_size
>= 0) {
3823 bt
= type1
.t
& VT_BTYPE
;
3829 tcc_error("bitfields must have scalar type");
3831 if (bit_size
> bsize
) {
3832 tcc_error("width of '%s' exceeds its type",
3833 get_tok_str(v
, NULL
));
3834 } else if (bit_size
== bsize
3835 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
3836 /* no need for bit fields */
3838 } else if (bit_size
== 64) {
3839 tcc_error("field width 64 not implemented");
3841 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
3843 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3846 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3847 /* Remember we've seen a real field to check
3848 for placement of flexible array member. */
3851 /* If member is a struct or bit-field, enforce
3852 placing into the struct (as anonymous). */
3854 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3859 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
3864 if (tok
== ';' || tok
== TOK_EOF
)
3871 parse_attribute(&ad
);
3872 struct_layout(type
, &ad
);
3877 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
3879 if (s
->a
.aligned
&& 0 == ad
->a
.aligned
)
3880 ad
->a
.aligned
= s
->a
.aligned
;
3881 if (s
->f
.func_call
&& 0 == ad
->f
.func_call
)
3882 ad
->f
.func_call
= s
->f
.func_call
;
3883 if (s
->f
.func_type
&& 0 == ad
->f
.func_type
)
3884 ad
->f
.func_type
= s
->f
.func_type
;
3889 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3890 are added to the element type, copied because it could be a typedef. */
3891 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3893 while (type
->t
& VT_ARRAY
) {
3894 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3895 type
= &type
->ref
->type
;
3897 type
->t
|= qualifiers
;
3900 /* return 0 if no type declaration. otherwise, return the basic type
3903 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3905 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
3909 memset(ad
, 0, sizeof(AttributeDef
));
3919 /* currently, we really ignore extension */
3929 if (u
== VT_SHORT
|| u
== VT_LONG
) {
3930 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
3931 tmbt
: tcc_error("too many basic types");
3934 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
3939 t
= (t
& ~VT_BTYPE
) | u
;
3952 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3953 #ifndef TCC_TARGET_PE
3954 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3956 } else if ((t
& VT_BTYPE
) == VT_LONG
) {
3957 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3964 #ifdef TCC_TARGET_ARM64
3966 /* GCC's __uint128_t appears in some Linux header files. Make it a
3967 synonym for long double to get the size and alignment right. */
3978 if ((t
& VT_BTYPE
) == VT_LONG
) {
3979 #ifdef TCC_TARGET_PE
3980 t
= (t
& ~VT_BTYPE
) | VT_DOUBLE
;
3982 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3991 struct_decl(&type1
, VT_ENUM
);
3994 type
->ref
= type1
.ref
;
3997 struct_decl(&type1
, VT_STRUCT
);
4000 struct_decl(&type1
, VT_UNION
);
4003 /* type modifiers */
4008 parse_btype_qualify(type
, VT_CONSTANT
);
4016 parse_btype_qualify(type
, VT_VOLATILE
);
4023 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4024 tcc_error("signed and unsigned modifier");
4037 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4038 tcc_error("signed and unsigned modifier");
4039 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4055 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4056 tcc_error("multiple storage classes");
4067 /* GNUC attribute */
4068 case TOK_ATTRIBUTE1
:
4069 case TOK_ATTRIBUTE2
:
4070 parse_attribute(ad
);
4071 if (ad
->attr_mode
) {
4072 u
= ad
->attr_mode
-1;
4073 t
= (t
& ~VT_BTYPE
) | u
;
4081 parse_expr_type(&type1
);
4082 /* remove all storage modifiers except typedef */
4083 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4085 sym_to_attr(ad
, type1
.ref
);
4091 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4094 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4095 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4096 type
->ref
= s
->type
.ref
;
4098 parse_btype_qualify(type
, t
);
4100 /* get attributes from typedef */
4110 if (tcc_state
->char_is_unsigned
) {
4111 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4115 /* long is never used as type */
4116 if ((t
& VT_BTYPE
) == VT_LONG
)
4117 #if PTR_SIZE == 8 && !defined TCC_TARGET_PE
4118 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
4120 t
= (t
& ~VT_BTYPE
) | VT_INT
;
4126 /* convert a function parameter type (array to pointer and function to
4127 function pointer) */
4128 static inline void convert_parameter_type(CType
*pt
)
4130 /* remove const and volatile qualifiers (XXX: const could be used
4131 to indicate a const function parameter */
4132 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4133 /* array must be transformed to pointer according to ANSI C */
4135 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4140 ST_FUNC
void parse_asm_str(CString
*astr
)
4143 parse_mult_str(astr
, "string constant");
4146 /* Parse an asm label and return the token */
4147 static int asm_label_instr(void)
4153 parse_asm_str(&astr
);
4156 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4158 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4163 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4165 int n
, l
, t1
, arg_size
, align
;
4166 Sym
**plast
, *s
, *first
;
4171 /* function type, or recursive declarator (return if so) */
4173 if (td
&& !(td
& TYPE_ABSTRACT
))
4177 else if (parse_btype(&pt
, &ad1
))
4188 /* read param name and compute offset */
4189 if (l
!= FUNC_OLD
) {
4190 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4192 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4193 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4194 tcc_error("parameter declared as void");
4195 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4199 expect("identifier");
4200 pt
.t
= VT_VOID
; /* invalid type */
4203 convert_parameter_type(&pt
);
4204 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4210 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4215 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4216 tcc_error("invalid type");
4219 /* if no parameters, then old type prototype */
4222 /* NOTE: const is ignored in returned type as it has a special
4223 meaning in gcc / C++ */
4224 type
->t
&= ~VT_CONSTANT
;
4225 /* some ancient pre-K&R C allows a function to return an array
4226 and the array brackets to be put after the arguments, such
4227 that "int c()[]" means something like "int[] c()" */
4230 skip(']'); /* only handle simple "[]" */
4233 /* we push a anonymous symbol which will contain the function prototype */
4234 ad
->f
.func_args
= arg_size
;
4235 ad
->f
.func_type
= l
;
4236 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4242 } else if (tok
== '[') {
4243 int saved_nocode_wanted
= nocode_wanted
;
4244 /* array definition */
4246 if (tok
== TOK_RESTRICT1
)
4251 if (!local_stack
|| (storage
& VT_STATIC
))
4252 vpushi(expr_const());
4254 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4255 length must always be evaluated, even under nocode_wanted,
4256 so that its size slot is initialized (e.g. under sizeof
4261 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4264 tcc_error("invalid array size");
4266 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4267 tcc_error("size of variable length array should be an integer");
4272 /* parse next post type */
4273 post_type(type
, ad
, storage
, 0);
4274 if (type
->t
== VT_FUNC
)
4275 tcc_error("declaration of an array of functions");
4276 t1
|= type
->t
& VT_VLA
;
4279 loc
-= type_size(&int_type
, &align
);
4283 vla_runtime_type_size(type
, &align
);
4285 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4291 nocode_wanted
= saved_nocode_wanted
;
4293 /* we push an anonymous symbol which will contain the array
4295 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4296 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4302 /* Parse a type declarator (except basic type), and return the type
4303 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4304 expected. 'type' should contain the basic type. 'ad' is the
4305 attribute definition of the basic type. It can be modified by
4306 type_decl(). If this (possibly abstract) declarator is a pointer chain
4307 it returns the innermost pointed to type (equals *type, but is a different
4308 pointer), otherwise returns type itself, that's used for recursive calls. */
4309 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4312 int qualifiers
, storage
;
4314 /* recursive type, remove storage bits first, apply them later again */
4315 storage
= type
->t
& VT_STORAGE
;
4316 type
->t
&= ~VT_STORAGE
;
4319 while (tok
== '*') {
4327 qualifiers
|= VT_CONSTANT
;
4332 qualifiers
|= VT_VOLATILE
;
4338 /* XXX: clarify attribute handling */
4339 case TOK_ATTRIBUTE1
:
4340 case TOK_ATTRIBUTE2
:
4341 parse_attribute(ad
);
4345 type
->t
|= qualifiers
;
4347 /* innermost pointed to type is the one for the first derivation */
4348 ret
= pointed_type(type
);
4352 /* This is possibly a parameter type list for abstract declarators
4353 ('int ()'), use post_type for testing this. */
4354 if (!post_type(type
, ad
, 0, td
)) {
4355 /* It's not, so it's a nested declarator, and the post operations
4356 apply to the innermost pointed to type (if any). */
4357 /* XXX: this is not correct to modify 'ad' at this point, but
4358 the syntax is not clear */
4359 parse_attribute(ad
);
4360 post
= type_decl(type
, ad
, v
, td
);
4363 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4364 /* type identifier */
4368 if (!(td
& TYPE_ABSTRACT
))
4369 expect("identifier");
4372 post_type(post
, ad
, storage
, 0);
4373 parse_attribute(ad
);
4378 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4379 ST_FUNC
int lvalue_type(int t
)
4384 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4386 else if (bt
== VT_SHORT
)
4390 if (t
& VT_UNSIGNED
)
4391 r
|= VT_LVAL_UNSIGNED
;
4395 /* indirection with full error checking and bound check */
4396 ST_FUNC
void indir(void)
4398 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4399 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4403 if (vtop
->r
& VT_LVAL
)
4405 vtop
->type
= *pointed_type(&vtop
->type
);
4406 /* Arrays and functions are never lvalues */
4407 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4408 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4409 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4410 /* if bound checking, the referenced pointer must be checked */
4411 #ifdef CONFIG_TCC_BCHECK
4412 if (tcc_state
->do_bounds_check
)
4413 vtop
->r
|= VT_MUSTBOUND
;
4418 /* pass a parameter to a function and do type checking and casting */
4419 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4424 func_type
= func
->f
.func_type
;
4425 if (func_type
== FUNC_OLD
||
4426 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4427 /* default casting : only need to convert float to double */
4428 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4429 gen_cast_s(VT_DOUBLE
);
4430 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4431 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4432 type
.ref
= vtop
->type
.ref
;
4435 } else if (arg
== NULL
) {
4436 tcc_error("too many arguments to function");
4439 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4440 gen_assign_cast(&type
);
4444 /* parse an expression and return its type without any side effect.
4445 If UNRY we parse an unary expression, otherwise a full one. */
4446 static void expr_type(CType
*type
, int unry
)
4458 /* parse an expression of the form '(type)' or '(expr)' and return its
4460 static void parse_expr_type(CType
*type
)
4466 if (parse_btype(type
, &ad
)) {
4467 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4474 static void parse_type(CType
*type
)
4479 if (!parse_btype(type
, &ad
)) {
4482 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4485 static void parse_builtin_params(int nc
, const char *args
)
4492 while ((c
= *args
++)) {
4496 case 'e': expr_eq(); continue;
4497 case 't': parse_type(&t
); vpush(&t
); continue;
4498 default: tcc_error("internal error"); break;
4506 ST_FUNC
void unary(void)
4508 int n
, t
, align
, size
, r
, sizeof_caller
;
4513 sizeof_caller
= in_sizeof
;
4516 /* XXX: GCC 2.95.3 does not generate a table although it should be
4529 vsetc(&type
, VT_CONST
, &tokc
);
4533 t
= VT_INT
| VT_UNSIGNED
;
4539 t
= VT_LLONG
| VT_UNSIGNED
;
4551 case TOK___FUNCTION__
:
4553 goto tok_identifier
;
4559 /* special function name identifier */
4560 len
= strlen(funcname
) + 1;
4561 /* generate char[len] type */
4566 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4567 if (!NODATA_WANTED
) {
4568 ptr
= section_ptr_add(data_section
, len
);
4569 memcpy(ptr
, funcname
, len
);
4575 #ifdef TCC_TARGET_PE
4576 t
= VT_SHORT
| VT_UNSIGNED
;
4582 /* string parsing */
4584 if (tcc_state
->char_is_unsigned
)
4585 t
= VT_BYTE
| VT_UNSIGNED
;
4587 if (tcc_state
->warn_write_strings
)
4592 memset(&ad
, 0, sizeof(AttributeDef
));
4593 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4598 if (parse_btype(&type
, &ad
)) {
4599 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4601 /* check ISOC99 compound literal */
4603 /* data is allocated locally by default */
4608 /* all except arrays are lvalues */
4609 if (!(type
.t
& VT_ARRAY
))
4610 r
|= lvalue_type(type
.t
);
4611 memset(&ad
, 0, sizeof(AttributeDef
));
4612 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4614 if (sizeof_caller
) {
4621 } else if (tok
== '{') {
4622 int saved_nocode_wanted
= nocode_wanted
;
4624 tcc_error("expected constant");
4625 /* save all registers */
4627 /* statement expression : we do not accept break/continue
4628 inside as GCC does. We do retain the nocode_wanted state,
4629 as statement expressions can't ever be entered from the
4630 outside, so any reactivation of code emission (from labels
4631 or loop heads) can be disabled again after the end of it. */
4632 block(NULL
, NULL
, 1);
4633 nocode_wanted
= saved_nocode_wanted
;
4648 /* functions names must be treated as function pointers,
4649 except for unary '&' and sizeof. Since we consider that
4650 functions are not lvalues, we only have to handle it
4651 there and in function calls. */
4652 /* arrays can also be used although they are not lvalues */
4653 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4654 !(vtop
->type
.t
& VT_ARRAY
))
4656 mk_pointer(&vtop
->type
);
4662 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4663 gen_cast_s(VT_BOOL
);
4664 vtop
->c
.i
= !vtop
->c
.i
;
4665 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4669 vseti(VT_JMP
, gvtst(1, 0));
4681 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4682 tcc_error("pointer not accepted for unary plus");
4683 /* In order to force cast, we add zero, except for floating point
4684 where we really need an noop (otherwise -0.0 will be transformed
4686 if (!is_float(vtop
->type
.t
)) {
4697 expr_type(&type
, 1); // Perform a in_sizeof = 0;
4698 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
4699 size
= type_size(&type
, &align
);
4700 if (s
&& s
->a
.aligned
)
4701 align
= 1 << (s
->a
.aligned
- 1);
4702 if (t
== TOK_SIZEOF
) {
4703 if (!(type
.t
& VT_VLA
)) {
4705 tcc_error("sizeof applied to an incomplete type");
4708 vla_runtime_type_size(&type
, &align
);
4713 vtop
->type
.t
|= VT_UNSIGNED
;
4716 case TOK_builtin_expect
:
4717 /* __builtin_expect is a no-op for now */
4718 parse_builtin_params(0, "ee");
4721 case TOK_builtin_types_compatible_p
:
4722 parse_builtin_params(0, "tt");
4723 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4724 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4725 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4729 case TOK_builtin_choose_expr
:
4756 case TOK_builtin_constant_p
:
4757 parse_builtin_params(1, "e");
4758 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4762 case TOK_builtin_frame_address
:
4763 case TOK_builtin_return_address
:
4769 if (tok
!= TOK_CINT
) {
4770 tcc_error("%s only takes positive integers",
4771 tok1
== TOK_builtin_return_address
?
4772 "__builtin_return_address" :
4773 "__builtin_frame_address");
4775 level
= (uint32_t)tokc
.i
;
4780 vset(&type
, VT_LOCAL
, 0); /* local frame */
4782 mk_pointer(&vtop
->type
);
4783 indir(); /* -> parent frame */
4785 if (tok1
== TOK_builtin_return_address
) {
4786 // assume return address is just above frame pointer on stack
4789 mk_pointer(&vtop
->type
);
4794 #ifdef TCC_TARGET_X86_64
4795 #ifdef TCC_TARGET_PE
4796 case TOK_builtin_va_start
:
4797 parse_builtin_params(0, "ee");
4798 r
= vtop
->r
& VT_VALMASK
;
4802 tcc_error("__builtin_va_start expects a local variable");
4804 vtop
->type
= char_pointer_type
;
4809 case TOK_builtin_va_arg_types
:
4810 parse_builtin_params(0, "t");
4811 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4818 #ifdef TCC_TARGET_ARM64
4819 case TOK___va_start
: {
4820 parse_builtin_params(0, "ee");
4824 vtop
->type
.t
= VT_VOID
;
4827 case TOK___va_arg
: {
4828 parse_builtin_params(0, "et");
4836 case TOK___arm64_clear_cache
: {
4837 parse_builtin_params(0, "ee");
4840 vtop
->type
.t
= VT_VOID
;
4844 /* pre operations */
4855 t
= vtop
->type
.t
& VT_BTYPE
;
4857 /* In IEEE negate(x) isn't subtract(0,x), but rather
4861 vtop
->c
.f
= -1.0 * 0.0;
4862 else if (t
== VT_DOUBLE
)
4863 vtop
->c
.d
= -1.0 * 0.0;
4865 vtop
->c
.ld
= -1.0 * 0.0;
4873 goto tok_identifier
;
4875 /* allow to take the address of a label */
4876 if (tok
< TOK_UIDENT
)
4877 expect("label identifier");
4878 s
= label_find(tok
);
4880 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4882 if (s
->r
== LABEL_DECLARED
)
4883 s
->r
= LABEL_FORWARD
;
4886 s
->type
.t
= VT_VOID
;
4887 mk_pointer(&s
->type
);
4888 s
->type
.t
|= VT_STATIC
;
4890 vpushsym(&s
->type
, s
);
4896 CType controlling_type
;
4897 int has_default
= 0;
4900 AttributeDef ad_tmp
;
4902 TokenString
*str
= NULL
;
4903 ParseState saved_parse_state
;
4907 expr_type(&controlling_type
, 1);
4908 if (controlling_type
.t
& VT_ARRAY
)
4909 controlling_type
.t
= VT_PTR
;
4910 controlling_type
.t
&= ~VT_CONSTANT
;
4914 if (tok
== TOK_DEFAULT
) {
4916 tcc_error("too many 'default'");
4925 parse_btype(&cur_type
, &ad_tmp
);
4926 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
4927 if (compare_types(&controlling_type
, &cur_type
, 0)) {
4929 // tcc_error("type march twice");
4939 skip_or_save_block(&str
);
4941 skip_or_save_block(NULL
);
4945 else if (tok
== ')')
4948 if (!has_match
&& !has_default
) {
4951 type_to_str(buf
, 256, &controlling_type
, NULL
);
4952 tcc_error("_Generic sellector of type '%s' is not compatible with any assosiation",
4956 save_parse_state(&saved_parse_state
);
4957 begin_macro(str
, 1);
4961 restore_parse_state(&saved_parse_state
);
4964 // special qnan , snan and infinity values
4966 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4970 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
4974 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
4983 expect("identifier");
4986 const char *name
= get_tok_str(t
, NULL
);
4988 tcc_error("'%s' undeclared", name
);
4989 /* for simple function calls, we tolerate undeclared
4990 external reference to int() function */
4991 if (tcc_state
->warn_implicit_function_declaration
4992 #ifdef TCC_TARGET_PE
4993 /* people must be warned about using undeclared WINAPI functions
4994 (which usually start with uppercase letter) */
4995 || (name
[0] >= 'A' && name
[0] <= 'Z')
4998 tcc_warning("implicit declaration of function '%s'", name
);
4999 s
= external_global_sym(t
, &func_old_type
, 0);
5003 /* A symbol that has a register is a local register variable,
5004 which starts out as VT_LOCAL value. */
5005 if ((r
& VT_VALMASK
) < VT_CONST
)
5006 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5008 vset(&s
->type
, r
, s
->c
);
5009 /* Point to s as backpointer (even without r&VT_SYM).
5010 Will be used by at least the x86 inline asm parser for
5016 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5017 vtop
->c
.i
= s
->enum_val
;
5022 /* post operations */
5024 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5027 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5030 if (tok
== TOK_ARROW
)
5032 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5035 /* expect pointer on structure */
5036 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5037 expect("struct or union");
5038 if (tok
== TOK_CDOUBLE
)
5039 expect("field name");
5041 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5042 expect("field name");
5043 s
= find_field(&vtop
->type
, tok
);
5045 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5046 /* add field offset to pointer */
5047 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5050 /* change type to field type, and set to lvalue */
5051 vtop
->type
= s
->type
;
5052 vtop
->type
.t
|= qualifiers
;
5053 /* an array is never an lvalue */
5054 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5055 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5056 #ifdef CONFIG_TCC_BCHECK
5057 /* if bound checking, the referenced pointer must be checked */
5058 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5059 vtop
->r
|= VT_MUSTBOUND
;
5063 } else if (tok
== '[') {
5069 } else if (tok
== '(') {
5072 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5075 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5076 /* pointer test (no array accepted) */
5077 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5078 vtop
->type
= *pointed_type(&vtop
->type
);
5079 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5083 expect("function pointer");
5086 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5088 /* get return type */
5091 sa
= s
->next
; /* first parameter */
5092 nb_args
= regsize
= 0;
5094 /* compute first implicit argument if a structure is returned */
5095 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5096 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5097 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5098 &ret_align
, ®size
);
5100 /* get some space for the returned structure */
5101 size
= type_size(&s
->type
, &align
);
5102 #ifdef TCC_TARGET_ARM64
5103 /* On arm64, a small struct is return in registers.
5104 It is much easier to write it to memory if we know
5105 that we are allowed to write some extra bytes, so
5106 round the allocated space up to a power of 2: */
5108 while (size
& (size
- 1))
5109 size
= (size
| (size
- 1)) + 1;
5111 loc
= (loc
- size
) & -align
;
5113 ret
.r
= VT_LOCAL
| VT_LVAL
;
5114 /* pass it as 'int' to avoid structure arg passing
5116 vseti(VT_LOCAL
, loc
);
5126 /* return in register */
5127 if (is_float(ret
.type
.t
)) {
5128 ret
.r
= reg_fret(ret
.type
.t
);
5129 #ifdef TCC_TARGET_X86_64
5130 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5134 #ifndef TCC_TARGET_ARM64
5135 #ifdef TCC_TARGET_X86_64
5136 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5138 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5149 gfunc_param_typed(s
, sa
);
5159 tcc_error("too few arguments to function");
5161 gfunc_call(nb_args
);
5164 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5165 vsetc(&ret
.type
, r
, &ret
.c
);
5166 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5169 /* handle packed struct return */
5170 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5173 size
= type_size(&s
->type
, &align
);
5174 /* We're writing whole regs often, make sure there's enough
5175 space. Assume register size is power of 2. */
5176 if (regsize
> align
)
5178 loc
= (loc
- size
) & -align
;
5182 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5186 if (--ret_nregs
== 0)
5190 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5198 ST_FUNC
void expr_prod(void)
5203 while (tok
== '*' || tok
== '/' || tok
== '%') {
5211 ST_FUNC
void expr_sum(void)
5216 while (tok
== '+' || tok
== '-') {
5224 static void expr_shift(void)
5229 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5237 static void expr_cmp(void)
5242 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5243 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5251 static void expr_cmpeq(void)
5256 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5264 static void expr_and(void)
5267 while (tok
== '&') {
5274 static void expr_xor(void)
5277 while (tok
== '^') {
5284 static void expr_or(void)
5287 while (tok
== '|') {
5294 static void expr_land(void)
5297 if (tok
== TOK_LAND
) {
5300 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5301 gen_cast_s(VT_BOOL
);
5306 while (tok
== TOK_LAND
) {
5322 if (tok
!= TOK_LAND
) {
5335 static void expr_lor(void)
5338 if (tok
== TOK_LOR
) {
5341 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5342 gen_cast_s(VT_BOOL
);
5347 while (tok
== TOK_LOR
) {
5363 if (tok
!= TOK_LOR
) {
5376 /* Assuming vtop is a value used in a conditional context
5377 (i.e. compared with zero) return 0 if it's false, 1 if
5378 true and -1 if it can't be statically determined. */
5379 static int condition_3way(void)
5382 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5383 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5385 gen_cast_s(VT_BOOL
);
5392 static void expr_cond(void)
5394 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5396 CType type
, type1
, type2
;
5401 c
= condition_3way();
5402 g
= (tok
== ':' && gnu_ext
);
5404 /* needed to avoid having different registers saved in
5406 if (is_float(vtop
->type
.t
)) {
5408 #ifdef TCC_TARGET_X86_64
5409 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5434 sv
= *vtop
; /* save value to handle it later */
5435 vtop
--; /* no vpop so that FP stack is not flushed */
5453 bt1
= t1
& VT_BTYPE
;
5455 bt2
= t2
& VT_BTYPE
;
5458 /* cast operands to correct type according to ISOC rules */
5459 if (is_float(bt1
) || is_float(bt2
)) {
5460 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5461 type
.t
= VT_LDOUBLE
;
5463 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5468 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5469 /* cast to biggest op */
5471 /* convert to unsigned if it does not fit in a long long */
5472 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5473 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5474 type
.t
|= VT_UNSIGNED
;
5475 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5476 /* If one is a null ptr constant the result type
5478 if (is_null_pointer (vtop
))
5480 else if (is_null_pointer (&sv
))
5482 /* XXX: test pointer compatibility, C99 has more elaborate
5486 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5487 /* XXX: test function pointer compatibility */
5488 type
= bt1
== VT_FUNC
? type1
: type2
;
5489 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5490 /* XXX: test structure compatibility */
5491 type
= bt1
== VT_STRUCT
? type1
: type2
;
5492 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5493 /* NOTE: as an extension, we accept void on only one side */
5496 /* integer operations */
5498 /* convert to unsigned if it does not fit in an integer */
5499 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5500 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5501 type
.t
|= VT_UNSIGNED
;
5503 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5504 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5505 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5508 /* now we convert second operand */
5512 mk_pointer(&vtop
->type
);
5514 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5519 if (is_float(type
.t
)) {
5521 #ifdef TCC_TARGET_X86_64
5522 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5526 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5527 /* for long longs, we use fixed registers to avoid having
5528 to handle a complicated move */
5539 /* this is horrible, but we must also convert first
5545 mk_pointer(&vtop
->type
);
5547 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5553 move_reg(r2
, r1
, type
.t
);
5563 static void expr_eq(void)
5569 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5570 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5571 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5586 ST_FUNC
void gexpr(void)
5597 /* parse a constant expression and return value in vtop. */
5598 static void expr_const1(void)
5605 /* parse an integer constant and return its value. */
5606 static inline int64_t expr_const64(void)
5610 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5611 expect("constant expression");
5617 /* parse an integer constant and return its value.
5618 Complain if it doesn't fit 32bit (signed or unsigned). */
5619 ST_FUNC
int expr_const(void)
5622 int64_t wc
= expr_const64();
5624 if (c
!= wc
&& (unsigned)c
!= wc
)
5625 tcc_error("constant exceeds 32 bit");
5629 /* return the label token if current token is a label, otherwise
5631 static int is_label(void)
5635 /* fast test first */
5636 if (tok
< TOK_UIDENT
)
5638 /* no need to save tokc because tok is an identifier */
5644 unget_tok(last_tok
);
5649 #ifndef TCC_TARGET_ARM64
5650 static void gfunc_return(CType
*func_type
)
5652 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5653 CType type
, ret_type
;
5654 int ret_align
, ret_nregs
, regsize
;
5655 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5656 &ret_align
, ®size
);
5657 if (0 == ret_nregs
) {
5658 /* if returning structure, must copy it to implicit
5659 first pointer arg location */
5662 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5665 /* copy structure value to pointer */
5668 /* returning structure packed into registers */
5669 int r
, size
, addr
, align
;
5670 size
= type_size(func_type
,&align
);
5671 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5672 (vtop
->c
.i
& (ret_align
-1)))
5673 && (align
& (ret_align
-1))) {
5674 loc
= (loc
- size
) & -ret_align
;
5677 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5681 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5683 vtop
->type
= ret_type
;
5684 if (is_float(ret_type
.t
))
5685 r
= rc_fret(ret_type
.t
);
5696 if (--ret_nregs
== 0)
5698 /* We assume that when a structure is returned in multiple
5699 registers, their classes are consecutive values of the
5702 vtop
->c
.i
+= regsize
;
5706 } else if (is_float(func_type
->t
)) {
5707 gv(rc_fret(func_type
->t
));
5711 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5715 static int case_cmp(const void *pa
, const void *pb
)
5717 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5718 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5719 return a
< b
? -1 : a
> b
;
5722 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5726 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5744 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5746 gcase(base
, len
/2, bsym
);
5747 if (cur_switch
->def_sym
)
5748 gjmp_addr(cur_switch
->def_sym
);
5750 *bsym
= gjmp(*bsym
);
5754 base
+= e
; len
-= e
;
5764 if (p
->v1
== p
->v2
) {
5766 gtst_addr(0, p
->sym
);
5776 gtst_addr(0, p
->sym
);
5782 static void block(int *bsym
, int *csym
, int is_expr
)
5784 int a
, b
, c
, d
, cond
;
5787 /* generate line number info */
5788 if (tcc_state
->do_debug
)
5789 tcc_debug_line(tcc_state
);
5792 /* default return value is (void) */
5794 vtop
->type
.t
= VT_VOID
;
5797 if (tok
== TOK_IF
) {
5799 int saved_nocode_wanted
= nocode_wanted
;
5804 cond
= condition_3way();
5810 nocode_wanted
|= 0x20000000;
5811 block(bsym
, csym
, 0);
5813 nocode_wanted
= saved_nocode_wanted
;
5815 if (c
== TOK_ELSE
) {
5820 nocode_wanted
|= 0x20000000;
5821 block(bsym
, csym
, 0);
5822 gsym(d
); /* patch else jmp */
5824 nocode_wanted
= saved_nocode_wanted
;
5827 } else if (tok
== TOK_WHILE
) {
5828 int saved_nocode_wanted
;
5829 nocode_wanted
&= ~0x20000000;
5839 saved_nocode_wanted
= nocode_wanted
;
5841 nocode_wanted
= saved_nocode_wanted
;
5846 } else if (tok
== '{') {
5848 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5851 /* record local declaration stack position */
5853 llabel
= local_label_stack
;
5856 /* handle local labels declarations */
5857 if (tok
== TOK_LABEL
) {
5860 if (tok
< TOK_UIDENT
)
5861 expect("label identifier");
5862 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5872 while (tok
!= '}') {
5873 if ((a
= is_label()))
5880 block(bsym
, csym
, is_expr
);
5883 /* pop locally defined labels */
5884 label_pop(&local_label_stack
, llabel
, is_expr
);
5885 /* pop locally defined symbols */
5887 /* In the is_expr case (a statement expression is finished here),
5888 vtop might refer to symbols on the local_stack. Either via the
5889 type or via vtop->sym. We can't pop those nor any that in turn
5890 might be referred to. To make it easier we don't roll back
5891 any symbols in that case; some upper level call to block() will
5892 do that. We do have to remove such symbols from the lookup
5893 tables, though. sym_pop will do that. */
5894 sym_pop(&local_stack
, s
, is_expr
);
5896 /* Pop VLA frames and restore stack pointer if required */
5897 if (vlas_in_scope
> saved_vlas_in_scope
) {
5898 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5901 vlas_in_scope
= saved_vlas_in_scope
;
5904 } else if (tok
== TOK_RETURN
) {
5908 gen_assign_cast(&func_vt
);
5909 gfunc_return(&func_vt
);
5912 /* jump unless last stmt in top-level block */
5913 if (tok
!= '}' || local_scope
!= 1)
5915 nocode_wanted
|= 0x20000000;
5916 } else if (tok
== TOK_BREAK
) {
5919 tcc_error("cannot break");
5920 *bsym
= gjmp(*bsym
);
5923 nocode_wanted
|= 0x20000000;
5924 } else if (tok
== TOK_CONTINUE
) {
5927 tcc_error("cannot continue");
5928 vla_sp_restore_root();
5929 *csym
= gjmp(*csym
);
5932 } else if (tok
== TOK_FOR
) {
5934 int saved_nocode_wanted
;
5935 nocode_wanted
&= ~0x20000000;
5941 /* c99 for-loop init decl? */
5942 if (!decl0(VT_LOCAL
, 1, NULL
)) {
5943 /* no, regular for-loop init expr */
5969 saved_nocode_wanted
= nocode_wanted
;
5971 nocode_wanted
= saved_nocode_wanted
;
5976 sym_pop(&local_stack
, s
, 0);
5979 if (tok
== TOK_DO
) {
5980 int saved_nocode_wanted
;
5981 nocode_wanted
&= ~0x20000000;
5987 saved_nocode_wanted
= nocode_wanted
;
5995 nocode_wanted
= saved_nocode_wanted
;
6000 if (tok
== TOK_SWITCH
) {
6001 struct switch_t
*saved
, sw
;
6002 int saved_nocode_wanted
= nocode_wanted
;
6008 switchval
= *vtop
--;
6010 b
= gjmp(0); /* jump to first case */
6011 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6015 nocode_wanted
= saved_nocode_wanted
;
6016 a
= gjmp(a
); /* add implicit break */
6019 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6020 for (b
= 1; b
< sw
.n
; b
++)
6021 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6022 tcc_error("duplicate case value");
6023 /* Our switch table sorting is signed, so the compared
6024 value needs to be as well when it's 64bit. */
6025 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6026 switchval
.type
.t
&= ~VT_UNSIGNED
;
6028 gcase(sw
.p
, sw
.n
, &a
);
6031 gjmp_addr(sw
.def_sym
);
6032 dynarray_reset(&sw
.p
, &sw
.n
);
6037 if (tok
== TOK_CASE
) {
6038 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6041 nocode_wanted
&= ~0x20000000;
6043 cr
->v1
= cr
->v2
= expr_const64();
6044 if (gnu_ext
&& tok
== TOK_DOTS
) {
6046 cr
->v2
= expr_const64();
6047 if (cr
->v2
< cr
->v1
)
6048 tcc_warning("empty case range");
6051 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6054 goto block_after_label
;
6056 if (tok
== TOK_DEFAULT
) {
6061 if (cur_switch
->def_sym
)
6062 tcc_error("too many 'default'");
6063 cur_switch
->def_sym
= ind
;
6065 goto block_after_label
;
6067 if (tok
== TOK_GOTO
) {
6069 if (tok
== '*' && gnu_ext
) {
6073 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6076 } else if (tok
>= TOK_UIDENT
) {
6077 s
= label_find(tok
);
6078 /* put forward definition if needed */
6080 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6082 if (s
->r
== LABEL_DECLARED
)
6083 s
->r
= LABEL_FORWARD
;
6085 vla_sp_restore_root();
6086 if (s
->r
& LABEL_FORWARD
)
6087 s
->jnext
= gjmp(s
->jnext
);
6089 gjmp_addr(s
->jnext
);
6092 expect("label identifier");
6095 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6104 if (s
->r
== LABEL_DEFINED
)
6105 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6107 s
->r
= LABEL_DEFINED
;
6109 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6113 /* we accept this, but it is a mistake */
6115 nocode_wanted
&= ~0x20000000;
6117 tcc_warning("deprecated use of label at end of compound statement");
6121 block(bsym
, csym
, is_expr
);
6124 /* expression case */
6139 /* This skips over a stream of tokens containing balanced {} and ()
6140 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6141 with a '{'). If STR then allocates and stores the skipped tokens
6142 in *STR. This doesn't check if () and {} are nested correctly,
6143 i.e. "({)}" is accepted. */
6144 static void skip_or_save_block(TokenString
**str
)
6146 int braces
= tok
== '{';
6149 *str
= tok_str_alloc();
6151 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6153 if (tok
== TOK_EOF
) {
6154 if (str
|| level
> 0)
6155 tcc_error("unexpected end of file");
6160 tok_str_add_tok(*str
);
6163 if (t
== '{' || t
== '(') {
6165 } else if (t
== '}' || t
== ')') {
6167 if (level
== 0 && braces
&& t
== '}')
6172 tok_str_add(*str
, -1);
6173 tok_str_add(*str
, 0);
6177 #define EXPR_CONST 1
6180 static void parse_init_elem(int expr_type
)
6182 int saved_global_expr
;
6185 /* compound literals must be allocated globally in this case */
6186 saved_global_expr
= global_expr
;
6189 global_expr
= saved_global_expr
;
6190 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6191 (compound literals). */
6192 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6193 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6194 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6195 #ifdef TCC_TARGET_PE
6196 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6199 tcc_error("initializer element is not constant");
6207 /* put zeros for variable based init */
6208 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6211 /* nothing to do because globals are already set to zero */
6213 vpush_global_sym(&func_old_type
, TOK_memset
);
6215 #ifdef TCC_TARGET_ARM
6226 /* t is the array or struct type. c is the array or struct
6227 address. cur_field is the pointer to the current
6228 field, for arrays the 'c' member contains the current start
6229 index. 'size_only' is true if only size info is needed (only used
6230 in arrays). al contains the already initialized length of the
6231 current container (starting at c). This returns the new length of that. */
6232 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6233 Sym
**cur_field
, int size_only
, int al
)
6236 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6237 unsigned long corig
= c
;
6241 if (gnu_ext
&& (l
= is_label()) != 0)
6243 /* NOTE: we only support ranges for last designator */
6244 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6246 if (!(type
->t
& VT_ARRAY
))
6247 expect("array type");
6249 index
= index_last
= expr_const();
6250 if (tok
== TOK_DOTS
&& gnu_ext
) {
6252 index_last
= expr_const();
6256 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6258 tcc_error("invalid index");
6260 (*cur_field
)->c
= index_last
;
6261 type
= pointed_type(type
);
6262 elem_size
= type_size(type
, &align
);
6263 c
+= index
* elem_size
;
6264 nb_elems
= index_last
- index
+ 1;
6270 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6271 expect("struct/union type");
6272 f
= find_field(type
, l
);
6285 } else if (!gnu_ext
) {
6289 if (type
->t
& VT_ARRAY
) {
6290 index
= (*cur_field
)->c
;
6291 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6292 tcc_error("index too large");
6293 type
= pointed_type(type
);
6294 c
+= index
* type_size(type
, &align
);
6297 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6298 *cur_field
= f
= f
->next
;
6300 tcc_error("too many field init");
6305 /* must put zero in holes (note that doing it that way
6306 ensures that it even works with designators) */
6307 if (!size_only
&& c
- corig
> al
)
6308 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6309 decl_initializer(type
, sec
, c
, 0, size_only
);
6311 /* XXX: make it more general */
6312 if (!size_only
&& nb_elems
> 1) {
6313 unsigned long c_end
;
6318 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6319 for (i
= 1; i
< nb_elems
; i
++) {
6320 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6325 } else if (!NODATA_WANTED
) {
6326 c_end
= c
+ nb_elems
* elem_size
;
6327 if (c_end
> sec
->data_allocated
)
6328 section_realloc(sec
, c_end
);
6329 src
= sec
->data
+ c
;
6331 for(i
= 1; i
< nb_elems
; i
++) {
6333 memcpy(dst
, src
, elem_size
);
6337 c
+= nb_elems
* type_size(type
, &align
);
6343 /* store a value or an expression directly in global data or in local array */
6344 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6351 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6355 /* XXX: not portable */
6356 /* XXX: generate error if incorrect relocation */
6357 gen_assign_cast(&dtype
);
6358 bt
= type
->t
& VT_BTYPE
;
6360 if ((vtop
->r
& VT_SYM
)
6363 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6364 || (type
->t
& VT_BITFIELD
))
6365 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6367 tcc_error("initializer element is not computable at load time");
6369 if (NODATA_WANTED
) {
6374 size
= type_size(type
, &align
);
6375 section_reserve(sec
, c
+ size
);
6376 ptr
= sec
->data
+ c
;
6378 /* XXX: make code faster ? */
6379 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6380 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6381 /* XXX This rejects compound literals like
6382 '(void *){ptr}'. The problem is that '&sym' is
6383 represented the same way, which would be ruled out
6384 by the SYM_FIRST_ANOM check above, but also '"string"'
6385 in 'char *p = "string"' is represented the same
6386 with the type being VT_PTR and the symbol being an
6387 anonymous one. That is, there's no difference in vtop
6388 between '(void *){x}' and '&(void *){x}'. Ignore
6389 pointer typed entities here. Hopefully no real code
6390 will every use compound literals with scalar type. */
6391 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6392 /* These come from compound literals, memcpy stuff over. */
6396 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[vtop
->sym
->c
];
6397 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6398 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6400 /* We need to copy over all memory contents, and that
6401 includes relocations. Use the fact that relocs are
6402 created it order, so look from the end of relocs
6403 until we hit one before the copied region. */
6404 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6405 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6406 while (num_relocs
--) {
6408 if (rel
->r_offset
>= esym
->st_value
+ size
)
6410 if (rel
->r_offset
< esym
->st_value
)
6412 /* Note: if the same fields are initialized multiple
6413 times (possible with designators) then we possibly
6414 add multiple relocations for the same offset here.
6415 That would lead to wrong code, the last reloc needs
6416 to win. We clean this up later after the whole
6417 initializer is parsed. */
6418 put_elf_reloca(symtab_section
, sec
,
6419 c
+ rel
->r_offset
- esym
->st_value
,
6420 ELFW(R_TYPE
)(rel
->r_info
),
6421 ELFW(R_SYM
)(rel
->r_info
),
6431 if (type
->t
& VT_BITFIELD
) {
6432 int bit_pos
, bit_size
, bits
, n
;
6433 unsigned char *p
, v
, m
;
6434 bit_pos
= BIT_POS(vtop
->type
.t
);
6435 bit_size
= BIT_SIZE(vtop
->type
.t
);
6436 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6437 bit_pos
&= 7, bits
= 0;
6442 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6443 m
= ((1 << n
) - 1) << bit_pos
;
6444 *p
= (*p
& ~m
) | (v
& m
);
6445 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6449 /* XXX: when cross-compiling we assume that each type has the
6450 same representation on host and target, which is likely to
6451 be wrong in the case of long double */
6453 vtop
->c
.i
= vtop
->c
.i
!= 0;
6455 *(char *)ptr
|= vtop
->c
.i
;
6458 *(short *)ptr
|= vtop
->c
.i
;
6461 *(float*)ptr
= vtop
->c
.f
;
6464 *(double *)ptr
= vtop
->c
.d
;
6467 if (sizeof(long double) == LDOUBLE_SIZE
)
6468 *(long double *)ptr
= vtop
->c
.ld
;
6469 else if (sizeof(double) == LDOUBLE_SIZE
)
6470 *(double *)ptr
= (double)vtop
->c
.ld
;
6471 #if (defined __i386__ || defined __x86_64__) && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
6472 else if (sizeof (long double) >= 10)
6473 memcpy(memset(ptr
, 0, LDOUBLE_SIZE
), &vtop
->c
.ld
, 10);
6475 else if (sizeof (long double) == sizeof (double))
6476 __asm__("fldl %1\nfstpt %0\n" : "=m"
6477 (memset(ptr
, 0, LDOUBLE_SIZE
), ptr
) : "m" (vtop
->c
.ld
));
6481 tcc_error("can't cross compile long double constants");
6485 *(long long *)ptr
|= vtop
->c
.i
;
6492 addr_t val
= vtop
->c
.i
;
6494 if (vtop
->r
& VT_SYM
)
6495 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6497 *(addr_t
*)ptr
|= val
;
6499 if (vtop
->r
& VT_SYM
)
6500 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6501 *(addr_t
*)ptr
|= val
;
6507 int val
= vtop
->c
.i
;
6509 if (vtop
->r
& VT_SYM
)
6510 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6514 if (vtop
->r
& VT_SYM
)
6515 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6524 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6531 /* 't' contains the type and storage info. 'c' is the offset of the
6532 object in section 'sec'. If 'sec' is NULL, it means stack based
6533 allocation. 'first' is true if array '{' must be read (multi
6534 dimension implicit array init handling). 'size_only' is true if
6535 size only evaluation is wanted (only for arrays). */
6536 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6537 int first
, int size_only
)
6539 int len
, n
, no_oblock
, nb
, i
;
6546 /* If we currently are at an '}' or ',' we have read an initializer
6547 element in one of our callers, and not yet consumed it. */
6548 have_elem
= tok
== '}' || tok
== ',';
6549 if (!have_elem
&& tok
!= '{' &&
6550 /* In case of strings we have special handling for arrays, so
6551 don't consume them as initializer value (which would commit them
6552 to some anonymous symbol). */
6553 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6555 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6560 !(type
->t
& VT_ARRAY
) &&
6561 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6562 The source type might have VT_CONSTANT set, which is
6563 of course assignable to non-const elements. */
6564 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6565 init_putv(type
, sec
, c
);
6566 } else if (type
->t
& VT_ARRAY
) {
6569 t1
= pointed_type(type
);
6570 size1
= type_size(t1
, &align1
);
6573 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6576 tcc_error("character array initializer must be a literal,"
6577 " optionally enclosed in braces");
6582 /* only parse strings here if correct type (otherwise: handle
6583 them as ((w)char *) expressions */
6584 if ((tok
== TOK_LSTR
&&
6585 #ifdef TCC_TARGET_PE
6586 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6588 (t1
->t
& VT_BTYPE
) == VT_INT
6590 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6592 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6595 /* compute maximum number of chars wanted */
6597 cstr_len
= tokc
.str
.size
;
6599 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6602 if (n
>= 0 && nb
> (n
- len
))
6606 tcc_warning("initializer-string for array is too long");
6607 /* in order to go faster for common case (char
6608 string in global variable, we handle it
6610 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6612 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6616 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6618 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6620 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6627 /* only add trailing zero if enough storage (no
6628 warning in this case since it is standard) */
6629 if (n
< 0 || len
< n
) {
6632 init_putv(t1
, sec
, c
+ (len
* size1
));
6643 while (tok
!= '}' || have_elem
) {
6644 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
6646 if (type
->t
& VT_ARRAY
) {
6648 /* special test for multi dimensional arrays (may not
6649 be strictly correct if designators are used at the
6651 if (no_oblock
&& len
>= n
*size1
)
6654 if (s
->type
.t
== VT_UNION
)
6658 if (no_oblock
&& f
== NULL
)
6667 /* put zeros at the end */
6668 if (!size_only
&& len
< n
*size1
)
6669 init_putz(sec
, c
+ len
, n
*size1
- len
);
6672 /* patch type size if needed, which happens only for array types */
6674 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
6675 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6678 if (first
|| tok
== '{') {
6686 } else if (tok
== '{') {
6688 decl_initializer(type
, sec
, c
, first
, size_only
);
6690 } else if (size_only
) {
6691 /* If we supported only ISO C we wouldn't have to accept calling
6692 this on anything than an array size_only==1 (and even then
6693 only on the outermost level, so no recursion would be needed),
6694 because initializing a flex array member isn't supported.
6695 But GNU C supports it, so we need to recurse even into
6696 subfields of structs and arrays when size_only is set. */
6697 /* just skip expression */
6698 skip_or_save_block(NULL
);
6701 /* This should happen only when we haven't parsed
6702 the init element above for fear of committing a
6703 string constant to memory too early. */
6704 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6705 expect("string constant");
6706 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6708 init_putv(type
, sec
, c
);
6712 /* parse an initializer for type 't' if 'has_init' is non zero, and
6713 allocate space in local or global data space ('r' is either
6714 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6715 variable 'v' of scope 'scope' is declared before initializers
6716 are parsed. If 'v' is zero, then a reference to the new object
6717 is put in the value stack. If 'has_init' is 2, a special parsing
6718 is done to handle string constants. */
6719 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6720 int has_init
, int v
, int scope
)
6722 int size
, align
, addr
;
6723 ParseState saved_parse_state
= {0};
6724 TokenString
*init_str
= NULL
;
6726 Sym
*flexible_array
;
6728 int saved_nocode_wanted
= nocode_wanted
;
6729 #ifdef CONFIG_TCC_BCHECK
6730 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
6733 if (type
->t
& VT_STATIC
)
6734 nocode_wanted
|= NODATA_WANTED
? 0x40000000 : 0x80000000;
6736 flexible_array
= NULL
;
6737 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6738 Sym
*field
= type
->ref
->next
;
6741 field
= field
->next
;
6742 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6743 flexible_array
= field
;
6747 size
= type_size(type
, &align
);
6748 /* If unknown size, we must evaluate it before
6749 evaluating initializers because
6750 initializers can generate global data too
6751 (e.g. string pointers or ISOC99 compound
6752 literals). It also simplifies local
6753 initializers handling */
6754 if (size
< 0 || (flexible_array
&& has_init
)) {
6756 tcc_error("unknown type size");
6757 /* get all init string */
6758 if (has_init
== 2) {
6759 init_str
= tok_str_alloc();
6760 /* only get strings */
6761 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6762 tok_str_add_tok(init_str
);
6765 tok_str_add(init_str
, -1);
6766 tok_str_add(init_str
, 0);
6768 skip_or_save_block(&init_str
);
6772 save_parse_state(&saved_parse_state
);
6774 begin_macro(init_str
, 1);
6776 decl_initializer(type
, NULL
, 0, 1, 1);
6777 /* prepare second initializer parsing */
6778 macro_ptr
= init_str
->str
;
6781 /* if still unknown size, error */
6782 size
= type_size(type
, &align
);
6784 tcc_error("unknown type size");
6786 /* If there's a flex member and it was used in the initializer
6788 if (flexible_array
&&
6789 flexible_array
->type
.ref
->c
> 0)
6790 size
+= flexible_array
->type
.ref
->c
6791 * pointed_size(&flexible_array
->type
);
6792 /* take into account specified alignment if bigger */
6793 if (ad
->a
.aligned
) {
6794 int speca
= 1 << (ad
->a
.aligned
- 1);
6797 } else if (ad
->a
.packed
) {
6802 size
= 0, align
= 1;
6804 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6806 #ifdef CONFIG_TCC_BCHECK
6807 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6811 loc
= (loc
- size
) & -align
;
6813 #ifdef CONFIG_TCC_BCHECK
6814 /* handles bounds */
6815 /* XXX: currently, since we do only one pass, we cannot track
6816 '&' operators, so we add only arrays */
6817 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6819 /* add padding between regions */
6821 /* then add local bound info */
6822 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6823 bounds_ptr
[0] = addr
;
6824 bounds_ptr
[1] = size
;
6828 /* local variable */
6829 #ifdef CONFIG_TCC_ASM
6830 if (ad
->asm_label
) {
6831 int reg
= asm_parse_regvar(ad
->asm_label
);
6833 r
= (r
& ~VT_VALMASK
) | reg
;
6836 sym
= sym_push(v
, type
, r
, addr
);
6839 /* push local reference */
6840 vset(type
, r
, addr
);
6843 if (v
&& scope
== VT_CONST
) {
6844 /* see if the symbol was already defined */
6847 patch_storage(sym
, ad
, type
);
6848 if (sym
->type
.t
& VT_EXTERN
) {
6849 /* if the variable is extern, it was not allocated */
6850 sym
->type
.t
&= ~VT_EXTERN
;
6851 /* set array size if it was omitted in extern
6853 if ((sym
->type
.t
& VT_ARRAY
) &&
6854 sym
->type
.ref
->c
< 0 &&
6856 sym
->type
.ref
->c
= type
->ref
->c
;
6857 } else if (!has_init
) {
6858 /* we accept several definitions of the same
6859 global variable. this is tricky, because we
6860 must play with the SHN_COMMON type of the symbol */
6861 /* no init data, we won't add more to the symbol */
6863 } else if (sym
->c
) {
6865 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
6866 if (esym
->st_shndx
== data_section
->sh_num
)
6867 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6872 /* allocate symbol in corresponding section */
6877 else if (tcc_state
->nocommon
)
6882 addr
= section_add(sec
, size
, align
);
6883 #ifdef CONFIG_TCC_BCHECK
6884 /* add padding if bound check */
6886 section_add(sec
, 1, 1);
6889 addr
= align
; /* SHN_COMMON is special, symbol value is align */
6890 sec
= common_section
;
6895 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6896 patch_storage(sym
, ad
, NULL
);
6898 /* Local statics have a scope until now (for
6899 warnings), remove it here. */
6901 /* update symbol definition */
6902 put_extern_sym(sym
, sec
, addr
, size
);
6904 /* push global reference */
6905 sym
= get_sym_ref(type
, sec
, addr
, size
);
6906 vpushsym(type
, sym
);
6910 #ifdef CONFIG_TCC_BCHECK
6911 /* handles bounds now because the symbol must be defined
6912 before for the relocation */
6916 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
6917 /* then add global bound info */
6918 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6919 bounds_ptr
[0] = 0; /* relocated */
6920 bounds_ptr
[1] = size
;
6925 if (type
->t
& VT_VLA
) {
6931 /* save current stack pointer */
6932 if (vlas_in_scope
== 0) {
6933 if (vla_sp_root_loc
== -1)
6934 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
6935 gen_vla_sp_save(vla_sp_root_loc
);
6938 vla_runtime_type_size(type
, &a
);
6939 gen_vla_alloc(type
, a
);
6940 gen_vla_sp_save(addr
);
6944 } else if (has_init
) {
6945 size_t oldreloc_offset
= 0;
6946 if (sec
&& sec
->reloc
)
6947 oldreloc_offset
= sec
->reloc
->data_offset
;
6948 decl_initializer(type
, sec
, addr
, 1, 0);
6949 if (sec
&& sec
->reloc
)
6950 squeeze_multi_relocs(sec
, oldreloc_offset
);
6951 /* patch flexible array member size back to -1, */
6952 /* for possible subsequent similar declarations */
6954 flexible_array
->type
.ref
->c
= -1;
6958 /* restore parse state if needed */
6961 restore_parse_state(&saved_parse_state
);
6964 nocode_wanted
= saved_nocode_wanted
;
6967 /* parse a function defined by symbol 'sym' and generate its code in
6968 'cur_text_section' */
6969 static void gen_function(Sym
*sym
)
6972 ind
= cur_text_section
->data_offset
;
6973 /* NOTE: we patch the symbol size later */
6974 put_extern_sym(sym
, cur_text_section
, ind
, 0);
6975 funcname
= get_tok_str(sym
->v
, NULL
);
6977 /* Initialize VLA state */
6979 vla_sp_root_loc
= -1;
6980 /* put debug symbol */
6981 tcc_debug_funcstart(tcc_state
, sym
);
6982 /* push a dummy symbol to enable local sym storage */
6983 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
6984 local_scope
= 1; /* for function parameters */
6985 gfunc_prolog(&sym
->type
);
6988 block(NULL
, NULL
, 0);
6992 cur_text_section
->data_offset
= ind
;
6993 label_pop(&global_label_stack
, NULL
, 0);
6994 /* reset local stack */
6996 sym_pop(&local_stack
, NULL
, 0);
6997 /* end of function */
6998 /* patch symbol size */
6999 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
7001 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7002 /* It's better to crash than to generate wrong code */
7003 cur_text_section
= NULL
;
7004 funcname
= ""; /* for safety */
7005 func_vt
.t
= VT_VOID
; /* for safety */
7006 func_var
= 0; /* for safety */
7007 ind
= 0; /* for safety */
7008 nocode_wanted
= 0x80000000;
7012 static void gen_inline_functions(TCCState
*s
)
7015 int inline_generated
, i
, ln
;
7016 struct InlineFunc
*fn
;
7018 ln
= file
->line_num
;
7019 /* iterate while inline function are referenced */
7021 inline_generated
= 0;
7022 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7023 fn
= s
->inline_fns
[i
];
7025 if (sym
&& sym
->c
) {
7026 /* the function was used: generate its code and
7027 convert it to a normal function */
7030 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7031 sym
->type
.t
&= ~VT_INLINE
;
7033 begin_macro(fn
->func_str
, 1);
7035 cur_text_section
= text_section
;
7039 inline_generated
= 1;
7042 } while (inline_generated
);
7043 file
->line_num
= ln
;
7046 ST_FUNC
void free_inline_functions(TCCState
*s
)
7049 /* free tokens of unused inline functions */
7050 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7051 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7053 tok_str_free(fn
->func_str
);
7055 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7058 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7059 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7060 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7068 if (!parse_btype(&btype
, &ad
)) {
7069 if (is_for_loop_init
)
7071 /* skip redundant ';' if not in old parameter decl scope */
7072 if (tok
== ';' && l
!= VT_CMP
) {
7076 if (l
== VT_CONST
&&
7077 (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7078 /* global asm block */
7082 /* special test for old K&R protos without explicit int
7083 type. Only accepted when defining global data */
7084 if (l
!= VT_CONST
|| tok
< TOK_UIDENT
)
7089 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7090 int v
= btype
.ref
->v
;
7091 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7092 tcc_warning("unnamed struct/union that defines no instances");
7096 if (IS_ENUM(btype
.t
)) {
7101 while (1) { /* iterate thru each declaration */
7103 /* If the base type itself was an array type of unspecified
7104 size (like in 'typedef int arr[]; arr x = {1};') then
7105 we will overwrite the unknown size by the real one for
7106 this decl. We need to unshare the ref symbol holding
7108 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7109 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7111 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7115 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7116 printf("type = '%s'\n", buf
);
7119 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7120 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7121 tcc_error("function without file scope cannot be static");
7123 /* if old style function prototype, we accept a
7126 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7127 decl0(VT_CMP
, 0, sym
);
7130 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7131 ad
.asm_label
= asm_label_instr();
7132 /* parse one last attribute list, after asm label */
7133 parse_attribute(&ad
);
7138 #ifdef TCC_TARGET_PE
7139 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7140 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7141 tcc_error("cannot have dll linkage with static or typedef");
7142 if (ad
.a
.dllimport
) {
7143 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7146 type
.t
|= VT_EXTERN
;
7152 tcc_error("cannot use local functions");
7153 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7154 expect("function definition");
7156 /* reject abstract declarators in function definition
7157 make old style params without decl have int type */
7159 while ((sym
= sym
->next
) != NULL
) {
7160 if (!(sym
->v
& ~SYM_FIELD
))
7161 expect("identifier");
7162 if (sym
->type
.t
== VT_VOID
)
7163 sym
->type
= int_type
;
7166 /* XXX: cannot do better now: convert extern line to static inline */
7167 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7168 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7173 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
7176 ref
= sym
->type
.ref
;
7178 /* use func_call from prototype if not defined */
7179 if (ref
->f
.func_call
!= FUNC_CDECL
7180 && type
.ref
->f
.func_call
== FUNC_CDECL
)
7181 type
.ref
->f
.func_call
= ref
->f
.func_call
;
7183 /* use static from prototype */
7184 if (sym
->type
.t
& VT_STATIC
)
7185 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7187 /* If the definition has no visibility use the
7188 one from prototype. */
7189 if (!type
.ref
->a
.visibility
)
7190 type
.ref
->a
.visibility
= ref
->a
.visibility
;
7191 /* apply other storage attributes from prototype */
7192 type
.ref
->a
.dllexport
|= ref
->a
.dllexport
;
7193 type
.ref
->a
.weak
|= ref
->a
.weak
;
7195 if (!is_compatible_types(&sym
->type
, &type
)) {
7197 tcc_error("incompatible types for redefinition of '%s'",
7198 get_tok_str(v
, NULL
));
7200 if (ref
->f
.func_body
)
7201 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
7202 /* if symbol is already defined, then put complete type */
7206 /* put function symbol */
7207 sym
= global_identifier_push(v
, type
.t
, 0);
7208 sym
->type
.ref
= type
.ref
;
7211 sym
->type
.ref
->f
.func_body
= 1;
7212 sym
->r
= VT_SYM
| VT_CONST
;
7213 patch_storage(sym
, &ad
, NULL
);
7215 /* static inline functions are just recorded as a kind
7216 of macro. Their code will be emitted at the end of
7217 the compilation unit only if they are used */
7218 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7219 (VT_INLINE
| VT_STATIC
)) {
7220 struct InlineFunc
*fn
;
7221 const char *filename
;
7223 filename
= file
? file
->filename
: "";
7224 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7225 strcpy(fn
->filename
, filename
);
7227 skip_or_save_block(&fn
->func_str
);
7228 dynarray_add(&tcc_state
->inline_fns
,
7229 &tcc_state
->nb_inline_fns
, fn
);
7231 /* compute text section */
7232 cur_text_section
= ad
.section
;
7233 if (!cur_text_section
)
7234 cur_text_section
= text_section
;
7240 /* find parameter in function parameter list */
7241 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7242 if ((sym
->v
& ~SYM_FIELD
) == v
)
7244 tcc_error("declaration for parameter '%s' but no such parameter",
7245 get_tok_str(v
, NULL
));
7247 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7248 tcc_error("storage class specified for '%s'",
7249 get_tok_str(v
, NULL
));
7250 if (sym
->type
.t
!= VT_VOID
)
7251 tcc_error("redefinition of parameter '%s'",
7252 get_tok_str(v
, NULL
));
7253 convert_parameter_type(&type
);
7255 } else if (type
.t
& VT_TYPEDEF
) {
7256 /* save typedefed type */
7257 /* XXX: test storage specifiers ? */
7259 if (sym
&& sym
->sym_scope
== local_scope
) {
7260 if (!is_compatible_types(&sym
->type
, &type
)
7261 || !(sym
->type
.t
& VT_TYPEDEF
))
7262 tcc_error("incompatible redefinition of '%s'",
7263 get_tok_str(v
, NULL
));
7266 sym
= sym_push(v
, &type
, 0, 0);
7272 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7273 /* external function definition */
7274 /* specific case for func_call attribute */
7276 } else if (!(type
.t
& VT_ARRAY
)) {
7277 /* not lvalue if array */
7278 r
|= lvalue_type(type
.t
);
7280 has_init
= (tok
== '=');
7281 if (has_init
&& (type
.t
& VT_VLA
))
7282 tcc_error("variable length array cannot be initialized");
7283 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7284 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7285 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7286 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7287 /* external variable or function */
7288 /* NOTE: as GCC, uninitialized global static
7289 arrays of null size are considered as
7291 sym
= external_sym(v
, &type
, r
, &ad
);
7292 if (ad
.alias_target
) {
7296 alias_target
= sym_find(ad
.alias_target
);
7297 if (!alias_target
|| !alias_target
->c
)
7298 tcc_error("unsupported forward __alias__ attribute");
7299 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[alias_target
->c
];
7300 tsec
.sh_num
= esym
->st_shndx
;
7301 /* Local statics have a scope until now (for
7302 warnings), remove it here. */
7304 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
7307 if (type
.t
& VT_STATIC
)
7313 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7317 if (is_for_loop_init
)
7330 ST_FUNC
void decl(int l
)
7335 /* ------------------------------------------------------------------------- */