2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* no code generation wanted */
54 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
55 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
56 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
57 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
58 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
60 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
61 ST_DATA
const char *funcname
;
64 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
66 ST_DATA
struct switch_t
{
70 } **p
; int n
; /* list of case ranges */
71 int def_sym
; /* default symbol */
72 } *cur_switch
; /* current switch */
74 /* ------------------------------------------------------------------------- */
76 static void gen_cast(CType
*type
);
77 static void gen_cast_s(int t
);
78 static inline CType
*pointed_type(CType
*type
);
79 static int is_compatible_types(CType
*type1
, CType
*type2
);
80 static int parse_btype(CType
*type
, AttributeDef
*ad
);
81 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
82 static void parse_expr_type(CType
*type
);
83 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
84 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
85 static void block(int *bsym
, int *csym
, int is_expr
);
86 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
87 static void decl(int l
);
88 static int decl0(int l
, int is_for_loop_init
, Sym
*);
89 static void expr_eq(void);
90 static void vla_runtime_type_size(CType
*type
, int *a
);
91 static void vla_sp_restore(void);
92 static void vla_sp_restore_root(void);
93 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
94 static inline int64_t expr_const64(void);
95 static void vpush64(int ty
, unsigned long long v
);
96 static void vpush(CType
*type
);
97 static int gvtst(int inv
, int t
);
98 static void gen_inline_functions(TCCState
*s
);
99 static void skip_or_save_block(TokenString
**str
);
100 static void gv_dup(void);
102 ST_INLN
int is_float(int t
)
106 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
109 /* we use our own 'finite' function to avoid potential problems with
110 non standard math libs */
111 /* XXX: endianness dependent */
112 ST_FUNC
int ieee_finite(double d
)
115 memcpy(p
, &d
, sizeof(double));
116 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
119 /* compiling intel long double natively */
120 #if (defined __i386__ || defined __x86_64__) \
121 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
122 # define TCC_IS_NATIVE_387
125 ST_FUNC
void test_lvalue(void)
127 if (!(vtop
->r
& VT_LVAL
))
131 ST_FUNC
void check_vstack(void)
134 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
137 /* ------------------------------------------------------------------------- */
138 /* vstack debugging aid */
141 void pv (const char *lbl
, int a
, int b
)
144 for (i
= a
; i
< a
+ b
; ++i
) {
145 SValue
*p
= &vtop
[-i
];
146 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
147 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
152 /* ------------------------------------------------------------------------- */
153 /* start of translation unit info */
154 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
159 /* file info: full path + filename */
160 section_sym
= put_elf_sym(symtab_section
, 0, 0,
161 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
162 text_section
->sh_num
, NULL
);
163 getcwd(buf
, sizeof(buf
));
165 normalize_slashes(buf
);
167 pstrcat(buf
, sizeof(buf
), "/");
168 put_stabs_r(buf
, N_SO
, 0, 0,
169 text_section
->data_offset
, text_section
, section_sym
);
170 put_stabs_r(file
->filename
, N_SO
, 0, 0,
171 text_section
->data_offset
, text_section
, section_sym
);
176 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
177 symbols can be safely used */
178 put_elf_sym(symtab_section
, 0, 0,
179 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
180 SHN_ABS
, file
->filename
);
183 /* put end of translation unit info */
184 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
188 put_stabs_r(NULL
, N_SO
, 0, 0,
189 text_section
->data_offset
, text_section
, section_sym
);
193 /* generate line number info */
194 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
198 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
199 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
201 last_line_num
= file
->line_num
;
205 /* put function symbol */
206 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
214 /* XXX: we put here a dummy type */
215 snprintf(buf
, sizeof(buf
), "%s:%c1",
216 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
217 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
218 cur_text_section
, sym
->c
);
219 /* //gr gdb wants a line at the function */
220 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
226 /* put function size */
227 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
231 put_stabn(N_FUN
, 0, 0, size
);
234 /* ------------------------------------------------------------------------- */
235 ST_FUNC
int tccgen_compile(TCCState
*s1
)
237 cur_text_section
= NULL
;
239 anon_sym
= SYM_FIRST_ANOM
;
242 nocode_wanted
= 0x80000000;
244 /* define some often used types */
246 char_pointer_type
.t
= VT_BYTE
;
247 mk_pointer(&char_pointer_type
);
249 size_type
.t
= VT_INT
| VT_UNSIGNED
;
250 ptrdiff_type
.t
= VT_INT
;
252 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
253 ptrdiff_type
.t
= VT_LLONG
;
255 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
256 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
258 func_old_type
.t
= VT_FUNC
;
259 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
260 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
261 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
265 #ifdef TCC_TARGET_ARM
270 printf("%s: **** new file\n", file
->filename
);
273 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
276 gen_inline_functions(s1
);
279 #ifdef CONFIG_TCC_ASM
283 /* end of translation unit info */
288 /* ------------------------------------------------------------------------- */
289 /* apply storage attributes to Elf symbol */
291 static void update_storage(Sym
*sym
)
296 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
297 if (sym
->a
.visibility
)
298 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
301 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, ELFW(ST_TYPE
)(esym
->st_info
));
303 if (sym
->a
.dllimport
)
304 esym
->st_other
|= ST_PE_IMPORT
;
305 if (sym
->a
.dllexport
)
306 esym
->st_other
|= ST_PE_EXPORT
;
309 printf("storage %s: vis=%d weak=%d exp=%d imp=%d\n",
310 get_tok_str(sym
->v
, NULL
),
319 /* ------------------------------------------------------------------------- */
320 /* update sym->c so that it points to an external symbol in section
321 'section' with value 'value' */
323 ST_FUNC
void put_extern_sym2(Sym
*sym
, Section
*section
,
324 addr_t value
, unsigned long size
,
325 int can_add_underscore
)
327 int sym_type
, sym_bind
, sh_num
, info
, other
, t
;
331 #ifdef CONFIG_TCC_BCHECK
337 else if (section
== SECTION_ABS
)
340 sh_num
= section
->sh_num
;
343 name
= get_tok_str(sym
->v
, NULL
);
344 #ifdef CONFIG_TCC_BCHECK
345 if (tcc_state
->do_bounds_check
) {
346 /* XXX: avoid doing that for statics ? */
347 /* if bound checking is activated, we change some function
348 names by adding the "__bound" prefix */
351 /* XXX: we rely only on malloc hooks */
364 strcpy(buf
, "__bound_");
372 if ((t
& VT_BTYPE
) == VT_FUNC
) {
374 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
375 sym_type
= STT_NOTYPE
;
377 sym_type
= STT_OBJECT
;
380 sym_bind
= STB_LOCAL
;
382 sym_bind
= STB_GLOBAL
;
385 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
386 Sym
*ref
= sym
->type
.ref
;
387 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
388 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
390 other
|= ST_PE_STDCALL
;
391 can_add_underscore
= 0;
395 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
397 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
401 name
= get_tok_str(sym
->asm_label
, NULL
);
402 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
403 sym
->c
= set_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
405 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
406 esym
->st_value
= value
;
407 esym
->st_size
= size
;
408 esym
->st_shndx
= sh_num
;
413 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
414 addr_t value
, unsigned long size
)
416 put_extern_sym2(sym
, section
, value
, size
, 1);
419 /* add a new relocation entry to symbol 'sym' in section 's' */
420 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
425 if (nocode_wanted
&& s
== cur_text_section
)
430 put_extern_sym(sym
, NULL
, 0, 0);
434 /* now we can add ELF relocation info */
435 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
439 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
441 greloca(s
, sym
, offset
, type
, 0);
445 /* ------------------------------------------------------------------------- */
446 /* symbol allocator */
447 static Sym
*__sym_malloc(void)
449 Sym
*sym_pool
, *sym
, *last_sym
;
452 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
453 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
455 last_sym
= sym_free_first
;
457 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
458 sym
->next
= last_sym
;
462 sym_free_first
= last_sym
;
466 static inline Sym
*sym_malloc(void)
470 sym
= sym_free_first
;
472 sym
= __sym_malloc();
473 sym_free_first
= sym
->next
;
476 sym
= tcc_malloc(sizeof(Sym
));
481 ST_INLN
void sym_free(Sym
*sym
)
484 sym
->next
= sym_free_first
;
485 sym_free_first
= sym
;
491 /* push, without hashing */
492 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
497 memset(s
, 0, sizeof *s
);
507 /* find a symbol and return its associated structure. 's' is the top
508 of the symbol stack */
509 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
521 /* structure lookup */
522 ST_INLN Sym
*struct_find(int v
)
525 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
527 return table_ident
[v
]->sym_struct
;
530 /* find an identifier */
531 ST_INLN Sym
*sym_find(int v
)
534 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
536 return table_ident
[v
]->sym_identifier
;
539 /* push a given symbol on the symbol stack */
540 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
549 s
= sym_push2(ps
, v
, type
->t
, c
);
550 s
->type
.ref
= type
->ref
;
552 /* don't record fields or anonymous symbols */
554 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
555 /* record symbol in token array */
556 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
558 ps
= &ts
->sym_struct
;
560 ps
= &ts
->sym_identifier
;
563 s
->sym_scope
= local_scope
;
564 if (s
->prev_tok
&& s
->prev_tok
->sym_scope
== s
->sym_scope
)
565 tcc_error("redeclaration of '%s'",
566 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
571 /* push a global identifier */
572 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
575 s
= sym_push2(&global_stack
, v
, t
, c
);
576 /* don't record anonymous symbol */
577 if (v
< SYM_FIRST_ANOM
) {
578 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
579 /* modify the top most local identifier, so that
580 sym_identifier will point to 's' when popped */
582 ps
= &(*ps
)->prev_tok
;
589 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
590 pop them yet from the list, but do remove them from the token array. */
591 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
601 /* remove symbol in token array */
603 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
604 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
606 ps
= &ts
->sym_struct
;
608 ps
= &ts
->sym_identifier
;
619 /* ------------------------------------------------------------------------- */
621 static void vsetc(CType
*type
, int r
, CValue
*vc
)
625 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
626 tcc_error("memory full (vstack)");
627 /* cannot let cpu flags if other instruction are generated. Also
628 avoid leaving VT_JMP anywhere except on the top of the stack
629 because it would complicate the code generator.
631 Don't do this when nocode_wanted. vtop might come from
632 !nocode_wanted regions (see 88_codeopt.c) and transforming
633 it to a register without actually generating code is wrong
634 as their value might still be used for real. All values
635 we push under nocode_wanted will eventually be popped
636 again, so that the VT_CMP/VT_JMP value will be in vtop
637 when code is unsuppressed again.
639 Same logic below in vswap(); */
640 if (vtop
>= vstack
&& !nocode_wanted
) {
641 v
= vtop
->r
& VT_VALMASK
;
642 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
654 ST_FUNC
void vswap(void)
657 /* cannot vswap cpu flags. See comment at vsetc() above */
658 if (vtop
>= vstack
&& !nocode_wanted
) {
659 int v
= vtop
->r
& VT_VALMASK
;
660 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
668 /* pop stack value */
669 ST_FUNC
void vpop(void)
672 v
= vtop
->r
& VT_VALMASK
;
673 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
674 /* for x86, we need to pop the FP stack */
676 o(0xd8dd); /* fstp %st(0) */
679 if (v
== VT_JMP
|| v
== VT_JMPI
) {
680 /* need to put correct jump if && or || without test */
686 /* push constant of type "type" with useless value */
687 ST_FUNC
void vpush(CType
*type
)
689 vset(type
, VT_CONST
, 0);
692 /* push integer constant */
693 ST_FUNC
void vpushi(int v
)
697 vsetc(&int_type
, VT_CONST
, &cval
);
700 /* push a pointer sized constant */
701 static void vpushs(addr_t v
)
705 vsetc(&size_type
, VT_CONST
, &cval
);
708 /* push arbitrary 64bit constant */
709 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
716 vsetc(&ctype
, VT_CONST
, &cval
);
719 /* push long long constant */
720 static inline void vpushll(long long v
)
722 vpush64(VT_LLONG
, v
);
725 ST_FUNC
void vset(CType
*type
, int r
, int v
)
730 vsetc(type
, r
, &cval
);
733 static void vseti(int r
, int v
)
741 ST_FUNC
void vpushv(SValue
*v
)
743 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
744 tcc_error("memory full (vstack)");
749 static void vdup(void)
754 /* rotate n first stack elements to the bottom
755 I1 ... In -> I2 ... In I1 [top is right]
757 ST_FUNC
void vrotb(int n
)
768 /* rotate the n elements before entry e towards the top
769 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
771 ST_FUNC
void vrote(SValue
*e
, int n
)
777 for(i
= 0;i
< n
- 1; i
++)
782 /* rotate n first stack elements to the top
783 I1 ... In -> In I1 ... I(n-1) [top is right]
785 ST_FUNC
void vrott(int n
)
790 /* push a symbol value of TYPE */
791 static inline void vpushsym(CType
*type
, Sym
*sym
)
795 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
799 /* Return a static symbol pointing to a section */
800 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
806 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
807 sym
->type
.ref
= type
->ref
;
808 sym
->r
= VT_CONST
| VT_SYM
;
809 put_extern_sym(sym
, sec
, offset
, size
);
813 /* push a reference to a section offset by adding a dummy symbol */
814 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
816 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
819 /* define a new external reference to a symbol 'v' of type 'u' */
820 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
826 /* push forward reference */
827 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
828 s
->type
.ref
= type
->ref
;
829 s
->r
= r
| VT_CONST
| VT_SYM
;
834 /* Merge some storage attributes. */
835 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
837 if (type
&& !is_compatible_types(&sym
->type
, type
))
838 tcc_error("incompatible types for redefinition of '%s'",
839 get_tok_str(sym
->v
, NULL
));
841 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
842 tcc_error("incompatible dll linkage for redefinition of '%s'",
843 get_tok_str(sym
->v
, NULL
));
845 sym
->a
.dllexport
|= ad
->a
.dllexport
;
846 sym
->a
.weak
|= ad
->a
.weak
;
847 if (ad
->a
.visibility
) {
848 int vis
= sym
->a
.visibility
;
849 int vis2
= ad
->a
.visibility
;
850 if (vis
== STV_DEFAULT
)
852 else if (vis2
!= STV_DEFAULT
)
853 vis
= (vis
< vis2
) ? vis
: vis2
;
854 sym
->a
.visibility
= vis
;
857 sym
->a
.aligned
= ad
->a
.aligned
;
859 sym
->asm_label
= ad
->asm_label
;
863 /* define a new external reference to a symbol 'v' */
864 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
869 /* push forward reference */
870 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
871 s
->type
.t
|= VT_EXTERN
;
875 if (s
->type
.ref
== func_old_type
.ref
) {
876 s
->type
.ref
= type
->ref
;
877 s
->r
= r
| VT_CONST
| VT_SYM
;
878 s
->type
.t
|= VT_EXTERN
;
880 patch_storage(s
, ad
, type
);
885 /* push a reference to global symbol v */
886 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
888 vpushsym(type
, external_global_sym(v
, type
, 0));
891 /* save registers up to (vtop - n) stack entry */
892 ST_FUNC
void save_regs(int n
)
895 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
899 /* save r to the memory stack, and mark it as being free */
900 ST_FUNC
void save_reg(int r
)
902 save_reg_upstack(r
, 0);
905 /* save r to the memory stack, and mark it as being free,
906 if seen up to (vtop - n) stack entry */
907 ST_FUNC
void save_reg_upstack(int r
, int n
)
909 int l
, saved
, size
, align
;
913 if ((r
&= VT_VALMASK
) >= VT_CONST
)
918 /* modify all stack values */
921 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
922 if ((p
->r
& VT_VALMASK
) == r
||
923 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
924 /* must save value on stack if not already done */
926 /* NOTE: must reload 'r' because r might be equal to r2 */
927 r
= p
->r
& VT_VALMASK
;
928 /* store register in the stack */
930 if ((p
->r
& VT_LVAL
) ||
931 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
933 type
= &char_pointer_type
;
937 size
= type_size(type
, &align
);
938 loc
= (loc
- size
) & -align
;
940 sv
.r
= VT_LOCAL
| VT_LVAL
;
943 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
944 /* x86 specific: need to pop fp register ST0 if saved */
946 o(0xd8dd); /* fstp %st(0) */
950 /* special long long case */
951 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
959 /* mark that stack entry as being saved on the stack */
960 if (p
->r
& VT_LVAL
) {
961 /* also clear the bounded flag because the
962 relocation address of the function was stored in
964 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
966 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
974 #ifdef TCC_TARGET_ARM
975 /* find a register of class 'rc2' with at most one reference on stack.
976 * If none, call get_reg(rc) */
977 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
982 for(r
=0;r
<NB_REGS
;r
++) {
983 if (reg_classes
[r
] & rc2
) {
986 for(p
= vstack
; p
<= vtop
; p
++) {
987 if ((p
->r
& VT_VALMASK
) == r
||
988 (p
->r2
& VT_VALMASK
) == r
)
999 /* find a free register of class 'rc'. If none, save one register */
1000 ST_FUNC
int get_reg(int rc
)
1005 /* find a free register */
1006 for(r
=0;r
<NB_REGS
;r
++) {
1007 if (reg_classes
[r
] & rc
) {
1010 for(p
=vstack
;p
<=vtop
;p
++) {
1011 if ((p
->r
& VT_VALMASK
) == r
||
1012 (p
->r2
& VT_VALMASK
) == r
)
1020 /* no register left : free the first one on the stack (VERY
1021 IMPORTANT to start from the bottom to ensure that we don't
1022 spill registers used in gen_opi()) */
1023 for(p
=vstack
;p
<=vtop
;p
++) {
1024 /* look at second register (if long long) */
1025 r
= p
->r2
& VT_VALMASK
;
1026 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1028 r
= p
->r
& VT_VALMASK
;
1029 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1035 /* Should never comes here */
1039 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1041 static void move_reg(int r
, int s
, int t
)
1055 /* get address of vtop (vtop MUST BE an lvalue) */
1056 ST_FUNC
void gaddrof(void)
1058 vtop
->r
&= ~VT_LVAL
;
1059 /* tricky: if saved lvalue, then we can go back to lvalue */
1060 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1061 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1066 #ifdef CONFIG_TCC_BCHECK
1067 /* generate lvalue bound code */
1068 static void gbound(void)
1073 vtop
->r
&= ~VT_MUSTBOUND
;
1074 /* if lvalue, then use checking code before dereferencing */
1075 if (vtop
->r
& VT_LVAL
) {
1076 /* if not VT_BOUNDED value, then make one */
1077 if (!(vtop
->r
& VT_BOUNDED
)) {
1078 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1079 /* must save type because we must set it to int to get pointer */
1081 vtop
->type
.t
= VT_PTR
;
1084 gen_bounded_ptr_add();
1085 vtop
->r
|= lval_type
;
1088 /* then check for dereferencing */
1089 gen_bounded_ptr_deref();
1094 static void incr_bf_adr(int o
)
1096 vtop
->type
= char_pointer_type
;
1100 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1101 | (VT_BYTE
|VT_UNSIGNED
);
1102 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1103 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1106 /* single-byte load mode for packed or otherwise unaligned bitfields */
1107 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1110 save_reg_upstack(vtop
->r
, 1);
1111 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1112 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1121 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1123 vpushi((1 << n
) - 1), gen_op('&');
1126 vpushi(bits
), gen_op(TOK_SHL
);
1129 bits
+= n
, bit_size
-= n
, o
= 1;
1132 if (!(type
->t
& VT_UNSIGNED
)) {
1133 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1134 vpushi(n
), gen_op(TOK_SHL
);
1135 vpushi(n
), gen_op(TOK_SAR
);
1139 /* single-byte store mode for packed or otherwise unaligned bitfields */
1140 static void store_packed_bf(int bit_pos
, int bit_size
)
1142 int bits
, n
, o
, m
, c
;
1144 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1146 save_reg_upstack(vtop
->r
, 1);
1147 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1149 incr_bf_adr(o
); // X B
1151 c
? vdup() : gv_dup(); // B V X
1154 vpushi(bits
), gen_op(TOK_SHR
);
1156 vpushi(bit_pos
), gen_op(TOK_SHL
);
1161 m
= ((1 << n
) - 1) << bit_pos
;
1162 vpushi(m
), gen_op('&'); // X B V1
1163 vpushv(vtop
-1); // X B V1 B
1164 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1165 gen_op('&'); // X B V1 B1
1166 gen_op('|'); // X B V2
1168 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1169 vstore(), vpop(); // X B
1170 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1175 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1178 if (0 == sv
->type
.ref
)
1180 t
= sv
->type
.ref
->auxtype
;
1181 if (t
!= -1 && t
!= VT_STRUCT
) {
1182 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1183 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1188 /* store vtop a register belonging to class 'rc'. lvalues are
1189 converted to values. Cannot be used if cannot be converted to
1190 register value (such as structures). */
1191 ST_FUNC
int gv(int rc
)
1193 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1195 /* NOTE: get_reg can modify vstack[] */
1196 if (vtop
->type
.t
& VT_BITFIELD
) {
1199 bit_pos
= BIT_POS(vtop
->type
.t
);
1200 bit_size
= BIT_SIZE(vtop
->type
.t
);
1201 /* remove bit field info to avoid loops */
1202 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1205 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1206 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1207 type
.t
|= VT_UNSIGNED
;
1209 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1211 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1216 if (r
== VT_STRUCT
) {
1217 load_packed_bf(&type
, bit_pos
, bit_size
);
1219 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1220 /* cast to int to propagate signedness in following ops */
1222 /* generate shifts */
1223 vpushi(bits
- (bit_pos
+ bit_size
));
1225 vpushi(bits
- bit_size
);
1226 /* NOTE: transformed to SHR if unsigned */
1231 if (is_float(vtop
->type
.t
) &&
1232 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1233 unsigned long offset
;
1234 /* CPUs usually cannot use float constants, so we store them
1235 generically in data segment */
1236 size
= type_size(&vtop
->type
, &align
);
1238 size
= 0, align
= 1;
1239 offset
= section_add(data_section
, size
, align
);
1240 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1242 init_putv(&vtop
->type
, data_section
, offset
);
1245 #ifdef CONFIG_TCC_BCHECK
1246 if (vtop
->r
& VT_MUSTBOUND
)
1250 r
= vtop
->r
& VT_VALMASK
;
1251 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1252 #ifndef TCC_TARGET_ARM64
1255 #ifdef TCC_TARGET_X86_64
1256 else if (rc
== RC_FRET
)
1260 /* need to reload if:
1262 - lvalue (need to dereference pointer)
1263 - already a register, but not in the right class */
1265 || (vtop
->r
& VT_LVAL
)
1266 || !(reg_classes
[r
] & rc
)
1268 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1269 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1271 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1277 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1278 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1280 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1281 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1282 unsigned long long ll
;
1284 int r2
, original_type
;
1285 original_type
= vtop
->type
.t
;
1286 /* two register type load : expand to two words
1289 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1292 vtop
->c
.i
= ll
; /* first word */
1294 vtop
->r
= r
; /* save register value */
1295 vpushi(ll
>> 32); /* second word */
1298 if (vtop
->r
& VT_LVAL
) {
1299 /* We do not want to modifier the long long
1300 pointer here, so the safest (and less
1301 efficient) is to save all the other registers
1302 in the stack. XXX: totally inefficient. */
1306 /* lvalue_save: save only if used further down the stack */
1307 save_reg_upstack(vtop
->r
, 1);
1309 /* load from memory */
1310 vtop
->type
.t
= load_type
;
1313 vtop
[-1].r
= r
; /* save register value */
1314 /* increment pointer to get second word */
1315 vtop
->type
.t
= addr_type
;
1320 vtop
->type
.t
= load_type
;
1322 /* move registers */
1325 vtop
[-1].r
= r
; /* save register value */
1326 vtop
->r
= vtop
[-1].r2
;
1328 /* Allocate second register. Here we rely on the fact that
1329 get_reg() tries first to free r2 of an SValue. */
1333 /* write second register */
1335 vtop
->type
.t
= original_type
;
1336 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1338 /* lvalue of scalar type : need to use lvalue type
1339 because of possible cast */
1342 /* compute memory access type */
1343 if (vtop
->r
& VT_LVAL_BYTE
)
1345 else if (vtop
->r
& VT_LVAL_SHORT
)
1347 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1351 /* restore wanted type */
1354 /* one register type load */
1359 #ifdef TCC_TARGET_C67
1360 /* uses register pairs for doubles */
1361 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1368 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1369 ST_FUNC
void gv2(int rc1
, int rc2
)
1373 /* generate more generic register first. But VT_JMP or VT_CMP
1374 values must be generated first in all cases to avoid possible
1376 v
= vtop
[0].r
& VT_VALMASK
;
1377 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1382 /* test if reload is needed for first register */
1383 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1393 /* test if reload is needed for first register */
1394 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1400 #ifndef TCC_TARGET_ARM64
1401 /* wrapper around RC_FRET to return a register by type */
1402 static int rc_fret(int t
)
1404 #ifdef TCC_TARGET_X86_64
1405 if (t
== VT_LDOUBLE
) {
1413 /* wrapper around REG_FRET to return a register by type */
1414 static int reg_fret(int t
)
1416 #ifdef TCC_TARGET_X86_64
1417 if (t
== VT_LDOUBLE
) {
1425 /* expand 64bit on stack in two ints */
1426 static void lexpand(void)
1429 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1430 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1431 if (v
== VT_CONST
) {
1434 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1440 vtop
[0].r
= vtop
[-1].r2
;
1441 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1443 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1447 #ifdef TCC_TARGET_ARM
1448 /* expand long long on stack */
1449 ST_FUNC
void lexpand_nr(void)
1453 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1455 vtop
->r2
= VT_CONST
;
1456 vtop
->type
.t
= VT_INT
| u
;
1457 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1458 if (v
== VT_CONST
) {
1459 vtop
[-1].c
.i
= vtop
->c
.i
;
1460 vtop
->c
.i
= vtop
->c
.i
>> 32;
1462 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1464 vtop
->r
= vtop
[-1].r
;
1465 } else if (v
> VT_CONST
) {
1469 vtop
->r
= vtop
[-1].r2
;
1470 vtop
[-1].r2
= VT_CONST
;
1471 vtop
[-1].type
.t
= VT_INT
| u
;
1476 /* build a long long from two ints */
1477 static void lbuild(int t
)
1479 gv2(RC_INT
, RC_INT
);
1480 vtop
[-1].r2
= vtop
[0].r
;
1481 vtop
[-1].type
.t
= t
;
1486 /* convert stack entry to register and duplicate its value in another
1488 static void gv_dup(void)
1495 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1496 if (t
& VT_BITFIELD
) {
1506 /* stack: H L L1 H1 */
1516 /* duplicate value */
1521 #ifdef TCC_TARGET_X86_64
1522 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1532 load(r1
, &sv
); /* move r to r1 */
1534 /* duplicates value */
1540 /* Generate value test
1542 * Generate a test for any value (jump, comparison and integers) */
1543 ST_FUNC
int gvtst(int inv
, int t
)
1545 int v
= vtop
->r
& VT_VALMASK
;
1546 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1550 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1551 /* constant jmp optimization */
1552 if ((vtop
->c
.i
!= 0) != inv
)
1557 return gtst(inv
, t
);
1561 /* generate CPU independent (unsigned) long long operations */
1562 static void gen_opl(int op
)
1564 int t
, a
, b
, op1
, c
, i
;
1566 unsigned short reg_iret
= REG_IRET
;
1567 unsigned short reg_lret
= REG_LRET
;
1573 func
= TOK___divdi3
;
1576 func
= TOK___udivdi3
;
1579 func
= TOK___moddi3
;
1582 func
= TOK___umoddi3
;
1589 /* call generic long long function */
1590 vpush_global_sym(&func_old_type
, func
);
1595 vtop
->r2
= reg_lret
;
1603 //pv("gen_opl A",0,2);
1609 /* stack: L1 H1 L2 H2 */
1614 vtop
[-2] = vtop
[-3];
1617 /* stack: H1 H2 L1 L2 */
1618 //pv("gen_opl B",0,4);
1624 /* stack: H1 H2 L1 L2 ML MH */
1627 /* stack: ML MH H1 H2 L1 L2 */
1631 /* stack: ML MH H1 L2 H2 L1 */
1636 /* stack: ML MH M1 M2 */
1639 } else if (op
== '+' || op
== '-') {
1640 /* XXX: add non carry method too (for MIPS or alpha) */
1646 /* stack: H1 H2 (L1 op L2) */
1649 gen_op(op1
+ 1); /* TOK_xxxC2 */
1652 /* stack: H1 H2 (L1 op L2) */
1655 /* stack: (L1 op L2) H1 H2 */
1657 /* stack: (L1 op L2) (H1 op H2) */
1665 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1666 t
= vtop
[-1].type
.t
;
1670 /* stack: L H shift */
1672 /* constant: simpler */
1673 /* NOTE: all comments are for SHL. the other cases are
1674 done by swapping words */
1685 if (op
!= TOK_SAR
) {
1718 /* XXX: should provide a faster fallback on x86 ? */
1721 func
= TOK___ashrdi3
;
1724 func
= TOK___lshrdi3
;
1727 func
= TOK___ashldi3
;
1733 /* compare operations */
1739 /* stack: L1 H1 L2 H2 */
1741 vtop
[-1] = vtop
[-2];
1743 /* stack: L1 L2 H1 H2 */
1746 /* when values are equal, we need to compare low words. since
1747 the jump is inverted, we invert the test too. */
1750 else if (op1
== TOK_GT
)
1752 else if (op1
== TOK_ULT
)
1754 else if (op1
== TOK_UGT
)
1764 /* generate non equal test */
1770 /* compare low. Always unsigned */
1774 else if (op1
== TOK_LE
)
1776 else if (op1
== TOK_GT
)
1778 else if (op1
== TOK_GE
)
1789 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1791 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1792 return (a
^ b
) >> 63 ? -x
: x
;
1795 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1797 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1800 /* handle integer constant optimizations and various machine
1802 static void gen_opic(int op
)
1804 SValue
*v1
= vtop
- 1;
1806 int t1
= v1
->type
.t
& VT_BTYPE
;
1807 int t2
= v2
->type
.t
& VT_BTYPE
;
1808 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1809 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1810 uint64_t l1
= c1
? v1
->c
.i
: 0;
1811 uint64_t l2
= c2
? v2
->c
.i
: 0;
1812 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1814 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1815 l1
= ((uint32_t)l1
|
1816 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1817 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1818 l2
= ((uint32_t)l2
|
1819 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1823 case '+': l1
+= l2
; break;
1824 case '-': l1
-= l2
; break;
1825 case '&': l1
&= l2
; break;
1826 case '^': l1
^= l2
; break;
1827 case '|': l1
|= l2
; break;
1828 case '*': l1
*= l2
; break;
1835 /* if division by zero, generate explicit division */
1838 tcc_error("division by zero in constant");
1842 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1843 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1844 case TOK_UDIV
: l1
= l1
/ l2
; break;
1845 case TOK_UMOD
: l1
= l1
% l2
; break;
1848 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1849 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1851 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1854 case TOK_ULT
: l1
= l1
< l2
; break;
1855 case TOK_UGE
: l1
= l1
>= l2
; break;
1856 case TOK_EQ
: l1
= l1
== l2
; break;
1857 case TOK_NE
: l1
= l1
!= l2
; break;
1858 case TOK_ULE
: l1
= l1
<= l2
; break;
1859 case TOK_UGT
: l1
= l1
> l2
; break;
1860 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1861 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1862 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1863 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1865 case TOK_LAND
: l1
= l1
&& l2
; break;
1866 case TOK_LOR
: l1
= l1
|| l2
; break;
1870 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1871 l1
= ((uint32_t)l1
|
1872 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1876 /* if commutative ops, put c2 as constant */
1877 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1878 op
== '|' || op
== '*')) {
1880 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1881 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1883 if (!const_wanted
&&
1885 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1886 (l1
== -1 && op
== TOK_SAR
))) {
1887 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1889 } else if (!const_wanted
&&
1890 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1892 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
1893 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1894 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1899 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1902 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1903 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1906 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
1907 /* filter out NOP operations like x*1, x-0, x&-1... */
1909 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1910 /* try to use shifts instead of muls or divs */
1911 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1920 else if (op
== TOK_PDIV
)
1926 } else if (c2
&& (op
== '+' || op
== '-') &&
1927 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1928 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1929 /* symbol + constant case */
1933 /* The backends can't always deal with addends to symbols
1934 larger than +-1<<31. Don't construct such. */
1941 /* call low level op generator */
1942 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1943 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1951 /* generate a floating point operation with constant propagation */
1952 static void gen_opif(int op
)
1956 #if defined _MSC_VER && defined _AMD64_
1957 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
1964 /* currently, we cannot do computations with forward symbols */
1965 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1966 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1968 if (v1
->type
.t
== VT_FLOAT
) {
1971 } else if (v1
->type
.t
== VT_DOUBLE
) {
1979 /* NOTE: we only do constant propagation if finite number (not
1980 NaN or infinity) (ANSI spec) */
1981 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1985 case '+': f1
+= f2
; break;
1986 case '-': f1
-= f2
; break;
1987 case '*': f1
*= f2
; break;
1991 tcc_error("division by zero in constant");
1996 /* XXX: also handles tests ? */
2000 /* XXX: overflow test ? */
2001 if (v1
->type
.t
== VT_FLOAT
) {
2003 } else if (v1
->type
.t
== VT_DOUBLE
) {
2015 static int pointed_size(CType
*type
)
2018 return type_size(pointed_type(type
), &align
);
2021 static void vla_runtime_pointed_size(CType
*type
)
2024 vla_runtime_type_size(pointed_type(type
), &align
);
2027 static inline int is_null_pointer(SValue
*p
)
2029 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2031 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2032 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2033 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2034 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
2037 static inline int is_integer_btype(int bt
)
2039 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2040 bt
== VT_INT
|| bt
== VT_LLONG
);
2043 /* check types for comparison or subtraction of pointers */
2044 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2046 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2049 /* null pointers are accepted for all comparisons as gcc */
2050 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2054 bt1
= type1
->t
& VT_BTYPE
;
2055 bt2
= type2
->t
& VT_BTYPE
;
2056 /* accept comparison between pointer and integer with a warning */
2057 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2058 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2059 tcc_warning("comparison between pointer and integer");
2063 /* both must be pointers or implicit function pointers */
2064 if (bt1
== VT_PTR
) {
2065 type1
= pointed_type(type1
);
2066 } else if (bt1
!= VT_FUNC
)
2067 goto invalid_operands
;
2069 if (bt2
== VT_PTR
) {
2070 type2
= pointed_type(type2
);
2071 } else if (bt2
!= VT_FUNC
) {
2073 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2075 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2076 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2080 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2081 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2082 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2083 /* gcc-like error if '-' is used */
2085 goto invalid_operands
;
2087 tcc_warning("comparison of distinct pointer types lacks a cast");
2091 /* generic gen_op: handles types problems */
2092 ST_FUNC
void gen_op(int op
)
2094 int u
, t1
, t2
, bt1
, bt2
, t
;
2098 t1
= vtop
[-1].type
.t
;
2099 t2
= vtop
[0].type
.t
;
2100 bt1
= t1
& VT_BTYPE
;
2101 bt2
= t2
& VT_BTYPE
;
2103 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2104 tcc_error("operation on a struct");
2105 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2106 if (bt2
== VT_FUNC
) {
2107 mk_pointer(&vtop
->type
);
2110 if (bt1
== VT_FUNC
) {
2112 mk_pointer(&vtop
->type
);
2117 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2118 /* at least one operand is a pointer */
2119 /* relational op: must be both pointers */
2120 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2121 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2122 /* pointers are handled are unsigned */
2124 t
= VT_LLONG
| VT_UNSIGNED
;
2126 t
= VT_INT
| VT_UNSIGNED
;
2130 /* if both pointers, then it must be the '-' op */
2131 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2133 tcc_error("cannot use pointers here");
2134 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2135 /* XXX: check that types are compatible */
2136 if (vtop
[-1].type
.t
& VT_VLA
) {
2137 vla_runtime_pointed_size(&vtop
[-1].type
);
2139 vpushi(pointed_size(&vtop
[-1].type
));
2143 vtop
->type
.t
= ptrdiff_type
.t
;
2147 /* exactly one pointer : must be '+' or '-'. */
2148 if (op
!= '-' && op
!= '+')
2149 tcc_error("cannot use pointers here");
2150 /* Put pointer as first operand */
2151 if (bt2
== VT_PTR
) {
2153 t
= t1
, t1
= t2
, t2
= t
;
2156 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2157 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2160 type1
= vtop
[-1].type
;
2161 type1
.t
&= ~VT_ARRAY
;
2162 if (vtop
[-1].type
.t
& VT_VLA
)
2163 vla_runtime_pointed_size(&vtop
[-1].type
);
2165 u
= pointed_size(&vtop
[-1].type
);
2167 tcc_error("unknown array element size");
2171 /* XXX: cast to int ? (long long case) */
2177 /* #ifdef CONFIG_TCC_BCHECK
2178 The main reason to removing this code:
2185 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2186 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2188 When this code is on. then the output looks like
2190 v+(i-j) = 0xbff84000
2192 /* if evaluating constant expression, no code should be
2193 generated, so no bound check */
2194 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2195 /* if bounded pointers, we generate a special code to
2202 gen_bounded_ptr_add();
2208 /* put again type if gen_opic() swaped operands */
2211 } else if (is_float(bt1
) || is_float(bt2
)) {
2212 /* compute bigger type and do implicit casts */
2213 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2215 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2220 /* floats can only be used for a few operations */
2221 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2222 (op
< TOK_ULT
|| op
> TOK_GT
))
2223 tcc_error("invalid operands for binary operation");
2225 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2226 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2227 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2229 t
|= (VT_LONG
& t1
);
2231 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2232 /* cast to biggest op */
2233 t
= VT_LLONG
| VT_LONG
;
2234 if (bt1
== VT_LLONG
)
2236 if (bt2
== VT_LLONG
)
2238 /* convert to unsigned if it does not fit in a long long */
2239 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2240 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2244 /* integer operations */
2245 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2246 /* convert to unsigned if it does not fit in an integer */
2247 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2248 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2251 /* XXX: currently, some unsigned operations are explicit, so
2252 we modify them here */
2253 if (t
& VT_UNSIGNED
) {
2260 else if (op
== TOK_LT
)
2262 else if (op
== TOK_GT
)
2264 else if (op
== TOK_LE
)
2266 else if (op
== TOK_GE
)
2274 /* special case for shifts and long long: we keep the shift as
2276 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2283 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2284 /* relational op: the result is an int */
2285 vtop
->type
.t
= VT_INT
;
2290 // Make sure that we have converted to an rvalue:
2291 if (vtop
->r
& VT_LVAL
)
2292 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2295 #ifndef TCC_TARGET_ARM
2296 /* generic itof for unsigned long long case */
2297 static void gen_cvt_itof1(int t
)
2299 #ifdef TCC_TARGET_ARM64
2302 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2303 (VT_LLONG
| VT_UNSIGNED
)) {
2306 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2307 #if LDOUBLE_SIZE != 8
2308 else if (t
== VT_LDOUBLE
)
2309 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2312 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2316 vtop
->r
= reg_fret(t
);
2324 /* generic ftoi for unsigned long long case */
2325 static void gen_cvt_ftoi1(int t
)
2327 #ifdef TCC_TARGET_ARM64
2332 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2333 /* not handled natively */
2334 st
= vtop
->type
.t
& VT_BTYPE
;
2336 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2337 #if LDOUBLE_SIZE != 8
2338 else if (st
== VT_LDOUBLE
)
2339 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2342 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2347 vtop
->r2
= REG_LRET
;
2354 /* force char or short cast */
2355 static void force_charshort_cast(int t
)
2359 /* cannot cast static initializers */
2360 if (STATIC_DATA_WANTED
)
2364 /* XXX: add optimization if lvalue : just change type and offset */
2369 if (t
& VT_UNSIGNED
) {
2370 vpushi((1 << bits
) - 1);
2373 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2379 /* result must be signed or the SAR is converted to an SHL
2380 This was not the case when "t" was a signed short
2381 and the last value on the stack was an unsigned int */
2382 vtop
->type
.t
&= ~VT_UNSIGNED
;
2388 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2389 static void gen_cast_s(int t
)
2397 static void gen_cast(CType
*type
)
2399 int sbt
, dbt
, sf
, df
, c
, p
;
2401 /* special delayed cast for char/short */
2402 /* XXX: in some cases (multiple cascaded casts), it may still
2404 if (vtop
->r
& VT_MUSTCAST
) {
2405 vtop
->r
&= ~VT_MUSTCAST
;
2406 force_charshort_cast(vtop
->type
.t
);
2409 /* bitfields first get cast to ints */
2410 if (vtop
->type
.t
& VT_BITFIELD
) {
2414 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2415 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2420 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2421 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2422 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2423 c
&= dbt
!= VT_LDOUBLE
;
2426 /* constant case: we can do it now */
2427 /* XXX: in ISOC, cannot do it if error in convert */
2428 if (sbt
== VT_FLOAT
)
2429 vtop
->c
.ld
= vtop
->c
.f
;
2430 else if (sbt
== VT_DOUBLE
)
2431 vtop
->c
.ld
= vtop
->c
.d
;
2434 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2435 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2436 vtop
->c
.ld
= vtop
->c
.i
;
2438 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2440 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2441 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2443 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2446 if (dbt
== VT_FLOAT
)
2447 vtop
->c
.f
= (float)vtop
->c
.ld
;
2448 else if (dbt
== VT_DOUBLE
)
2449 vtop
->c
.d
= (double)vtop
->c
.ld
;
2450 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2451 vtop
->c
.i
= vtop
->c
.ld
;
2452 } else if (sf
&& dbt
== VT_BOOL
) {
2453 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2456 vtop
->c
.i
= vtop
->c
.ld
;
2457 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2459 else if (sbt
& VT_UNSIGNED
)
2460 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2462 else if (sbt
== VT_PTR
)
2465 else if (sbt
!= VT_LLONG
)
2466 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2467 -(vtop
->c
.i
& 0x80000000));
2469 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2471 else if (dbt
== VT_BOOL
)
2472 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2474 else if (dbt
== VT_PTR
)
2477 else if (dbt
!= VT_LLONG
) {
2478 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2479 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2482 if (!(dbt
& VT_UNSIGNED
))
2483 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2486 } else if (p
&& dbt
== VT_BOOL
) {
2490 /* non constant case: generate code */
2492 /* convert from fp to fp */
2495 /* convert int to fp */
2498 /* convert fp to int */
2499 if (dbt
== VT_BOOL
) {
2503 /* we handle char/short/etc... with generic code */
2504 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2505 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2509 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2510 /* additional cast for char/short... */
2516 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2517 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2518 /* scalar to long long */
2519 /* machine independent conversion */
2521 /* generate high word */
2522 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2526 if (sbt
== VT_PTR
) {
2527 /* cast from pointer to int before we apply
2528 shift operation, which pointers don't support*/
2535 /* patch second register */
2536 vtop
[-1].r2
= vtop
->r
;
2540 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2541 (dbt
& VT_BTYPE
) == VT_PTR
||
2542 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2543 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2544 (sbt
& VT_BTYPE
) != VT_PTR
&&
2545 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2546 /* need to convert from 32bit to 64bit */
2548 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2549 #if defined(TCC_TARGET_ARM64)
2551 #elif defined(TCC_TARGET_X86_64)
2553 /* x86_64 specific: movslq */
2555 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2562 } else if (dbt
== VT_BOOL
) {
2563 /* scalar to bool */
2566 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2567 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2568 if (sbt
== VT_PTR
) {
2569 vtop
->type
.t
= VT_INT
;
2570 tcc_warning("nonportable conversion from pointer to char/short");
2572 force_charshort_cast(dbt
);
2574 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2576 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2577 /* from long long: just take low order word */
2581 /* if lvalue and single word type, nothing to do because
2582 the lvalue already contains the real type size (see
2583 VT_LVAL_xxx constants) */
2587 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2588 /* if we are casting between pointer types,
2589 we must update the VT_LVAL_xxx size */
2590 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2591 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2596 /* return type size as known at compile time. Put alignment at 'a' */
2597 ST_FUNC
int type_size(CType
*type
, int *a
)
2602 bt
= type
->t
& VT_BTYPE
;
2603 if (bt
== VT_STRUCT
) {
2608 } else if (bt
== VT_PTR
) {
2609 if (type
->t
& VT_ARRAY
) {
2613 ts
= type_size(&s
->type
, a
);
2615 if (ts
< 0 && s
->c
< 0)
2623 } else if (IS_ENUM(type
->t
) && type
->ref
->c
== -1) {
2624 return -1; /* incomplete enum */
2625 } else if (bt
== VT_LDOUBLE
) {
2627 return LDOUBLE_SIZE
;
2628 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2629 #ifdef TCC_TARGET_I386
2630 #ifdef TCC_TARGET_PE
2635 #elif defined(TCC_TARGET_ARM)
2645 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2648 } else if (bt
== VT_SHORT
) {
2651 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2655 /* char, void, function, _Bool */
2661 /* push type size as known at runtime time on top of value stack. Put
2663 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2665 if (type
->t
& VT_VLA
) {
2666 type_size(&type
->ref
->type
, a
);
2667 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2669 vpushi(type_size(type
, a
));
2673 static void vla_sp_restore(void) {
2674 if (vlas_in_scope
) {
2675 gen_vla_sp_restore(vla_sp_loc
);
2679 static void vla_sp_restore_root(void) {
2680 if (vlas_in_scope
) {
2681 gen_vla_sp_restore(vla_sp_root_loc
);
2685 /* return the pointed type of t */
2686 static inline CType
*pointed_type(CType
*type
)
2688 return &type
->ref
->type
;
2691 /* modify type so that its it is a pointer to type. */
2692 ST_FUNC
void mk_pointer(CType
*type
)
2695 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2696 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2700 /* compare function types. OLD functions match any new functions */
2701 static int is_compatible_func(CType
*type1
, CType
*type2
)
2707 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2709 /* check func_call */
2710 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2712 /* XXX: not complete */
2713 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2715 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2717 while (s1
!= NULL
) {
2720 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2730 /* return true if type1 and type2 are the same. If unqualified is
2731 true, qualifiers on the types are ignored.
2733 - enums are not checked as gcc __builtin_types_compatible_p ()
2735 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2739 t1
= type1
->t
& VT_TYPE
;
2740 t2
= type2
->t
& VT_TYPE
;
2742 /* strip qualifiers before comparing */
2743 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2744 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2747 /* Default Vs explicit signedness only matters for char */
2748 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2752 /* XXX: bitfields ? */
2755 /* test more complicated cases */
2756 bt1
= t1
& VT_BTYPE
;
2757 if (bt1
== VT_PTR
) {
2758 type1
= pointed_type(type1
);
2759 type2
= pointed_type(type2
);
2760 return is_compatible_types(type1
, type2
);
2761 } else if (bt1
== VT_STRUCT
) {
2762 return (type1
->ref
== type2
->ref
);
2763 } else if (bt1
== VT_FUNC
) {
2764 return is_compatible_func(type1
, type2
);
2770 /* return true if type1 and type2 are exactly the same (including
2773 static int is_compatible_types(CType
*type1
, CType
*type2
)
2775 return compare_types(type1
,type2
,0);
2778 /* return true if type1 and type2 are the same (ignoring qualifiers).
2780 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2782 return compare_types(type1
,type2
,1);
2785 /* print a type. If 'varstr' is not NULL, then the variable is also
2786 printed in the type */
2788 /* XXX: add array and function pointers */
2789 static void type_to_str(char *buf
, int buf_size
,
2790 CType
*type
, const char *varstr
)
2802 pstrcat(buf
, buf_size
, "extern ");
2804 pstrcat(buf
, buf_size
, "static ");
2806 pstrcat(buf
, buf_size
, "typedef ");
2808 pstrcat(buf
, buf_size
, "inline ");
2809 if (t
& VT_VOLATILE
)
2810 pstrcat(buf
, buf_size
, "volatile ");
2811 if (t
& VT_CONSTANT
)
2812 pstrcat(buf
, buf_size
, "const ");
2814 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2815 || ((t
& VT_UNSIGNED
)
2816 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2819 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2821 buf_size
-= strlen(buf
);
2856 tstr
= "long double";
2858 pstrcat(buf
, buf_size
, tstr
);
2865 pstrcat(buf
, buf_size
, tstr
);
2866 v
= type
->ref
->v
& ~SYM_STRUCT
;
2867 if (v
>= SYM_FIRST_ANOM
)
2868 pstrcat(buf
, buf_size
, "<anonymous>");
2870 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2874 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2875 pstrcat(buf
, buf_size
, "(");
2877 while (sa
!= NULL
) {
2878 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2879 pstrcat(buf
, buf_size
, buf1
);
2882 pstrcat(buf
, buf_size
, ", ");
2884 pstrcat(buf
, buf_size
, ")");
2889 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2890 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2893 pstrcpy(buf1
, sizeof(buf1
), "*");
2894 if (t
& VT_CONSTANT
)
2895 pstrcat(buf1
, buf_size
, "const ");
2896 if (t
& VT_VOLATILE
)
2897 pstrcat(buf1
, buf_size
, "volatile ");
2899 pstrcat(buf1
, sizeof(buf1
), varstr
);
2900 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2904 pstrcat(buf
, buf_size
, " ");
2905 pstrcat(buf
, buf_size
, varstr
);
2910 /* verify type compatibility to store vtop in 'dt' type, and generate
2912 static void gen_assign_cast(CType
*dt
)
2914 CType
*st
, *type1
, *type2
;
2915 char buf1
[256], buf2
[256];
2918 st
= &vtop
->type
; /* source type */
2919 dbt
= dt
->t
& VT_BTYPE
;
2920 sbt
= st
->t
& VT_BTYPE
;
2921 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2922 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2924 It is Ok if both are void
2930 gcc accepts this program
2933 tcc_error("cannot cast from/to void");
2935 if (dt
->t
& VT_CONSTANT
)
2936 tcc_warning("assignment of read-only location");
2939 /* special cases for pointers */
2940 /* '0' can also be a pointer */
2941 if (is_null_pointer(vtop
))
2943 /* accept implicit pointer to integer cast with warning */
2944 if (is_integer_btype(sbt
)) {
2945 tcc_warning("assignment makes pointer from integer without a cast");
2948 type1
= pointed_type(dt
);
2949 /* a function is implicitly a function pointer */
2950 if (sbt
== VT_FUNC
) {
2951 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2952 !is_compatible_types(pointed_type(dt
), st
))
2953 tcc_warning("assignment from incompatible pointer type");
2958 type2
= pointed_type(st
);
2959 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2960 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2961 /* void * can match anything */
2963 //printf("types %08x %08x\n", type1->t, type2->t);
2964 /* exact type match, except for qualifiers */
2965 if (!is_compatible_unqualified_types(type1
, type2
)) {
2966 /* Like GCC don't warn by default for merely changes
2967 in pointer target signedness. Do warn for different
2968 base types, though, in particular for unsigned enums
2969 and signed int targets. */
2970 if ((type1
->t
& (VT_BTYPE
|VT_LONG
)) != (type2
->t
& (VT_BTYPE
|VT_LONG
))
2971 || IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)
2973 tcc_warning("assignment from incompatible pointer type");
2976 /* check const and volatile */
2977 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2978 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2979 tcc_warning("assignment discards qualifiers from pointer target type");
2985 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2986 tcc_warning("assignment makes integer from pointer without a cast");
2987 } else if (sbt
== VT_STRUCT
) {
2988 goto case_VT_STRUCT
;
2990 /* XXX: more tests */
2994 if (!is_compatible_unqualified_types(dt
, st
)) {
2996 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2997 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2998 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3006 /* store vtop in lvalue pushed on stack */
3007 ST_FUNC
void vstore(void)
3009 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3011 ft
= vtop
[-1].type
.t
;
3012 sbt
= vtop
->type
.t
& VT_BTYPE
;
3013 dbt
= ft
& VT_BTYPE
;
3014 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3015 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3016 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3017 /* optimize char/short casts */
3018 delayed_cast
= VT_MUSTCAST
;
3019 vtop
->type
.t
= ft
& VT_TYPE
;
3020 /* XXX: factorize */
3021 if (ft
& VT_CONSTANT
)
3022 tcc_warning("assignment of read-only location");
3025 if (!(ft
& VT_BITFIELD
))
3026 gen_assign_cast(&vtop
[-1].type
);
3029 if (sbt
== VT_STRUCT
) {
3030 /* if structure, only generate pointer */
3031 /* structure assignment : generate memcpy */
3032 /* XXX: optimize if small size */
3033 size
= type_size(&vtop
->type
, &align
);
3037 vtop
->type
.t
= VT_PTR
;
3040 /* address of memcpy() */
3043 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3044 else if(!(align
& 3))
3045 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3048 /* Use memmove, rather than memcpy, as dest and src may be same: */
3049 vpush_global_sym(&func_old_type
, TOK_memmove
);
3054 vtop
->type
.t
= VT_PTR
;
3060 /* leave source on stack */
3061 } else if (ft
& VT_BITFIELD
) {
3062 /* bitfield store handling */
3064 /* save lvalue as expression result (example: s.b = s.a = n;) */
3065 vdup(), vtop
[-1] = vtop
[-2];
3067 bit_pos
= BIT_POS(ft
);
3068 bit_size
= BIT_SIZE(ft
);
3069 /* remove bit field info to avoid loops */
3070 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3072 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3073 gen_cast(&vtop
[-1].type
);
3074 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3077 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3078 if (r
== VT_STRUCT
) {
3079 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3080 store_packed_bf(bit_pos
, bit_size
);
3082 unsigned long long mask
= (1ULL << bit_size
) - 1;
3083 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3085 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3088 vpushi((unsigned)mask
);
3095 /* duplicate destination */
3098 /* load destination, mask and or with source */
3099 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3100 vpushll(~(mask
<< bit_pos
));
3102 vpushi(~((unsigned)mask
<< bit_pos
));
3107 /* ... and discard */
3110 } else if (dbt
== VT_VOID
) {
3113 #ifdef CONFIG_TCC_BCHECK
3114 /* bound check case */
3115 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3124 #ifdef TCC_TARGET_X86_64
3125 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3127 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3132 r
= gv(rc
); /* generate value */
3133 /* if lvalue was saved on stack, must read it */
3134 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3136 t
= get_reg(RC_INT
);
3142 sv
.r
= VT_LOCAL
| VT_LVAL
;
3143 sv
.c
.i
= vtop
[-1].c
.i
;
3145 vtop
[-1].r
= t
| VT_LVAL
;
3147 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3149 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3150 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3152 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3153 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3155 vtop
[-1].type
.t
= load_type
;
3158 /* convert to int to increment easily */
3159 vtop
->type
.t
= addr_type
;
3165 vtop
[-1].type
.t
= load_type
;
3166 /* XXX: it works because r2 is spilled last ! */
3167 store(vtop
->r2
, vtop
- 1);
3173 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3174 vtop
->r
|= delayed_cast
;
3178 /* post defines POST/PRE add. c is the token ++ or -- */
3179 ST_FUNC
void inc(int post
, int c
)
3182 vdup(); /* save lvalue */
3184 gv_dup(); /* duplicate value */
3189 vpushi(c
- TOK_MID
);
3191 vstore(); /* store value */
3193 vpop(); /* if post op, return saved value */
3196 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3198 /* read the string */
3202 while (tok
== TOK_STR
) {
3203 /* XXX: add \0 handling too ? */
3204 cstr_cat(astr
, tokc
.str
.data
, -1);
3207 cstr_ccat(astr
, '\0');
3210 /* If I is >= 1 and a power of two, returns log2(i)+1.
3211 If I is 0 returns 0. */
3212 static int exact_log2p1(int i
)
3217 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3228 /* Parse __attribute__((...)) GNUC extension. */
3229 static void parse_attribute(AttributeDef
*ad
)
3235 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3240 while (tok
!= ')') {
3241 if (tok
< TOK_IDENT
)
3242 expect("attribute name");
3249 parse_mult_str(&astr
, "section name");
3250 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3257 parse_mult_str(&astr
, "alias(\"target\")");
3258 ad
->alias_target
= /* save string as token, for later */
3259 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3263 case TOK_VISIBILITY1
:
3264 case TOK_VISIBILITY2
:
3266 parse_mult_str(&astr
,
3267 "visibility(\"default|hidden|internal|protected\")");
3268 if (!strcmp (astr
.data
, "default"))
3269 ad
->a
.visibility
= STV_DEFAULT
;
3270 else if (!strcmp (astr
.data
, "hidden"))
3271 ad
->a
.visibility
= STV_HIDDEN
;
3272 else if (!strcmp (astr
.data
, "internal"))
3273 ad
->a
.visibility
= STV_INTERNAL
;
3274 else if (!strcmp (astr
.data
, "protected"))
3275 ad
->a
.visibility
= STV_PROTECTED
;
3277 expect("visibility(\"default|hidden|internal|protected\")");
3286 if (n
<= 0 || (n
& (n
- 1)) != 0)
3287 tcc_error("alignment must be a positive power of two");
3292 ad
->a
.aligned
= exact_log2p1(n
);
3293 if (n
!= 1 << (ad
->a
.aligned
- 1))
3294 tcc_error("alignment of %d is larger than implemented", n
);
3306 /* currently, no need to handle it because tcc does not
3307 track unused objects */
3311 /* currently, no need to handle it because tcc does not
3312 track unused objects */
3317 ad
->f
.func_call
= FUNC_CDECL
;
3322 ad
->f
.func_call
= FUNC_STDCALL
;
3324 #ifdef TCC_TARGET_I386
3334 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3340 ad
->f
.func_call
= FUNC_FASTCALLW
;
3347 ad
->attr_mode
= VT_LLONG
+ 1;
3350 ad
->attr_mode
= VT_BYTE
+ 1;
3353 ad
->attr_mode
= VT_SHORT
+ 1;
3357 ad
->attr_mode
= VT_INT
+ 1;
3360 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3367 ad
->a
.dllexport
= 1;
3370 ad
->a
.dllimport
= 1;
3373 if (tcc_state
->warn_unsupported
)
3374 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3375 /* skip parameters */
3377 int parenthesis
= 0;
3381 else if (tok
== ')')
3384 } while (parenthesis
&& tok
!= -1);
3397 static Sym
* find_field (CType
*type
, int v
)
3401 while ((s
= s
->next
) != NULL
) {
3402 if ((s
->v
& SYM_FIELD
) &&
3403 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3404 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3405 Sym
*ret
= find_field (&s
->type
, v
);
3415 static void struct_add_offset (Sym
*s
, int offset
)
3417 while ((s
= s
->next
) != NULL
) {
3418 if ((s
->v
& SYM_FIELD
) &&
3419 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3420 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3421 struct_add_offset(s
->type
.ref
, offset
);
3427 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3429 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3430 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3431 int pcc
= !tcc_state
->ms_bitfields
;
3432 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3439 prevbt
= VT_STRUCT
; /* make it never match */
3444 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3445 if (f
->type
.t
& VT_BITFIELD
)
3446 bit_size
= BIT_SIZE(f
->type
.t
);
3449 size
= type_size(&f
->type
, &align
);
3450 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3453 if (pcc
&& bit_size
== 0) {
3454 /* in pcc mode, packing does not affect zero-width bitfields */
3457 /* in pcc mode, attribute packed overrides if set. */
3458 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3461 /* pragma pack overrides align if lesser and packs bitfields always */
3464 if (pragma_pack
< align
)
3465 align
= pragma_pack
;
3466 /* in pcc mode pragma pack also overrides individual align */
3467 if (pcc
&& pragma_pack
< a
)
3471 /* some individual align was specified */
3475 if (type
->ref
->type
.t
== VT_UNION
) {
3476 if (pcc
&& bit_size
>= 0)
3477 size
= (bit_size
+ 7) >> 3;
3482 } else if (bit_size
< 0) {
3484 c
+= (bit_pos
+ 7) >> 3;
3485 c
= (c
+ align
- 1) & -align
;
3494 /* A bit-field. Layout is more complicated. There are two
3495 options: PCC (GCC) compatible and MS compatible */
3497 /* In PCC layout a bit-field is placed adjacent to the
3498 preceding bit-fields, except if:
3500 - an individual alignment was given
3501 - it would overflow its base type container and
3502 there is no packing */
3503 if (bit_size
== 0) {
3505 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3507 } else if (f
->a
.aligned
) {
3509 } else if (!packed
) {
3511 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3512 if (ofs
> size
/ align
)
3516 /* in pcc mode, long long bitfields have type int if they fit */
3517 if (size
== 8 && bit_size
<= 32)
3518 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3520 while (bit_pos
>= align
* 8)
3521 c
+= align
, bit_pos
-= align
* 8;
3524 /* In PCC layout named bit-fields influence the alignment
3525 of the containing struct using the base types alignment,
3526 except for packed fields (which here have correct align). */
3527 if (f
->v
& SYM_FIRST_ANOM
3528 // && bit_size // ??? gcc on ARM/rpi does that
3533 bt
= f
->type
.t
& VT_BTYPE
;
3534 if ((bit_pos
+ bit_size
> size
* 8)
3535 || (bit_size
> 0) == (bt
!= prevbt
)
3537 c
= (c
+ align
- 1) & -align
;
3540 /* In MS bitfield mode a bit-field run always uses
3541 at least as many bits as the underlying type.
3542 To start a new run it's also required that this
3543 or the last bit-field had non-zero width. */
3544 if (bit_size
|| prev_bit_size
)
3547 /* In MS layout the records alignment is normally
3548 influenced by the field, except for a zero-width
3549 field at the start of a run (but by further zero-width
3550 fields it is again). */
3551 if (bit_size
== 0 && prevbt
!= bt
)
3554 prev_bit_size
= bit_size
;
3557 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3558 | (bit_pos
<< VT_STRUCT_SHIFT
);
3559 bit_pos
+= bit_size
;
3561 if (align
> maxalign
)
3565 printf("set field %s offset %-2d size %-2d align %-2d",
3566 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3567 if (f
->type
.t
& VT_BITFIELD
) {
3568 printf(" pos %-2d bits %-2d",
3576 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3578 /* An anonymous struct/union. Adjust member offsets
3579 to reflect the real offset of our containing struct.
3580 Also set the offset of this anon member inside
3581 the outer struct to be zero. Via this it
3582 works when accessing the field offset directly
3583 (from base object), as well as when recursing
3584 members in initializer handling. */
3585 int v2
= f
->type
.ref
->v
;
3586 if (!(v2
& SYM_FIELD
) &&
3587 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3589 /* This happens only with MS extensions. The
3590 anon member has a named struct type, so it
3591 potentially is shared with other references.
3592 We need to unshare members so we can modify
3595 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3596 &f
->type
.ref
->type
, 0,
3598 pps
= &f
->type
.ref
->next
;
3599 while ((ass
= ass
->next
) != NULL
) {
3600 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3601 pps
= &((*pps
)->next
);
3605 struct_add_offset(f
->type
.ref
, offset
);
3615 c
+= (bit_pos
+ 7) >> 3;
3617 /* store size and alignment */
3618 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3622 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3623 /* can happen if individual align for some member was given. In
3624 this case MSVC ignores maxalign when aligning the size */
3629 c
= (c
+ a
- 1) & -a
;
3633 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3636 /* check whether we can access bitfields by their type */
3637 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3641 if (0 == (f
->type
.t
& VT_BITFIELD
))
3645 bit_size
= BIT_SIZE(f
->type
.t
);
3648 bit_pos
= BIT_POS(f
->type
.t
);
3649 size
= type_size(&f
->type
, &align
);
3650 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3653 /* try to access the field using a different type */
3654 c0
= -1, s
= align
= 1;
3656 px
= f
->c
* 8 + bit_pos
;
3657 cx
= (px
>> 3) & -align
;
3658 px
= px
- (cx
<< 3);
3661 s
= (px
+ bit_size
+ 7) >> 3;
3671 s
= type_size(&t
, &align
);
3675 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3676 /* update offset and bit position */
3679 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3680 | (bit_pos
<< VT_STRUCT_SHIFT
);
3684 printf("FIX field %s offset %-2d size %-2d align %-2d "
3685 "pos %-2d bits %-2d\n",
3686 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3687 cx
, s
, align
, px
, bit_size
);
3690 /* fall back to load/store single-byte wise */
3691 f
->auxtype
= VT_STRUCT
;
3693 printf("FIX field %s : load byte-wise\n",
3694 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3700 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3701 static void struct_decl(CType
*type
, int u
)
3703 int v
, c
, size
, align
, flexible
;
3704 int bit_size
, bsize
, bt
;
3706 AttributeDef ad
, ad1
;
3709 memset(&ad
, 0, sizeof ad
);
3711 parse_attribute(&ad
);
3715 /* struct already defined ? return it */
3717 expect("struct/union/enum name");
3719 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3722 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3724 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3729 /* Record the original enum/struct/union token. */
3730 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3732 /* we put an undefined size for struct/union */
3733 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3734 s
->r
= 0; /* default alignment is zero as gcc */
3736 type
->t
= s
->type
.t
;
3742 tcc_error("struct/union/enum already defined");
3743 /* cannot be empty */
3744 /* non empty enums are not allowed */
3747 long long ll
= 0, pl
= 0, nl
= 0;
3750 /* enum symbols have static storage */
3751 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3755 expect("identifier");
3757 if (ss
&& !local_stack
)
3758 tcc_error("redefinition of enumerator '%s'",
3759 get_tok_str(v
, NULL
));
3763 ll
= expr_const64();
3765 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3767 *ps
= ss
, ps
= &ss
->next
;
3776 /* NOTE: we accept a trailing comma */
3781 /* set integral type of the enum */
3784 if (pl
!= (unsigned)pl
)
3785 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3787 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3788 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3789 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3791 /* set type for enum members */
3792 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3794 if (ll
== (int)ll
) /* default is int if it fits */
3796 if (t
.t
& VT_UNSIGNED
) {
3797 ss
->type
.t
|= VT_UNSIGNED
;
3798 if (ll
== (unsigned)ll
)
3801 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3802 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3807 while (tok
!= '}') {
3808 if (!parse_btype(&btype
, &ad1
)) {
3814 tcc_error("flexible array member '%s' not at the end of struct",
3815 get_tok_str(v
, NULL
));
3821 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3823 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3824 expect("identifier");
3826 int v
= btype
.ref
->v
;
3827 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3828 if (tcc_state
->ms_extensions
== 0)
3829 expect("identifier");
3833 if (type_size(&type1
, &align
) < 0) {
3834 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3837 tcc_error("field '%s' has incomplete type",
3838 get_tok_str(v
, NULL
));
3840 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3841 (type1
.t
& VT_STORAGE
))
3842 tcc_error("invalid type for '%s'",
3843 get_tok_str(v
, NULL
));
3847 bit_size
= expr_const();
3848 /* XXX: handle v = 0 case for messages */
3850 tcc_error("negative width in bit-field '%s'",
3851 get_tok_str(v
, NULL
));
3852 if (v
&& bit_size
== 0)
3853 tcc_error("zero width for bit-field '%s'",
3854 get_tok_str(v
, NULL
));
3855 parse_attribute(&ad1
);
3857 size
= type_size(&type1
, &align
);
3858 if (bit_size
>= 0) {
3859 bt
= type1
.t
& VT_BTYPE
;
3865 tcc_error("bitfields must have scalar type");
3867 if (bit_size
> bsize
) {
3868 tcc_error("width of '%s' exceeds its type",
3869 get_tok_str(v
, NULL
));
3870 } else if (bit_size
== bsize
3871 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
3872 /* no need for bit fields */
3874 } else if (bit_size
== 64) {
3875 tcc_error("field width 64 not implemented");
3877 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
3879 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3882 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3883 /* Remember we've seen a real field to check
3884 for placement of flexible array member. */
3887 /* If member is a struct or bit-field, enforce
3888 placing into the struct (as anonymous). */
3890 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3895 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
3900 if (tok
== ';' || tok
== TOK_EOF
)
3907 parse_attribute(&ad
);
3908 struct_layout(type
, &ad
);
3913 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
3915 if (s
->a
.aligned
&& 0 == ad
->a
.aligned
)
3916 ad
->a
.aligned
= s
->a
.aligned
;
3917 if (s
->f
.func_call
&& 0 == ad
->f
.func_call
)
3918 ad
->f
.func_call
= s
->f
.func_call
;
3919 if (s
->f
.func_type
&& 0 == ad
->f
.func_type
)
3920 ad
->f
.func_type
= s
->f
.func_type
;
3925 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3926 are added to the element type, copied because it could be a typedef. */
3927 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3929 while (type
->t
& VT_ARRAY
) {
3930 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3931 type
= &type
->ref
->type
;
3933 type
->t
|= qualifiers
;
3936 /* return 0 if no type declaration. otherwise, return the basic type
3939 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3941 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
3945 memset(ad
, 0, sizeof(AttributeDef
));
3955 /* currently, we really ignore extension */
3965 if (u
== VT_SHORT
|| u
== VT_LONG
) {
3966 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
3967 tmbt
: tcc_error("too many basic types");
3970 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
3975 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
3988 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3989 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
3990 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
3991 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
3998 #ifdef TCC_TARGET_ARM64
4000 /* GCC's __uint128_t appears in some Linux header files. Make it a
4001 synonym for long double to get the size and alignment right. */
4012 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4013 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4021 struct_decl(&type1
, VT_ENUM
);
4024 type
->ref
= type1
.ref
;
4027 struct_decl(&type1
, VT_STRUCT
);
4030 struct_decl(&type1
, VT_UNION
);
4033 /* type modifiers */
4038 parse_btype_qualify(type
, VT_CONSTANT
);
4046 parse_btype_qualify(type
, VT_VOLATILE
);
4053 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4054 tcc_error("signed and unsigned modifier");
4067 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4068 tcc_error("signed and unsigned modifier");
4069 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4085 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4086 tcc_error("multiple storage classes");
4097 /* GNUC attribute */
4098 case TOK_ATTRIBUTE1
:
4099 case TOK_ATTRIBUTE2
:
4100 parse_attribute(ad
);
4101 if (ad
->attr_mode
) {
4102 u
= ad
->attr_mode
-1;
4103 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4111 parse_expr_type(&type1
);
4112 /* remove all storage modifiers except typedef */
4113 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4115 sym_to_attr(ad
, type1
.ref
);
4121 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4123 t
&= ~(VT_BTYPE
|VT_LONG
);
4124 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4125 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4126 type
->ref
= s
->type
.ref
;
4128 parse_btype_qualify(type
, t
);
4130 /* get attributes from typedef */
4140 if (tcc_state
->char_is_unsigned
) {
4141 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4144 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4145 bt
= t
& (VT_BTYPE
|VT_LONG
);
4147 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4148 #ifdef TCC_TARGET_PE
4149 if (bt
== VT_LDOUBLE
)
4150 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4156 /* convert a function parameter type (array to pointer and function to
4157 function pointer) */
4158 static inline void convert_parameter_type(CType
*pt
)
4160 /* remove const and volatile qualifiers (XXX: const could be used
4161 to indicate a const function parameter */
4162 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4163 /* array must be transformed to pointer according to ANSI C */
4165 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4170 ST_FUNC
void parse_asm_str(CString
*astr
)
4173 parse_mult_str(astr
, "string constant");
4176 /* Parse an asm label and return the token */
4177 static int asm_label_instr(void)
4183 parse_asm_str(&astr
);
4186 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4188 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4193 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4195 int n
, l
, t1
, arg_size
, align
;
4196 Sym
**plast
, *s
, *first
;
4201 /* function type, or recursive declarator (return if so) */
4203 if (td
&& !(td
& TYPE_ABSTRACT
))
4207 else if (parse_btype(&pt
, &ad1
))
4218 /* read param name and compute offset */
4219 if (l
!= FUNC_OLD
) {
4220 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4222 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4223 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4224 tcc_error("parameter declared as void");
4225 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4229 expect("identifier");
4230 pt
.t
= VT_VOID
; /* invalid type */
4233 convert_parameter_type(&pt
);
4234 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4240 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4245 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4246 tcc_error("invalid type");
4249 /* if no parameters, then old type prototype */
4252 /* NOTE: const is ignored in returned type as it has a special
4253 meaning in gcc / C++ */
4254 type
->t
&= ~VT_CONSTANT
;
4255 /* some ancient pre-K&R C allows a function to return an array
4256 and the array brackets to be put after the arguments, such
4257 that "int c()[]" means something like "int[] c()" */
4260 skip(']'); /* only handle simple "[]" */
4263 /* we push a anonymous symbol which will contain the function prototype */
4264 ad
->f
.func_args
= arg_size
;
4265 ad
->f
.func_type
= l
;
4266 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4272 } else if (tok
== '[') {
4273 int saved_nocode_wanted
= nocode_wanted
;
4274 /* array definition */
4276 if (tok
== TOK_RESTRICT1
)
4281 if (!local_stack
|| (storage
& VT_STATIC
))
4282 vpushi(expr_const());
4284 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4285 length must always be evaluated, even under nocode_wanted,
4286 so that its size slot is initialized (e.g. under sizeof
4291 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4294 tcc_error("invalid array size");
4296 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4297 tcc_error("size of variable length array should be an integer");
4302 /* parse next post type */
4303 post_type(type
, ad
, storage
, 0);
4304 if (type
->t
== VT_FUNC
)
4305 tcc_error("declaration of an array of functions");
4306 t1
|= type
->t
& VT_VLA
;
4309 loc
-= type_size(&int_type
, &align
);
4313 vla_runtime_type_size(type
, &align
);
4315 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4321 nocode_wanted
= saved_nocode_wanted
;
4323 /* we push an anonymous symbol which will contain the array
4325 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4326 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4332 /* Parse a type declarator (except basic type), and return the type
4333 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4334 expected. 'type' should contain the basic type. 'ad' is the
4335 attribute definition of the basic type. It can be modified by
4336 type_decl(). If this (possibly abstract) declarator is a pointer chain
4337 it returns the innermost pointed to type (equals *type, but is a different
4338 pointer), otherwise returns type itself, that's used for recursive calls. */
4339 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4342 int qualifiers
, storage
;
4344 /* recursive type, remove storage bits first, apply them later again */
4345 storage
= type
->t
& VT_STORAGE
;
4346 type
->t
&= ~VT_STORAGE
;
4349 while (tok
== '*') {
4357 qualifiers
|= VT_CONSTANT
;
4362 qualifiers
|= VT_VOLATILE
;
4368 /* XXX: clarify attribute handling */
4369 case TOK_ATTRIBUTE1
:
4370 case TOK_ATTRIBUTE2
:
4371 parse_attribute(ad
);
4375 type
->t
|= qualifiers
;
4377 /* innermost pointed to type is the one for the first derivation */
4378 ret
= pointed_type(type
);
4382 /* This is possibly a parameter type list for abstract declarators
4383 ('int ()'), use post_type for testing this. */
4384 if (!post_type(type
, ad
, 0, td
)) {
4385 /* It's not, so it's a nested declarator, and the post operations
4386 apply to the innermost pointed to type (if any). */
4387 /* XXX: this is not correct to modify 'ad' at this point, but
4388 the syntax is not clear */
4389 parse_attribute(ad
);
4390 post
= type_decl(type
, ad
, v
, td
);
4393 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4394 /* type identifier */
4398 if (!(td
& TYPE_ABSTRACT
))
4399 expect("identifier");
4402 post_type(post
, ad
, storage
, 0);
4403 parse_attribute(ad
);
4408 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4409 ST_FUNC
int lvalue_type(int t
)
4414 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4416 else if (bt
== VT_SHORT
)
4420 if (t
& VT_UNSIGNED
)
4421 r
|= VT_LVAL_UNSIGNED
;
4425 /* indirection with full error checking and bound check */
4426 ST_FUNC
void indir(void)
4428 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4429 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4433 if (vtop
->r
& VT_LVAL
)
4435 vtop
->type
= *pointed_type(&vtop
->type
);
4436 /* Arrays and functions are never lvalues */
4437 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4438 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4439 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4440 /* if bound checking, the referenced pointer must be checked */
4441 #ifdef CONFIG_TCC_BCHECK
4442 if (tcc_state
->do_bounds_check
)
4443 vtop
->r
|= VT_MUSTBOUND
;
4448 /* pass a parameter to a function and do type checking and casting */
4449 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4454 func_type
= func
->f
.func_type
;
4455 if (func_type
== FUNC_OLD
||
4456 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4457 /* default casting : only need to convert float to double */
4458 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4459 gen_cast_s(VT_DOUBLE
);
4460 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4461 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4462 type
.ref
= vtop
->type
.ref
;
4465 } else if (arg
== NULL
) {
4466 tcc_error("too many arguments to function");
4469 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4470 gen_assign_cast(&type
);
4474 /* parse an expression and return its type without any side effect. */
4475 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4484 /* parse an expression of the form '(type)' or '(expr)' and return its
4486 static void parse_expr_type(CType
*type
)
4492 if (parse_btype(type
, &ad
)) {
4493 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4495 expr_type(type
, gexpr
);
4500 static void parse_type(CType
*type
)
4505 if (!parse_btype(type
, &ad
)) {
4508 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4511 static void parse_builtin_params(int nc
, const char *args
)
4518 while ((c
= *args
++)) {
4522 case 'e': expr_eq(); continue;
4523 case 't': parse_type(&t
); vpush(&t
); continue;
4524 default: tcc_error("internal error"); break;
4532 ST_FUNC
void unary(void)
4534 int n
, t
, align
, size
, r
, sizeof_caller
;
4539 sizeof_caller
= in_sizeof
;
4542 /* XXX: GCC 2.95.3 does not generate a table although it should be
4550 #ifdef TCC_TARGET_PE
4551 t
= VT_SHORT
|VT_UNSIGNED
;
4559 vsetc(&type
, VT_CONST
, &tokc
);
4563 t
= VT_INT
| VT_UNSIGNED
;
4569 t
= VT_LLONG
| VT_UNSIGNED
;
4581 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4584 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4586 case TOK___FUNCTION__
:
4588 goto tok_identifier
;
4594 /* special function name identifier */
4595 len
= strlen(funcname
) + 1;
4596 /* generate char[len] type */
4601 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4602 if (!NODATA_WANTED
) {
4603 ptr
= section_ptr_add(data_section
, len
);
4604 memcpy(ptr
, funcname
, len
);
4610 #ifdef TCC_TARGET_PE
4611 t
= VT_SHORT
| VT_UNSIGNED
;
4617 /* string parsing */
4619 if (tcc_state
->char_is_unsigned
)
4620 t
= VT_BYTE
| VT_UNSIGNED
;
4622 if (tcc_state
->warn_write_strings
)
4627 memset(&ad
, 0, sizeof(AttributeDef
));
4628 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4633 if (parse_btype(&type
, &ad
)) {
4634 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4636 /* check ISOC99 compound literal */
4638 /* data is allocated locally by default */
4643 /* all except arrays are lvalues */
4644 if (!(type
.t
& VT_ARRAY
))
4645 r
|= lvalue_type(type
.t
);
4646 memset(&ad
, 0, sizeof(AttributeDef
));
4647 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4649 if (sizeof_caller
) {
4656 } else if (tok
== '{') {
4657 int saved_nocode_wanted
= nocode_wanted
;
4659 tcc_error("expected constant");
4660 /* save all registers */
4662 /* statement expression : we do not accept break/continue
4663 inside as GCC does. We do retain the nocode_wanted state,
4664 as statement expressions can't ever be entered from the
4665 outside, so any reactivation of code emission (from labels
4666 or loop heads) can be disabled again after the end of it. */
4667 block(NULL
, NULL
, 1);
4668 nocode_wanted
= saved_nocode_wanted
;
4683 /* functions names must be treated as function pointers,
4684 except for unary '&' and sizeof. Since we consider that
4685 functions are not lvalues, we only have to handle it
4686 there and in function calls. */
4687 /* arrays can also be used although they are not lvalues */
4688 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4689 !(vtop
->type
.t
& VT_ARRAY
))
4691 mk_pointer(&vtop
->type
);
4697 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4698 gen_cast_s(VT_BOOL
);
4699 vtop
->c
.i
= !vtop
->c
.i
;
4700 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4704 vseti(VT_JMP
, gvtst(1, 0));
4716 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4717 tcc_error("pointer not accepted for unary plus");
4718 /* In order to force cast, we add zero, except for floating point
4719 where we really need an noop (otherwise -0.0 will be transformed
4721 if (!is_float(vtop
->type
.t
)) {
4732 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
4733 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
4734 size
= type_size(&type
, &align
);
4735 if (s
&& s
->a
.aligned
)
4736 align
= 1 << (s
->a
.aligned
- 1);
4737 if (t
== TOK_SIZEOF
) {
4738 if (!(type
.t
& VT_VLA
)) {
4740 tcc_error("sizeof applied to an incomplete type");
4743 vla_runtime_type_size(&type
, &align
);
4748 vtop
->type
.t
|= VT_UNSIGNED
;
4751 case TOK_builtin_expect
:
4752 /* __builtin_expect is a no-op for now */
4753 parse_builtin_params(0, "ee");
4756 case TOK_builtin_types_compatible_p
:
4757 parse_builtin_params(0, "tt");
4758 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4759 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4760 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4764 case TOK_builtin_choose_expr
:
4791 case TOK_builtin_constant_p
:
4792 parse_builtin_params(1, "e");
4793 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4797 case TOK_builtin_frame_address
:
4798 case TOK_builtin_return_address
:
4804 if (tok
!= TOK_CINT
) {
4805 tcc_error("%s only takes positive integers",
4806 tok1
== TOK_builtin_return_address
?
4807 "__builtin_return_address" :
4808 "__builtin_frame_address");
4810 level
= (uint32_t)tokc
.i
;
4815 vset(&type
, VT_LOCAL
, 0); /* local frame */
4817 mk_pointer(&vtop
->type
);
4818 indir(); /* -> parent frame */
4820 if (tok1
== TOK_builtin_return_address
) {
4821 // assume return address is just above frame pointer on stack
4824 mk_pointer(&vtop
->type
);
4829 #ifdef TCC_TARGET_X86_64
4830 #ifdef TCC_TARGET_PE
4831 case TOK_builtin_va_start
:
4832 parse_builtin_params(0, "ee");
4833 r
= vtop
->r
& VT_VALMASK
;
4837 tcc_error("__builtin_va_start expects a local variable");
4839 vtop
->type
= char_pointer_type
;
4844 case TOK_builtin_va_arg_types
:
4845 parse_builtin_params(0, "t");
4846 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4853 #ifdef TCC_TARGET_ARM64
4854 case TOK___va_start
: {
4855 parse_builtin_params(0, "ee");
4859 vtop
->type
.t
= VT_VOID
;
4862 case TOK___va_arg
: {
4863 parse_builtin_params(0, "et");
4871 case TOK___arm64_clear_cache
: {
4872 parse_builtin_params(0, "ee");
4875 vtop
->type
.t
= VT_VOID
;
4879 /* pre operations */
4890 t
= vtop
->type
.t
& VT_BTYPE
;
4892 /* In IEEE negate(x) isn't subtract(0,x), but rather
4896 vtop
->c
.f
= -1.0 * 0.0;
4897 else if (t
== VT_DOUBLE
)
4898 vtop
->c
.d
= -1.0 * 0.0;
4900 vtop
->c
.ld
= -1.0 * 0.0;
4908 goto tok_identifier
;
4910 /* allow to take the address of a label */
4911 if (tok
< TOK_UIDENT
)
4912 expect("label identifier");
4913 s
= label_find(tok
);
4915 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4917 if (s
->r
== LABEL_DECLARED
)
4918 s
->r
= LABEL_FORWARD
;
4921 s
->type
.t
= VT_VOID
;
4922 mk_pointer(&s
->type
);
4923 s
->type
.t
|= VT_STATIC
;
4925 vpushsym(&s
->type
, s
);
4931 CType controlling_type
;
4932 int has_default
= 0;
4935 TokenString
*str
= NULL
;
4939 expr_type(&controlling_type
, expr_eq
);
4940 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
4944 if (tok
== TOK_DEFAULT
) {
4946 tcc_error("too many 'default'");
4952 AttributeDef ad_tmp
;
4955 parse_btype(&cur_type
, &ad_tmp
);
4956 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
4957 if (compare_types(&controlling_type
, &cur_type
, 0)) {
4959 tcc_error("type match twice");
4969 skip_or_save_block(&str
);
4971 skip_or_save_block(NULL
);
4978 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
4979 tcc_error("type '%s' does not match any association", buf
);
4981 begin_macro(str
, 1);
4990 // special qnan , snan and infinity values
4992 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4996 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
5000 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
5009 expect("identifier");
5012 const char *name
= get_tok_str(t
, NULL
);
5014 tcc_error("'%s' undeclared", name
);
5015 /* for simple function calls, we tolerate undeclared
5016 external reference to int() function */
5017 if (tcc_state
->warn_implicit_function_declaration
5018 #ifdef TCC_TARGET_PE
5019 /* people must be warned about using undeclared WINAPI functions
5020 (which usually start with uppercase letter) */
5021 || (name
[0] >= 'A' && name
[0] <= 'Z')
5024 tcc_warning("implicit declaration of function '%s'", name
);
5025 s
= external_global_sym(t
, &func_old_type
, 0);
5029 /* A symbol that has a register is a local register variable,
5030 which starts out as VT_LOCAL value. */
5031 if ((r
& VT_VALMASK
) < VT_CONST
)
5032 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5034 vset(&s
->type
, r
, s
->c
);
5035 /* Point to s as backpointer (even without r&VT_SYM).
5036 Will be used by at least the x86 inline asm parser for
5042 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5043 vtop
->c
.i
= s
->enum_val
;
5048 /* post operations */
5050 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5053 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5056 if (tok
== TOK_ARROW
)
5058 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5061 /* expect pointer on structure */
5062 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5063 expect("struct or union");
5064 if (tok
== TOK_CDOUBLE
)
5065 expect("field name");
5067 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5068 expect("field name");
5069 s
= find_field(&vtop
->type
, tok
);
5071 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5072 /* add field offset to pointer */
5073 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5076 /* change type to field type, and set to lvalue */
5077 vtop
->type
= s
->type
;
5078 vtop
->type
.t
|= qualifiers
;
5079 /* an array is never an lvalue */
5080 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5081 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5082 #ifdef CONFIG_TCC_BCHECK
5083 /* if bound checking, the referenced pointer must be checked */
5084 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5085 vtop
->r
|= VT_MUSTBOUND
;
5089 } else if (tok
== '[') {
5095 } else if (tok
== '(') {
5098 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5101 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5102 /* pointer test (no array accepted) */
5103 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5104 vtop
->type
= *pointed_type(&vtop
->type
);
5105 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5109 expect("function pointer");
5112 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5114 /* get return type */
5117 sa
= s
->next
; /* first parameter */
5118 nb_args
= regsize
= 0;
5120 /* compute first implicit argument if a structure is returned */
5121 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5122 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5123 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5124 &ret_align
, ®size
);
5126 /* get some space for the returned structure */
5127 size
= type_size(&s
->type
, &align
);
5128 #ifdef TCC_TARGET_ARM64
5129 /* On arm64, a small struct is return in registers.
5130 It is much easier to write it to memory if we know
5131 that we are allowed to write some extra bytes, so
5132 round the allocated space up to a power of 2: */
5134 while (size
& (size
- 1))
5135 size
= (size
| (size
- 1)) + 1;
5137 loc
= (loc
- size
) & -align
;
5139 ret
.r
= VT_LOCAL
| VT_LVAL
;
5140 /* pass it as 'int' to avoid structure arg passing
5142 vseti(VT_LOCAL
, loc
);
5152 /* return in register */
5153 if (is_float(ret
.type
.t
)) {
5154 ret
.r
= reg_fret(ret
.type
.t
);
5155 #ifdef TCC_TARGET_X86_64
5156 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5160 #ifndef TCC_TARGET_ARM64
5161 #ifdef TCC_TARGET_X86_64
5162 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5164 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5175 gfunc_param_typed(s
, sa
);
5185 tcc_error("too few arguments to function");
5187 gfunc_call(nb_args
);
5190 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5191 vsetc(&ret
.type
, r
, &ret
.c
);
5192 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5195 /* handle packed struct return */
5196 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5199 size
= type_size(&s
->type
, &align
);
5200 /* We're writing whole regs often, make sure there's enough
5201 space. Assume register size is power of 2. */
5202 if (regsize
> align
)
5204 loc
= (loc
- size
) & -align
;
5208 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5212 if (--ret_nregs
== 0)
5216 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5224 ST_FUNC
void expr_prod(void)
5229 while (tok
== '*' || tok
== '/' || tok
== '%') {
5237 ST_FUNC
void expr_sum(void)
5242 while (tok
== '+' || tok
== '-') {
5250 static void expr_shift(void)
5255 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5263 static void expr_cmp(void)
5268 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5269 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5277 static void expr_cmpeq(void)
5282 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5290 static void expr_and(void)
5293 while (tok
== '&') {
5300 static void expr_xor(void)
5303 while (tok
== '^') {
5310 static void expr_or(void)
5313 while (tok
== '|') {
5320 static void expr_land(void)
5323 if (tok
== TOK_LAND
) {
5326 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5327 gen_cast_s(VT_BOOL
);
5332 while (tok
== TOK_LAND
) {
5348 if (tok
!= TOK_LAND
) {
5361 static void expr_lor(void)
5364 if (tok
== TOK_LOR
) {
5367 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5368 gen_cast_s(VT_BOOL
);
5373 while (tok
== TOK_LOR
) {
5389 if (tok
!= TOK_LOR
) {
5402 /* Assuming vtop is a value used in a conditional context
5403 (i.e. compared with zero) return 0 if it's false, 1 if
5404 true and -1 if it can't be statically determined. */
5405 static int condition_3way(void)
5408 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5409 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5411 gen_cast_s(VT_BOOL
);
5418 static void expr_cond(void)
5420 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5422 CType type
, type1
, type2
;
5427 c
= condition_3way();
5428 g
= (tok
== ':' && gnu_ext
);
5430 /* needed to avoid having different registers saved in
5432 if (is_float(vtop
->type
.t
)) {
5434 #ifdef TCC_TARGET_X86_64
5435 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5460 sv
= *vtop
; /* save value to handle it later */
5461 vtop
--; /* no vpop so that FP stack is not flushed */
5479 bt1
= t1
& VT_BTYPE
;
5481 bt2
= t2
& VT_BTYPE
;
5484 /* cast operands to correct type according to ISOC rules */
5485 if (is_float(bt1
) || is_float(bt2
)) {
5486 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5487 type
.t
= VT_LDOUBLE
;
5489 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5494 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5495 /* cast to biggest op */
5496 type
.t
= VT_LLONG
| VT_LONG
;
5497 if (bt1
== VT_LLONG
)
5499 if (bt2
== VT_LLONG
)
5501 /* convert to unsigned if it does not fit in a long long */
5502 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5503 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5504 type
.t
|= VT_UNSIGNED
;
5505 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5506 /* If one is a null ptr constant the result type
5508 if (is_null_pointer (vtop
))
5510 else if (is_null_pointer (&sv
))
5512 /* XXX: test pointer compatibility, C99 has more elaborate
5516 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5517 /* XXX: test function pointer compatibility */
5518 type
= bt1
== VT_FUNC
? type1
: type2
;
5519 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5520 /* XXX: test structure compatibility */
5521 type
= bt1
== VT_STRUCT
? type1
: type2
;
5522 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5523 /* NOTE: as an extension, we accept void on only one side */
5526 /* integer operations */
5527 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5528 /* convert to unsigned if it does not fit in an integer */
5529 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5530 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5531 type
.t
|= VT_UNSIGNED
;
5533 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5534 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5535 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5538 /* now we convert second operand */
5542 mk_pointer(&vtop
->type
);
5544 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5549 if (is_float(type
.t
)) {
5551 #ifdef TCC_TARGET_X86_64
5552 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5556 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5557 /* for long longs, we use fixed registers to avoid having
5558 to handle a complicated move */
5569 /* this is horrible, but we must also convert first
5575 mk_pointer(&vtop
->type
);
5577 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5583 move_reg(r2
, r1
, type
.t
);
5593 static void expr_eq(void)
5599 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5600 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5601 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5616 ST_FUNC
void gexpr(void)
5627 /* parse a constant expression and return value in vtop. */
5628 static void expr_const1(void)
5637 /* parse an integer constant and return its value. */
5638 static inline int64_t expr_const64(void)
5642 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5643 expect("constant expression");
5649 /* parse an integer constant and return its value.
5650 Complain if it doesn't fit 32bit (signed or unsigned). */
5651 ST_FUNC
int expr_const(void)
5654 int64_t wc
= expr_const64();
5656 if (c
!= wc
&& (unsigned)c
!= wc
)
5657 tcc_error("constant exceeds 32 bit");
5661 /* return the label token if current token is a label, otherwise
5663 static int is_label(void)
5667 /* fast test first */
5668 if (tok
< TOK_UIDENT
)
5670 /* no need to save tokc because tok is an identifier */
5676 unget_tok(last_tok
);
5681 #ifndef TCC_TARGET_ARM64
5682 static void gfunc_return(CType
*func_type
)
5684 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5685 CType type
, ret_type
;
5686 int ret_align
, ret_nregs
, regsize
;
5687 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5688 &ret_align
, ®size
);
5689 if (0 == ret_nregs
) {
5690 /* if returning structure, must copy it to implicit
5691 first pointer arg location */
5694 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5697 /* copy structure value to pointer */
5700 /* returning structure packed into registers */
5701 int r
, size
, addr
, align
;
5702 size
= type_size(func_type
,&align
);
5703 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5704 (vtop
->c
.i
& (ret_align
-1)))
5705 && (align
& (ret_align
-1))) {
5706 loc
= (loc
- size
) & -ret_align
;
5709 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5713 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5715 vtop
->type
= ret_type
;
5716 if (is_float(ret_type
.t
))
5717 r
= rc_fret(ret_type
.t
);
5728 if (--ret_nregs
== 0)
5730 /* We assume that when a structure is returned in multiple
5731 registers, their classes are consecutive values of the
5734 vtop
->c
.i
+= regsize
;
5738 } else if (is_float(func_type
->t
)) {
5739 gv(rc_fret(func_type
->t
));
5743 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5747 static int case_cmp(const void *pa
, const void *pb
)
5749 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5750 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5751 return a
< b
? -1 : a
> b
;
5754 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5758 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5776 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5778 gcase(base
, len
/2, bsym
);
5779 if (cur_switch
->def_sym
)
5780 gjmp_addr(cur_switch
->def_sym
);
5782 *bsym
= gjmp(*bsym
);
5786 base
+= e
; len
-= e
;
5796 if (p
->v1
== p
->v2
) {
5798 gtst_addr(0, p
->sym
);
5808 gtst_addr(0, p
->sym
);
5814 static void block(int *bsym
, int *csym
, int is_expr
)
5816 int a
, b
, c
, d
, cond
;
5819 /* generate line number info */
5820 if (tcc_state
->do_debug
)
5821 tcc_debug_line(tcc_state
);
5824 /* default return value is (void) */
5826 vtop
->type
.t
= VT_VOID
;
5829 if (tok
== TOK_IF
) {
5831 int saved_nocode_wanted
= nocode_wanted
;
5836 cond
= condition_3way();
5842 nocode_wanted
|= 0x20000000;
5843 block(bsym
, csym
, 0);
5845 nocode_wanted
= saved_nocode_wanted
;
5847 if (c
== TOK_ELSE
) {
5852 nocode_wanted
|= 0x20000000;
5853 block(bsym
, csym
, 0);
5854 gsym(d
); /* patch else jmp */
5856 nocode_wanted
= saved_nocode_wanted
;
5859 } else if (tok
== TOK_WHILE
) {
5860 int saved_nocode_wanted
;
5861 nocode_wanted
&= ~0x20000000;
5871 saved_nocode_wanted
= nocode_wanted
;
5873 nocode_wanted
= saved_nocode_wanted
;
5878 } else if (tok
== '{') {
5880 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5883 /* record local declaration stack position */
5885 llabel
= local_label_stack
;
5888 /* handle local labels declarations */
5889 if (tok
== TOK_LABEL
) {
5892 if (tok
< TOK_UIDENT
)
5893 expect("label identifier");
5894 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5904 while (tok
!= '}') {
5905 if ((a
= is_label()))
5912 block(bsym
, csym
, is_expr
);
5915 /* pop locally defined labels */
5916 label_pop(&local_label_stack
, llabel
, is_expr
);
5917 /* pop locally defined symbols */
5919 /* In the is_expr case (a statement expression is finished here),
5920 vtop might refer to symbols on the local_stack. Either via the
5921 type or via vtop->sym. We can't pop those nor any that in turn
5922 might be referred to. To make it easier we don't roll back
5923 any symbols in that case; some upper level call to block() will
5924 do that. We do have to remove such symbols from the lookup
5925 tables, though. sym_pop will do that. */
5926 sym_pop(&local_stack
, s
, is_expr
);
5928 /* Pop VLA frames and restore stack pointer if required */
5929 if (vlas_in_scope
> saved_vlas_in_scope
) {
5930 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5933 vlas_in_scope
= saved_vlas_in_scope
;
5936 } else if (tok
== TOK_RETURN
) {
5940 gen_assign_cast(&func_vt
);
5941 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
5944 gfunc_return(&func_vt
);
5947 /* jump unless last stmt in top-level block */
5948 if (tok
!= '}' || local_scope
!= 1)
5950 nocode_wanted
|= 0x20000000;
5951 } else if (tok
== TOK_BREAK
) {
5954 tcc_error("cannot break");
5955 *bsym
= gjmp(*bsym
);
5958 nocode_wanted
|= 0x20000000;
5959 } else if (tok
== TOK_CONTINUE
) {
5962 tcc_error("cannot continue");
5963 vla_sp_restore_root();
5964 *csym
= gjmp(*csym
);
5967 } else if (tok
== TOK_FOR
) {
5969 int saved_nocode_wanted
;
5970 nocode_wanted
&= ~0x20000000;
5976 /* c99 for-loop init decl? */
5977 if (!decl0(VT_LOCAL
, 1, NULL
)) {
5978 /* no, regular for-loop init expr */
6004 saved_nocode_wanted
= nocode_wanted
;
6006 nocode_wanted
= saved_nocode_wanted
;
6011 sym_pop(&local_stack
, s
, 0);
6014 if (tok
== TOK_DO
) {
6015 int saved_nocode_wanted
;
6016 nocode_wanted
&= ~0x20000000;
6022 saved_nocode_wanted
= nocode_wanted
;
6030 nocode_wanted
= saved_nocode_wanted
;
6035 if (tok
== TOK_SWITCH
) {
6036 struct switch_t
*saved
, sw
;
6037 int saved_nocode_wanted
= nocode_wanted
;
6043 switchval
= *vtop
--;
6045 b
= gjmp(0); /* jump to first case */
6046 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6050 nocode_wanted
= saved_nocode_wanted
;
6051 a
= gjmp(a
); /* add implicit break */
6054 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6055 for (b
= 1; b
< sw
.n
; b
++)
6056 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6057 tcc_error("duplicate case value");
6058 /* Our switch table sorting is signed, so the compared
6059 value needs to be as well when it's 64bit. */
6060 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6061 switchval
.type
.t
&= ~VT_UNSIGNED
;
6063 gcase(sw
.p
, sw
.n
, &a
);
6066 gjmp_addr(sw
.def_sym
);
6067 dynarray_reset(&sw
.p
, &sw
.n
);
6072 if (tok
== TOK_CASE
) {
6073 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6076 nocode_wanted
&= ~0x20000000;
6078 cr
->v1
= cr
->v2
= expr_const64();
6079 if (gnu_ext
&& tok
== TOK_DOTS
) {
6081 cr
->v2
= expr_const64();
6082 if (cr
->v2
< cr
->v1
)
6083 tcc_warning("empty case range");
6086 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6089 goto block_after_label
;
6091 if (tok
== TOK_DEFAULT
) {
6096 if (cur_switch
->def_sym
)
6097 tcc_error("too many 'default'");
6098 cur_switch
->def_sym
= ind
;
6100 goto block_after_label
;
6102 if (tok
== TOK_GOTO
) {
6104 if (tok
== '*' && gnu_ext
) {
6108 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6111 } else if (tok
>= TOK_UIDENT
) {
6112 s
= label_find(tok
);
6113 /* put forward definition if needed */
6115 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6117 if (s
->r
== LABEL_DECLARED
)
6118 s
->r
= LABEL_FORWARD
;
6120 vla_sp_restore_root();
6121 if (s
->r
& LABEL_FORWARD
)
6122 s
->jnext
= gjmp(s
->jnext
);
6124 gjmp_addr(s
->jnext
);
6127 expect("label identifier");
6130 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6139 if (s
->r
== LABEL_DEFINED
)
6140 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6142 s
->r
= LABEL_DEFINED
;
6144 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6148 /* we accept this, but it is a mistake */
6150 nocode_wanted
&= ~0x20000000;
6152 tcc_warning("deprecated use of label at end of compound statement");
6156 block(bsym
, csym
, is_expr
);
6159 /* expression case */
6174 /* This skips over a stream of tokens containing balanced {} and ()
6175 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6176 with a '{'). If STR then allocates and stores the skipped tokens
6177 in *STR. This doesn't check if () and {} are nested correctly,
6178 i.e. "({)}" is accepted. */
6179 static void skip_or_save_block(TokenString
**str
)
6181 int braces
= tok
== '{';
6184 *str
= tok_str_alloc();
6186 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6188 if (tok
== TOK_EOF
) {
6189 if (str
|| level
> 0)
6190 tcc_error("unexpected end of file");
6195 tok_str_add_tok(*str
);
6198 if (t
== '{' || t
== '(') {
6200 } else if (t
== '}' || t
== ')') {
6202 if (level
== 0 && braces
&& t
== '}')
6207 tok_str_add(*str
, -1);
6208 tok_str_add(*str
, 0);
6212 #define EXPR_CONST 1
6215 static void parse_init_elem(int expr_type
)
6217 int saved_global_expr
;
6220 /* compound literals must be allocated globally in this case */
6221 saved_global_expr
= global_expr
;
6224 global_expr
= saved_global_expr
;
6225 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6226 (compound literals). */
6227 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6228 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6229 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6230 #ifdef TCC_TARGET_PE
6231 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6234 tcc_error("initializer element is not constant");
6242 /* put zeros for variable based init */
6243 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6246 /* nothing to do because globals are already set to zero */
6248 vpush_global_sym(&func_old_type
, TOK_memset
);
6250 #ifdef TCC_TARGET_ARM
6261 /* t is the array or struct type. c is the array or struct
6262 address. cur_field is the pointer to the current
6263 field, for arrays the 'c' member contains the current start
6264 index. 'size_only' is true if only size info is needed (only used
6265 in arrays). al contains the already initialized length of the
6266 current container (starting at c). This returns the new length of that. */
6267 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6268 Sym
**cur_field
, int size_only
, int al
)
6271 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6272 unsigned long corig
= c
;
6276 if (gnu_ext
&& (l
= is_label()) != 0)
6278 /* NOTE: we only support ranges for last designator */
6279 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6281 if (!(type
->t
& VT_ARRAY
))
6282 expect("array type");
6284 index
= index_last
= expr_const();
6285 if (tok
== TOK_DOTS
&& gnu_ext
) {
6287 index_last
= expr_const();
6291 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6293 tcc_error("invalid index");
6295 (*cur_field
)->c
= index_last
;
6296 type
= pointed_type(type
);
6297 elem_size
= type_size(type
, &align
);
6298 c
+= index
* elem_size
;
6299 nb_elems
= index_last
- index
+ 1;
6305 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6306 expect("struct/union type");
6307 f
= find_field(type
, l
);
6320 } else if (!gnu_ext
) {
6324 if (type
->t
& VT_ARRAY
) {
6325 index
= (*cur_field
)->c
;
6326 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6327 tcc_error("index too large");
6328 type
= pointed_type(type
);
6329 c
+= index
* type_size(type
, &align
);
6332 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6333 *cur_field
= f
= f
->next
;
6335 tcc_error("too many field init");
6340 /* must put zero in holes (note that doing it that way
6341 ensures that it even works with designators) */
6342 if (!size_only
&& c
- corig
> al
)
6343 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6344 decl_initializer(type
, sec
, c
, 0, size_only
);
6346 /* XXX: make it more general */
6347 if (!size_only
&& nb_elems
> 1) {
6348 unsigned long c_end
;
6353 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6354 for (i
= 1; i
< nb_elems
; i
++) {
6355 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6360 } else if (!NODATA_WANTED
) {
6361 c_end
= c
+ nb_elems
* elem_size
;
6362 if (c_end
> sec
->data_allocated
)
6363 section_realloc(sec
, c_end
);
6364 src
= sec
->data
+ c
;
6366 for(i
= 1; i
< nb_elems
; i
++) {
6368 memcpy(dst
, src
, elem_size
);
6372 c
+= nb_elems
* type_size(type
, &align
);
6378 /* store a value or an expression directly in global data or in local array */
6379 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6386 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6390 /* XXX: not portable */
6391 /* XXX: generate error if incorrect relocation */
6392 gen_assign_cast(&dtype
);
6393 bt
= type
->t
& VT_BTYPE
;
6395 if ((vtop
->r
& VT_SYM
)
6398 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6399 || (type
->t
& VT_BITFIELD
))
6400 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6402 tcc_error("initializer element is not computable at load time");
6404 if (NODATA_WANTED
) {
6409 size
= type_size(type
, &align
);
6410 section_reserve(sec
, c
+ size
);
6411 ptr
= sec
->data
+ c
;
6413 /* XXX: make code faster ? */
6414 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6415 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6416 /* XXX This rejects compound literals like
6417 '(void *){ptr}'. The problem is that '&sym' is
6418 represented the same way, which would be ruled out
6419 by the SYM_FIRST_ANOM check above, but also '"string"'
6420 in 'char *p = "string"' is represented the same
6421 with the type being VT_PTR and the symbol being an
6422 anonymous one. That is, there's no difference in vtop
6423 between '(void *){x}' and '&(void *){x}'. Ignore
6424 pointer typed entities here. Hopefully no real code
6425 will every use compound literals with scalar type. */
6426 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6427 /* These come from compound literals, memcpy stuff over. */
6431 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[vtop
->sym
->c
];
6432 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6433 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6435 /* We need to copy over all memory contents, and that
6436 includes relocations. Use the fact that relocs are
6437 created it order, so look from the end of relocs
6438 until we hit one before the copied region. */
6439 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6440 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6441 while (num_relocs
--) {
6443 if (rel
->r_offset
>= esym
->st_value
+ size
)
6445 if (rel
->r_offset
< esym
->st_value
)
6447 /* Note: if the same fields are initialized multiple
6448 times (possible with designators) then we possibly
6449 add multiple relocations for the same offset here.
6450 That would lead to wrong code, the last reloc needs
6451 to win. We clean this up later after the whole
6452 initializer is parsed. */
6453 put_elf_reloca(symtab_section
, sec
,
6454 c
+ rel
->r_offset
- esym
->st_value
,
6455 ELFW(R_TYPE
)(rel
->r_info
),
6456 ELFW(R_SYM
)(rel
->r_info
),
6466 if (type
->t
& VT_BITFIELD
) {
6467 int bit_pos
, bit_size
, bits
, n
;
6468 unsigned char *p
, v
, m
;
6469 bit_pos
= BIT_POS(vtop
->type
.t
);
6470 bit_size
= BIT_SIZE(vtop
->type
.t
);
6471 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6472 bit_pos
&= 7, bits
= 0;
6477 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6478 m
= ((1 << n
) - 1) << bit_pos
;
6479 *p
= (*p
& ~m
) | (v
& m
);
6480 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6484 /* XXX: when cross-compiling we assume that each type has the
6485 same representation on host and target, which is likely to
6486 be wrong in the case of long double */
6488 vtop
->c
.i
= vtop
->c
.i
!= 0;
6490 *(char *)ptr
|= vtop
->c
.i
;
6493 *(short *)ptr
|= vtop
->c
.i
;
6496 *(float*)ptr
= vtop
->c
.f
;
6499 *(double *)ptr
= vtop
->c
.d
;
6502 #if defined TCC_IS_NATIVE_387
6503 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6504 memcpy(ptr
, &vtop
->c
.ld
, 10);
6506 else if (sizeof (long double) == sizeof (double))
6507 __asm__("fldl %1\nfstpt %0\n" : "=m" (ptr
) : "m" (vtop
->c
.ld
));
6511 if (sizeof(long double) == LDOUBLE_SIZE
)
6512 *(long double*)ptr
= vtop
->c
.ld
;
6513 else if (sizeof(double) == LDOUBLE_SIZE
)
6514 *(double *)ptr
= (double)vtop
->c
.ld
;
6516 tcc_error("can't cross compile long double constants");
6520 *(long long *)ptr
|= vtop
->c
.i
;
6527 addr_t val
= vtop
->c
.i
;
6529 if (vtop
->r
& VT_SYM
)
6530 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6532 *(addr_t
*)ptr
|= val
;
6534 if (vtop
->r
& VT_SYM
)
6535 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6536 *(addr_t
*)ptr
|= val
;
6542 int val
= vtop
->c
.i
;
6544 if (vtop
->r
& VT_SYM
)
6545 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6549 if (vtop
->r
& VT_SYM
)
6550 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6559 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6566 /* 't' contains the type and storage info. 'c' is the offset of the
6567 object in section 'sec'. If 'sec' is NULL, it means stack based
6568 allocation. 'first' is true if array '{' must be read (multi
6569 dimension implicit array init handling). 'size_only' is true if
6570 size only evaluation is wanted (only for arrays). */
6571 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6572 int first
, int size_only
)
6574 int len
, n
, no_oblock
, nb
, i
;
6581 /* If we currently are at an '}' or ',' we have read an initializer
6582 element in one of our callers, and not yet consumed it. */
6583 have_elem
= tok
== '}' || tok
== ',';
6584 if (!have_elem
&& tok
!= '{' &&
6585 /* In case of strings we have special handling for arrays, so
6586 don't consume them as initializer value (which would commit them
6587 to some anonymous symbol). */
6588 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6590 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6595 !(type
->t
& VT_ARRAY
) &&
6596 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6597 The source type might have VT_CONSTANT set, which is
6598 of course assignable to non-const elements. */
6599 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6600 init_putv(type
, sec
, c
);
6601 } else if (type
->t
& VT_ARRAY
) {
6604 t1
= pointed_type(type
);
6605 size1
= type_size(t1
, &align1
);
6608 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6611 tcc_error("character array initializer must be a literal,"
6612 " optionally enclosed in braces");
6617 /* only parse strings here if correct type (otherwise: handle
6618 them as ((w)char *) expressions */
6619 if ((tok
== TOK_LSTR
&&
6620 #ifdef TCC_TARGET_PE
6621 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6623 (t1
->t
& VT_BTYPE
) == VT_INT
6625 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6627 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6630 /* compute maximum number of chars wanted */
6632 cstr_len
= tokc
.str
.size
;
6634 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6637 if (n
>= 0 && nb
> (n
- len
))
6641 tcc_warning("initializer-string for array is too long");
6642 /* in order to go faster for common case (char
6643 string in global variable, we handle it
6645 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6647 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6651 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6653 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6655 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6662 /* only add trailing zero if enough storage (no
6663 warning in this case since it is standard) */
6664 if (n
< 0 || len
< n
) {
6667 init_putv(t1
, sec
, c
+ (len
* size1
));
6678 while (tok
!= '}' || have_elem
) {
6679 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
6681 if (type
->t
& VT_ARRAY
) {
6683 /* special test for multi dimensional arrays (may not
6684 be strictly correct if designators are used at the
6686 if (no_oblock
&& len
>= n
*size1
)
6689 if (s
->type
.t
== VT_UNION
)
6693 if (no_oblock
&& f
== NULL
)
6702 /* put zeros at the end */
6703 if (!size_only
&& len
< n
*size1
)
6704 init_putz(sec
, c
+ len
, n
*size1
- len
);
6707 /* patch type size if needed, which happens only for array types */
6709 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
6710 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6713 if (first
|| tok
== '{') {
6721 } else if (tok
== '{') {
6723 decl_initializer(type
, sec
, c
, first
, size_only
);
6725 } else if (size_only
) {
6726 /* If we supported only ISO C we wouldn't have to accept calling
6727 this on anything than an array size_only==1 (and even then
6728 only on the outermost level, so no recursion would be needed),
6729 because initializing a flex array member isn't supported.
6730 But GNU C supports it, so we need to recurse even into
6731 subfields of structs and arrays when size_only is set. */
6732 /* just skip expression */
6733 skip_or_save_block(NULL
);
6736 /* This should happen only when we haven't parsed
6737 the init element above for fear of committing a
6738 string constant to memory too early. */
6739 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6740 expect("string constant");
6741 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6743 init_putv(type
, sec
, c
);
6747 /* parse an initializer for type 't' if 'has_init' is non zero, and
6748 allocate space in local or global data space ('r' is either
6749 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6750 variable 'v' of scope 'scope' is declared before initializers
6751 are parsed. If 'v' is zero, then a reference to the new object
6752 is put in the value stack. If 'has_init' is 2, a special parsing
6753 is done to handle string constants. */
6754 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6755 int has_init
, int v
, int scope
)
6757 int size
, align
, addr
;
6758 TokenString
*init_str
= NULL
;
6761 Sym
*flexible_array
;
6763 int saved_nocode_wanted
= nocode_wanted
;
6764 #ifdef CONFIG_TCC_BCHECK
6765 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
6768 if (type
->t
& VT_STATIC
)
6769 nocode_wanted
|= NODATA_WANTED
? 0x40000000 : 0x80000000;
6771 flexible_array
= NULL
;
6772 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6773 Sym
*field
= type
->ref
->next
;
6776 field
= field
->next
;
6777 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6778 flexible_array
= field
;
6782 size
= type_size(type
, &align
);
6783 /* If unknown size, we must evaluate it before
6784 evaluating initializers because
6785 initializers can generate global data too
6786 (e.g. string pointers or ISOC99 compound
6787 literals). It also simplifies local
6788 initializers handling */
6789 if (size
< 0 || (flexible_array
&& has_init
)) {
6791 tcc_error("unknown type size");
6792 /* get all init string */
6793 if (has_init
== 2) {
6794 init_str
= tok_str_alloc();
6795 /* only get strings */
6796 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6797 tok_str_add_tok(init_str
);
6800 tok_str_add(init_str
, -1);
6801 tok_str_add(init_str
, 0);
6803 skip_or_save_block(&init_str
);
6808 begin_macro(init_str
, 1);
6810 decl_initializer(type
, NULL
, 0, 1, 1);
6811 /* prepare second initializer parsing */
6812 macro_ptr
= init_str
->str
;
6815 /* if still unknown size, error */
6816 size
= type_size(type
, &align
);
6818 tcc_error("unknown type size");
6820 /* If there's a flex member and it was used in the initializer
6822 if (flexible_array
&&
6823 flexible_array
->type
.ref
->c
> 0)
6824 size
+= flexible_array
->type
.ref
->c
6825 * pointed_size(&flexible_array
->type
);
6826 /* take into account specified alignment if bigger */
6827 if (ad
->a
.aligned
) {
6828 int speca
= 1 << (ad
->a
.aligned
- 1);
6831 } else if (ad
->a
.packed
) {
6836 size
= 0, align
= 1;
6838 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6840 #ifdef CONFIG_TCC_BCHECK
6841 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6845 loc
= (loc
- size
) & -align
;
6847 #ifdef CONFIG_TCC_BCHECK
6848 /* handles bounds */
6849 /* XXX: currently, since we do only one pass, we cannot track
6850 '&' operators, so we add only arrays */
6851 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6853 /* add padding between regions */
6855 /* then add local bound info */
6856 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6857 bounds_ptr
[0] = addr
;
6858 bounds_ptr
[1] = size
;
6862 /* local variable */
6863 #ifdef CONFIG_TCC_ASM
6864 if (ad
->asm_label
) {
6865 int reg
= asm_parse_regvar(ad
->asm_label
);
6867 r
= (r
& ~VT_VALMASK
) | reg
;
6870 sym
= sym_push(v
, type
, r
, addr
);
6873 /* push local reference */
6874 vset(type
, r
, addr
);
6877 if (v
&& scope
== VT_CONST
) {
6878 /* see if the symbol was already defined */
6881 patch_storage(sym
, ad
, type
);
6882 if (sym
->type
.t
& VT_EXTERN
) {
6883 /* if the variable is extern, it was not allocated */
6884 sym
->type
.t
&= ~VT_EXTERN
;
6885 /* set array size if it was omitted in extern
6887 if ((sym
->type
.t
& VT_ARRAY
) &&
6888 sym
->type
.ref
->c
< 0 &&
6890 sym
->type
.ref
->c
= type
->ref
->c
;
6891 } else if (!has_init
) {
6892 /* we accept several definitions of the same
6893 global variable. this is tricky, because we
6894 must play with the SHN_COMMON type of the symbol */
6895 /* no init data, we won't add more to the symbol */
6897 } else if (sym
->c
) {
6899 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
6900 if (esym
->st_shndx
== data_section
->sh_num
)
6901 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6906 /* allocate symbol in corresponding section */
6911 else if (tcc_state
->nocommon
)
6916 addr
= section_add(sec
, size
, align
);
6917 #ifdef CONFIG_TCC_BCHECK
6918 /* add padding if bound check */
6920 section_add(sec
, 1, 1);
6923 addr
= align
; /* SHN_COMMON is special, symbol value is align */
6924 sec
= common_section
;
6929 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6930 patch_storage(sym
, ad
, NULL
);
6932 /* Local statics have a scope until now (for
6933 warnings), remove it here. */
6935 /* update symbol definition */
6936 put_extern_sym(sym
, sec
, addr
, size
);
6938 /* push global reference */
6939 sym
= get_sym_ref(type
, sec
, addr
, size
);
6940 vpushsym(type
, sym
);
6944 #ifdef CONFIG_TCC_BCHECK
6945 /* handles bounds now because the symbol must be defined
6946 before for the relocation */
6950 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
6951 /* then add global bound info */
6952 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6953 bounds_ptr
[0] = 0; /* relocated */
6954 bounds_ptr
[1] = size
;
6959 if (type
->t
& VT_VLA
) {
6965 /* save current stack pointer */
6966 if (vlas_in_scope
== 0) {
6967 if (vla_sp_root_loc
== -1)
6968 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
6969 gen_vla_sp_save(vla_sp_root_loc
);
6972 vla_runtime_type_size(type
, &a
);
6973 gen_vla_alloc(type
, a
);
6974 gen_vla_sp_save(addr
);
6978 } else if (has_init
) {
6979 size_t oldreloc_offset
= 0;
6980 if (sec
&& sec
->reloc
)
6981 oldreloc_offset
= sec
->reloc
->data_offset
;
6982 decl_initializer(type
, sec
, addr
, 1, 0);
6983 if (sec
&& sec
->reloc
)
6984 squeeze_multi_relocs(sec
, oldreloc_offset
);
6985 /* patch flexible array member size back to -1, */
6986 /* for possible subsequent similar declarations */
6988 flexible_array
->type
.ref
->c
= -1;
6992 /* restore parse state if needed */
6998 nocode_wanted
= saved_nocode_wanted
;
7001 /* parse a function defined by symbol 'sym' and generate its code in
7002 'cur_text_section' */
7003 static void gen_function(Sym
*sym
)
7006 ind
= cur_text_section
->data_offset
;
7007 /* NOTE: we patch the symbol size later */
7008 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7009 funcname
= get_tok_str(sym
->v
, NULL
);
7011 /* Initialize VLA state */
7013 vla_sp_root_loc
= -1;
7014 /* put debug symbol */
7015 tcc_debug_funcstart(tcc_state
, sym
);
7016 /* push a dummy symbol to enable local sym storage */
7017 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7018 local_scope
= 1; /* for function parameters */
7019 gfunc_prolog(&sym
->type
);
7022 block(NULL
, NULL
, 0);
7026 cur_text_section
->data_offset
= ind
;
7027 label_pop(&global_label_stack
, NULL
, 0);
7028 /* reset local stack */
7030 sym_pop(&local_stack
, NULL
, 0);
7031 /* end of function */
7032 /* patch symbol size */
7033 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
7035 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7036 /* It's better to crash than to generate wrong code */
7037 cur_text_section
= NULL
;
7038 funcname
= ""; /* for safety */
7039 func_vt
.t
= VT_VOID
; /* for safety */
7040 func_var
= 0; /* for safety */
7041 ind
= 0; /* for safety */
7042 nocode_wanted
= 0x80000000;
7046 static void gen_inline_functions(TCCState
*s
)
7049 int inline_generated
, i
, ln
;
7050 struct InlineFunc
*fn
;
7052 ln
= file
->line_num
;
7053 /* iterate while inline function are referenced */
7055 inline_generated
= 0;
7056 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7057 fn
= s
->inline_fns
[i
];
7059 if (sym
&& sym
->c
) {
7060 /* the function was used: generate its code and
7061 convert it to a normal function */
7064 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7065 sym
->type
.t
&= ~VT_INLINE
;
7067 begin_macro(fn
->func_str
, 1);
7069 cur_text_section
= text_section
;
7073 inline_generated
= 1;
7076 } while (inline_generated
);
7077 file
->line_num
= ln
;
7080 ST_FUNC
void free_inline_functions(TCCState
*s
)
7083 /* free tokens of unused inline functions */
7084 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7085 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7087 tok_str_free(fn
->func_str
);
7089 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7092 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7093 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7094 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7102 if (!parse_btype(&btype
, &ad
)) {
7103 if (is_for_loop_init
)
7105 /* skip redundant ';' if not in old parameter decl scope */
7106 if (tok
== ';' && l
!= VT_CMP
) {
7112 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7113 /* global asm block */
7117 if (tok
>= TOK_UIDENT
) {
7118 /* special test for old K&R protos without explicit int
7119 type. Only accepted when defining global data */
7123 expect("declaration");
7128 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7129 int v
= btype
.ref
->v
;
7130 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7131 tcc_warning("unnamed struct/union that defines no instances");
7135 if (IS_ENUM(btype
.t
)) {
7140 while (1) { /* iterate thru each declaration */
7142 /* If the base type itself was an array type of unspecified
7143 size (like in 'typedef int arr[]; arr x = {1};') then
7144 we will overwrite the unknown size by the real one for
7145 this decl. We need to unshare the ref symbol holding
7147 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7148 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7150 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7154 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7155 printf("type = '%s'\n", buf
);
7158 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7159 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7160 tcc_error("function without file scope cannot be static");
7162 /* if old style function prototype, we accept a
7165 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7166 decl0(VT_CMP
, 0, sym
);
7169 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7170 ad
.asm_label
= asm_label_instr();
7171 /* parse one last attribute list, after asm label */
7172 parse_attribute(&ad
);
7177 #ifdef TCC_TARGET_PE
7178 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7179 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7180 tcc_error("cannot have dll linkage with static or typedef");
7181 if (ad
.a
.dllimport
) {
7182 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7185 type
.t
|= VT_EXTERN
;
7191 tcc_error("cannot use local functions");
7192 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7193 expect("function definition");
7195 /* reject abstract declarators in function definition
7196 make old style params without decl have int type */
7198 while ((sym
= sym
->next
) != NULL
) {
7199 if (!(sym
->v
& ~SYM_FIELD
))
7200 expect("identifier");
7201 if (sym
->type
.t
== VT_VOID
)
7202 sym
->type
= int_type
;
7205 /* XXX: cannot do better now: convert extern line to static inline */
7206 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7207 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7212 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
7215 ref
= sym
->type
.ref
;
7217 /* use func_call from prototype if not defined */
7218 if (ref
->f
.func_call
!= FUNC_CDECL
7219 && type
.ref
->f
.func_call
== FUNC_CDECL
)
7220 type
.ref
->f
.func_call
= ref
->f
.func_call
;
7222 /* use static from prototype */
7223 if (sym
->type
.t
& VT_STATIC
)
7224 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7226 /* If the definition has no visibility use the
7227 one from prototype. */
7228 if (!type
.ref
->a
.visibility
)
7229 type
.ref
->a
.visibility
= ref
->a
.visibility
;
7230 /* apply other storage attributes from prototype */
7231 type
.ref
->a
.dllexport
|= ref
->a
.dllexport
;
7232 type
.ref
->a
.weak
|= ref
->a
.weak
;
7234 if (!is_compatible_types(&sym
->type
, &type
)) {
7236 tcc_error("incompatible types for redefinition of '%s'",
7237 get_tok_str(v
, NULL
));
7239 if (ref
->f
.func_body
)
7240 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
7241 /* if symbol is already defined, then put complete type */
7245 /* put function symbol */
7246 sym
= global_identifier_push(v
, type
.t
, 0);
7247 sym
->type
.ref
= type
.ref
;
7250 sym
->type
.ref
->f
.func_body
= 1;
7251 sym
->r
= VT_SYM
| VT_CONST
;
7252 patch_storage(sym
, &ad
, NULL
);
7254 /* static inline functions are just recorded as a kind
7255 of macro. Their code will be emitted at the end of
7256 the compilation unit only if they are used */
7257 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7258 (VT_INLINE
| VT_STATIC
)) {
7259 struct InlineFunc
*fn
;
7260 const char *filename
;
7262 filename
= file
? file
->filename
: "";
7263 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7264 strcpy(fn
->filename
, filename
);
7266 skip_or_save_block(&fn
->func_str
);
7267 dynarray_add(&tcc_state
->inline_fns
,
7268 &tcc_state
->nb_inline_fns
, fn
);
7270 /* compute text section */
7271 cur_text_section
= ad
.section
;
7272 if (!cur_text_section
)
7273 cur_text_section
= text_section
;
7279 /* find parameter in function parameter list */
7280 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7281 if ((sym
->v
& ~SYM_FIELD
) == v
)
7283 tcc_error("declaration for parameter '%s' but no such parameter",
7284 get_tok_str(v
, NULL
));
7286 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7287 tcc_error("storage class specified for '%s'",
7288 get_tok_str(v
, NULL
));
7289 if (sym
->type
.t
!= VT_VOID
)
7290 tcc_error("redefinition of parameter '%s'",
7291 get_tok_str(v
, NULL
));
7292 convert_parameter_type(&type
);
7294 } else if (type
.t
& VT_TYPEDEF
) {
7295 /* save typedefed type */
7296 /* XXX: test storage specifiers ? */
7298 if (sym
&& sym
->sym_scope
== local_scope
) {
7299 if (!is_compatible_types(&sym
->type
, &type
)
7300 || !(sym
->type
.t
& VT_TYPEDEF
))
7301 tcc_error("incompatible redefinition of '%s'",
7302 get_tok_str(v
, NULL
));
7305 sym
= sym_push(v
, &type
, 0, 0);
7311 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7312 /* external function definition */
7313 /* specific case for func_call attribute */
7315 } else if (!(type
.t
& VT_ARRAY
)) {
7316 /* not lvalue if array */
7317 r
|= lvalue_type(type
.t
);
7319 has_init
= (tok
== '=');
7320 if (has_init
&& (type
.t
& VT_VLA
))
7321 tcc_error("variable length array cannot be initialized");
7322 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7323 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7324 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7325 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7326 /* external variable or function */
7327 /* NOTE: as GCC, uninitialized global static
7328 arrays of null size are considered as
7330 sym
= external_sym(v
, &type
, r
, &ad
);
7331 if (ad
.alias_target
) {
7335 alias_target
= sym_find(ad
.alias_target
);
7336 if (!alias_target
|| !alias_target
->c
)
7337 tcc_error("unsupported forward __alias__ attribute");
7338 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[alias_target
->c
];
7339 tsec
.sh_num
= esym
->st_shndx
;
7340 /* Local statics have a scope until now (for
7341 warnings), remove it here. */
7343 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
7346 if (type
.t
& VT_STATIC
)
7352 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7356 if (is_for_loop_init
)
7369 static void decl(int l
)
7374 /* ------------------------------------------------------------------------- */