2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
41 static int local_scope
;
43 static int section_sym
;
45 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
46 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
47 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
49 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
51 ST_DATA
int const_wanted
; /* true if constant wanted */
52 ST_DATA
int nocode_wanted
; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
56 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
57 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
59 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
60 ST_DATA
const char *funcname
;
63 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
65 ST_DATA
struct switch_t
{
69 } **p
; int n
; /* list of case ranges */
70 int def_sym
; /* default symbol */
71 } *cur_switch
; /* current switch */
73 /* ------------------------------------------------------------------------- */
75 static void gen_cast(CType
*type
);
76 static void gen_cast_s(int t
);
77 static inline CType
*pointed_type(CType
*type
);
78 static int is_compatible_types(CType
*type1
, CType
*type2
);
79 static int parse_btype(CType
*type
, AttributeDef
*ad
);
80 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
81 static void parse_expr_type(CType
*type
);
82 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
83 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
84 static void block(int *bsym
, int *csym
, int is_expr
);
85 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
86 static void decl(int l
);
87 static int decl0(int l
, int is_for_loop_init
, Sym
*);
88 static void expr_eq(void);
89 static void vla_runtime_type_size(CType
*type
, int *a
);
90 static void vla_sp_restore(void);
91 static void vla_sp_restore_root(void);
92 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
93 static inline int64_t expr_const64(void);
94 static void vpush64(int ty
, unsigned long long v
);
95 static void vpush(CType
*type
);
96 static int gvtst(int inv
, int t
);
97 static void gen_inline_functions(TCCState
*s
);
98 static void skip_or_save_block(TokenString
**str
);
99 static void gv_dup(void);
101 ST_INLN
int is_float(int t
)
105 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
108 /* we use our own 'finite' function to avoid potential problems with
109 non standard math libs */
110 /* XXX: endianness dependent */
111 ST_FUNC
int ieee_finite(double d
)
114 memcpy(p
, &d
, sizeof(double));
115 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
118 /* compiling intel long double natively */
119 #if (defined __i386__ || defined __x86_64__) \
120 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
121 # define TCC_IS_NATIVE_387
124 ST_FUNC
void test_lvalue(void)
126 if (!(vtop
->r
& VT_LVAL
))
130 ST_FUNC
void check_vstack(void)
133 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
136 /* ------------------------------------------------------------------------- */
137 /* vstack debugging aid */
140 void pv (const char *lbl
, int a
, int b
)
143 for (i
= a
; i
< a
+ b
; ++i
) {
144 SValue
*p
= &vtop
[-i
];
145 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
146 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
151 /* ------------------------------------------------------------------------- */
152 /* start of translation unit info */
153 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
158 /* file info: full path + filename */
159 section_sym
= put_elf_sym(symtab_section
, 0, 0,
160 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
161 text_section
->sh_num
, NULL
);
162 getcwd(buf
, sizeof(buf
));
164 normalize_slashes(buf
);
166 pstrcat(buf
, sizeof(buf
), "/");
167 put_stabs_r(buf
, N_SO
, 0, 0,
168 text_section
->data_offset
, text_section
, section_sym
);
169 put_stabs_r(file
->filename
, N_SO
, 0, 0,
170 text_section
->data_offset
, text_section
, section_sym
);
175 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
176 symbols can be safely used */
177 put_elf_sym(symtab_section
, 0, 0,
178 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
179 SHN_ABS
, file
->filename
);
182 /* put end of translation unit info */
183 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
187 put_stabs_r(NULL
, N_SO
, 0, 0,
188 text_section
->data_offset
, text_section
, section_sym
);
192 /* generate line number info */
193 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
197 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
198 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
200 last_line_num
= file
->line_num
;
204 /* put function symbol */
205 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
213 /* XXX: we put here a dummy type */
214 snprintf(buf
, sizeof(buf
), "%s:%c1",
215 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
216 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
217 cur_text_section
, sym
->c
);
218 /* //gr gdb wants a line at the function */
219 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
225 /* put function size */
226 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
230 put_stabn(N_FUN
, 0, 0, size
);
233 /* ------------------------------------------------------------------------- */
234 ST_FUNC
int tccgen_compile(TCCState
*s1
)
236 cur_text_section
= NULL
;
238 anon_sym
= SYM_FIRST_ANOM
;
241 nocode_wanted
= 0x80000000;
243 /* define some often used types */
245 char_pointer_type
.t
= VT_BYTE
;
246 mk_pointer(&char_pointer_type
);
248 size_type
.t
= VT_INT
| VT_UNSIGNED
;
249 ptrdiff_type
.t
= VT_INT
;
251 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
252 ptrdiff_type
.t
= VT_LLONG
;
254 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
255 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
257 func_old_type
.t
= VT_FUNC
;
258 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
259 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
260 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
264 #ifdef TCC_TARGET_ARM
269 printf("%s: **** new file\n", file
->filename
);
272 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
275 gen_inline_functions(s1
);
277 /* end of translation unit info */
282 /* ------------------------------------------------------------------------- */
283 ST_FUNC ElfSym
*elfsym(Sym
*s
)
287 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
290 /* apply storage attributes to Elf symbol */
291 ST_FUNC
void update_storage(Sym
*sym
)
294 int sym_bind
, old_sym_bind
;
300 if (sym
->a
.visibility
)
301 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
304 if (sym
->type
.t
& VT_STATIC
)
305 sym_bind
= STB_LOCAL
;
306 else if (sym
->a
.weak
)
309 sym_bind
= STB_GLOBAL
;
310 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
311 if (sym_bind
!= old_sym_bind
) {
312 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
316 if (sym
->a
.dllimport
)
317 esym
->st_other
|= ST_PE_IMPORT
;
318 if (sym
->a
.dllexport
)
319 esym
->st_other
|= ST_PE_EXPORT
;
323 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
324 get_tok_str(sym
->v
, NULL
),
325 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
333 /* ------------------------------------------------------------------------- */
334 /* update sym->c so that it points to an external symbol in section
335 'section' with value 'value' */
337 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
338 addr_t value
, unsigned long size
,
339 int can_add_underscore
)
341 int sym_type
, sym_bind
, info
, other
, t
;
345 #ifdef CONFIG_TCC_BCHECK
350 name
= get_tok_str(sym
->v
, NULL
);
351 #ifdef CONFIG_TCC_BCHECK
352 if (tcc_state
->do_bounds_check
) {
353 /* XXX: avoid doing that for statics ? */
354 /* if bound checking is activated, we change some function
355 names by adding the "__bound" prefix */
358 /* XXX: we rely only on malloc hooks */
371 strcpy(buf
, "__bound_");
379 if ((t
& VT_BTYPE
) == VT_FUNC
) {
381 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
382 sym_type
= STT_NOTYPE
;
384 sym_type
= STT_OBJECT
;
387 sym_bind
= STB_LOCAL
;
389 sym_bind
= STB_GLOBAL
;
392 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
393 Sym
*ref
= sym
->type
.ref
;
394 if (ref
->a
.nodecorate
) {
395 can_add_underscore
= 0;
397 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
398 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
400 other
|= ST_PE_STDCALL
;
401 can_add_underscore
= 0;
405 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
407 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
411 name
= get_tok_str(sym
->asm_label
, NULL
);
412 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
413 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
416 esym
->st_value
= value
;
417 esym
->st_size
= size
;
418 esym
->st_shndx
= sh_num
;
423 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
424 addr_t value
, unsigned long size
)
426 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
427 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
430 /* add a new relocation entry to symbol 'sym' in section 's' */
431 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
436 if (nocode_wanted
&& s
== cur_text_section
)
441 put_extern_sym(sym
, NULL
, 0, 0);
445 /* now we can add ELF relocation info */
446 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
450 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
452 greloca(s
, sym
, offset
, type
, 0);
456 /* ------------------------------------------------------------------------- */
457 /* symbol allocator */
458 static Sym
*__sym_malloc(void)
460 Sym
*sym_pool
, *sym
, *last_sym
;
463 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
464 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
466 last_sym
= sym_free_first
;
468 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
469 sym
->next
= last_sym
;
473 sym_free_first
= last_sym
;
477 static inline Sym
*sym_malloc(void)
481 sym
= sym_free_first
;
483 sym
= __sym_malloc();
484 sym_free_first
= sym
->next
;
487 sym
= tcc_malloc(sizeof(Sym
));
492 ST_INLN
void sym_free(Sym
*sym
)
495 sym
->next
= sym_free_first
;
496 sym_free_first
= sym
;
502 /* push, without hashing */
503 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
508 memset(s
, 0, sizeof *s
);
518 /* find a symbol and return its associated structure. 's' is the top
519 of the symbol stack */
520 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
532 /* structure lookup */
533 ST_INLN Sym
*struct_find(int v
)
536 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
538 return table_ident
[v
]->sym_struct
;
541 /* find an identifier */
542 ST_INLN Sym
*sym_find(int v
)
545 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
547 return table_ident
[v
]->sym_identifier
;
550 /* push a given symbol on the symbol stack */
551 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
560 s
= sym_push2(ps
, v
, type
->t
, c
);
561 s
->type
.ref
= type
->ref
;
563 /* don't record fields or anonymous symbols */
565 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
566 /* record symbol in token array */
567 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
569 ps
= &ts
->sym_struct
;
571 ps
= &ts
->sym_identifier
;
574 s
->sym_scope
= local_scope
;
575 if (s
->prev_tok
&& s
->prev_tok
->sym_scope
== s
->sym_scope
)
576 tcc_error("redeclaration of '%s'",
577 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
582 /* push a global identifier */
583 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
586 s
= sym_push2(&global_stack
, v
, t
, c
);
587 /* don't record anonymous symbol */
588 if (v
< SYM_FIRST_ANOM
) {
589 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
590 /* modify the top most local identifier, so that
591 sym_identifier will point to 's' when popped */
592 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
593 ps
= &(*ps
)->prev_tok
;
600 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
601 pop them yet from the list, but do remove them from the token array. */
602 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
612 /* remove symbol in token array */
614 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
615 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
617 ps
= &ts
->sym_struct
;
619 ps
= &ts
->sym_identifier
;
630 /* ------------------------------------------------------------------------- */
632 static void vsetc(CType
*type
, int r
, CValue
*vc
)
636 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
637 tcc_error("memory full (vstack)");
638 /* cannot let cpu flags if other instruction are generated. Also
639 avoid leaving VT_JMP anywhere except on the top of the stack
640 because it would complicate the code generator.
642 Don't do this when nocode_wanted. vtop might come from
643 !nocode_wanted regions (see 88_codeopt.c) and transforming
644 it to a register without actually generating code is wrong
645 as their value might still be used for real. All values
646 we push under nocode_wanted will eventually be popped
647 again, so that the VT_CMP/VT_JMP value will be in vtop
648 when code is unsuppressed again.
650 Same logic below in vswap(); */
651 if (vtop
>= vstack
&& !nocode_wanted
) {
652 v
= vtop
->r
& VT_VALMASK
;
653 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
665 ST_FUNC
void vswap(void)
668 /* cannot vswap cpu flags. See comment at vsetc() above */
669 if (vtop
>= vstack
&& !nocode_wanted
) {
670 int v
= vtop
->r
& VT_VALMASK
;
671 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
679 /* pop stack value */
680 ST_FUNC
void vpop(void)
683 v
= vtop
->r
& VT_VALMASK
;
684 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
685 /* for x86, we need to pop the FP stack */
687 o(0xd8dd); /* fstp %st(0) */
690 if (v
== VT_JMP
|| v
== VT_JMPI
) {
691 /* need to put correct jump if && or || without test */
697 /* push constant of type "type" with useless value */
698 ST_FUNC
void vpush(CType
*type
)
700 vset(type
, VT_CONST
, 0);
703 /* push integer constant */
704 ST_FUNC
void vpushi(int v
)
708 vsetc(&int_type
, VT_CONST
, &cval
);
711 /* push a pointer sized constant */
712 static void vpushs(addr_t v
)
716 vsetc(&size_type
, VT_CONST
, &cval
);
719 /* push arbitrary 64bit constant */
720 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
727 vsetc(&ctype
, VT_CONST
, &cval
);
730 /* push long long constant */
731 static inline void vpushll(long long v
)
733 vpush64(VT_LLONG
, v
);
736 ST_FUNC
void vset(CType
*type
, int r
, int v
)
741 vsetc(type
, r
, &cval
);
744 static void vseti(int r
, int v
)
752 ST_FUNC
void vpushv(SValue
*v
)
754 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
755 tcc_error("memory full (vstack)");
760 static void vdup(void)
765 /* rotate n first stack elements to the bottom
766 I1 ... In -> I2 ... In I1 [top is right]
768 ST_FUNC
void vrotb(int n
)
779 /* rotate the n elements before entry e towards the top
780 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
782 ST_FUNC
void vrote(SValue
*e
, int n
)
788 for(i
= 0;i
< n
- 1; i
++)
793 /* rotate n first stack elements to the top
794 I1 ... In -> In I1 ... I(n-1) [top is right]
796 ST_FUNC
void vrott(int n
)
801 /* push a symbol value of TYPE */
802 static inline void vpushsym(CType
*type
, Sym
*sym
)
806 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
810 /* Return a static symbol pointing to a section */
811 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
817 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
818 sym
->type
.ref
= type
->ref
;
819 sym
->r
= VT_CONST
| VT_SYM
;
820 put_extern_sym(sym
, sec
, offset
, size
);
824 /* push a reference to a section offset by adding a dummy symbol */
825 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
827 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
830 /* define a new external reference to a symbol 'v' of type 'u' */
831 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
837 /* push forward reference */
838 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
839 s
->type
.ref
= type
->ref
;
840 s
->r
= r
| VT_CONST
| VT_SYM
;
841 } else if (IS_ASM_SYM(s
)) {
842 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
843 s
->type
.ref
= type
->ref
;
849 /* Merge some type attributes. */
850 static void patch_type(Sym
*sym
, CType
*type
)
852 if (!(type
->t
& VT_EXTERN
)) {
853 if (!(sym
->type
.t
& VT_EXTERN
))
854 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
855 sym
->type
.t
&= ~VT_EXTERN
;
858 if (IS_ASM_SYM(sym
)) {
859 /* stay static if both are static */
860 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
861 sym
->type
.ref
= type
->ref
;
864 if (!is_compatible_types(&sym
->type
, type
)) {
865 tcc_error("incompatible types for redefinition of '%s'",
866 get_tok_str(sym
->v
, NULL
));
868 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
869 int static_proto
= sym
->type
.t
& VT_STATIC
;
870 /* warn if static follows non-static function declaration */
871 if ((type
->t
& VT_STATIC
) && !static_proto
&& !(type
->t
& VT_INLINE
))
872 tcc_warning("static storage ignored for redefinition of '%s'",
873 get_tok_str(sym
->v
, NULL
));
875 if (0 == (type
->t
& VT_EXTERN
)) {
876 /* put complete type, use static from prototype */
877 sym
->type
.t
= (type
->t
& ~VT_STATIC
) | static_proto
;
878 if (type
->t
& VT_INLINE
)
879 sym
->type
.t
= type
->t
;
880 sym
->type
.ref
= type
->ref
;
884 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
885 /* set array size if it was omitted in extern declaration */
886 if (sym
->type
.ref
->c
< 0)
887 sym
->type
.ref
->c
= type
->ref
->c
;
888 else if (sym
->type
.ref
->c
!= type
->ref
->c
)
889 tcc_error("conflicting type for '%s'", get_tok_str(sym
->v
, NULL
));
891 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
892 tcc_warning("storage mismatch for redefinition of '%s'",
893 get_tok_str(sym
->v
, NULL
));
898 /* Merge some storage attributes. */
899 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
902 patch_type(sym
, type
);
905 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
906 tcc_error("incompatible dll linkage for redefinition of '%s'",
907 get_tok_str(sym
->v
, NULL
));
908 sym
->a
.dllexport
|= ad
->a
.dllexport
;
910 sym
->a
.weak
|= ad
->a
.weak
;
911 if (ad
->a
.visibility
) {
912 int vis
= sym
->a
.visibility
;
913 int vis2
= ad
->a
.visibility
;
914 if (vis
== STV_DEFAULT
)
916 else if (vis2
!= STV_DEFAULT
)
917 vis
= (vis
< vis2
) ? vis
: vis2
;
918 sym
->a
.visibility
= vis
;
921 sym
->a
.aligned
= ad
->a
.aligned
;
923 sym
->asm_label
= ad
->asm_label
;
927 /* define a new external reference to a symbol 'v' */
928 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
933 /* push forward reference */
934 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
935 s
->type
.t
|= VT_EXTERN
;
939 if (s
->type
.ref
== func_old_type
.ref
) {
940 s
->type
.ref
= type
->ref
;
941 s
->r
= r
| VT_CONST
| VT_SYM
;
942 s
->type
.t
|= VT_EXTERN
;
944 patch_storage(s
, ad
, type
);
949 /* push a reference to global symbol v */
950 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
952 vpushsym(type
, external_global_sym(v
, type
, 0));
955 /* save registers up to (vtop - n) stack entry */
956 ST_FUNC
void save_regs(int n
)
959 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
963 /* save r to the memory stack, and mark it as being free */
964 ST_FUNC
void save_reg(int r
)
966 save_reg_upstack(r
, 0);
969 /* save r to the memory stack, and mark it as being free,
970 if seen up to (vtop - n) stack entry */
971 ST_FUNC
void save_reg_upstack(int r
, int n
)
973 int l
, saved
, size
, align
;
977 if ((r
&= VT_VALMASK
) >= VT_CONST
)
982 /* modify all stack values */
985 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
986 if ((p
->r
& VT_VALMASK
) == r
||
987 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
988 /* must save value on stack if not already done */
990 /* NOTE: must reload 'r' because r might be equal to r2 */
991 r
= p
->r
& VT_VALMASK
;
992 /* store register in the stack */
994 if ((p
->r
& VT_LVAL
) ||
995 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
997 type
= &char_pointer_type
;
1001 size
= type_size(type
, &align
);
1002 loc
= (loc
- size
) & -align
;
1003 sv
.type
.t
= type
->t
;
1004 sv
.r
= VT_LOCAL
| VT_LVAL
;
1007 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1008 /* x86 specific: need to pop fp register ST0 if saved */
1009 if (r
== TREG_ST0
) {
1010 o(0xd8dd); /* fstp %st(0) */
1014 /* special long long case */
1015 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1023 /* mark that stack entry as being saved on the stack */
1024 if (p
->r
& VT_LVAL
) {
1025 /* also clear the bounded flag because the
1026 relocation address of the function was stored in
1028 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1030 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1038 #ifdef TCC_TARGET_ARM
1039 /* find a register of class 'rc2' with at most one reference on stack.
1040 * If none, call get_reg(rc) */
1041 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1046 for(r
=0;r
<NB_REGS
;r
++) {
1047 if (reg_classes
[r
] & rc2
) {
1050 for(p
= vstack
; p
<= vtop
; p
++) {
1051 if ((p
->r
& VT_VALMASK
) == r
||
1052 (p
->r2
& VT_VALMASK
) == r
)
1063 /* find a free register of class 'rc'. If none, save one register */
1064 ST_FUNC
int get_reg(int rc
)
1069 /* find a free register */
1070 for(r
=0;r
<NB_REGS
;r
++) {
1071 if (reg_classes
[r
] & rc
) {
1074 for(p
=vstack
;p
<=vtop
;p
++) {
1075 if ((p
->r
& VT_VALMASK
) == r
||
1076 (p
->r2
& VT_VALMASK
) == r
)
1084 /* no register left : free the first one on the stack (VERY
1085 IMPORTANT to start from the bottom to ensure that we don't
1086 spill registers used in gen_opi()) */
1087 for(p
=vstack
;p
<=vtop
;p
++) {
1088 /* look at second register (if long long) */
1089 r
= p
->r2
& VT_VALMASK
;
1090 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1092 r
= p
->r
& VT_VALMASK
;
1093 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1099 /* Should never comes here */
1103 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1105 static void move_reg(int r
, int s
, int t
)
1119 /* get address of vtop (vtop MUST BE an lvalue) */
1120 ST_FUNC
void gaddrof(void)
1122 vtop
->r
&= ~VT_LVAL
;
1123 /* tricky: if saved lvalue, then we can go back to lvalue */
1124 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1125 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1130 #ifdef CONFIG_TCC_BCHECK
1131 /* generate lvalue bound code */
1132 static void gbound(void)
1137 vtop
->r
&= ~VT_MUSTBOUND
;
1138 /* if lvalue, then use checking code before dereferencing */
1139 if (vtop
->r
& VT_LVAL
) {
1140 /* if not VT_BOUNDED value, then make one */
1141 if (!(vtop
->r
& VT_BOUNDED
)) {
1142 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1143 /* must save type because we must set it to int to get pointer */
1145 vtop
->type
.t
= VT_PTR
;
1148 gen_bounded_ptr_add();
1149 vtop
->r
|= lval_type
;
1152 /* then check for dereferencing */
1153 gen_bounded_ptr_deref();
1158 static void incr_bf_adr(int o
)
1160 vtop
->type
= char_pointer_type
;
1164 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1165 | (VT_BYTE
|VT_UNSIGNED
);
1166 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1167 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1170 /* single-byte load mode for packed or otherwise unaligned bitfields */
1171 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1174 save_reg_upstack(vtop
->r
, 1);
1175 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1176 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1185 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1187 vpushi((1 << n
) - 1), gen_op('&');
1190 vpushi(bits
), gen_op(TOK_SHL
);
1193 bits
+= n
, bit_size
-= n
, o
= 1;
1196 if (!(type
->t
& VT_UNSIGNED
)) {
1197 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1198 vpushi(n
), gen_op(TOK_SHL
);
1199 vpushi(n
), gen_op(TOK_SAR
);
1203 /* single-byte store mode for packed or otherwise unaligned bitfields */
1204 static void store_packed_bf(int bit_pos
, int bit_size
)
1206 int bits
, n
, o
, m
, c
;
1208 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1210 save_reg_upstack(vtop
->r
, 1);
1211 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1213 incr_bf_adr(o
); // X B
1215 c
? vdup() : gv_dup(); // B V X
1218 vpushi(bits
), gen_op(TOK_SHR
);
1220 vpushi(bit_pos
), gen_op(TOK_SHL
);
1225 m
= ((1 << n
) - 1) << bit_pos
;
1226 vpushi(m
), gen_op('&'); // X B V1
1227 vpushv(vtop
-1); // X B V1 B
1228 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1229 gen_op('&'); // X B V1 B1
1230 gen_op('|'); // X B V2
1232 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1233 vstore(), vpop(); // X B
1234 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1239 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1242 if (0 == sv
->type
.ref
)
1244 t
= sv
->type
.ref
->auxtype
;
1245 if (t
!= -1 && t
!= VT_STRUCT
) {
1246 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1247 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1252 /* store vtop a register belonging to class 'rc'. lvalues are
1253 converted to values. Cannot be used if cannot be converted to
1254 register value (such as structures). */
1255 ST_FUNC
int gv(int rc
)
1257 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1259 /* NOTE: get_reg can modify vstack[] */
1260 if (vtop
->type
.t
& VT_BITFIELD
) {
1263 bit_pos
= BIT_POS(vtop
->type
.t
);
1264 bit_size
= BIT_SIZE(vtop
->type
.t
);
1265 /* remove bit field info to avoid loops */
1266 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1269 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1270 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1271 type
.t
|= VT_UNSIGNED
;
1273 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1275 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1280 if (r
== VT_STRUCT
) {
1281 load_packed_bf(&type
, bit_pos
, bit_size
);
1283 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1284 /* cast to int to propagate signedness in following ops */
1286 /* generate shifts */
1287 vpushi(bits
- (bit_pos
+ bit_size
));
1289 vpushi(bits
- bit_size
);
1290 /* NOTE: transformed to SHR if unsigned */
1295 if (is_float(vtop
->type
.t
) &&
1296 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1297 unsigned long offset
;
1298 /* CPUs usually cannot use float constants, so we store them
1299 generically in data segment */
1300 size
= type_size(&vtop
->type
, &align
);
1302 size
= 0, align
= 1;
1303 offset
= section_add(data_section
, size
, align
);
1304 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1306 init_putv(&vtop
->type
, data_section
, offset
);
1309 #ifdef CONFIG_TCC_BCHECK
1310 if (vtop
->r
& VT_MUSTBOUND
)
1314 r
= vtop
->r
& VT_VALMASK
;
1315 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1316 #ifndef TCC_TARGET_ARM64
1319 #ifdef TCC_TARGET_X86_64
1320 else if (rc
== RC_FRET
)
1324 /* need to reload if:
1326 - lvalue (need to dereference pointer)
1327 - already a register, but not in the right class */
1329 || (vtop
->r
& VT_LVAL
)
1330 || !(reg_classes
[r
] & rc
)
1332 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1333 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1335 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1341 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1342 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1344 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1345 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1346 unsigned long long ll
;
1348 int r2
, original_type
;
1349 original_type
= vtop
->type
.t
;
1350 /* two register type load : expand to two words
1353 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1356 vtop
->c
.i
= ll
; /* first word */
1358 vtop
->r
= r
; /* save register value */
1359 vpushi(ll
>> 32); /* second word */
1362 if (vtop
->r
& VT_LVAL
) {
1363 /* We do not want to modifier the long long
1364 pointer here, so the safest (and less
1365 efficient) is to save all the other registers
1366 in the stack. XXX: totally inefficient. */
1370 /* lvalue_save: save only if used further down the stack */
1371 save_reg_upstack(vtop
->r
, 1);
1373 /* load from memory */
1374 vtop
->type
.t
= load_type
;
1377 vtop
[-1].r
= r
; /* save register value */
1378 /* increment pointer to get second word */
1379 vtop
->type
.t
= addr_type
;
1384 vtop
->type
.t
= load_type
;
1386 /* move registers */
1389 vtop
[-1].r
= r
; /* save register value */
1390 vtop
->r
= vtop
[-1].r2
;
1392 /* Allocate second register. Here we rely on the fact that
1393 get_reg() tries first to free r2 of an SValue. */
1397 /* write second register */
1399 vtop
->type
.t
= original_type
;
1400 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1402 /* lvalue of scalar type : need to use lvalue type
1403 because of possible cast */
1406 /* compute memory access type */
1407 if (vtop
->r
& VT_LVAL_BYTE
)
1409 else if (vtop
->r
& VT_LVAL_SHORT
)
1411 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1415 /* restore wanted type */
1418 /* one register type load */
1423 #ifdef TCC_TARGET_C67
1424 /* uses register pairs for doubles */
1425 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1432 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1433 ST_FUNC
void gv2(int rc1
, int rc2
)
1437 /* generate more generic register first. But VT_JMP or VT_CMP
1438 values must be generated first in all cases to avoid possible
1440 v
= vtop
[0].r
& VT_VALMASK
;
1441 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1446 /* test if reload is needed for first register */
1447 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1457 /* test if reload is needed for first register */
1458 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1464 #ifndef TCC_TARGET_ARM64
1465 /* wrapper around RC_FRET to return a register by type */
1466 static int rc_fret(int t
)
1468 #ifdef TCC_TARGET_X86_64
1469 if (t
== VT_LDOUBLE
) {
1477 /* wrapper around REG_FRET to return a register by type */
1478 static int reg_fret(int t
)
1480 #ifdef TCC_TARGET_X86_64
1481 if (t
== VT_LDOUBLE
) {
1489 /* expand 64bit on stack in two ints */
1490 static void lexpand(void)
1493 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1494 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1495 if (v
== VT_CONST
) {
1498 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1504 vtop
[0].r
= vtop
[-1].r2
;
1505 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1507 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1511 #ifdef TCC_TARGET_ARM
1512 /* expand long long on stack */
1513 ST_FUNC
void lexpand_nr(void)
1517 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1519 vtop
->r2
= VT_CONST
;
1520 vtop
->type
.t
= VT_INT
| u
;
1521 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1522 if (v
== VT_CONST
) {
1523 vtop
[-1].c
.i
= vtop
->c
.i
;
1524 vtop
->c
.i
= vtop
->c
.i
>> 32;
1526 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1528 vtop
->r
= vtop
[-1].r
;
1529 } else if (v
> VT_CONST
) {
1533 vtop
->r
= vtop
[-1].r2
;
1534 vtop
[-1].r2
= VT_CONST
;
1535 vtop
[-1].type
.t
= VT_INT
| u
;
1540 /* build a long long from two ints */
1541 static void lbuild(int t
)
1543 gv2(RC_INT
, RC_INT
);
1544 vtop
[-1].r2
= vtop
[0].r
;
1545 vtop
[-1].type
.t
= t
;
1550 /* convert stack entry to register and duplicate its value in another
1552 static void gv_dup(void)
1559 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1560 if (t
& VT_BITFIELD
) {
1570 /* stack: H L L1 H1 */
1580 /* duplicate value */
1585 #ifdef TCC_TARGET_X86_64
1586 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1596 load(r1
, &sv
); /* move r to r1 */
1598 /* duplicates value */
1604 /* Generate value test
1606 * Generate a test for any value (jump, comparison and integers) */
1607 ST_FUNC
int gvtst(int inv
, int t
)
1609 int v
= vtop
->r
& VT_VALMASK
;
1610 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1614 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1615 /* constant jmp optimization */
1616 if ((vtop
->c
.i
!= 0) != inv
)
1621 return gtst(inv
, t
);
1625 /* generate CPU independent (unsigned) long long operations */
1626 static void gen_opl(int op
)
1628 int t
, a
, b
, op1
, c
, i
;
1630 unsigned short reg_iret
= REG_IRET
;
1631 unsigned short reg_lret
= REG_LRET
;
1637 func
= TOK___divdi3
;
1640 func
= TOK___udivdi3
;
1643 func
= TOK___moddi3
;
1646 func
= TOK___umoddi3
;
1653 /* call generic long long function */
1654 vpush_global_sym(&func_old_type
, func
);
1659 vtop
->r2
= reg_lret
;
1667 //pv("gen_opl A",0,2);
1673 /* stack: L1 H1 L2 H2 */
1678 vtop
[-2] = vtop
[-3];
1681 /* stack: H1 H2 L1 L2 */
1682 //pv("gen_opl B",0,4);
1688 /* stack: H1 H2 L1 L2 ML MH */
1691 /* stack: ML MH H1 H2 L1 L2 */
1695 /* stack: ML MH H1 L2 H2 L1 */
1700 /* stack: ML MH M1 M2 */
1703 } else if (op
== '+' || op
== '-') {
1704 /* XXX: add non carry method too (for MIPS or alpha) */
1710 /* stack: H1 H2 (L1 op L2) */
1713 gen_op(op1
+ 1); /* TOK_xxxC2 */
1716 /* stack: H1 H2 (L1 op L2) */
1719 /* stack: (L1 op L2) H1 H2 */
1721 /* stack: (L1 op L2) (H1 op H2) */
1729 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1730 t
= vtop
[-1].type
.t
;
1734 /* stack: L H shift */
1736 /* constant: simpler */
1737 /* NOTE: all comments are for SHL. the other cases are
1738 done by swapping words */
1749 if (op
!= TOK_SAR
) {
1782 /* XXX: should provide a faster fallback on x86 ? */
1785 func
= TOK___ashrdi3
;
1788 func
= TOK___lshrdi3
;
1791 func
= TOK___ashldi3
;
1797 /* compare operations */
1803 /* stack: L1 H1 L2 H2 */
1805 vtop
[-1] = vtop
[-2];
1807 /* stack: L1 L2 H1 H2 */
1810 /* when values are equal, we need to compare low words. since
1811 the jump is inverted, we invert the test too. */
1814 else if (op1
== TOK_GT
)
1816 else if (op1
== TOK_ULT
)
1818 else if (op1
== TOK_UGT
)
1828 /* generate non equal test */
1834 /* compare low. Always unsigned */
1838 else if (op1
== TOK_LE
)
1840 else if (op1
== TOK_GT
)
1842 else if (op1
== TOK_GE
)
1853 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1855 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1856 return (a
^ b
) >> 63 ? -x
: x
;
1859 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1861 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1864 /* handle integer constant optimizations and various machine
1866 static void gen_opic(int op
)
1868 SValue
*v1
= vtop
- 1;
1870 int t1
= v1
->type
.t
& VT_BTYPE
;
1871 int t2
= v2
->type
.t
& VT_BTYPE
;
1872 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1873 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1874 uint64_t l1
= c1
? v1
->c
.i
: 0;
1875 uint64_t l2
= c2
? v2
->c
.i
: 0;
1876 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1878 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1879 l1
= ((uint32_t)l1
|
1880 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1881 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1882 l2
= ((uint32_t)l2
|
1883 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1887 case '+': l1
+= l2
; break;
1888 case '-': l1
-= l2
; break;
1889 case '&': l1
&= l2
; break;
1890 case '^': l1
^= l2
; break;
1891 case '|': l1
|= l2
; break;
1892 case '*': l1
*= l2
; break;
1899 /* if division by zero, generate explicit division */
1902 tcc_error("division by zero in constant");
1906 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1907 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1908 case TOK_UDIV
: l1
= l1
/ l2
; break;
1909 case TOK_UMOD
: l1
= l1
% l2
; break;
1912 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1913 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1915 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1918 case TOK_ULT
: l1
= l1
< l2
; break;
1919 case TOK_UGE
: l1
= l1
>= l2
; break;
1920 case TOK_EQ
: l1
= l1
== l2
; break;
1921 case TOK_NE
: l1
= l1
!= l2
; break;
1922 case TOK_ULE
: l1
= l1
<= l2
; break;
1923 case TOK_UGT
: l1
= l1
> l2
; break;
1924 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1925 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1926 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1927 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1929 case TOK_LAND
: l1
= l1
&& l2
; break;
1930 case TOK_LOR
: l1
= l1
|| l2
; break;
1934 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1935 l1
= ((uint32_t)l1
|
1936 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1940 /* if commutative ops, put c2 as constant */
1941 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1942 op
== '|' || op
== '*')) {
1944 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1945 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1947 if (!const_wanted
&&
1949 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1950 (l1
== -1 && op
== TOK_SAR
))) {
1951 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1953 } else if (!const_wanted
&&
1954 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1956 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
1957 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1958 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1963 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1966 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1967 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1970 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
1971 /* filter out NOP operations like x*1, x-0, x&-1... */
1973 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1974 /* try to use shifts instead of muls or divs */
1975 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1984 else if (op
== TOK_PDIV
)
1990 } else if (c2
&& (op
== '+' || op
== '-') &&
1991 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1992 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1993 /* symbol + constant case */
1997 /* The backends can't always deal with addends to symbols
1998 larger than +-1<<31. Don't construct such. */
2005 /* call low level op generator */
2006 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2007 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2015 /* generate a floating point operation with constant propagation */
2016 static void gen_opif(int op
)
2020 #if defined _MSC_VER && defined _AMD64_
2021 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2028 /* currently, we cannot do computations with forward symbols */
2029 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2030 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2032 if (v1
->type
.t
== VT_FLOAT
) {
2035 } else if (v1
->type
.t
== VT_DOUBLE
) {
2043 /* NOTE: we only do constant propagation if finite number (not
2044 NaN or infinity) (ANSI spec) */
2045 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2049 case '+': f1
+= f2
; break;
2050 case '-': f1
-= f2
; break;
2051 case '*': f1
*= f2
; break;
2054 /* If not in initializer we need to potentially generate
2055 FP exceptions at runtime, otherwise we want to fold. */
2061 /* XXX: also handles tests ? */
2065 /* XXX: overflow test ? */
2066 if (v1
->type
.t
== VT_FLOAT
) {
2068 } else if (v1
->type
.t
== VT_DOUBLE
) {
2080 static int pointed_size(CType
*type
)
2083 return type_size(pointed_type(type
), &align
);
2086 static void vla_runtime_pointed_size(CType
*type
)
2089 vla_runtime_type_size(pointed_type(type
), &align
);
2092 static inline int is_null_pointer(SValue
*p
)
2094 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2096 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2097 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2098 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2099 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
2102 static inline int is_integer_btype(int bt
)
2104 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2105 bt
== VT_INT
|| bt
== VT_LLONG
);
2108 /* check types for comparison or subtraction of pointers */
2109 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2111 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2114 /* null pointers are accepted for all comparisons as gcc */
2115 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2119 bt1
= type1
->t
& VT_BTYPE
;
2120 bt2
= type2
->t
& VT_BTYPE
;
2121 /* accept comparison between pointer and integer with a warning */
2122 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2123 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2124 tcc_warning("comparison between pointer and integer");
2128 /* both must be pointers or implicit function pointers */
2129 if (bt1
== VT_PTR
) {
2130 type1
= pointed_type(type1
);
2131 } else if (bt1
!= VT_FUNC
)
2132 goto invalid_operands
;
2134 if (bt2
== VT_PTR
) {
2135 type2
= pointed_type(type2
);
2136 } else if (bt2
!= VT_FUNC
) {
2138 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2140 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2141 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2145 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2146 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2147 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2148 /* gcc-like error if '-' is used */
2150 goto invalid_operands
;
2152 tcc_warning("comparison of distinct pointer types lacks a cast");
2156 /* generic gen_op: handles types problems */
2157 ST_FUNC
void gen_op(int op
)
2159 int u
, t1
, t2
, bt1
, bt2
, t
;
2163 t1
= vtop
[-1].type
.t
;
2164 t2
= vtop
[0].type
.t
;
2165 bt1
= t1
& VT_BTYPE
;
2166 bt2
= t2
& VT_BTYPE
;
2168 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2169 tcc_error("operation on a struct");
2170 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2171 if (bt2
== VT_FUNC
) {
2172 mk_pointer(&vtop
->type
);
2175 if (bt1
== VT_FUNC
) {
2177 mk_pointer(&vtop
->type
);
2182 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2183 /* at least one operand is a pointer */
2184 /* relational op: must be both pointers */
2185 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2186 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2187 /* pointers are handled are unsigned */
2189 t
= VT_LLONG
| VT_UNSIGNED
;
2191 t
= VT_INT
| VT_UNSIGNED
;
2195 /* if both pointers, then it must be the '-' op */
2196 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2198 tcc_error("cannot use pointers here");
2199 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2200 /* XXX: check that types are compatible */
2201 if (vtop
[-1].type
.t
& VT_VLA
) {
2202 vla_runtime_pointed_size(&vtop
[-1].type
);
2204 vpushi(pointed_size(&vtop
[-1].type
));
2208 vtop
->type
.t
= ptrdiff_type
.t
;
2212 /* exactly one pointer : must be '+' or '-'. */
2213 if (op
!= '-' && op
!= '+')
2214 tcc_error("cannot use pointers here");
2215 /* Put pointer as first operand */
2216 if (bt2
== VT_PTR
) {
2218 t
= t1
, t1
= t2
, t2
= t
;
2221 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2222 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2225 type1
= vtop
[-1].type
;
2226 type1
.t
&= ~VT_ARRAY
;
2227 if (vtop
[-1].type
.t
& VT_VLA
)
2228 vla_runtime_pointed_size(&vtop
[-1].type
);
2230 u
= pointed_size(&vtop
[-1].type
);
2232 tcc_error("unknown array element size");
2236 /* XXX: cast to int ? (long long case) */
2242 /* #ifdef CONFIG_TCC_BCHECK
2243 The main reason to removing this code:
2250 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2251 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2253 When this code is on. then the output looks like
2255 v+(i-j) = 0xbff84000
2257 /* if evaluating constant expression, no code should be
2258 generated, so no bound check */
2259 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2260 /* if bounded pointers, we generate a special code to
2267 gen_bounded_ptr_add();
2273 /* put again type if gen_opic() swaped operands */
2276 } else if (is_float(bt1
) || is_float(bt2
)) {
2277 /* compute bigger type and do implicit casts */
2278 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2280 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2285 /* floats can only be used for a few operations */
2286 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2287 (op
< TOK_ULT
|| op
> TOK_GT
))
2288 tcc_error("invalid operands for binary operation");
2290 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2291 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2292 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2294 t
|= (VT_LONG
& t1
);
2296 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2297 /* cast to biggest op */
2298 t
= VT_LLONG
| VT_LONG
;
2299 if (bt1
== VT_LLONG
)
2301 if (bt2
== VT_LLONG
)
2303 /* convert to unsigned if it does not fit in a long long */
2304 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2305 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2309 /* integer operations */
2310 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2311 /* convert to unsigned if it does not fit in an integer */
2312 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2313 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2316 /* XXX: currently, some unsigned operations are explicit, so
2317 we modify them here */
2318 if (t
& VT_UNSIGNED
) {
2325 else if (op
== TOK_LT
)
2327 else if (op
== TOK_GT
)
2329 else if (op
== TOK_LE
)
2331 else if (op
== TOK_GE
)
2339 /* special case for shifts and long long: we keep the shift as
2341 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2348 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2349 /* relational op: the result is an int */
2350 vtop
->type
.t
= VT_INT
;
2355 // Make sure that we have converted to an rvalue:
2356 if (vtop
->r
& VT_LVAL
)
2357 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2360 #ifndef TCC_TARGET_ARM
2361 /* generic itof for unsigned long long case */
2362 static void gen_cvt_itof1(int t
)
2364 #ifdef TCC_TARGET_ARM64
2367 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2368 (VT_LLONG
| VT_UNSIGNED
)) {
2371 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2372 #if LDOUBLE_SIZE != 8
2373 else if (t
== VT_LDOUBLE
)
2374 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2377 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2381 vtop
->r
= reg_fret(t
);
2389 /* generic ftoi for unsigned long long case */
2390 static void gen_cvt_ftoi1(int t
)
2392 #ifdef TCC_TARGET_ARM64
2397 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2398 /* not handled natively */
2399 st
= vtop
->type
.t
& VT_BTYPE
;
2401 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2402 #if LDOUBLE_SIZE != 8
2403 else if (st
== VT_LDOUBLE
)
2404 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2407 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2412 vtop
->r2
= REG_LRET
;
2419 /* force char or short cast */
2420 static void force_charshort_cast(int t
)
2424 /* cannot cast static initializers */
2425 if (STATIC_DATA_WANTED
)
2429 /* XXX: add optimization if lvalue : just change type and offset */
2434 if (t
& VT_UNSIGNED
) {
2435 vpushi((1 << bits
) - 1);
2438 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2444 /* result must be signed or the SAR is converted to an SHL
2445 This was not the case when "t" was a signed short
2446 and the last value on the stack was an unsigned int */
2447 vtop
->type
.t
&= ~VT_UNSIGNED
;
2453 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2454 static void gen_cast_s(int t
)
2462 static void gen_cast(CType
*type
)
2464 int sbt
, dbt
, sf
, df
, c
, p
;
2466 /* special delayed cast for char/short */
2467 /* XXX: in some cases (multiple cascaded casts), it may still
2469 if (vtop
->r
& VT_MUSTCAST
) {
2470 vtop
->r
&= ~VT_MUSTCAST
;
2471 force_charshort_cast(vtop
->type
.t
);
2474 /* bitfields first get cast to ints */
2475 if (vtop
->type
.t
& VT_BITFIELD
) {
2479 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2480 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2485 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2486 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2487 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2488 c
&= dbt
!= VT_LDOUBLE
;
2491 /* constant case: we can do it now */
2492 /* XXX: in ISOC, cannot do it if error in convert */
2493 if (sbt
== VT_FLOAT
)
2494 vtop
->c
.ld
= vtop
->c
.f
;
2495 else if (sbt
== VT_DOUBLE
)
2496 vtop
->c
.ld
= vtop
->c
.d
;
2499 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2500 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2501 vtop
->c
.ld
= vtop
->c
.i
;
2503 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2505 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2506 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2508 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2511 if (dbt
== VT_FLOAT
)
2512 vtop
->c
.f
= (float)vtop
->c
.ld
;
2513 else if (dbt
== VT_DOUBLE
)
2514 vtop
->c
.d
= (double)vtop
->c
.ld
;
2515 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2516 vtop
->c
.i
= vtop
->c
.ld
;
2517 } else if (sf
&& dbt
== VT_BOOL
) {
2518 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2521 vtop
->c
.i
= vtop
->c
.ld
;
2522 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2524 else if (sbt
& VT_UNSIGNED
)
2525 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2527 else if (sbt
== VT_PTR
)
2530 else if (sbt
!= VT_LLONG
)
2531 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2532 -(vtop
->c
.i
& 0x80000000));
2534 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2536 else if (dbt
== VT_BOOL
)
2537 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2539 else if (dbt
== VT_PTR
)
2542 else if (dbt
!= VT_LLONG
) {
2543 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2544 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2547 if (!(dbt
& VT_UNSIGNED
))
2548 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2551 } else if (p
&& dbt
== VT_BOOL
) {
2555 /* non constant case: generate code */
2557 /* convert from fp to fp */
2560 /* convert int to fp */
2563 /* convert fp to int */
2564 if (dbt
== VT_BOOL
) {
2568 /* we handle char/short/etc... with generic code */
2569 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2570 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2574 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2575 /* additional cast for char/short... */
2581 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2582 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2583 /* scalar to long long */
2584 /* machine independent conversion */
2586 /* generate high word */
2587 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2591 if (sbt
== VT_PTR
) {
2592 /* cast from pointer to int before we apply
2593 shift operation, which pointers don't support*/
2600 /* patch second register */
2601 vtop
[-1].r2
= vtop
->r
;
2605 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2606 (dbt
& VT_BTYPE
) == VT_PTR
||
2607 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2608 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2609 (sbt
& VT_BTYPE
) != VT_PTR
&&
2610 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2611 /* need to convert from 32bit to 64bit */
2613 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2614 #if defined(TCC_TARGET_ARM64)
2616 #elif defined(TCC_TARGET_X86_64)
2618 /* x86_64 specific: movslq */
2620 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2627 } else if (dbt
== VT_BOOL
) {
2628 /* scalar to bool */
2631 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2632 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2633 if (sbt
== VT_PTR
) {
2634 vtop
->type
.t
= VT_INT
;
2635 tcc_warning("nonportable conversion from pointer to char/short");
2637 force_charshort_cast(dbt
);
2638 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2640 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2642 /* from long long: just take low order word */
2647 vtop
->type
.t
|= VT_UNSIGNED
;
2651 /* if lvalue and single word type, nothing to do because
2652 the lvalue already contains the real type size (see
2653 VT_LVAL_xxx constants) */
2656 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2657 /* if we are casting between pointer types,
2658 we must update the VT_LVAL_xxx size */
2659 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2660 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2665 /* return type size as known at compile time. Put alignment at 'a' */
2666 ST_FUNC
int type_size(CType
*type
, int *a
)
2671 bt
= type
->t
& VT_BTYPE
;
2672 if (bt
== VT_STRUCT
) {
2677 } else if (bt
== VT_PTR
) {
2678 if (type
->t
& VT_ARRAY
) {
2682 ts
= type_size(&s
->type
, a
);
2684 if (ts
< 0 && s
->c
< 0)
2692 } else if (IS_ENUM(type
->t
) && type
->ref
->c
== -1) {
2693 return -1; /* incomplete enum */
2694 } else if (bt
== VT_LDOUBLE
) {
2696 return LDOUBLE_SIZE
;
2697 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2698 #ifdef TCC_TARGET_I386
2699 #ifdef TCC_TARGET_PE
2704 #elif defined(TCC_TARGET_ARM)
2714 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2717 } else if (bt
== VT_SHORT
) {
2720 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2724 /* char, void, function, _Bool */
2730 /* push type size as known at runtime time on top of value stack. Put
2732 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2734 if (type
->t
& VT_VLA
) {
2735 type_size(&type
->ref
->type
, a
);
2736 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2738 vpushi(type_size(type
, a
));
2742 static void vla_sp_restore(void) {
2743 if (vlas_in_scope
) {
2744 gen_vla_sp_restore(vla_sp_loc
);
2748 static void vla_sp_restore_root(void) {
2749 if (vlas_in_scope
) {
2750 gen_vla_sp_restore(vla_sp_root_loc
);
2754 /* return the pointed type of t */
2755 static inline CType
*pointed_type(CType
*type
)
2757 return &type
->ref
->type
;
2760 /* modify type so that its it is a pointer to type. */
2761 ST_FUNC
void mk_pointer(CType
*type
)
2764 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2765 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2769 /* compare function types. OLD functions match any new functions */
2770 static int is_compatible_func(CType
*type1
, CType
*type2
)
2776 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2778 /* check func_call */
2779 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2781 /* XXX: not complete */
2782 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2784 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2786 while (s1
!= NULL
) {
2789 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2799 /* return true if type1 and type2 are the same. If unqualified is
2800 true, qualifiers on the types are ignored.
2802 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2806 t1
= type1
->t
& VT_TYPE
;
2807 t2
= type2
->t
& VT_TYPE
;
2809 /* strip qualifiers before comparing */
2810 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2811 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2814 /* Default Vs explicit signedness only matters for char */
2815 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2819 /* XXX: bitfields ? */
2822 /* test more complicated cases */
2823 bt1
= t1
& (VT_BTYPE
| (unqualified
? 0 : VT_ARRAY
) );
2824 if (bt1
== VT_PTR
) {
2825 type1
= pointed_type(type1
);
2826 type2
= pointed_type(type2
);
2827 return is_compatible_types(type1
, type2
);
2828 } else if (bt1
& VT_ARRAY
) {
2829 return type1
->ref
->c
< 0 || type2
->ref
->c
< 0
2830 || type1
->ref
->c
== type2
->ref
->c
;
2831 } else if (bt1
== VT_STRUCT
) {
2832 return (type1
->ref
== type2
->ref
);
2833 } else if (bt1
== VT_FUNC
) {
2834 return is_compatible_func(type1
, type2
);
2835 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
2836 return type1
->ref
== type2
->ref
;
2842 /* return true if type1 and type2 are exactly the same (including
2845 static int is_compatible_types(CType
*type1
, CType
*type2
)
2847 return compare_types(type1
,type2
,0);
2850 /* return true if type1 and type2 are the same (ignoring qualifiers).
2852 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2854 return compare_types(type1
,type2
,1);
2857 /* print a type. If 'varstr' is not NULL, then the variable is also
2858 printed in the type */
2860 /* XXX: add array and function pointers */
2861 static void type_to_str(char *buf
, int buf_size
,
2862 CType
*type
, const char *varstr
)
2874 pstrcat(buf
, buf_size
, "extern ");
2876 pstrcat(buf
, buf_size
, "static ");
2878 pstrcat(buf
, buf_size
, "typedef ");
2880 pstrcat(buf
, buf_size
, "inline ");
2881 if (t
& VT_VOLATILE
)
2882 pstrcat(buf
, buf_size
, "volatile ");
2883 if (t
& VT_CONSTANT
)
2884 pstrcat(buf
, buf_size
, "const ");
2886 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2887 || ((t
& VT_UNSIGNED
)
2888 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2891 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2893 buf_size
-= strlen(buf
);
2928 tstr
= "long double";
2930 pstrcat(buf
, buf_size
, tstr
);
2937 pstrcat(buf
, buf_size
, tstr
);
2938 v
= type
->ref
->v
& ~SYM_STRUCT
;
2939 if (v
>= SYM_FIRST_ANOM
)
2940 pstrcat(buf
, buf_size
, "<anonymous>");
2942 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2947 if (varstr
&& '*' == *varstr
) {
2948 pstrcat(buf1
, sizeof(buf1
), "(");
2949 pstrcat(buf1
, sizeof(buf1
), varstr
);
2950 pstrcat(buf1
, sizeof(buf1
), ")");
2952 pstrcat(buf1
, buf_size
, "(");
2954 while (sa
!= NULL
) {
2956 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2957 pstrcat(buf1
, sizeof(buf1
), buf2
);
2960 pstrcat(buf1
, sizeof(buf1
), ", ");
2962 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2963 pstrcat(buf1
, sizeof(buf1
), ", ...");
2964 pstrcat(buf1
, sizeof(buf1
), ")");
2965 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2970 if (varstr
&& '*' == *varstr
)
2971 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2973 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2974 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2977 pstrcpy(buf1
, sizeof(buf1
), "*");
2978 if (t
& VT_CONSTANT
)
2979 pstrcat(buf1
, buf_size
, "const ");
2980 if (t
& VT_VOLATILE
)
2981 pstrcat(buf1
, buf_size
, "volatile ");
2983 pstrcat(buf1
, sizeof(buf1
), varstr
);
2984 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2988 pstrcat(buf
, buf_size
, " ");
2989 pstrcat(buf
, buf_size
, varstr
);
2994 /* verify type compatibility to store vtop in 'dt' type, and generate
2996 static void gen_assign_cast(CType
*dt
)
2998 CType
*st
, *type1
, *type2
;
2999 char buf1
[256], buf2
[256];
3000 int dbt
, sbt
, qualwarn
, lvl
;
3002 st
= &vtop
->type
; /* source type */
3003 dbt
= dt
->t
& VT_BTYPE
;
3004 sbt
= st
->t
& VT_BTYPE
;
3005 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3006 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3007 ; /* It is Ok if both are void */
3009 tcc_error("cannot cast from/to void");
3011 if (dt
->t
& VT_CONSTANT
)
3012 tcc_warning("assignment of read-only location");
3015 /* special cases for pointers */
3016 /* '0' can also be a pointer */
3017 if (is_null_pointer(vtop
))
3019 /* accept implicit pointer to integer cast with warning */
3020 if (is_integer_btype(sbt
)) {
3021 tcc_warning("assignment makes pointer from integer without a cast");
3024 type1
= pointed_type(dt
);
3026 type2
= pointed_type(st
);
3027 else if (sbt
== VT_FUNC
)
3028 type2
= st
; /* a function is implicitly a function pointer */
3031 if (is_compatible_types(type1
, type2
))
3033 for (qualwarn
= lvl
= 0;; ++lvl
) {
3034 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3035 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3037 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3038 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3039 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3041 type1
= pointed_type(type1
);
3042 type2
= pointed_type(type2
);
3044 if (!is_compatible_unqualified_types(type1
, type2
)) {
3045 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3046 /* void * can match anything */
3047 } else if (dbt
== sbt
3048 && is_integer_btype(sbt
& VT_BTYPE
)
3049 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3050 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3051 /* Like GCC don't warn by default for merely changes
3052 in pointer target signedness. Do warn for different
3053 base types, though, in particular for unsigned enums
3054 and signed int targets. */
3056 tcc_warning("assignment from incompatible pointer type");
3061 tcc_warning("assignment discards qualifiers from pointer target type");
3067 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3068 tcc_warning("assignment makes integer from pointer without a cast");
3069 } else if (sbt
== VT_STRUCT
) {
3070 goto case_VT_STRUCT
;
3072 /* XXX: more tests */
3076 if (!is_compatible_unqualified_types(dt
, st
)) {
3078 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3079 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3080 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3087 /* store vtop in lvalue pushed on stack */
3088 ST_FUNC
void vstore(void)
3090 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3092 ft
= vtop
[-1].type
.t
;
3093 sbt
= vtop
->type
.t
& VT_BTYPE
;
3094 dbt
= ft
& VT_BTYPE
;
3095 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3096 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3097 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3098 /* optimize char/short casts */
3099 delayed_cast
= VT_MUSTCAST
;
3100 vtop
->type
.t
= ft
& VT_TYPE
;
3101 /* XXX: factorize */
3102 if (ft
& VT_CONSTANT
)
3103 tcc_warning("assignment of read-only location");
3106 if (!(ft
& VT_BITFIELD
))
3107 gen_assign_cast(&vtop
[-1].type
);
3110 if (sbt
== VT_STRUCT
) {
3111 /* if structure, only generate pointer */
3112 /* structure assignment : generate memcpy */
3113 /* XXX: optimize if small size */
3114 size
= type_size(&vtop
->type
, &align
);
3118 vtop
->type
.t
= VT_PTR
;
3121 /* address of memcpy() */
3124 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3125 else if(!(align
& 3))
3126 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3129 /* Use memmove, rather than memcpy, as dest and src may be same: */
3130 vpush_global_sym(&func_old_type
, TOK_memmove
);
3135 vtop
->type
.t
= VT_PTR
;
3141 /* leave source on stack */
3142 } else if (ft
& VT_BITFIELD
) {
3143 /* bitfield store handling */
3145 /* save lvalue as expression result (example: s.b = s.a = n;) */
3146 vdup(), vtop
[-1] = vtop
[-2];
3148 bit_pos
= BIT_POS(ft
);
3149 bit_size
= BIT_SIZE(ft
);
3150 /* remove bit field info to avoid loops */
3151 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3153 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3154 gen_cast(&vtop
[-1].type
);
3155 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3158 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3159 if (r
== VT_STRUCT
) {
3160 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3161 store_packed_bf(bit_pos
, bit_size
);
3163 unsigned long long mask
= (1ULL << bit_size
) - 1;
3164 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3166 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3169 vpushi((unsigned)mask
);
3176 /* duplicate destination */
3179 /* load destination, mask and or with source */
3180 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3181 vpushll(~(mask
<< bit_pos
));
3183 vpushi(~((unsigned)mask
<< bit_pos
));
3188 /* ... and discard */
3191 } else if (dbt
== VT_VOID
) {
3194 #ifdef CONFIG_TCC_BCHECK
3195 /* bound check case */
3196 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3205 #ifdef TCC_TARGET_X86_64
3206 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3208 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3213 r
= gv(rc
); /* generate value */
3214 /* if lvalue was saved on stack, must read it */
3215 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3217 t
= get_reg(RC_INT
);
3223 sv
.r
= VT_LOCAL
| VT_LVAL
;
3224 sv
.c
.i
= vtop
[-1].c
.i
;
3226 vtop
[-1].r
= t
| VT_LVAL
;
3228 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3230 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3231 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3233 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3234 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3236 vtop
[-1].type
.t
= load_type
;
3239 /* convert to int to increment easily */
3240 vtop
->type
.t
= addr_type
;
3246 vtop
[-1].type
.t
= load_type
;
3247 /* XXX: it works because r2 is spilled last ! */
3248 store(vtop
->r2
, vtop
- 1);
3254 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3255 vtop
->r
|= delayed_cast
;
3259 /* post defines POST/PRE add. c is the token ++ or -- */
3260 ST_FUNC
void inc(int post
, int c
)
3263 vdup(); /* save lvalue */
3265 gv_dup(); /* duplicate value */
3270 vpushi(c
- TOK_MID
);
3272 vstore(); /* store value */
3274 vpop(); /* if post op, return saved value */
3277 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3279 /* read the string */
3283 while (tok
== TOK_STR
) {
3284 /* XXX: add \0 handling too ? */
3285 cstr_cat(astr
, tokc
.str
.data
, -1);
3288 cstr_ccat(astr
, '\0');
3291 /* If I is >= 1 and a power of two, returns log2(i)+1.
3292 If I is 0 returns 0. */
3293 static int exact_log2p1(int i
)
3298 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3309 /* Parse __attribute__((...)) GNUC extension. */
3310 static void parse_attribute(AttributeDef
*ad
)
3316 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3321 while (tok
!= ')') {
3322 if (tok
< TOK_IDENT
)
3323 expect("attribute name");
3330 parse_mult_str(&astr
, "section name");
3331 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3338 parse_mult_str(&astr
, "alias(\"target\")");
3339 ad
->alias_target
= /* save string as token, for later */
3340 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3344 case TOK_VISIBILITY1
:
3345 case TOK_VISIBILITY2
:
3347 parse_mult_str(&astr
,
3348 "visibility(\"default|hidden|internal|protected\")");
3349 if (!strcmp (astr
.data
, "default"))
3350 ad
->a
.visibility
= STV_DEFAULT
;
3351 else if (!strcmp (astr
.data
, "hidden"))
3352 ad
->a
.visibility
= STV_HIDDEN
;
3353 else if (!strcmp (astr
.data
, "internal"))
3354 ad
->a
.visibility
= STV_INTERNAL
;
3355 else if (!strcmp (astr
.data
, "protected"))
3356 ad
->a
.visibility
= STV_PROTECTED
;
3358 expect("visibility(\"default|hidden|internal|protected\")");
3367 if (n
<= 0 || (n
& (n
- 1)) != 0)
3368 tcc_error("alignment must be a positive power of two");
3373 ad
->a
.aligned
= exact_log2p1(n
);
3374 if (n
!= 1 << (ad
->a
.aligned
- 1))
3375 tcc_error("alignment of %d is larger than implemented", n
);
3387 /* currently, no need to handle it because tcc does not
3388 track unused objects */
3392 /* currently, no need to handle it because tcc does not
3393 track unused objects */
3398 ad
->f
.func_call
= FUNC_CDECL
;
3403 ad
->f
.func_call
= FUNC_STDCALL
;
3405 #ifdef TCC_TARGET_I386
3415 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3421 ad
->f
.func_call
= FUNC_FASTCALLW
;
3428 ad
->attr_mode
= VT_LLONG
+ 1;
3431 ad
->attr_mode
= VT_BYTE
+ 1;
3434 ad
->attr_mode
= VT_SHORT
+ 1;
3438 ad
->attr_mode
= VT_INT
+ 1;
3441 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3448 ad
->a
.dllexport
= 1;
3450 case TOK_NODECORATE
:
3451 ad
->a
.nodecorate
= 1;
3454 ad
->a
.dllimport
= 1;
3457 if (tcc_state
->warn_unsupported
)
3458 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3459 /* skip parameters */
3461 int parenthesis
= 0;
3465 else if (tok
== ')')
3468 } while (parenthesis
&& tok
!= -1);
3481 static Sym
* find_field (CType
*type
, int v
)
3485 while ((s
= s
->next
) != NULL
) {
3486 if ((s
->v
& SYM_FIELD
) &&
3487 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3488 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3489 Sym
*ret
= find_field (&s
->type
, v
);
3499 static void struct_add_offset (Sym
*s
, int offset
)
3501 while ((s
= s
->next
) != NULL
) {
3502 if ((s
->v
& SYM_FIELD
) &&
3503 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3504 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3505 struct_add_offset(s
->type
.ref
, offset
);
3511 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3513 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3514 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3515 int pcc
= !tcc_state
->ms_bitfields
;
3516 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3523 prevbt
= VT_STRUCT
; /* make it never match */
3528 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3529 if (f
->type
.t
& VT_BITFIELD
)
3530 bit_size
= BIT_SIZE(f
->type
.t
);
3533 size
= type_size(&f
->type
, &align
);
3534 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3537 if (pcc
&& bit_size
== 0) {
3538 /* in pcc mode, packing does not affect zero-width bitfields */
3541 /* in pcc mode, attribute packed overrides if set. */
3542 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3545 /* pragma pack overrides align if lesser and packs bitfields always */
3548 if (pragma_pack
< align
)
3549 align
= pragma_pack
;
3550 /* in pcc mode pragma pack also overrides individual align */
3551 if (pcc
&& pragma_pack
< a
)
3555 /* some individual align was specified */
3559 if (type
->ref
->type
.t
== VT_UNION
) {
3560 if (pcc
&& bit_size
>= 0)
3561 size
= (bit_size
+ 7) >> 3;
3566 } else if (bit_size
< 0) {
3568 c
+= (bit_pos
+ 7) >> 3;
3569 c
= (c
+ align
- 1) & -align
;
3578 /* A bit-field. Layout is more complicated. There are two
3579 options: PCC (GCC) compatible and MS compatible */
3581 /* In PCC layout a bit-field is placed adjacent to the
3582 preceding bit-fields, except if:
3584 - an individual alignment was given
3585 - it would overflow its base type container and
3586 there is no packing */
3587 if (bit_size
== 0) {
3589 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3591 } else if (f
->a
.aligned
) {
3593 } else if (!packed
) {
3595 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3596 if (ofs
> size
/ align
)
3600 /* in pcc mode, long long bitfields have type int if they fit */
3601 if (size
== 8 && bit_size
<= 32)
3602 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3604 while (bit_pos
>= align
* 8)
3605 c
+= align
, bit_pos
-= align
* 8;
3608 /* In PCC layout named bit-fields influence the alignment
3609 of the containing struct using the base types alignment,
3610 except for packed fields (which here have correct align). */
3611 if (f
->v
& SYM_FIRST_ANOM
3612 // && bit_size // ??? gcc on ARM/rpi does that
3617 bt
= f
->type
.t
& VT_BTYPE
;
3618 if ((bit_pos
+ bit_size
> size
* 8)
3619 || (bit_size
> 0) == (bt
!= prevbt
)
3621 c
= (c
+ align
- 1) & -align
;
3624 /* In MS bitfield mode a bit-field run always uses
3625 at least as many bits as the underlying type.
3626 To start a new run it's also required that this
3627 or the last bit-field had non-zero width. */
3628 if (bit_size
|| prev_bit_size
)
3631 /* In MS layout the records alignment is normally
3632 influenced by the field, except for a zero-width
3633 field at the start of a run (but by further zero-width
3634 fields it is again). */
3635 if (bit_size
== 0 && prevbt
!= bt
)
3638 prev_bit_size
= bit_size
;
3641 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3642 | (bit_pos
<< VT_STRUCT_SHIFT
);
3643 bit_pos
+= bit_size
;
3645 if (align
> maxalign
)
3649 printf("set field %s offset %-2d size %-2d align %-2d",
3650 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3651 if (f
->type
.t
& VT_BITFIELD
) {
3652 printf(" pos %-2d bits %-2d",
3660 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3662 /* An anonymous struct/union. Adjust member offsets
3663 to reflect the real offset of our containing struct.
3664 Also set the offset of this anon member inside
3665 the outer struct to be zero. Via this it
3666 works when accessing the field offset directly
3667 (from base object), as well as when recursing
3668 members in initializer handling. */
3669 int v2
= f
->type
.ref
->v
;
3670 if (!(v2
& SYM_FIELD
) &&
3671 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3673 /* This happens only with MS extensions. The
3674 anon member has a named struct type, so it
3675 potentially is shared with other references.
3676 We need to unshare members so we can modify
3679 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3680 &f
->type
.ref
->type
, 0,
3682 pps
= &f
->type
.ref
->next
;
3683 while ((ass
= ass
->next
) != NULL
) {
3684 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3685 pps
= &((*pps
)->next
);
3689 struct_add_offset(f
->type
.ref
, offset
);
3699 c
+= (bit_pos
+ 7) >> 3;
3701 /* store size and alignment */
3702 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3706 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3707 /* can happen if individual align for some member was given. In
3708 this case MSVC ignores maxalign when aligning the size */
3713 c
= (c
+ a
- 1) & -a
;
3717 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3720 /* check whether we can access bitfields by their type */
3721 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3725 if (0 == (f
->type
.t
& VT_BITFIELD
))
3729 bit_size
= BIT_SIZE(f
->type
.t
);
3732 bit_pos
= BIT_POS(f
->type
.t
);
3733 size
= type_size(&f
->type
, &align
);
3734 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3737 /* try to access the field using a different type */
3738 c0
= -1, s
= align
= 1;
3740 px
= f
->c
* 8 + bit_pos
;
3741 cx
= (px
>> 3) & -align
;
3742 px
= px
- (cx
<< 3);
3745 s
= (px
+ bit_size
+ 7) >> 3;
3755 s
= type_size(&t
, &align
);
3759 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3760 /* update offset and bit position */
3763 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3764 | (bit_pos
<< VT_STRUCT_SHIFT
);
3768 printf("FIX field %s offset %-2d size %-2d align %-2d "
3769 "pos %-2d bits %-2d\n",
3770 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3771 cx
, s
, align
, px
, bit_size
);
3774 /* fall back to load/store single-byte wise */
3775 f
->auxtype
= VT_STRUCT
;
3777 printf("FIX field %s : load byte-wise\n",
3778 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3784 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3785 static void struct_decl(CType
*type
, int u
)
3787 int v
, c
, size
, align
, flexible
;
3788 int bit_size
, bsize
, bt
;
3790 AttributeDef ad
, ad1
;
3793 memset(&ad
, 0, sizeof ad
);
3795 parse_attribute(&ad
);
3799 /* struct already defined ? return it */
3801 expect("struct/union/enum name");
3803 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3806 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3808 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3813 /* Record the original enum/struct/union token. */
3814 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3816 /* we put an undefined size for struct/union */
3817 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3818 s
->r
= 0; /* default alignment is zero as gcc */
3820 type
->t
= s
->type
.t
;
3826 tcc_error("struct/union/enum already defined");
3827 /* cannot be empty */
3828 /* non empty enums are not allowed */
3831 long long ll
= 0, pl
= 0, nl
= 0;
3834 /* enum symbols have static storage */
3835 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3839 expect("identifier");
3841 if (ss
&& !local_stack
)
3842 tcc_error("redefinition of enumerator '%s'",
3843 get_tok_str(v
, NULL
));
3847 ll
= expr_const64();
3849 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3851 *ps
= ss
, ps
= &ss
->next
;
3860 /* NOTE: we accept a trailing comma */
3865 /* set integral type of the enum */
3868 if (pl
!= (unsigned)pl
)
3869 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3871 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3872 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3873 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3875 /* set type for enum members */
3876 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3878 if (ll
== (int)ll
) /* default is int if it fits */
3880 if (t
.t
& VT_UNSIGNED
) {
3881 ss
->type
.t
|= VT_UNSIGNED
;
3882 if (ll
== (unsigned)ll
)
3885 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3886 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3891 while (tok
!= '}') {
3892 if (!parse_btype(&btype
, &ad1
)) {
3898 tcc_error("flexible array member '%s' not at the end of struct",
3899 get_tok_str(v
, NULL
));
3905 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3907 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3908 expect("identifier");
3910 int v
= btype
.ref
->v
;
3911 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3912 if (tcc_state
->ms_extensions
== 0)
3913 expect("identifier");
3917 if (type_size(&type1
, &align
) < 0) {
3918 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3921 tcc_error("field '%s' has incomplete type",
3922 get_tok_str(v
, NULL
));
3924 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3925 (type1
.t
& VT_BTYPE
) == VT_VOID
||
3926 (type1
.t
& VT_STORAGE
))
3927 tcc_error("invalid type for '%s'",
3928 get_tok_str(v
, NULL
));
3932 bit_size
= expr_const();
3933 /* XXX: handle v = 0 case for messages */
3935 tcc_error("negative width in bit-field '%s'",
3936 get_tok_str(v
, NULL
));
3937 if (v
&& bit_size
== 0)
3938 tcc_error("zero width for bit-field '%s'",
3939 get_tok_str(v
, NULL
));
3940 parse_attribute(&ad1
);
3942 size
= type_size(&type1
, &align
);
3943 if (bit_size
>= 0) {
3944 bt
= type1
.t
& VT_BTYPE
;
3950 tcc_error("bitfields must have scalar type");
3952 if (bit_size
> bsize
) {
3953 tcc_error("width of '%s' exceeds its type",
3954 get_tok_str(v
, NULL
));
3955 } else if (bit_size
== bsize
3956 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
3957 /* no need for bit fields */
3959 } else if (bit_size
== 64) {
3960 tcc_error("field width 64 not implemented");
3962 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
3964 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3967 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3968 /* Remember we've seen a real field to check
3969 for placement of flexible array member. */
3972 /* If member is a struct or bit-field, enforce
3973 placing into the struct (as anonymous). */
3975 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3980 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
3985 if (tok
== ';' || tok
== TOK_EOF
)
3992 parse_attribute(&ad
);
3993 struct_layout(type
, &ad
);
3998 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4000 if (s
->a
.aligned
&& 0 == ad
->a
.aligned
)
4001 ad
->a
.aligned
= s
->a
.aligned
;
4002 if (s
->f
.func_call
&& 0 == ad
->f
.func_call
)
4003 ad
->f
.func_call
= s
->f
.func_call
;
4004 if (s
->f
.func_type
&& 0 == ad
->f
.func_type
)
4005 ad
->f
.func_type
= s
->f
.func_type
;
4010 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4011 are added to the element type, copied because it could be a typedef. */
4012 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4014 while (type
->t
& VT_ARRAY
) {
4015 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4016 type
= &type
->ref
->type
;
4018 type
->t
|= qualifiers
;
4021 /* return 0 if no type declaration. otherwise, return the basic type
4024 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4026 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
4030 memset(ad
, 0, sizeof(AttributeDef
));
4040 /* currently, we really ignore extension */
4050 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4051 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4052 tmbt
: tcc_error("too many basic types");
4055 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4060 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4073 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4074 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4075 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4076 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4083 #ifdef TCC_TARGET_ARM64
4085 /* GCC's __uint128_t appears in some Linux header files. Make it a
4086 synonym for long double to get the size and alignment right. */
4097 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4098 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4106 struct_decl(&type1
, VT_ENUM
);
4109 type
->ref
= type1
.ref
;
4112 struct_decl(&type1
, VT_STRUCT
);
4115 struct_decl(&type1
, VT_UNION
);
4118 /* type modifiers */
4123 parse_btype_qualify(type
, VT_CONSTANT
);
4131 parse_btype_qualify(type
, VT_VOLATILE
);
4138 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4139 tcc_error("signed and unsigned modifier");
4152 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4153 tcc_error("signed and unsigned modifier");
4154 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4170 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4171 tcc_error("multiple storage classes");
4182 /* GNUC attribute */
4183 case TOK_ATTRIBUTE1
:
4184 case TOK_ATTRIBUTE2
:
4185 parse_attribute(ad
);
4186 if (ad
->attr_mode
) {
4187 u
= ad
->attr_mode
-1;
4188 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4196 parse_expr_type(&type1
);
4197 /* remove all storage modifiers except typedef */
4198 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4200 sym_to_attr(ad
, type1
.ref
);
4206 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4208 t
&= ~(VT_BTYPE
|VT_LONG
);
4209 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4210 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4211 type
->ref
= s
->type
.ref
;
4213 parse_btype_qualify(type
, t
);
4215 /* get attributes from typedef */
4225 if (tcc_state
->char_is_unsigned
) {
4226 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4229 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4230 bt
= t
& (VT_BTYPE
|VT_LONG
);
4232 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4233 #ifdef TCC_TARGET_PE
4234 if (bt
== VT_LDOUBLE
)
4235 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4241 /* convert a function parameter type (array to pointer and function to
4242 function pointer) */
4243 static inline void convert_parameter_type(CType
*pt
)
4245 /* remove const and volatile qualifiers (XXX: const could be used
4246 to indicate a const function parameter */
4247 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4248 /* array must be transformed to pointer according to ANSI C */
4250 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4255 ST_FUNC
void parse_asm_str(CString
*astr
)
4258 parse_mult_str(astr
, "string constant");
4261 /* Parse an asm label and return the token */
4262 static int asm_label_instr(void)
4268 parse_asm_str(&astr
);
4271 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4273 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4278 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4280 int n
, l
, t1
, arg_size
, align
;
4281 Sym
**plast
, *s
, *first
;
4286 /* function type, or recursive declarator (return if so) */
4288 if (td
&& !(td
& TYPE_ABSTRACT
))
4292 else if (parse_btype(&pt
, &ad1
))
4303 /* read param name and compute offset */
4304 if (l
!= FUNC_OLD
) {
4305 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4307 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4308 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4309 tcc_error("parameter declared as void");
4313 expect("identifier");
4314 pt
.t
= VT_VOID
; /* invalid type */
4317 convert_parameter_type(&pt
);
4318 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4319 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4325 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4330 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4331 tcc_error("invalid type");
4334 /* if no parameters, then old type prototype */
4337 /* NOTE: const is ignored in returned type as it has a special
4338 meaning in gcc / C++ */
4339 type
->t
&= ~VT_CONSTANT
;
4340 /* some ancient pre-K&R C allows a function to return an array
4341 and the array brackets to be put after the arguments, such
4342 that "int c()[]" means something like "int[] c()" */
4345 skip(']'); /* only handle simple "[]" */
4348 /* we push a anonymous symbol which will contain the function prototype */
4349 ad
->f
.func_args
= arg_size
;
4350 ad
->f
.func_type
= l
;
4351 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4357 } else if (tok
== '[') {
4358 int saved_nocode_wanted
= nocode_wanted
;
4359 /* array definition */
4362 /* XXX The optional type-quals and static should only be accepted
4363 in parameter decls. The '*' as well, and then even only
4364 in prototypes (not function defs). */
4366 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4381 if (!local_stack
|| (storage
& VT_STATIC
))
4382 vpushi(expr_const());
4384 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4385 length must always be evaluated, even under nocode_wanted,
4386 so that its size slot is initialized (e.g. under sizeof
4391 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4394 tcc_error("invalid array size");
4396 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4397 tcc_error("size of variable length array should be an integer");
4402 /* parse next post type */
4403 post_type(type
, ad
, storage
, 0);
4404 if (type
->t
== VT_FUNC
)
4405 tcc_error("declaration of an array of functions");
4406 t1
|= type
->t
& VT_VLA
;
4409 loc
-= type_size(&int_type
, &align
);
4413 vla_runtime_type_size(type
, &align
);
4415 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4421 nocode_wanted
= saved_nocode_wanted
;
4423 /* we push an anonymous symbol which will contain the array
4425 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4426 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4432 /* Parse a type declarator (except basic type), and return the type
4433 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4434 expected. 'type' should contain the basic type. 'ad' is the
4435 attribute definition of the basic type. It can be modified by
4436 type_decl(). If this (possibly abstract) declarator is a pointer chain
4437 it returns the innermost pointed to type (equals *type, but is a different
4438 pointer), otherwise returns type itself, that's used for recursive calls. */
4439 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4442 int qualifiers
, storage
;
4444 /* recursive type, remove storage bits first, apply them later again */
4445 storage
= type
->t
& VT_STORAGE
;
4446 type
->t
&= ~VT_STORAGE
;
4449 while (tok
== '*') {
4457 qualifiers
|= VT_CONSTANT
;
4462 qualifiers
|= VT_VOLATILE
;
4468 /* XXX: clarify attribute handling */
4469 case TOK_ATTRIBUTE1
:
4470 case TOK_ATTRIBUTE2
:
4471 parse_attribute(ad
);
4475 type
->t
|= qualifiers
;
4477 /* innermost pointed to type is the one for the first derivation */
4478 ret
= pointed_type(type
);
4482 /* This is possibly a parameter type list for abstract declarators
4483 ('int ()'), use post_type for testing this. */
4484 if (!post_type(type
, ad
, 0, td
)) {
4485 /* It's not, so it's a nested declarator, and the post operations
4486 apply to the innermost pointed to type (if any). */
4487 /* XXX: this is not correct to modify 'ad' at this point, but
4488 the syntax is not clear */
4489 parse_attribute(ad
);
4490 post
= type_decl(type
, ad
, v
, td
);
4493 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4494 /* type identifier */
4498 if (!(td
& TYPE_ABSTRACT
))
4499 expect("identifier");
4502 post_type(post
, ad
, storage
, 0);
4503 parse_attribute(ad
);
4508 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4509 ST_FUNC
int lvalue_type(int t
)
4514 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4516 else if (bt
== VT_SHORT
)
4520 if (t
& VT_UNSIGNED
)
4521 r
|= VT_LVAL_UNSIGNED
;
4525 /* indirection with full error checking and bound check */
4526 ST_FUNC
void indir(void)
4528 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4529 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4533 if (vtop
->r
& VT_LVAL
)
4535 vtop
->type
= *pointed_type(&vtop
->type
);
4536 /* Arrays and functions are never lvalues */
4537 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4538 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4539 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4540 /* if bound checking, the referenced pointer must be checked */
4541 #ifdef CONFIG_TCC_BCHECK
4542 if (tcc_state
->do_bounds_check
)
4543 vtop
->r
|= VT_MUSTBOUND
;
4548 /* pass a parameter to a function and do type checking and casting */
4549 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4554 func_type
= func
->f
.func_type
;
4555 if (func_type
== FUNC_OLD
||
4556 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4557 /* default casting : only need to convert float to double */
4558 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4559 gen_cast_s(VT_DOUBLE
);
4560 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4561 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4562 type
.ref
= vtop
->type
.ref
;
4565 } else if (arg
== NULL
) {
4566 tcc_error("too many arguments to function");
4569 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4570 gen_assign_cast(&type
);
4574 /* parse an expression and return its type without any side effect. */
4575 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4584 /* parse an expression of the form '(type)' or '(expr)' and return its
4586 static void parse_expr_type(CType
*type
)
4592 if (parse_btype(type
, &ad
)) {
4593 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4595 expr_type(type
, gexpr
);
4600 static void parse_type(CType
*type
)
4605 if (!parse_btype(type
, &ad
)) {
4608 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4611 static void parse_builtin_params(int nc
, const char *args
)
4618 while ((c
= *args
++)) {
4622 case 'e': expr_eq(); continue;
4623 case 't': parse_type(&t
); vpush(&t
); continue;
4624 default: tcc_error("internal error"); break;
4632 ST_FUNC
void unary(void)
4634 int n
, t
, align
, size
, r
, sizeof_caller
;
4639 sizeof_caller
= in_sizeof
;
4642 /* XXX: GCC 2.95.3 does not generate a table although it should be
4650 #ifdef TCC_TARGET_PE
4651 t
= VT_SHORT
|VT_UNSIGNED
;
4659 vsetc(&type
, VT_CONST
, &tokc
);
4663 t
= VT_INT
| VT_UNSIGNED
;
4669 t
= VT_LLONG
| VT_UNSIGNED
;
4681 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4684 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4686 case TOK___FUNCTION__
:
4688 goto tok_identifier
;
4694 /* special function name identifier */
4695 len
= strlen(funcname
) + 1;
4696 /* generate char[len] type */
4701 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4702 if (!NODATA_WANTED
) {
4703 ptr
= section_ptr_add(data_section
, len
);
4704 memcpy(ptr
, funcname
, len
);
4710 #ifdef TCC_TARGET_PE
4711 t
= VT_SHORT
| VT_UNSIGNED
;
4717 /* string parsing */
4719 if (tcc_state
->char_is_unsigned
)
4720 t
= VT_BYTE
| VT_UNSIGNED
;
4722 if (tcc_state
->warn_write_strings
)
4727 memset(&ad
, 0, sizeof(AttributeDef
));
4728 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4733 if (parse_btype(&type
, &ad
)) {
4734 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4736 /* check ISOC99 compound literal */
4738 /* data is allocated locally by default */
4743 /* all except arrays are lvalues */
4744 if (!(type
.t
& VT_ARRAY
))
4745 r
|= lvalue_type(type
.t
);
4746 memset(&ad
, 0, sizeof(AttributeDef
));
4747 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4749 if (sizeof_caller
) {
4756 } else if (tok
== '{') {
4757 int saved_nocode_wanted
= nocode_wanted
;
4759 tcc_error("expected constant");
4760 /* save all registers */
4762 /* statement expression : we do not accept break/continue
4763 inside as GCC does. We do retain the nocode_wanted state,
4764 as statement expressions can't ever be entered from the
4765 outside, so any reactivation of code emission (from labels
4766 or loop heads) can be disabled again after the end of it. */
4767 block(NULL
, NULL
, 1);
4768 nocode_wanted
= saved_nocode_wanted
;
4783 /* functions names must be treated as function pointers,
4784 except for unary '&' and sizeof. Since we consider that
4785 functions are not lvalues, we only have to handle it
4786 there and in function calls. */
4787 /* arrays can also be used although they are not lvalues */
4788 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4789 !(vtop
->type
.t
& VT_ARRAY
))
4791 mk_pointer(&vtop
->type
);
4797 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4798 gen_cast_s(VT_BOOL
);
4799 vtop
->c
.i
= !vtop
->c
.i
;
4800 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4804 vseti(VT_JMP
, gvtst(1, 0));
4816 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4817 tcc_error("pointer not accepted for unary plus");
4818 /* In order to force cast, we add zero, except for floating point
4819 where we really need an noop (otherwise -0.0 will be transformed
4821 if (!is_float(vtop
->type
.t
)) {
4832 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
4833 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
4834 size
= type_size(&type
, &align
);
4835 if (s
&& s
->a
.aligned
)
4836 align
= 1 << (s
->a
.aligned
- 1);
4837 if (t
== TOK_SIZEOF
) {
4838 if (!(type
.t
& VT_VLA
)) {
4840 tcc_error("sizeof applied to an incomplete type");
4843 vla_runtime_type_size(&type
, &align
);
4848 vtop
->type
.t
|= VT_UNSIGNED
;
4851 case TOK_builtin_expect
:
4852 /* __builtin_expect is a no-op for now */
4853 parse_builtin_params(0, "ee");
4856 case TOK_builtin_types_compatible_p
:
4857 parse_builtin_params(0, "tt");
4858 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4859 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4860 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4864 case TOK_builtin_choose_expr
:
4891 case TOK_builtin_constant_p
:
4892 parse_builtin_params(1, "e");
4893 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4897 case TOK_builtin_frame_address
:
4898 case TOK_builtin_return_address
:
4904 if (tok
!= TOK_CINT
) {
4905 tcc_error("%s only takes positive integers",
4906 tok1
== TOK_builtin_return_address
?
4907 "__builtin_return_address" :
4908 "__builtin_frame_address");
4910 level
= (uint32_t)tokc
.i
;
4915 vset(&type
, VT_LOCAL
, 0); /* local frame */
4917 mk_pointer(&vtop
->type
);
4918 indir(); /* -> parent frame */
4920 if (tok1
== TOK_builtin_return_address
) {
4921 // assume return address is just above frame pointer on stack
4924 mk_pointer(&vtop
->type
);
4929 #ifdef TCC_TARGET_X86_64
4930 #ifdef TCC_TARGET_PE
4931 case TOK_builtin_va_start
:
4932 parse_builtin_params(0, "ee");
4933 r
= vtop
->r
& VT_VALMASK
;
4937 tcc_error("__builtin_va_start expects a local variable");
4939 vtop
->type
= char_pointer_type
;
4944 case TOK_builtin_va_arg_types
:
4945 parse_builtin_params(0, "t");
4946 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4953 #ifdef TCC_TARGET_ARM64
4954 case TOK___va_start
: {
4955 parse_builtin_params(0, "ee");
4959 vtop
->type
.t
= VT_VOID
;
4962 case TOK___va_arg
: {
4963 parse_builtin_params(0, "et");
4971 case TOK___arm64_clear_cache
: {
4972 parse_builtin_params(0, "ee");
4975 vtop
->type
.t
= VT_VOID
;
4979 /* pre operations */
4990 t
= vtop
->type
.t
& VT_BTYPE
;
4992 /* In IEEE negate(x) isn't subtract(0,x), but rather
4996 vtop
->c
.f
= -1.0 * 0.0;
4997 else if (t
== VT_DOUBLE
)
4998 vtop
->c
.d
= -1.0 * 0.0;
5000 vtop
->c
.ld
= -1.0 * 0.0;
5008 goto tok_identifier
;
5010 /* allow to take the address of a label */
5011 if (tok
< TOK_UIDENT
)
5012 expect("label identifier");
5013 s
= label_find(tok
);
5015 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5017 if (s
->r
== LABEL_DECLARED
)
5018 s
->r
= LABEL_FORWARD
;
5021 s
->type
.t
= VT_VOID
;
5022 mk_pointer(&s
->type
);
5023 s
->type
.t
|= VT_STATIC
;
5025 vpushsym(&s
->type
, s
);
5031 CType controlling_type
;
5032 int has_default
= 0;
5035 TokenString
*str
= NULL
;
5036 int saved_const_wanted
= const_wanted
;
5041 expr_type(&controlling_type
, expr_eq
);
5042 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5043 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5044 mk_pointer(&controlling_type
);
5045 const_wanted
= saved_const_wanted
;
5049 if (tok
== TOK_DEFAULT
) {
5051 tcc_error("too many 'default'");
5057 AttributeDef ad_tmp
;
5060 parse_btype(&cur_type
, &ad_tmp
);
5061 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5062 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5064 tcc_error("type match twice");
5074 skip_or_save_block(&str
);
5076 skip_or_save_block(NULL
);
5083 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5084 tcc_error("type '%s' does not match any association", buf
);
5086 begin_macro(str
, 1);
5095 // special qnan , snan and infinity values
5100 vtop
->type
.t
= VT_FLOAT
;
5105 goto special_math_val
;
5108 goto special_math_val
;
5115 expect("identifier");
5117 if (!s
|| IS_ASM_SYM(s
)) {
5118 const char *name
= get_tok_str(t
, NULL
);
5120 tcc_error("'%s' undeclared", name
);
5121 /* for simple function calls, we tolerate undeclared
5122 external reference to int() function */
5123 if (tcc_state
->warn_implicit_function_declaration
5124 #ifdef TCC_TARGET_PE
5125 /* people must be warned about using undeclared WINAPI functions
5126 (which usually start with uppercase letter) */
5127 || (name
[0] >= 'A' && name
[0] <= 'Z')
5130 tcc_warning("implicit declaration of function '%s'", name
);
5131 s
= external_global_sym(t
, &func_old_type
, 0);
5135 /* A symbol that has a register is a local register variable,
5136 which starts out as VT_LOCAL value. */
5137 if ((r
& VT_VALMASK
) < VT_CONST
)
5138 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5140 vset(&s
->type
, r
, s
->c
);
5141 /* Point to s as backpointer (even without r&VT_SYM).
5142 Will be used by at least the x86 inline asm parser for
5148 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5149 vtop
->c
.i
= s
->enum_val
;
5154 /* post operations */
5156 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5159 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5162 if (tok
== TOK_ARROW
)
5164 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5167 /* expect pointer on structure */
5168 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5169 expect("struct or union");
5170 if (tok
== TOK_CDOUBLE
)
5171 expect("field name");
5173 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5174 expect("field name");
5175 s
= find_field(&vtop
->type
, tok
);
5177 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5178 /* add field offset to pointer */
5179 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5182 /* change type to field type, and set to lvalue */
5183 vtop
->type
= s
->type
;
5184 vtop
->type
.t
|= qualifiers
;
5185 /* an array is never an lvalue */
5186 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5187 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5188 #ifdef CONFIG_TCC_BCHECK
5189 /* if bound checking, the referenced pointer must be checked */
5190 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5191 vtop
->r
|= VT_MUSTBOUND
;
5195 } else if (tok
== '[') {
5201 } else if (tok
== '(') {
5204 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5207 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5208 /* pointer test (no array accepted) */
5209 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5210 vtop
->type
= *pointed_type(&vtop
->type
);
5211 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5215 expect("function pointer");
5218 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5220 /* get return type */
5223 sa
= s
->next
; /* first parameter */
5224 nb_args
= regsize
= 0;
5226 /* compute first implicit argument if a structure is returned */
5227 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5228 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5229 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5230 &ret_align
, ®size
);
5232 /* get some space for the returned structure */
5233 size
= type_size(&s
->type
, &align
);
5234 #ifdef TCC_TARGET_ARM64
5235 /* On arm64, a small struct is return in registers.
5236 It is much easier to write it to memory if we know
5237 that we are allowed to write some extra bytes, so
5238 round the allocated space up to a power of 2: */
5240 while (size
& (size
- 1))
5241 size
= (size
| (size
- 1)) + 1;
5243 loc
= (loc
- size
) & -align
;
5245 ret
.r
= VT_LOCAL
| VT_LVAL
;
5246 /* pass it as 'int' to avoid structure arg passing
5248 vseti(VT_LOCAL
, loc
);
5258 /* return in register */
5259 if (is_float(ret
.type
.t
)) {
5260 ret
.r
= reg_fret(ret
.type
.t
);
5261 #ifdef TCC_TARGET_X86_64
5262 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5266 #ifndef TCC_TARGET_ARM64
5267 #ifdef TCC_TARGET_X86_64
5268 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5270 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5281 gfunc_param_typed(s
, sa
);
5291 tcc_error("too few arguments to function");
5293 gfunc_call(nb_args
);
5296 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5297 vsetc(&ret
.type
, r
, &ret
.c
);
5298 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5301 /* handle packed struct return */
5302 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5305 size
= type_size(&s
->type
, &align
);
5306 /* We're writing whole regs often, make sure there's enough
5307 space. Assume register size is power of 2. */
5308 if (regsize
> align
)
5310 loc
= (loc
- size
) & -align
;
5314 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5318 if (--ret_nregs
== 0)
5322 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5330 ST_FUNC
void expr_prod(void)
5335 while (tok
== '*' || tok
== '/' || tok
== '%') {
5343 ST_FUNC
void expr_sum(void)
5348 while (tok
== '+' || tok
== '-') {
5356 static void expr_shift(void)
5361 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5369 static void expr_cmp(void)
5374 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5375 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5383 static void expr_cmpeq(void)
5388 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5396 static void expr_and(void)
5399 while (tok
== '&') {
5406 static void expr_xor(void)
5409 while (tok
== '^') {
5416 static void expr_or(void)
5419 while (tok
== '|') {
5426 static void expr_land(void)
5429 if (tok
== TOK_LAND
) {
5432 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5433 gen_cast_s(VT_BOOL
);
5438 while (tok
== TOK_LAND
) {
5454 if (tok
!= TOK_LAND
) {
5467 static void expr_lor(void)
5470 if (tok
== TOK_LOR
) {
5473 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5474 gen_cast_s(VT_BOOL
);
5479 while (tok
== TOK_LOR
) {
5495 if (tok
!= TOK_LOR
) {
5508 /* Assuming vtop is a value used in a conditional context
5509 (i.e. compared with zero) return 0 if it's false, 1 if
5510 true and -1 if it can't be statically determined. */
5511 static int condition_3way(void)
5514 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5515 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5517 gen_cast_s(VT_BOOL
);
5524 static void expr_cond(void)
5526 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5528 CType type
, type1
, type2
;
5533 c
= condition_3way();
5534 g
= (tok
== ':' && gnu_ext
);
5536 /* needed to avoid having different registers saved in
5538 if (is_float(vtop
->type
.t
)) {
5540 #ifdef TCC_TARGET_X86_64
5541 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5566 sv
= *vtop
; /* save value to handle it later */
5567 vtop
--; /* no vpop so that FP stack is not flushed */
5585 bt1
= t1
& VT_BTYPE
;
5587 bt2
= t2
& VT_BTYPE
;
5590 /* cast operands to correct type according to ISOC rules */
5591 if (is_float(bt1
) || is_float(bt2
)) {
5592 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5593 type
.t
= VT_LDOUBLE
;
5595 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5600 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5601 /* cast to biggest op */
5602 type
.t
= VT_LLONG
| VT_LONG
;
5603 if (bt1
== VT_LLONG
)
5605 if (bt2
== VT_LLONG
)
5607 /* convert to unsigned if it does not fit in a long long */
5608 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5609 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5610 type
.t
|= VT_UNSIGNED
;
5611 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5612 /* If one is a null ptr constant the result type
5614 if (is_null_pointer (vtop
))
5616 else if (is_null_pointer (&sv
))
5618 /* XXX: test pointer compatibility, C99 has more elaborate
5622 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5623 /* XXX: test function pointer compatibility */
5624 type
= bt1
== VT_FUNC
? type1
: type2
;
5625 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5626 /* XXX: test structure compatibility */
5627 type
= bt1
== VT_STRUCT
? type1
: type2
;
5628 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5629 /* NOTE: as an extension, we accept void on only one side */
5632 /* integer operations */
5633 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5634 /* convert to unsigned if it does not fit in an integer */
5635 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5636 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5637 type
.t
|= VT_UNSIGNED
;
5639 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5640 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5641 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5644 /* now we convert second operand */
5648 mk_pointer(&vtop
->type
);
5650 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5655 if (is_float(type
.t
)) {
5657 #ifdef TCC_TARGET_X86_64
5658 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5662 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5663 /* for long longs, we use fixed registers to avoid having
5664 to handle a complicated move */
5675 /* this is horrible, but we must also convert first
5681 mk_pointer(&vtop
->type
);
5683 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5689 move_reg(r2
, r1
, type
.t
);
5699 static void expr_eq(void)
5705 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5706 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5707 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5722 ST_FUNC
void gexpr(void)
5733 /* parse a constant expression and return value in vtop. */
5734 static void expr_const1(void)
5743 /* parse an integer constant and return its value. */
5744 static inline int64_t expr_const64(void)
5748 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5749 expect("constant expression");
5755 /* parse an integer constant and return its value.
5756 Complain if it doesn't fit 32bit (signed or unsigned). */
5757 ST_FUNC
int expr_const(void)
5760 int64_t wc
= expr_const64();
5762 if (c
!= wc
&& (unsigned)c
!= wc
)
5763 tcc_error("constant exceeds 32 bit");
5767 /* return the label token if current token is a label, otherwise
5769 static int is_label(void)
5773 /* fast test first */
5774 if (tok
< TOK_UIDENT
)
5776 /* no need to save tokc because tok is an identifier */
5782 unget_tok(last_tok
);
5787 #ifndef TCC_TARGET_ARM64
5788 static void gfunc_return(CType
*func_type
)
5790 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5791 CType type
, ret_type
;
5792 int ret_align
, ret_nregs
, regsize
;
5793 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5794 &ret_align
, ®size
);
5795 if (0 == ret_nregs
) {
5796 /* if returning structure, must copy it to implicit
5797 first pointer arg location */
5800 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5803 /* copy structure value to pointer */
5806 /* returning structure packed into registers */
5807 int r
, size
, addr
, align
;
5808 size
= type_size(func_type
,&align
);
5809 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5810 (vtop
->c
.i
& (ret_align
-1)))
5811 && (align
& (ret_align
-1))) {
5812 loc
= (loc
- size
) & -ret_align
;
5815 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5819 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5821 vtop
->type
= ret_type
;
5822 if (is_float(ret_type
.t
))
5823 r
= rc_fret(ret_type
.t
);
5834 if (--ret_nregs
== 0)
5836 /* We assume that when a structure is returned in multiple
5837 registers, their classes are consecutive values of the
5840 vtop
->c
.i
+= regsize
;
5844 } else if (is_float(func_type
->t
)) {
5845 gv(rc_fret(func_type
->t
));
5849 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5853 static int case_cmp(const void *pa
, const void *pb
)
5855 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5856 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5857 return a
< b
? -1 : a
> b
;
5860 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5864 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5882 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5884 gcase(base
, len
/2, bsym
);
5885 if (cur_switch
->def_sym
)
5886 gjmp_addr(cur_switch
->def_sym
);
5888 *bsym
= gjmp(*bsym
);
5892 base
+= e
; len
-= e
;
5902 if (p
->v1
== p
->v2
) {
5904 gtst_addr(0, p
->sym
);
5914 gtst_addr(0, p
->sym
);
5920 static void block(int *bsym
, int *csym
, int is_expr
)
5922 int a
, b
, c
, d
, cond
;
5925 /* generate line number info */
5926 if (tcc_state
->do_debug
)
5927 tcc_debug_line(tcc_state
);
5930 /* default return value is (void) */
5932 vtop
->type
.t
= VT_VOID
;
5935 if (tok
== TOK_IF
) {
5937 int saved_nocode_wanted
= nocode_wanted
;
5942 cond
= condition_3way();
5948 nocode_wanted
|= 0x20000000;
5949 block(bsym
, csym
, 0);
5951 nocode_wanted
= saved_nocode_wanted
;
5953 if (c
== TOK_ELSE
) {
5958 nocode_wanted
|= 0x20000000;
5959 block(bsym
, csym
, 0);
5960 gsym(d
); /* patch else jmp */
5962 nocode_wanted
= saved_nocode_wanted
;
5965 } else if (tok
== TOK_WHILE
) {
5966 int saved_nocode_wanted
;
5967 nocode_wanted
&= ~0x20000000;
5977 saved_nocode_wanted
= nocode_wanted
;
5979 nocode_wanted
= saved_nocode_wanted
;
5984 } else if (tok
== '{') {
5986 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5989 /* record local declaration stack position */
5991 llabel
= local_label_stack
;
5994 /* handle local labels declarations */
5995 if (tok
== TOK_LABEL
) {
5998 if (tok
< TOK_UIDENT
)
5999 expect("label identifier");
6000 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6010 while (tok
!= '}') {
6011 if ((a
= is_label()))
6018 block(bsym
, csym
, is_expr
);
6021 /* pop locally defined labels */
6022 label_pop(&local_label_stack
, llabel
, is_expr
);
6023 /* pop locally defined symbols */
6025 /* In the is_expr case (a statement expression is finished here),
6026 vtop might refer to symbols on the local_stack. Either via the
6027 type or via vtop->sym. We can't pop those nor any that in turn
6028 might be referred to. To make it easier we don't roll back
6029 any symbols in that case; some upper level call to block() will
6030 do that. We do have to remove such symbols from the lookup
6031 tables, though. sym_pop will do that. */
6032 sym_pop(&local_stack
, s
, is_expr
);
6034 /* Pop VLA frames and restore stack pointer if required */
6035 if (vlas_in_scope
> saved_vlas_in_scope
) {
6036 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
6039 vlas_in_scope
= saved_vlas_in_scope
;
6042 } else if (tok
== TOK_RETURN
) {
6046 gen_assign_cast(&func_vt
);
6047 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6050 gfunc_return(&func_vt
);
6053 /* jump unless last stmt in top-level block */
6054 if (tok
!= '}' || local_scope
!= 1)
6056 nocode_wanted
|= 0x20000000;
6057 } else if (tok
== TOK_BREAK
) {
6060 tcc_error("cannot break");
6061 *bsym
= gjmp(*bsym
);
6064 nocode_wanted
|= 0x20000000;
6065 } else if (tok
== TOK_CONTINUE
) {
6068 tcc_error("cannot continue");
6069 vla_sp_restore_root();
6070 *csym
= gjmp(*csym
);
6073 } else if (tok
== TOK_FOR
) {
6075 int saved_nocode_wanted
;
6076 nocode_wanted
&= ~0x20000000;
6082 /* c99 for-loop init decl? */
6083 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6084 /* no, regular for-loop init expr */
6110 saved_nocode_wanted
= nocode_wanted
;
6112 nocode_wanted
= saved_nocode_wanted
;
6117 sym_pop(&local_stack
, s
, 0);
6120 if (tok
== TOK_DO
) {
6121 int saved_nocode_wanted
;
6122 nocode_wanted
&= ~0x20000000;
6128 saved_nocode_wanted
= nocode_wanted
;
6134 nocode_wanted
= saved_nocode_wanted
;
6138 nocode_wanted
= saved_nocode_wanted
;
6143 if (tok
== TOK_SWITCH
) {
6144 struct switch_t
*saved
, sw
;
6145 int saved_nocode_wanted
= nocode_wanted
;
6151 switchval
= *vtop
--;
6153 b
= gjmp(0); /* jump to first case */
6154 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6158 nocode_wanted
= saved_nocode_wanted
;
6159 a
= gjmp(a
); /* add implicit break */
6162 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6163 for (b
= 1; b
< sw
.n
; b
++)
6164 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6165 tcc_error("duplicate case value");
6166 /* Our switch table sorting is signed, so the compared
6167 value needs to be as well when it's 64bit. */
6168 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6169 switchval
.type
.t
&= ~VT_UNSIGNED
;
6171 gcase(sw
.p
, sw
.n
, &a
);
6174 gjmp_addr(sw
.def_sym
);
6175 dynarray_reset(&sw
.p
, &sw
.n
);
6180 if (tok
== TOK_CASE
) {
6181 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6184 nocode_wanted
&= ~0x20000000;
6186 cr
->v1
= cr
->v2
= expr_const64();
6187 if (gnu_ext
&& tok
== TOK_DOTS
) {
6189 cr
->v2
= expr_const64();
6190 if (cr
->v2
< cr
->v1
)
6191 tcc_warning("empty case range");
6194 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6197 goto block_after_label
;
6199 if (tok
== TOK_DEFAULT
) {
6204 if (cur_switch
->def_sym
)
6205 tcc_error("too many 'default'");
6206 cur_switch
->def_sym
= ind
;
6208 goto block_after_label
;
6210 if (tok
== TOK_GOTO
) {
6212 if (tok
== '*' && gnu_ext
) {
6216 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6219 } else if (tok
>= TOK_UIDENT
) {
6220 s
= label_find(tok
);
6221 /* put forward definition if needed */
6223 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6225 if (s
->r
== LABEL_DECLARED
)
6226 s
->r
= LABEL_FORWARD
;
6228 vla_sp_restore_root();
6229 if (s
->r
& LABEL_FORWARD
)
6230 s
->jnext
= gjmp(s
->jnext
);
6232 gjmp_addr(s
->jnext
);
6235 expect("label identifier");
6238 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6247 if (s
->r
== LABEL_DEFINED
)
6248 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6250 s
->r
= LABEL_DEFINED
;
6252 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6256 /* we accept this, but it is a mistake */
6258 nocode_wanted
&= ~0x20000000;
6260 tcc_warning("deprecated use of label at end of compound statement");
6264 block(bsym
, csym
, is_expr
);
6267 /* expression case */
6282 /* This skips over a stream of tokens containing balanced {} and ()
6283 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6284 with a '{'). If STR then allocates and stores the skipped tokens
6285 in *STR. This doesn't check if () and {} are nested correctly,
6286 i.e. "({)}" is accepted. */
6287 static void skip_or_save_block(TokenString
**str
)
6289 int braces
= tok
== '{';
6292 *str
= tok_str_alloc();
6294 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6296 if (tok
== TOK_EOF
) {
6297 if (str
|| level
> 0)
6298 tcc_error("unexpected end of file");
6303 tok_str_add_tok(*str
);
6306 if (t
== '{' || t
== '(') {
6308 } else if (t
== '}' || t
== ')') {
6310 if (level
== 0 && braces
&& t
== '}')
6315 tok_str_add(*str
, -1);
6316 tok_str_add(*str
, 0);
6320 #define EXPR_CONST 1
6323 static void parse_init_elem(int expr_type
)
6325 int saved_global_expr
;
6328 /* compound literals must be allocated globally in this case */
6329 saved_global_expr
= global_expr
;
6332 global_expr
= saved_global_expr
;
6333 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6334 (compound literals). */
6335 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6336 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6337 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6338 #ifdef TCC_TARGET_PE
6339 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6342 tcc_error("initializer element is not constant");
6350 /* put zeros for variable based init */
6351 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6354 /* nothing to do because globals are already set to zero */
6356 vpush_global_sym(&func_old_type
, TOK_memset
);
6358 #ifdef TCC_TARGET_ARM
6369 /* t is the array or struct type. c is the array or struct
6370 address. cur_field is the pointer to the current
6371 field, for arrays the 'c' member contains the current start
6372 index. 'size_only' is true if only size info is needed (only used
6373 in arrays). al contains the already initialized length of the
6374 current container (starting at c). This returns the new length of that. */
6375 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6376 Sym
**cur_field
, int size_only
, int al
)
6379 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6380 unsigned long corig
= c
;
6384 if (gnu_ext
&& (l
= is_label()) != 0)
6386 /* NOTE: we only support ranges for last designator */
6387 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6389 if (!(type
->t
& VT_ARRAY
))
6390 expect("array type");
6392 index
= index_last
= expr_const();
6393 if (tok
== TOK_DOTS
&& gnu_ext
) {
6395 index_last
= expr_const();
6399 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6401 tcc_error("invalid index");
6403 (*cur_field
)->c
= index_last
;
6404 type
= pointed_type(type
);
6405 elem_size
= type_size(type
, &align
);
6406 c
+= index
* elem_size
;
6407 nb_elems
= index_last
- index
+ 1;
6413 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6414 expect("struct/union type");
6415 f
= find_field(type
, l
);
6428 } else if (!gnu_ext
) {
6432 if (type
->t
& VT_ARRAY
) {
6433 index
= (*cur_field
)->c
;
6434 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6435 tcc_error("index too large");
6436 type
= pointed_type(type
);
6437 c
+= index
* type_size(type
, &align
);
6440 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6441 *cur_field
= f
= f
->next
;
6443 tcc_error("too many field init");
6448 /* must put zero in holes (note that doing it that way
6449 ensures that it even works with designators) */
6450 if (!size_only
&& c
- corig
> al
)
6451 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6452 decl_initializer(type
, sec
, c
, 0, size_only
);
6454 /* XXX: make it more general */
6455 if (!size_only
&& nb_elems
> 1) {
6456 unsigned long c_end
;
6461 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6462 for (i
= 1; i
< nb_elems
; i
++) {
6463 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6468 } else if (!NODATA_WANTED
) {
6469 c_end
= c
+ nb_elems
* elem_size
;
6470 if (c_end
> sec
->data_allocated
)
6471 section_realloc(sec
, c_end
);
6472 src
= sec
->data
+ c
;
6474 for(i
= 1; i
< nb_elems
; i
++) {
6476 memcpy(dst
, src
, elem_size
);
6480 c
+= nb_elems
* type_size(type
, &align
);
6486 /* store a value or an expression directly in global data or in local array */
6487 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6494 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6498 /* XXX: not portable */
6499 /* XXX: generate error if incorrect relocation */
6500 gen_assign_cast(&dtype
);
6501 bt
= type
->t
& VT_BTYPE
;
6503 if ((vtop
->r
& VT_SYM
)
6506 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6507 || (type
->t
& VT_BITFIELD
))
6508 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6510 tcc_error("initializer element is not computable at load time");
6512 if (NODATA_WANTED
) {
6517 size
= type_size(type
, &align
);
6518 section_reserve(sec
, c
+ size
);
6519 ptr
= sec
->data
+ c
;
6521 /* XXX: make code faster ? */
6522 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6523 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6524 /* XXX This rejects compound literals like
6525 '(void *){ptr}'. The problem is that '&sym' is
6526 represented the same way, which would be ruled out
6527 by the SYM_FIRST_ANOM check above, but also '"string"'
6528 in 'char *p = "string"' is represented the same
6529 with the type being VT_PTR and the symbol being an
6530 anonymous one. That is, there's no difference in vtop
6531 between '(void *){x}' and '&(void *){x}'. Ignore
6532 pointer typed entities here. Hopefully no real code
6533 will every use compound literals with scalar type. */
6534 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6535 /* These come from compound literals, memcpy stuff over. */
6539 esym
= elfsym(vtop
->sym
);
6540 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6541 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6543 /* We need to copy over all memory contents, and that
6544 includes relocations. Use the fact that relocs are
6545 created it order, so look from the end of relocs
6546 until we hit one before the copied region. */
6547 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6548 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6549 while (num_relocs
--) {
6551 if (rel
->r_offset
>= esym
->st_value
+ size
)
6553 if (rel
->r_offset
< esym
->st_value
)
6555 /* Note: if the same fields are initialized multiple
6556 times (possible with designators) then we possibly
6557 add multiple relocations for the same offset here.
6558 That would lead to wrong code, the last reloc needs
6559 to win. We clean this up later after the whole
6560 initializer is parsed. */
6561 put_elf_reloca(symtab_section
, sec
,
6562 c
+ rel
->r_offset
- esym
->st_value
,
6563 ELFW(R_TYPE
)(rel
->r_info
),
6564 ELFW(R_SYM
)(rel
->r_info
),
6574 if (type
->t
& VT_BITFIELD
) {
6575 int bit_pos
, bit_size
, bits
, n
;
6576 unsigned char *p
, v
, m
;
6577 bit_pos
= BIT_POS(vtop
->type
.t
);
6578 bit_size
= BIT_SIZE(vtop
->type
.t
);
6579 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6580 bit_pos
&= 7, bits
= 0;
6585 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6586 m
= ((1 << n
) - 1) << bit_pos
;
6587 *p
= (*p
& ~m
) | (v
& m
);
6588 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6592 /* XXX: when cross-compiling we assume that each type has the
6593 same representation on host and target, which is likely to
6594 be wrong in the case of long double */
6596 vtop
->c
.i
= vtop
->c
.i
!= 0;
6598 *(char *)ptr
|= vtop
->c
.i
;
6601 *(short *)ptr
|= vtop
->c
.i
;
6604 *(float*)ptr
= vtop
->c
.f
;
6607 *(double *)ptr
= vtop
->c
.d
;
6610 #if defined TCC_IS_NATIVE_387
6611 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6612 memcpy(ptr
, &vtop
->c
.ld
, 10);
6614 else if (sizeof (long double) == sizeof (double))
6615 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
6617 else if (vtop
->c
.ld
== 0.0)
6621 if (sizeof(long double) == LDOUBLE_SIZE
)
6622 *(long double*)ptr
= vtop
->c
.ld
;
6623 else if (sizeof(double) == LDOUBLE_SIZE
)
6624 *(double *)ptr
= (double)vtop
->c
.ld
;
6626 tcc_error("can't cross compile long double constants");
6630 *(long long *)ptr
|= vtop
->c
.i
;
6637 addr_t val
= vtop
->c
.i
;
6639 if (vtop
->r
& VT_SYM
)
6640 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6642 *(addr_t
*)ptr
|= val
;
6644 if (vtop
->r
& VT_SYM
)
6645 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6646 *(addr_t
*)ptr
|= val
;
6652 int val
= vtop
->c
.i
;
6654 if (vtop
->r
& VT_SYM
)
6655 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6659 if (vtop
->r
& VT_SYM
)
6660 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6669 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6676 /* 't' contains the type and storage info. 'c' is the offset of the
6677 object in section 'sec'. If 'sec' is NULL, it means stack based
6678 allocation. 'first' is true if array '{' must be read (multi
6679 dimension implicit array init handling). 'size_only' is true if
6680 size only evaluation is wanted (only for arrays). */
6681 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6682 int first
, int size_only
)
6684 int len
, n
, no_oblock
, nb
, i
;
6691 /* If we currently are at an '}' or ',' we have read an initializer
6692 element in one of our callers, and not yet consumed it. */
6693 have_elem
= tok
== '}' || tok
== ',';
6694 if (!have_elem
&& tok
!= '{' &&
6695 /* In case of strings we have special handling for arrays, so
6696 don't consume them as initializer value (which would commit them
6697 to some anonymous symbol). */
6698 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6700 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6705 !(type
->t
& VT_ARRAY
) &&
6706 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6707 The source type might have VT_CONSTANT set, which is
6708 of course assignable to non-const elements. */
6709 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6710 init_putv(type
, sec
, c
);
6711 } else if (type
->t
& VT_ARRAY
) {
6714 t1
= pointed_type(type
);
6715 size1
= type_size(t1
, &align1
);
6718 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6721 tcc_error("character array initializer must be a literal,"
6722 " optionally enclosed in braces");
6727 /* only parse strings here if correct type (otherwise: handle
6728 them as ((w)char *) expressions */
6729 if ((tok
== TOK_LSTR
&&
6730 #ifdef TCC_TARGET_PE
6731 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6733 (t1
->t
& VT_BTYPE
) == VT_INT
6735 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6737 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6740 /* compute maximum number of chars wanted */
6742 cstr_len
= tokc
.str
.size
;
6744 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6747 if (n
>= 0 && nb
> (n
- len
))
6751 tcc_warning("initializer-string for array is too long");
6752 /* in order to go faster for common case (char
6753 string in global variable, we handle it
6755 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6757 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6761 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6763 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6765 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6772 /* only add trailing zero if enough storage (no
6773 warning in this case since it is standard) */
6774 if (n
< 0 || len
< n
) {
6777 init_putv(t1
, sec
, c
+ (len
* size1
));
6788 while (tok
!= '}' || have_elem
) {
6789 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
6791 if (type
->t
& VT_ARRAY
) {
6793 /* special test for multi dimensional arrays (may not
6794 be strictly correct if designators are used at the
6796 if (no_oblock
&& len
>= n
*size1
)
6799 if (s
->type
.t
== VT_UNION
)
6803 if (no_oblock
&& f
== NULL
)
6812 /* put zeros at the end */
6813 if (!size_only
&& len
< n
*size1
)
6814 init_putz(sec
, c
+ len
, n
*size1
- len
);
6817 /* patch type size if needed, which happens only for array types */
6819 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
6820 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6823 if (first
|| tok
== '{') {
6831 } else if (tok
== '{') {
6833 decl_initializer(type
, sec
, c
, first
, size_only
);
6835 } else if (size_only
) {
6836 /* If we supported only ISO C we wouldn't have to accept calling
6837 this on anything than an array size_only==1 (and even then
6838 only on the outermost level, so no recursion would be needed),
6839 because initializing a flex array member isn't supported.
6840 But GNU C supports it, so we need to recurse even into
6841 subfields of structs and arrays when size_only is set. */
6842 /* just skip expression */
6843 skip_or_save_block(NULL
);
6846 /* This should happen only when we haven't parsed
6847 the init element above for fear of committing a
6848 string constant to memory too early. */
6849 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6850 expect("string constant");
6851 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6853 init_putv(type
, sec
, c
);
6857 /* parse an initializer for type 't' if 'has_init' is non zero, and
6858 allocate space in local or global data space ('r' is either
6859 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6860 variable 'v' of scope 'scope' is declared before initializers
6861 are parsed. If 'v' is zero, then a reference to the new object
6862 is put in the value stack. If 'has_init' is 2, a special parsing
6863 is done to handle string constants. */
6864 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6865 int has_init
, int v
, int scope
)
6867 int size
, align
, addr
;
6868 TokenString
*init_str
= NULL
;
6871 Sym
*flexible_array
;
6873 int saved_nocode_wanted
= nocode_wanted
;
6874 #ifdef CONFIG_TCC_BCHECK
6878 /* Always allocate static or global variables */
6879 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
6880 nocode_wanted
|= 0x80000000;
6882 #ifdef CONFIG_TCC_BCHECK
6883 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
6886 flexible_array
= NULL
;
6887 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6888 Sym
*field
= type
->ref
->next
;
6891 field
= field
->next
;
6892 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6893 flexible_array
= field
;
6897 size
= type_size(type
, &align
);
6898 /* If unknown size, we must evaluate it before
6899 evaluating initializers because
6900 initializers can generate global data too
6901 (e.g. string pointers or ISOC99 compound
6902 literals). It also simplifies local
6903 initializers handling */
6904 if (size
< 0 || (flexible_array
&& has_init
)) {
6906 tcc_error("unknown type size");
6907 /* get all init string */
6908 if (has_init
== 2) {
6909 init_str
= tok_str_alloc();
6910 /* only get strings */
6911 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6912 tok_str_add_tok(init_str
);
6915 tok_str_add(init_str
, -1);
6916 tok_str_add(init_str
, 0);
6918 skip_or_save_block(&init_str
);
6923 begin_macro(init_str
, 1);
6925 decl_initializer(type
, NULL
, 0, 1, 1);
6926 /* prepare second initializer parsing */
6927 macro_ptr
= init_str
->str
;
6930 /* if still unknown size, error */
6931 size
= type_size(type
, &align
);
6933 tcc_error("unknown type size");
6935 /* If there's a flex member and it was used in the initializer
6937 if (flexible_array
&&
6938 flexible_array
->type
.ref
->c
> 0)
6939 size
+= flexible_array
->type
.ref
->c
6940 * pointed_size(&flexible_array
->type
);
6941 /* take into account specified alignment if bigger */
6942 if (ad
->a
.aligned
) {
6943 int speca
= 1 << (ad
->a
.aligned
- 1);
6946 } else if (ad
->a
.packed
) {
6950 if (!v
&& NODATA_WANTED
)
6951 size
= 0, align
= 1;
6953 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6955 #ifdef CONFIG_TCC_BCHECK
6956 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6960 loc
= (loc
- size
) & -align
;
6962 #ifdef CONFIG_TCC_BCHECK
6963 /* handles bounds */
6964 /* XXX: currently, since we do only one pass, we cannot track
6965 '&' operators, so we add only arrays */
6966 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6968 /* add padding between regions */
6970 /* then add local bound info */
6971 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6972 bounds_ptr
[0] = addr
;
6973 bounds_ptr
[1] = size
;
6977 /* local variable */
6978 #ifdef CONFIG_TCC_ASM
6979 if (ad
->asm_label
) {
6980 int reg
= asm_parse_regvar(ad
->asm_label
);
6982 r
= (r
& ~VT_VALMASK
) | reg
;
6985 sym
= sym_push(v
, type
, r
, addr
);
6988 /* push local reference */
6989 vset(type
, r
, addr
);
6992 if (v
&& scope
== VT_CONST
) {
6993 /* see if the symbol was already defined */
6996 patch_storage(sym
, ad
, type
);
6997 /* we accept several definitions of the same global variable. */
6998 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7003 /* allocate symbol in corresponding section */
7008 else if (tcc_state
->nocommon
)
7013 addr
= section_add(sec
, size
, align
);
7014 #ifdef CONFIG_TCC_BCHECK
7015 /* add padding if bound check */
7017 section_add(sec
, 1, 1);
7020 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7021 sec
= common_section
;
7026 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7027 patch_storage(sym
, ad
, NULL
);
7029 /* Local statics have a scope until now (for
7030 warnings), remove it here. */
7032 /* update symbol definition */
7033 put_extern_sym(sym
, sec
, addr
, size
);
7035 /* push global reference */
7036 sym
= get_sym_ref(type
, sec
, addr
, size
);
7037 vpushsym(type
, sym
);
7041 #ifdef CONFIG_TCC_BCHECK
7042 /* handles bounds now because the symbol must be defined
7043 before for the relocation */
7047 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7048 /* then add global bound info */
7049 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7050 bounds_ptr
[0] = 0; /* relocated */
7051 bounds_ptr
[1] = size
;
7056 if (type
->t
& VT_VLA
) {
7062 /* save current stack pointer */
7063 if (vlas_in_scope
== 0) {
7064 if (vla_sp_root_loc
== -1)
7065 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
7066 gen_vla_sp_save(vla_sp_root_loc
);
7069 vla_runtime_type_size(type
, &a
);
7070 gen_vla_alloc(type
, a
);
7071 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7072 /* on _WIN64, because of the function args scratch area, the
7073 result of alloca differs from RSP and is returned in RAX. */
7074 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7076 gen_vla_sp_save(addr
);
7080 } else if (has_init
) {
7081 size_t oldreloc_offset
= 0;
7082 if (sec
&& sec
->reloc
)
7083 oldreloc_offset
= sec
->reloc
->data_offset
;
7084 decl_initializer(type
, sec
, addr
, 1, 0);
7085 if (sec
&& sec
->reloc
)
7086 squeeze_multi_relocs(sec
, oldreloc_offset
);
7087 /* patch flexible array member size back to -1, */
7088 /* for possible subsequent similar declarations */
7090 flexible_array
->type
.ref
->c
= -1;
7094 /* restore parse state if needed */
7100 nocode_wanted
= saved_nocode_wanted
;
7103 /* parse a function defined by symbol 'sym' and generate its code in
7104 'cur_text_section' */
7105 static void gen_function(Sym
*sym
)
7108 ind
= cur_text_section
->data_offset
;
7109 if (sym
->a
.aligned
) {
7110 size_t newoff
= section_add(cur_text_section
, 0,
7111 1 << (sym
->a
.aligned
- 1));
7112 gen_fill_nops(newoff
- ind
);
7114 /* NOTE: we patch the symbol size later */
7115 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7116 funcname
= get_tok_str(sym
->v
, NULL
);
7118 /* Initialize VLA state */
7120 vla_sp_root_loc
= -1;
7121 /* put debug symbol */
7122 tcc_debug_funcstart(tcc_state
, sym
);
7123 /* push a dummy symbol to enable local sym storage */
7124 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7125 local_scope
= 1; /* for function parameters */
7126 gfunc_prolog(&sym
->type
);
7129 block(NULL
, NULL
, 0);
7130 if (!(nocode_wanted
& 0x20000000)
7131 && ((func_vt
.t
& VT_BTYPE
) == VT_INT
)
7132 && !strcmp (funcname
, "main"))
7136 gen_assign_cast(&func_vt
);
7137 gfunc_return(&func_vt
);
7142 cur_text_section
->data_offset
= ind
;
7143 label_pop(&global_label_stack
, NULL
, 0);
7144 /* reset local stack */
7146 sym_pop(&local_stack
, NULL
, 0);
7147 /* end of function */
7148 /* patch symbol size */
7149 elfsym(sym
)->st_size
= ind
- func_ind
;
7150 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7151 /* It's better to crash than to generate wrong code */
7152 cur_text_section
= NULL
;
7153 funcname
= ""; /* for safety */
7154 func_vt
.t
= VT_VOID
; /* for safety */
7155 func_var
= 0; /* for safety */
7156 ind
= 0; /* for safety */
7157 nocode_wanted
= 0x80000000;
7161 static void gen_inline_functions(TCCState
*s
)
7164 int inline_generated
, i
, ln
;
7165 struct InlineFunc
*fn
;
7167 ln
= file
->line_num
;
7168 /* iterate while inline function are referenced */
7170 inline_generated
= 0;
7171 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7172 fn
= s
->inline_fns
[i
];
7174 if (sym
&& sym
->c
) {
7175 /* the function was used: generate its code and
7176 convert it to a normal function */
7179 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7180 sym
->type
.t
&= ~VT_INLINE
;
7182 begin_macro(fn
->func_str
, 1);
7184 cur_text_section
= text_section
;
7188 inline_generated
= 1;
7191 } while (inline_generated
);
7192 file
->line_num
= ln
;
7195 ST_FUNC
void free_inline_functions(TCCState
*s
)
7198 /* free tokens of unused inline functions */
7199 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7200 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7202 tok_str_free(fn
->func_str
);
7204 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7207 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7208 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7209 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7217 if (!parse_btype(&btype
, &ad
)) {
7218 if (is_for_loop_init
)
7220 /* skip redundant ';' if not in old parameter decl scope */
7221 if (tok
== ';' && l
!= VT_CMP
) {
7227 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7228 /* global asm block */
7232 if (tok
>= TOK_UIDENT
) {
7233 /* special test for old K&R protos without explicit int
7234 type. Only accepted when defining global data */
7238 expect("declaration");
7243 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7244 int v
= btype
.ref
->v
;
7245 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7246 tcc_warning("unnamed struct/union that defines no instances");
7250 if (IS_ENUM(btype
.t
)) {
7255 while (1) { /* iterate thru each declaration */
7257 /* If the base type itself was an array type of unspecified
7258 size (like in 'typedef int arr[]; arr x = {1};') then
7259 we will overwrite the unknown size by the real one for
7260 this decl. We need to unshare the ref symbol holding
7262 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7263 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7265 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7269 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7270 printf("type = '%s'\n", buf
);
7273 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7274 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7275 tcc_error("function without file scope cannot be static");
7277 /* if old style function prototype, we accept a
7280 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7281 decl0(VT_CMP
, 0, sym
);
7284 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7285 ad
.asm_label
= asm_label_instr();
7286 /* parse one last attribute list, after asm label */
7287 parse_attribute(&ad
);
7292 #ifdef TCC_TARGET_PE
7293 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7294 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7295 tcc_error("cannot have dll linkage with static or typedef");
7296 if (ad
.a
.dllimport
) {
7297 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7300 type
.t
|= VT_EXTERN
;
7306 tcc_error("cannot use local functions");
7307 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7308 expect("function definition");
7310 /* reject abstract declarators in function definition
7311 make old style params without decl have int type */
7313 while ((sym
= sym
->next
) != NULL
) {
7314 if (!(sym
->v
& ~SYM_FIELD
))
7315 expect("identifier");
7316 if (sym
->type
.t
== VT_VOID
)
7317 sym
->type
= int_type
;
7320 /* XXX: cannot do better now: convert extern line to static inline */
7321 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7322 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7324 /* put function symbol */
7325 sym
= external_global_sym(v
, &type
, 0);
7326 type
.t
&= ~VT_EXTERN
;
7327 patch_storage(sym
, &ad
, &type
);
7329 /* static inline functions are just recorded as a kind
7330 of macro. Their code will be emitted at the end of
7331 the compilation unit only if they are used */
7332 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7333 (VT_INLINE
| VT_STATIC
)) {
7334 struct InlineFunc
*fn
;
7335 const char *filename
;
7337 filename
= file
? file
->filename
: "";
7338 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7339 strcpy(fn
->filename
, filename
);
7341 skip_or_save_block(&fn
->func_str
);
7342 dynarray_add(&tcc_state
->inline_fns
,
7343 &tcc_state
->nb_inline_fns
, fn
);
7345 /* compute text section */
7346 cur_text_section
= ad
.section
;
7347 if (!cur_text_section
)
7348 cur_text_section
= text_section
;
7354 /* find parameter in function parameter list */
7355 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7356 if ((sym
->v
& ~SYM_FIELD
) == v
)
7358 tcc_error("declaration for parameter '%s' but no such parameter",
7359 get_tok_str(v
, NULL
));
7361 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7362 tcc_error("storage class specified for '%s'",
7363 get_tok_str(v
, NULL
));
7364 if (sym
->type
.t
!= VT_VOID
)
7365 tcc_error("redefinition of parameter '%s'",
7366 get_tok_str(v
, NULL
));
7367 convert_parameter_type(&type
);
7369 } else if (type
.t
& VT_TYPEDEF
) {
7370 /* save typedefed type */
7371 /* XXX: test storage specifiers ? */
7373 if (sym
&& sym
->sym_scope
== local_scope
) {
7374 if (!is_compatible_types(&sym
->type
, &type
)
7375 || !(sym
->type
.t
& VT_TYPEDEF
))
7376 tcc_error("incompatible redefinition of '%s'",
7377 get_tok_str(v
, NULL
));
7380 sym
= sym_push(v
, &type
, 0, 0);
7384 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7385 && !(type
.t
& VT_EXTERN
)) {
7386 tcc_error("declaration of void object");
7389 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7390 /* external function definition */
7391 /* specific case for func_call attribute */
7393 } else if (!(type
.t
& VT_ARRAY
)) {
7394 /* not lvalue if array */
7395 r
|= lvalue_type(type
.t
);
7397 has_init
= (tok
== '=');
7398 if (has_init
&& (type
.t
& VT_VLA
))
7399 tcc_error("variable length array cannot be initialized");
7400 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7401 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7402 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7403 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7404 /* external variable or function */
7405 /* NOTE: as GCC, uninitialized global static
7406 arrays of null size are considered as
7408 type
.t
|= VT_EXTERN
;
7409 sym
= external_sym(v
, &type
, r
, &ad
);
7410 if (ad
.alias_target
) {
7413 alias_target
= sym_find(ad
.alias_target
);
7414 esym
= elfsym(alias_target
);
7416 tcc_error("unsupported forward __alias__ attribute");
7417 /* Local statics have a scope until now (for
7418 warnings), remove it here. */
7420 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7423 if (type
.t
& VT_STATIC
)
7429 else if (l
== VT_CONST
)
7430 /* uninitialized global variables may be overridden */
7431 type
.t
|= VT_EXTERN
;
7432 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7436 if (is_for_loop_init
)
7449 static void decl(int l
)
7454 /* ------------------------------------------------------------------------- */