2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
41 static int local_scope
;
43 static int section_sym
;
45 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
46 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
47 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
49 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
51 ST_DATA
int const_wanted
; /* true if constant wanted */
52 ST_DATA
int nocode_wanted
; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
56 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
57 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
59 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
60 ST_DATA
const char *funcname
;
63 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
65 ST_DATA
struct switch_t
{
69 } **p
; int n
; /* list of case ranges */
70 int def_sym
; /* default symbol */
71 } *cur_switch
; /* current switch */
73 /* ------------------------------------------------------------------------- */
75 static void gen_cast(CType
*type
);
76 static void gen_cast_s(int t
);
77 static inline CType
*pointed_type(CType
*type
);
78 static int is_compatible_types(CType
*type1
, CType
*type2
);
79 static int parse_btype(CType
*type
, AttributeDef
*ad
);
80 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
81 static void parse_expr_type(CType
*type
);
82 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
83 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
84 static void block(int *bsym
, int *csym
, int is_expr
);
85 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
86 static void decl(int l
);
87 static int decl0(int l
, int is_for_loop_init
, Sym
*);
88 static void expr_eq(void);
89 static void vla_runtime_type_size(CType
*type
, int *a
);
90 static void vla_sp_restore(void);
91 static void vla_sp_restore_root(void);
92 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
93 static inline int64_t expr_const64(void);
94 static void vpush64(int ty
, unsigned long long v
);
95 static void vpush(CType
*type
);
96 static int gvtst(int inv
, int t
);
97 static void gen_inline_functions(TCCState
*s
);
98 static void skip_or_save_block(TokenString
**str
);
99 static void gv_dup(void);
101 ST_INLN
int is_float(int t
)
105 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
108 /* we use our own 'finite' function to avoid potential problems with
109 non standard math libs */
110 /* XXX: endianness dependent */
111 ST_FUNC
int ieee_finite(double d
)
114 memcpy(p
, &d
, sizeof(double));
115 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
118 /* compiling intel long double natively */
119 #if (defined __i386__ || defined __x86_64__) \
120 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
121 # define TCC_IS_NATIVE_387
124 ST_FUNC
void test_lvalue(void)
126 if (!(vtop
->r
& VT_LVAL
))
130 ST_FUNC
void check_vstack(void)
133 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
136 /* ------------------------------------------------------------------------- */
137 /* vstack debugging aid */
140 void pv (const char *lbl
, int a
, int b
)
143 for (i
= a
; i
< a
+ b
; ++i
) {
144 SValue
*p
= &vtop
[-i
];
145 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
146 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
151 /* ------------------------------------------------------------------------- */
152 /* start of translation unit info */
153 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
158 /* file info: full path + filename */
159 section_sym
= put_elf_sym(symtab_section
, 0, 0,
160 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
161 text_section
->sh_num
, NULL
);
162 getcwd(buf
, sizeof(buf
));
164 normalize_slashes(buf
);
166 pstrcat(buf
, sizeof(buf
), "/");
167 put_stabs_r(buf
, N_SO
, 0, 0,
168 text_section
->data_offset
, text_section
, section_sym
);
169 put_stabs_r(file
->filename
, N_SO
, 0, 0,
170 text_section
->data_offset
, text_section
, section_sym
);
175 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
176 symbols can be safely used */
177 put_elf_sym(symtab_section
, 0, 0,
178 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
179 SHN_ABS
, file
->filename
);
182 /* put end of translation unit info */
183 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
187 put_stabs_r(NULL
, N_SO
, 0, 0,
188 text_section
->data_offset
, text_section
, section_sym
);
192 /* generate line number info */
193 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
197 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
198 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
200 last_line_num
= file
->line_num
;
204 /* put function symbol */
205 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
213 /* XXX: we put here a dummy type */
214 snprintf(buf
, sizeof(buf
), "%s:%c1",
215 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
216 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
217 cur_text_section
, sym
->c
);
218 /* //gr gdb wants a line at the function */
219 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
225 /* put function size */
226 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
230 put_stabn(N_FUN
, 0, 0, size
);
233 /* ------------------------------------------------------------------------- */
234 ST_FUNC
int tccgen_compile(TCCState
*s1
)
236 cur_text_section
= NULL
;
238 anon_sym
= SYM_FIRST_ANOM
;
241 nocode_wanted
= 0x80000000;
243 /* define some often used types */
245 char_pointer_type
.t
= VT_BYTE
;
246 mk_pointer(&char_pointer_type
);
248 size_type
.t
= VT_INT
| VT_UNSIGNED
;
249 ptrdiff_type
.t
= VT_INT
;
251 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
252 ptrdiff_type
.t
= VT_LLONG
;
254 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
255 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
257 func_old_type
.t
= VT_FUNC
;
258 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
259 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
260 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
264 #ifdef TCC_TARGET_ARM
269 printf("%s: **** new file\n", file
->filename
);
272 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
275 gen_inline_functions(s1
);
277 /* end of translation unit info */
282 /* ------------------------------------------------------------------------- */
283 ST_FUNC ElfSym
*elfsym(Sym
*s
)
287 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
290 /* apply storage attributes to Elf symbol */
291 ST_FUNC
void update_storage(Sym
*sym
)
294 int sym_bind
, old_sym_bind
;
300 if (sym
->a
.visibility
)
301 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
304 if (sym
->type
.t
& VT_STATIC
)
305 sym_bind
= STB_LOCAL
;
306 else if (sym
->a
.weak
)
309 sym_bind
= STB_GLOBAL
;
310 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
311 if (sym_bind
!= old_sym_bind
) {
312 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
316 if (sym
->a
.dllimport
)
317 esym
->st_other
|= ST_PE_IMPORT
;
318 if (sym
->a
.dllexport
)
319 esym
->st_other
|= ST_PE_EXPORT
;
323 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
324 get_tok_str(sym
->v
, NULL
),
325 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
333 /* ------------------------------------------------------------------------- */
334 /* update sym->c so that it points to an external symbol in section
335 'section' with value 'value' */
337 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
338 addr_t value
, unsigned long size
,
339 int can_add_underscore
)
341 int sym_type
, sym_bind
, info
, other
, t
;
345 #ifdef CONFIG_TCC_BCHECK
350 name
= get_tok_str(sym
->v
, NULL
);
351 #ifdef CONFIG_TCC_BCHECK
352 if (tcc_state
->do_bounds_check
) {
353 /* XXX: avoid doing that for statics ? */
354 /* if bound checking is activated, we change some function
355 names by adding the "__bound" prefix */
358 /* XXX: we rely only on malloc hooks */
371 strcpy(buf
, "__bound_");
379 if ((t
& VT_BTYPE
) == VT_FUNC
) {
381 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
382 sym_type
= STT_NOTYPE
;
384 sym_type
= STT_OBJECT
;
387 sym_bind
= STB_LOCAL
;
389 sym_bind
= STB_GLOBAL
;
392 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
393 Sym
*ref
= sym
->type
.ref
;
394 if (ref
->a
.nodecorate
) {
395 can_add_underscore
= 0;
397 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
398 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
400 other
|= ST_PE_STDCALL
;
401 can_add_underscore
= 0;
405 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
407 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
411 name
= get_tok_str(sym
->asm_label
, NULL
);
412 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
413 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
416 esym
->st_value
= value
;
417 esym
->st_size
= size
;
418 esym
->st_shndx
= sh_num
;
423 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
424 addr_t value
, unsigned long size
)
426 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
427 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
430 /* add a new relocation entry to symbol 'sym' in section 's' */
431 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
436 if (nocode_wanted
&& s
== cur_text_section
)
441 put_extern_sym(sym
, NULL
, 0, 0);
445 /* now we can add ELF relocation info */
446 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
450 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
452 greloca(s
, sym
, offset
, type
, 0);
456 /* ------------------------------------------------------------------------- */
457 /* symbol allocator */
458 static Sym
*__sym_malloc(void)
460 Sym
*sym_pool
, *sym
, *last_sym
;
463 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
464 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
466 last_sym
= sym_free_first
;
468 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
469 sym
->next
= last_sym
;
473 sym_free_first
= last_sym
;
477 static inline Sym
*sym_malloc(void)
481 sym
= sym_free_first
;
483 sym
= __sym_malloc();
484 sym_free_first
= sym
->next
;
487 sym
= tcc_malloc(sizeof(Sym
));
492 ST_INLN
void sym_free(Sym
*sym
)
495 sym
->next
= sym_free_first
;
496 sym_free_first
= sym
;
502 /* push, without hashing */
503 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
508 memset(s
, 0, sizeof *s
);
518 /* find a symbol and return its associated structure. 's' is the top
519 of the symbol stack */
520 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
532 /* structure lookup */
533 ST_INLN Sym
*struct_find(int v
)
536 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
538 return table_ident
[v
]->sym_struct
;
541 /* find an identifier */
542 ST_INLN Sym
*sym_find(int v
)
545 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
547 return table_ident
[v
]->sym_identifier
;
550 /* push a given symbol on the symbol stack */
551 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
560 s
= sym_push2(ps
, v
, type
->t
, c
);
561 s
->type
.ref
= type
->ref
;
563 /* don't record fields or anonymous symbols */
565 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
566 /* record symbol in token array */
567 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
569 ps
= &ts
->sym_struct
;
571 ps
= &ts
->sym_identifier
;
574 s
->sym_scope
= local_scope
;
575 if (s
->prev_tok
&& s
->prev_tok
->sym_scope
== s
->sym_scope
)
576 tcc_error("redeclaration of '%s'",
577 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
582 /* push a global identifier */
583 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
586 s
= sym_push2(&global_stack
, v
, t
, c
);
587 /* don't record anonymous symbol */
588 if (v
< SYM_FIRST_ANOM
) {
589 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
590 /* modify the top most local identifier, so that
591 sym_identifier will point to 's' when popped */
592 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
593 ps
= &(*ps
)->prev_tok
;
600 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
601 pop them yet from the list, but do remove them from the token array. */
602 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
612 /* remove symbol in token array */
614 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
615 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
617 ps
= &ts
->sym_struct
;
619 ps
= &ts
->sym_identifier
;
630 /* ------------------------------------------------------------------------- */
632 static void vsetc(CType
*type
, int r
, CValue
*vc
)
636 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
637 tcc_error("memory full (vstack)");
638 /* cannot let cpu flags if other instruction are generated. Also
639 avoid leaving VT_JMP anywhere except on the top of the stack
640 because it would complicate the code generator.
642 Don't do this when nocode_wanted. vtop might come from
643 !nocode_wanted regions (see 88_codeopt.c) and transforming
644 it to a register without actually generating code is wrong
645 as their value might still be used for real. All values
646 we push under nocode_wanted will eventually be popped
647 again, so that the VT_CMP/VT_JMP value will be in vtop
648 when code is unsuppressed again.
650 Same logic below in vswap(); */
651 if (vtop
>= vstack
&& !nocode_wanted
) {
652 v
= vtop
->r
& VT_VALMASK
;
653 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
665 ST_FUNC
void vswap(void)
668 /* cannot vswap cpu flags. See comment at vsetc() above */
669 if (vtop
>= vstack
&& !nocode_wanted
) {
670 int v
= vtop
->r
& VT_VALMASK
;
671 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
679 /* pop stack value */
680 ST_FUNC
void vpop(void)
683 v
= vtop
->r
& VT_VALMASK
;
684 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
685 /* for x86, we need to pop the FP stack */
687 o(0xd8dd); /* fstp %st(0) */
690 if (v
== VT_JMP
|| v
== VT_JMPI
) {
691 /* need to put correct jump if && or || without test */
697 /* push constant of type "type" with useless value */
698 ST_FUNC
void vpush(CType
*type
)
700 vset(type
, VT_CONST
, 0);
703 /* push integer constant */
704 ST_FUNC
void vpushi(int v
)
708 vsetc(&int_type
, VT_CONST
, &cval
);
711 /* push a pointer sized constant */
712 static void vpushs(addr_t v
)
716 vsetc(&size_type
, VT_CONST
, &cval
);
719 /* push arbitrary 64bit constant */
720 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
727 vsetc(&ctype
, VT_CONST
, &cval
);
730 /* push long long constant */
731 static inline void vpushll(long long v
)
733 vpush64(VT_LLONG
, v
);
736 ST_FUNC
void vset(CType
*type
, int r
, int v
)
741 vsetc(type
, r
, &cval
);
744 static void vseti(int r
, int v
)
752 ST_FUNC
void vpushv(SValue
*v
)
754 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
755 tcc_error("memory full (vstack)");
760 static void vdup(void)
765 /* rotate n first stack elements to the bottom
766 I1 ... In -> I2 ... In I1 [top is right]
768 ST_FUNC
void vrotb(int n
)
779 /* rotate the n elements before entry e towards the top
780 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
782 ST_FUNC
void vrote(SValue
*e
, int n
)
788 for(i
= 0;i
< n
- 1; i
++)
793 /* rotate n first stack elements to the top
794 I1 ... In -> In I1 ... I(n-1) [top is right]
796 ST_FUNC
void vrott(int n
)
801 /* push a symbol value of TYPE */
802 static inline void vpushsym(CType
*type
, Sym
*sym
)
806 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
810 /* Return a static symbol pointing to a section */
811 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
817 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
818 sym
->type
.ref
= type
->ref
;
819 sym
->r
= VT_CONST
| VT_SYM
;
820 put_extern_sym(sym
, sec
, offset
, size
);
824 /* push a reference to a section offset by adding a dummy symbol */
825 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
827 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
830 /* define a new external reference to a symbol 'v' of type 'u' */
831 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
837 /* push forward reference */
838 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
839 s
->type
.ref
= type
->ref
;
840 s
->r
= r
| VT_CONST
| VT_SYM
;
841 } else if (IS_ASM_SYM(s
)) {
842 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
843 s
->type
.ref
= type
->ref
;
849 /* Merge some type attributes. */
850 static void patch_type(Sym
*sym
, CType
*type
)
852 if (!(type
->t
& VT_EXTERN
)) {
853 if (!(sym
->type
.t
& VT_EXTERN
))
854 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
855 sym
->type
.t
&= ~VT_EXTERN
;
858 if (IS_ASM_SYM(sym
)) {
859 /* stay static if both are static */
860 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
861 sym
->type
.ref
= type
->ref
;
864 if (!is_compatible_types(&sym
->type
, type
)) {
865 tcc_error("incompatible types for redefinition of '%s'",
866 get_tok_str(sym
->v
, NULL
));
868 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
869 int static_proto
= sym
->type
.t
& VT_STATIC
;
870 /* warn if static follows non-static function declaration */
871 if ((type
->t
& VT_STATIC
) && !static_proto
&& !(type
->t
& VT_INLINE
))
872 tcc_warning("static storage ignored for redefinition of '%s'",
873 get_tok_str(sym
->v
, NULL
));
875 if (0 == (type
->t
& VT_EXTERN
)) {
876 /* put complete type, use static from prototype */
877 sym
->type
.t
= (type
->t
& ~VT_STATIC
) | static_proto
;
878 if (type
->t
& VT_INLINE
)
879 sym
->type
.t
= type
->t
;
880 sym
->type
.ref
= type
->ref
;
884 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
885 /* set array size if it was omitted in extern declaration */
886 if (sym
->type
.ref
->c
< 0)
887 sym
->type
.ref
->c
= type
->ref
->c
;
888 else if (sym
->type
.ref
->c
!= type
->ref
->c
)
889 tcc_error("conflicting type for '%s'", get_tok_str(sym
->v
, NULL
));
891 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
892 tcc_warning("storage mismatch for redefinition of '%s'",
893 get_tok_str(sym
->v
, NULL
));
898 /* Merge some storage attributes. */
899 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
902 patch_type(sym
, type
);
905 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
906 tcc_error("incompatible dll linkage for redefinition of '%s'",
907 get_tok_str(sym
->v
, NULL
));
908 sym
->a
.dllexport
|= ad
->a
.dllexport
;
910 sym
->a
.weak
|= ad
->a
.weak
;
911 if (ad
->a
.visibility
) {
912 int vis
= sym
->a
.visibility
;
913 int vis2
= ad
->a
.visibility
;
914 if (vis
== STV_DEFAULT
)
916 else if (vis2
!= STV_DEFAULT
)
917 vis
= (vis
< vis2
) ? vis
: vis2
;
918 sym
->a
.visibility
= vis
;
921 sym
->a
.aligned
= ad
->a
.aligned
;
923 sym
->asm_label
= ad
->asm_label
;
927 /* define a new external reference to a symbol 'v' */
928 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
933 /* push forward reference */
934 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
935 s
->type
.t
|= VT_EXTERN
;
939 if (s
->type
.ref
== func_old_type
.ref
) {
940 s
->type
.ref
= type
->ref
;
941 s
->r
= r
| VT_CONST
| VT_SYM
;
942 s
->type
.t
|= VT_EXTERN
;
944 patch_storage(s
, ad
, type
);
949 /* push a reference to global symbol v */
950 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
952 vpushsym(type
, external_global_sym(v
, type
, 0));
955 /* save registers up to (vtop - n) stack entry */
956 ST_FUNC
void save_regs(int n
)
959 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
963 /* save r to the memory stack, and mark it as being free */
964 ST_FUNC
void save_reg(int r
)
966 save_reg_upstack(r
, 0);
969 /* save r to the memory stack, and mark it as being free,
970 if seen up to (vtop - n) stack entry */
971 ST_FUNC
void save_reg_upstack(int r
, int n
)
973 int l
, saved
, size
, align
;
977 if ((r
&= VT_VALMASK
) >= VT_CONST
)
982 /* modify all stack values */
985 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
986 if ((p
->r
& VT_VALMASK
) == r
||
987 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
988 /* must save value on stack if not already done */
990 /* NOTE: must reload 'r' because r might be equal to r2 */
991 r
= p
->r
& VT_VALMASK
;
992 /* store register in the stack */
994 if ((p
->r
& VT_LVAL
) ||
995 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
997 type
= &char_pointer_type
;
1001 size
= type_size(type
, &align
);
1002 loc
= (loc
- size
) & -align
;
1003 sv
.type
.t
= type
->t
;
1004 sv
.r
= VT_LOCAL
| VT_LVAL
;
1007 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1008 /* x86 specific: need to pop fp register ST0 if saved */
1009 if (r
== TREG_ST0
) {
1010 o(0xd8dd); /* fstp %st(0) */
1014 /* special long long case */
1015 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1023 /* mark that stack entry as being saved on the stack */
1024 if (p
->r
& VT_LVAL
) {
1025 /* also clear the bounded flag because the
1026 relocation address of the function was stored in
1028 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1030 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1038 #ifdef TCC_TARGET_ARM
1039 /* find a register of class 'rc2' with at most one reference on stack.
1040 * If none, call get_reg(rc) */
1041 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1046 for(r
=0;r
<NB_REGS
;r
++) {
1047 if (reg_classes
[r
] & rc2
) {
1050 for(p
= vstack
; p
<= vtop
; p
++) {
1051 if ((p
->r
& VT_VALMASK
) == r
||
1052 (p
->r2
& VT_VALMASK
) == r
)
1063 /* find a free register of class 'rc'. If none, save one register */
1064 ST_FUNC
int get_reg(int rc
)
1069 /* find a free register */
1070 for(r
=0;r
<NB_REGS
;r
++) {
1071 if (reg_classes
[r
] & rc
) {
1074 for(p
=vstack
;p
<=vtop
;p
++) {
1075 if ((p
->r
& VT_VALMASK
) == r
||
1076 (p
->r2
& VT_VALMASK
) == r
)
1084 /* no register left : free the first one on the stack (VERY
1085 IMPORTANT to start from the bottom to ensure that we don't
1086 spill registers used in gen_opi()) */
1087 for(p
=vstack
;p
<=vtop
;p
++) {
1088 /* look at second register (if long long) */
1089 r
= p
->r2
& VT_VALMASK
;
1090 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1092 r
= p
->r
& VT_VALMASK
;
1093 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1099 /* Should never comes here */
1103 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1105 static void move_reg(int r
, int s
, int t
)
1119 /* get address of vtop (vtop MUST BE an lvalue) */
1120 ST_FUNC
void gaddrof(void)
1122 vtop
->r
&= ~VT_LVAL
;
1123 /* tricky: if saved lvalue, then we can go back to lvalue */
1124 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1125 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1130 #ifdef CONFIG_TCC_BCHECK
1131 /* generate lvalue bound code */
1132 static void gbound(void)
1137 vtop
->r
&= ~VT_MUSTBOUND
;
1138 /* if lvalue, then use checking code before dereferencing */
1139 if (vtop
->r
& VT_LVAL
) {
1140 /* if not VT_BOUNDED value, then make one */
1141 if (!(vtop
->r
& VT_BOUNDED
)) {
1142 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1143 /* must save type because we must set it to int to get pointer */
1145 vtop
->type
.t
= VT_PTR
;
1148 gen_bounded_ptr_add();
1149 vtop
->r
|= lval_type
;
1152 /* then check for dereferencing */
1153 gen_bounded_ptr_deref();
1158 static void incr_bf_adr(int o
)
1160 vtop
->type
= char_pointer_type
;
1164 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1165 | (VT_BYTE
|VT_UNSIGNED
);
1166 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1167 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1170 /* single-byte load mode for packed or otherwise unaligned bitfields */
1171 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1174 save_reg_upstack(vtop
->r
, 1);
1175 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1176 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1185 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1187 vpushi((1 << n
) - 1), gen_op('&');
1190 vpushi(bits
), gen_op(TOK_SHL
);
1193 bits
+= n
, bit_size
-= n
, o
= 1;
1196 if (!(type
->t
& VT_UNSIGNED
)) {
1197 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1198 vpushi(n
), gen_op(TOK_SHL
);
1199 vpushi(n
), gen_op(TOK_SAR
);
1203 /* single-byte store mode for packed or otherwise unaligned bitfields */
1204 static void store_packed_bf(int bit_pos
, int bit_size
)
1206 int bits
, n
, o
, m
, c
;
1208 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1210 save_reg_upstack(vtop
->r
, 1);
1211 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1213 incr_bf_adr(o
); // X B
1215 c
? vdup() : gv_dup(); // B V X
1218 vpushi(bits
), gen_op(TOK_SHR
);
1220 vpushi(bit_pos
), gen_op(TOK_SHL
);
1225 m
= ((1 << n
) - 1) << bit_pos
;
1226 vpushi(m
), gen_op('&'); // X B V1
1227 vpushv(vtop
-1); // X B V1 B
1228 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1229 gen_op('&'); // X B V1 B1
1230 gen_op('|'); // X B V2
1232 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1233 vstore(), vpop(); // X B
1234 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1239 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1242 if (0 == sv
->type
.ref
)
1244 t
= sv
->type
.ref
->auxtype
;
1245 if (t
!= -1 && t
!= VT_STRUCT
) {
1246 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1247 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1252 /* store vtop a register belonging to class 'rc'. lvalues are
1253 converted to values. Cannot be used if cannot be converted to
1254 register value (such as structures). */
1255 ST_FUNC
int gv(int rc
)
1257 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1259 /* NOTE: get_reg can modify vstack[] */
1260 if (vtop
->type
.t
& VT_BITFIELD
) {
1263 bit_pos
= BIT_POS(vtop
->type
.t
);
1264 bit_size
= BIT_SIZE(vtop
->type
.t
);
1265 /* remove bit field info to avoid loops */
1266 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1269 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1270 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1271 type
.t
|= VT_UNSIGNED
;
1273 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1275 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1280 if (r
== VT_STRUCT
) {
1281 load_packed_bf(&type
, bit_pos
, bit_size
);
1283 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1284 /* cast to int to propagate signedness in following ops */
1286 /* generate shifts */
1287 vpushi(bits
- (bit_pos
+ bit_size
));
1289 vpushi(bits
- bit_size
);
1290 /* NOTE: transformed to SHR if unsigned */
1295 if (is_float(vtop
->type
.t
) &&
1296 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1297 unsigned long offset
;
1298 /* CPUs usually cannot use float constants, so we store them
1299 generically in data segment */
1300 size
= type_size(&vtop
->type
, &align
);
1302 size
= 0, align
= 1;
1303 offset
= section_add(data_section
, size
, align
);
1304 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1306 init_putv(&vtop
->type
, data_section
, offset
);
1309 #ifdef CONFIG_TCC_BCHECK
1310 if (vtop
->r
& VT_MUSTBOUND
)
1314 r
= vtop
->r
& VT_VALMASK
;
1315 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1316 #ifndef TCC_TARGET_ARM64
1319 #ifdef TCC_TARGET_X86_64
1320 else if (rc
== RC_FRET
)
1324 /* need to reload if:
1326 - lvalue (need to dereference pointer)
1327 - already a register, but not in the right class */
1329 || (vtop
->r
& VT_LVAL
)
1330 || !(reg_classes
[r
] & rc
)
1332 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1333 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1335 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1341 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1342 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1344 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1345 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1346 unsigned long long ll
;
1348 int r2
, original_type
;
1349 original_type
= vtop
->type
.t
;
1350 /* two register type load : expand to two words
1353 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1356 vtop
->c
.i
= ll
; /* first word */
1358 vtop
->r
= r
; /* save register value */
1359 vpushi(ll
>> 32); /* second word */
1362 if (vtop
->r
& VT_LVAL
) {
1363 /* We do not want to modifier the long long
1364 pointer here, so the safest (and less
1365 efficient) is to save all the other registers
1366 in the stack. XXX: totally inefficient. */
1370 /* lvalue_save: save only if used further down the stack */
1371 save_reg_upstack(vtop
->r
, 1);
1373 /* load from memory */
1374 vtop
->type
.t
= load_type
;
1377 vtop
[-1].r
= r
; /* save register value */
1378 /* increment pointer to get second word */
1379 vtop
->type
.t
= addr_type
;
1384 vtop
->type
.t
= load_type
;
1386 /* move registers */
1389 vtop
[-1].r
= r
; /* save register value */
1390 vtop
->r
= vtop
[-1].r2
;
1392 /* Allocate second register. Here we rely on the fact that
1393 get_reg() tries first to free r2 of an SValue. */
1397 /* write second register */
1399 vtop
->type
.t
= original_type
;
1400 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1402 /* lvalue of scalar type : need to use lvalue type
1403 because of possible cast */
1406 /* compute memory access type */
1407 if (vtop
->r
& VT_LVAL_BYTE
)
1409 else if (vtop
->r
& VT_LVAL_SHORT
)
1411 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1415 /* restore wanted type */
1418 /* one register type load */
1423 #ifdef TCC_TARGET_C67
1424 /* uses register pairs for doubles */
1425 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1432 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1433 ST_FUNC
void gv2(int rc1
, int rc2
)
1437 /* generate more generic register first. But VT_JMP or VT_CMP
1438 values must be generated first in all cases to avoid possible
1440 v
= vtop
[0].r
& VT_VALMASK
;
1441 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1446 /* test if reload is needed for first register */
1447 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1457 /* test if reload is needed for first register */
1458 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1464 #ifndef TCC_TARGET_ARM64
1465 /* wrapper around RC_FRET to return a register by type */
1466 static int rc_fret(int t
)
1468 #ifdef TCC_TARGET_X86_64
1469 if (t
== VT_LDOUBLE
) {
1477 /* wrapper around REG_FRET to return a register by type */
1478 static int reg_fret(int t
)
1480 #ifdef TCC_TARGET_X86_64
1481 if (t
== VT_LDOUBLE
) {
1489 /* expand 64bit on stack in two ints */
1490 static void lexpand(void)
1493 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1494 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1495 if (v
== VT_CONST
) {
1498 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1504 vtop
[0].r
= vtop
[-1].r2
;
1505 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1507 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1511 #ifdef TCC_TARGET_ARM
1512 /* expand long long on stack */
1513 ST_FUNC
void lexpand_nr(void)
1517 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1519 vtop
->r2
= VT_CONST
;
1520 vtop
->type
.t
= VT_INT
| u
;
1521 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1522 if (v
== VT_CONST
) {
1523 vtop
[-1].c
.i
= vtop
->c
.i
;
1524 vtop
->c
.i
= vtop
->c
.i
>> 32;
1526 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1528 vtop
->r
= vtop
[-1].r
;
1529 } else if (v
> VT_CONST
) {
1533 vtop
->r
= vtop
[-1].r2
;
1534 vtop
[-1].r2
= VT_CONST
;
1535 vtop
[-1].type
.t
= VT_INT
| u
;
1540 /* build a long long from two ints */
1541 static void lbuild(int t
)
1543 gv2(RC_INT
, RC_INT
);
1544 vtop
[-1].r2
= vtop
[0].r
;
1545 vtop
[-1].type
.t
= t
;
1550 /* convert stack entry to register and duplicate its value in another
1552 static void gv_dup(void)
1559 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1560 if (t
& VT_BITFIELD
) {
1570 /* stack: H L L1 H1 */
1580 /* duplicate value */
1585 #ifdef TCC_TARGET_X86_64
1586 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1596 load(r1
, &sv
); /* move r to r1 */
1598 /* duplicates value */
1604 /* Generate value test
1606 * Generate a test for any value (jump, comparison and integers) */
1607 ST_FUNC
int gvtst(int inv
, int t
)
1609 int v
= vtop
->r
& VT_VALMASK
;
1610 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1614 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1615 /* constant jmp optimization */
1616 if ((vtop
->c
.i
!= 0) != inv
)
1621 return gtst(inv
, t
);
1625 /* generate CPU independent (unsigned) long long operations */
1626 static void gen_opl(int op
)
1628 int t
, a
, b
, op1
, c
, i
;
1630 unsigned short reg_iret
= REG_IRET
;
1631 unsigned short reg_lret
= REG_LRET
;
1637 func
= TOK___divdi3
;
1640 func
= TOK___udivdi3
;
1643 func
= TOK___moddi3
;
1646 func
= TOK___umoddi3
;
1653 /* call generic long long function */
1654 vpush_global_sym(&func_old_type
, func
);
1659 vtop
->r2
= reg_lret
;
1667 //pv("gen_opl A",0,2);
1673 /* stack: L1 H1 L2 H2 */
1678 vtop
[-2] = vtop
[-3];
1681 /* stack: H1 H2 L1 L2 */
1682 //pv("gen_opl B",0,4);
1688 /* stack: H1 H2 L1 L2 ML MH */
1691 /* stack: ML MH H1 H2 L1 L2 */
1695 /* stack: ML MH H1 L2 H2 L1 */
1700 /* stack: ML MH M1 M2 */
1703 } else if (op
== '+' || op
== '-') {
1704 /* XXX: add non carry method too (for MIPS or alpha) */
1710 /* stack: H1 H2 (L1 op L2) */
1713 gen_op(op1
+ 1); /* TOK_xxxC2 */
1716 /* stack: H1 H2 (L1 op L2) */
1719 /* stack: (L1 op L2) H1 H2 */
1721 /* stack: (L1 op L2) (H1 op H2) */
1729 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1730 t
= vtop
[-1].type
.t
;
1734 /* stack: L H shift */
1736 /* constant: simpler */
1737 /* NOTE: all comments are for SHL. the other cases are
1738 done by swapping words */
1749 if (op
!= TOK_SAR
) {
1782 /* XXX: should provide a faster fallback on x86 ? */
1785 func
= TOK___ashrdi3
;
1788 func
= TOK___lshrdi3
;
1791 func
= TOK___ashldi3
;
1797 /* compare operations */
1803 /* stack: L1 H1 L2 H2 */
1805 vtop
[-1] = vtop
[-2];
1807 /* stack: L1 L2 H1 H2 */
1810 /* when values are equal, we need to compare low words. since
1811 the jump is inverted, we invert the test too. */
1814 else if (op1
== TOK_GT
)
1816 else if (op1
== TOK_ULT
)
1818 else if (op1
== TOK_UGT
)
1828 /* generate non equal test */
1834 /* compare low. Always unsigned */
1838 else if (op1
== TOK_LE
)
1840 else if (op1
== TOK_GT
)
1842 else if (op1
== TOK_GE
)
1853 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1855 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1856 return (a
^ b
) >> 63 ? -x
: x
;
1859 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1861 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1864 /* handle integer constant optimizations and various machine
1866 static void gen_opic(int op
)
1868 SValue
*v1
= vtop
- 1;
1870 int t1
= v1
->type
.t
& VT_BTYPE
;
1871 int t2
= v2
->type
.t
& VT_BTYPE
;
1872 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1873 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1874 uint64_t l1
= c1
? v1
->c
.i
: 0;
1875 uint64_t l2
= c2
? v2
->c
.i
: 0;
1876 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1878 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1879 l1
= ((uint32_t)l1
|
1880 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1881 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1882 l2
= ((uint32_t)l2
|
1883 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1887 case '+': l1
+= l2
; break;
1888 case '-': l1
-= l2
; break;
1889 case '&': l1
&= l2
; break;
1890 case '^': l1
^= l2
; break;
1891 case '|': l1
|= l2
; break;
1892 case '*': l1
*= l2
; break;
1899 /* if division by zero, generate explicit division */
1902 tcc_error("division by zero in constant");
1906 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1907 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1908 case TOK_UDIV
: l1
= l1
/ l2
; break;
1909 case TOK_UMOD
: l1
= l1
% l2
; break;
1912 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1913 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1915 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1918 case TOK_ULT
: l1
= l1
< l2
; break;
1919 case TOK_UGE
: l1
= l1
>= l2
; break;
1920 case TOK_EQ
: l1
= l1
== l2
; break;
1921 case TOK_NE
: l1
= l1
!= l2
; break;
1922 case TOK_ULE
: l1
= l1
<= l2
; break;
1923 case TOK_UGT
: l1
= l1
> l2
; break;
1924 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1925 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1926 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1927 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1929 case TOK_LAND
: l1
= l1
&& l2
; break;
1930 case TOK_LOR
: l1
= l1
|| l2
; break;
1934 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1935 l1
= ((uint32_t)l1
|
1936 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1940 /* if commutative ops, put c2 as constant */
1941 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1942 op
== '|' || op
== '*')) {
1944 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1945 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1947 if (!const_wanted
&&
1949 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1950 (l1
== -1 && op
== TOK_SAR
))) {
1951 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1953 } else if (!const_wanted
&&
1954 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1956 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
1957 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1958 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1963 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1966 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1967 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1970 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
1971 /* filter out NOP operations like x*1, x-0, x&-1... */
1973 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1974 /* try to use shifts instead of muls or divs */
1975 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1984 else if (op
== TOK_PDIV
)
1990 } else if (c2
&& (op
== '+' || op
== '-') &&
1991 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1992 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1993 /* symbol + constant case */
1997 /* The backends can't always deal with addends to symbols
1998 larger than +-1<<31. Don't construct such. */
2005 /* call low level op generator */
2006 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2007 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2015 /* generate a floating point operation with constant propagation */
2016 static void gen_opif(int op
)
2020 #if defined _MSC_VER && defined _AMD64_
2021 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2028 /* currently, we cannot do computations with forward symbols */
2029 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2030 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2032 if (v1
->type
.t
== VT_FLOAT
) {
2035 } else if (v1
->type
.t
== VT_DOUBLE
) {
2043 /* NOTE: we only do constant propagation if finite number (not
2044 NaN or infinity) (ANSI spec) */
2045 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2049 case '+': f1
+= f2
; break;
2050 case '-': f1
-= f2
; break;
2051 case '*': f1
*= f2
; break;
2054 /* If not in initializer we need to potentially generate
2055 FP exceptions at runtime, otherwise we want to fold. */
2061 /* XXX: also handles tests ? */
2065 /* XXX: overflow test ? */
2066 if (v1
->type
.t
== VT_FLOAT
) {
2068 } else if (v1
->type
.t
== VT_DOUBLE
) {
2080 static int pointed_size(CType
*type
)
2083 return type_size(pointed_type(type
), &align
);
2086 static void vla_runtime_pointed_size(CType
*type
)
2089 vla_runtime_type_size(pointed_type(type
), &align
);
2092 static inline int is_null_pointer(SValue
*p
)
2094 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2096 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2097 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2098 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2099 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
2102 static inline int is_integer_btype(int bt
)
2104 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2105 bt
== VT_INT
|| bt
== VT_LLONG
);
2108 /* check types for comparison or subtraction of pointers */
2109 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2111 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2114 /* null pointers are accepted for all comparisons as gcc */
2115 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2119 bt1
= type1
->t
& VT_BTYPE
;
2120 bt2
= type2
->t
& VT_BTYPE
;
2121 /* accept comparison between pointer and integer with a warning */
2122 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2123 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2124 tcc_warning("comparison between pointer and integer");
2128 /* both must be pointers or implicit function pointers */
2129 if (bt1
== VT_PTR
) {
2130 type1
= pointed_type(type1
);
2131 } else if (bt1
!= VT_FUNC
)
2132 goto invalid_operands
;
2134 if (bt2
== VT_PTR
) {
2135 type2
= pointed_type(type2
);
2136 } else if (bt2
!= VT_FUNC
) {
2138 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2140 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2141 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2145 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2146 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2147 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2148 /* gcc-like error if '-' is used */
2150 goto invalid_operands
;
2152 tcc_warning("comparison of distinct pointer types lacks a cast");
2156 /* generic gen_op: handles types problems */
2157 ST_FUNC
void gen_op(int op
)
2159 int u
, t1
, t2
, bt1
, bt2
, t
;
2163 t1
= vtop
[-1].type
.t
;
2164 t2
= vtop
[0].type
.t
;
2165 bt1
= t1
& VT_BTYPE
;
2166 bt2
= t2
& VT_BTYPE
;
2168 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2169 tcc_error("operation on a struct");
2170 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2171 if (bt2
== VT_FUNC
) {
2172 mk_pointer(&vtop
->type
);
2175 if (bt1
== VT_FUNC
) {
2177 mk_pointer(&vtop
->type
);
2182 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2183 /* at least one operand is a pointer */
2184 /* relational op: must be both pointers */
2185 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2186 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2187 /* pointers are handled are unsigned */
2189 t
= VT_LLONG
| VT_UNSIGNED
;
2191 t
= VT_INT
| VT_UNSIGNED
;
2195 /* if both pointers, then it must be the '-' op */
2196 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2198 tcc_error("cannot use pointers here");
2199 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2200 /* XXX: check that types are compatible */
2201 if (vtop
[-1].type
.t
& VT_VLA
) {
2202 vla_runtime_pointed_size(&vtop
[-1].type
);
2204 vpushi(pointed_size(&vtop
[-1].type
));
2208 vtop
->type
.t
= ptrdiff_type
.t
;
2212 /* exactly one pointer : must be '+' or '-'. */
2213 if (op
!= '-' && op
!= '+')
2214 tcc_error("cannot use pointers here");
2215 /* Put pointer as first operand */
2216 if (bt2
== VT_PTR
) {
2218 t
= t1
, t1
= t2
, t2
= t
;
2221 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2222 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2225 type1
= vtop
[-1].type
;
2226 type1
.t
&= ~VT_ARRAY
;
2227 if (vtop
[-1].type
.t
& VT_VLA
)
2228 vla_runtime_pointed_size(&vtop
[-1].type
);
2230 u
= pointed_size(&vtop
[-1].type
);
2232 tcc_error("unknown array element size");
2236 /* XXX: cast to int ? (long long case) */
2242 /* #ifdef CONFIG_TCC_BCHECK
2243 The main reason to removing this code:
2250 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2251 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2253 When this code is on. then the output looks like
2255 v+(i-j) = 0xbff84000
2257 /* if evaluating constant expression, no code should be
2258 generated, so no bound check */
2259 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2260 /* if bounded pointers, we generate a special code to
2267 gen_bounded_ptr_add();
2273 /* put again type if gen_opic() swaped operands */
2276 } else if (is_float(bt1
) || is_float(bt2
)) {
2277 /* compute bigger type and do implicit casts */
2278 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2280 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2285 /* floats can only be used for a few operations */
2286 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2287 (op
< TOK_ULT
|| op
> TOK_GT
))
2288 tcc_error("invalid operands for binary operation");
2290 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2291 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2292 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2294 t
|= (VT_LONG
& t1
);
2296 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2297 /* cast to biggest op */
2298 t
= VT_LLONG
| VT_LONG
;
2299 if (bt1
== VT_LLONG
)
2301 if (bt2
== VT_LLONG
)
2303 /* convert to unsigned if it does not fit in a long long */
2304 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2305 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2309 /* integer operations */
2310 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2311 /* convert to unsigned if it does not fit in an integer */
2312 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2313 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2316 /* XXX: currently, some unsigned operations are explicit, so
2317 we modify them here */
2318 if (t
& VT_UNSIGNED
) {
2325 else if (op
== TOK_LT
)
2327 else if (op
== TOK_GT
)
2329 else if (op
== TOK_LE
)
2331 else if (op
== TOK_GE
)
2339 /* special case for shifts and long long: we keep the shift as
2341 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2348 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2349 /* relational op: the result is an int */
2350 vtop
->type
.t
= VT_INT
;
2355 // Make sure that we have converted to an rvalue:
2356 if (vtop
->r
& VT_LVAL
)
2357 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2360 #ifndef TCC_TARGET_ARM
2361 /* generic itof for unsigned long long case */
2362 static void gen_cvt_itof1(int t
)
2364 #ifdef TCC_TARGET_ARM64
2367 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2368 (VT_LLONG
| VT_UNSIGNED
)) {
2371 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2372 #if LDOUBLE_SIZE != 8
2373 else if (t
== VT_LDOUBLE
)
2374 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2377 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2381 vtop
->r
= reg_fret(t
);
2389 /* generic ftoi for unsigned long long case */
2390 static void gen_cvt_ftoi1(int t
)
2392 #ifdef TCC_TARGET_ARM64
2397 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2398 /* not handled natively */
2399 st
= vtop
->type
.t
& VT_BTYPE
;
2401 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2402 #if LDOUBLE_SIZE != 8
2403 else if (st
== VT_LDOUBLE
)
2404 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2407 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2412 vtop
->r2
= REG_LRET
;
2419 /* force char or short cast */
2420 static void force_charshort_cast(int t
)
2424 /* cannot cast static initializers */
2425 if (STATIC_DATA_WANTED
)
2429 /* XXX: add optimization if lvalue : just change type and offset */
2434 if (t
& VT_UNSIGNED
) {
2435 vpushi((1 << bits
) - 1);
2438 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2444 /* result must be signed or the SAR is converted to an SHL
2445 This was not the case when "t" was a signed short
2446 and the last value on the stack was an unsigned int */
2447 vtop
->type
.t
&= ~VT_UNSIGNED
;
2453 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2454 static void gen_cast_s(int t
)
2462 static void gen_cast(CType
*type
)
2464 int sbt
, dbt
, sf
, df
, c
, p
;
2466 /* special delayed cast for char/short */
2467 /* XXX: in some cases (multiple cascaded casts), it may still
2469 if (vtop
->r
& VT_MUSTCAST
) {
2470 vtop
->r
&= ~VT_MUSTCAST
;
2471 force_charshort_cast(vtop
->type
.t
);
2474 /* bitfields first get cast to ints */
2475 if (vtop
->type
.t
& VT_BITFIELD
) {
2479 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2480 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2485 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2486 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2487 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2488 c
&= dbt
!= VT_LDOUBLE
;
2491 /* constant case: we can do it now */
2492 /* XXX: in ISOC, cannot do it if error in convert */
2493 if (sbt
== VT_FLOAT
)
2494 vtop
->c
.ld
= vtop
->c
.f
;
2495 else if (sbt
== VT_DOUBLE
)
2496 vtop
->c
.ld
= vtop
->c
.d
;
2499 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2500 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2501 vtop
->c
.ld
= vtop
->c
.i
;
2503 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2505 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2506 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2508 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2511 if (dbt
== VT_FLOAT
)
2512 vtop
->c
.f
= (float)vtop
->c
.ld
;
2513 else if (dbt
== VT_DOUBLE
)
2514 vtop
->c
.d
= (double)vtop
->c
.ld
;
2515 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2516 vtop
->c
.i
= vtop
->c
.ld
;
2517 } else if (sf
&& dbt
== VT_BOOL
) {
2518 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2521 vtop
->c
.i
= vtop
->c
.ld
;
2522 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2524 else if (sbt
& VT_UNSIGNED
)
2525 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2527 else if (sbt
== VT_PTR
)
2530 else if (sbt
!= VT_LLONG
)
2531 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2532 -(vtop
->c
.i
& 0x80000000));
2534 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2536 else if (dbt
== VT_BOOL
)
2537 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2539 else if (dbt
== VT_PTR
)
2542 else if (dbt
!= VT_LLONG
) {
2543 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2544 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2547 if (!(dbt
& VT_UNSIGNED
))
2548 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2551 } else if (p
&& dbt
== VT_BOOL
) {
2555 /* non constant case: generate code */
2557 /* convert from fp to fp */
2560 /* convert int to fp */
2563 /* convert fp to int */
2564 if (dbt
== VT_BOOL
) {
2568 /* we handle char/short/etc... with generic code */
2569 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2570 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2574 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2575 /* additional cast for char/short... */
2581 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2582 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2583 /* scalar to long long */
2584 /* machine independent conversion */
2586 /* generate high word */
2587 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2591 if (sbt
== VT_PTR
) {
2592 /* cast from pointer to int before we apply
2593 shift operation, which pointers don't support*/
2600 /* patch second register */
2601 vtop
[-1].r2
= vtop
->r
;
2605 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2606 (dbt
& VT_BTYPE
) == VT_PTR
||
2607 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2608 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2609 (sbt
& VT_BTYPE
) != VT_PTR
&&
2610 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2611 /* need to convert from 32bit to 64bit */
2613 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2614 #if defined(TCC_TARGET_ARM64)
2616 #elif defined(TCC_TARGET_X86_64)
2618 /* x86_64 specific: movslq */
2620 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2627 } else if (dbt
== VT_BOOL
) {
2628 /* scalar to bool */
2631 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2632 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2633 if (sbt
== VT_PTR
) {
2634 vtop
->type
.t
= VT_INT
;
2635 tcc_warning("nonportable conversion from pointer to char/short");
2637 force_charshort_cast(dbt
);
2638 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2640 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2642 /* from long long: just take low order word */
2647 vtop
->type
.t
|= VT_UNSIGNED
;
2651 /* if lvalue and single word type, nothing to do because
2652 the lvalue already contains the real type size (see
2653 VT_LVAL_xxx constants) */
2656 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2657 /* if we are casting between pointer types,
2658 we must update the VT_LVAL_xxx size */
2659 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2660 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2665 /* return type size as known at compile time. Put alignment at 'a' */
2666 ST_FUNC
int type_size(CType
*type
, int *a
)
2671 bt
= type
->t
& VT_BTYPE
;
2672 if (bt
== VT_STRUCT
) {
2677 } else if (bt
== VT_PTR
) {
2678 if (type
->t
& VT_ARRAY
) {
2682 ts
= type_size(&s
->type
, a
);
2684 if (ts
< 0 && s
->c
< 0)
2692 } else if (IS_ENUM(type
->t
) && type
->ref
->c
== -1) {
2693 return -1; /* incomplete enum */
2694 } else if (bt
== VT_LDOUBLE
) {
2696 return LDOUBLE_SIZE
;
2697 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2698 #ifdef TCC_TARGET_I386
2699 #ifdef TCC_TARGET_PE
2704 #elif defined(TCC_TARGET_ARM)
2714 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2717 } else if (bt
== VT_SHORT
) {
2720 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2724 /* char, void, function, _Bool */
2730 /* push type size as known at runtime time on top of value stack. Put
2732 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2734 if (type
->t
& VT_VLA
) {
2735 type_size(&type
->ref
->type
, a
);
2736 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2738 vpushi(type_size(type
, a
));
2742 static void vla_sp_restore(void) {
2743 if (vlas_in_scope
) {
2744 gen_vla_sp_restore(vla_sp_loc
);
2748 static void vla_sp_restore_root(void) {
2749 if (vlas_in_scope
) {
2750 gen_vla_sp_restore(vla_sp_root_loc
);
2754 /* return the pointed type of t */
2755 static inline CType
*pointed_type(CType
*type
)
2757 return &type
->ref
->type
;
2760 /* modify type so that its it is a pointer to type. */
2761 ST_FUNC
void mk_pointer(CType
*type
)
2764 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2765 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2769 /* compare function types. OLD functions match any new functions */
2770 static int is_compatible_func(CType
*type1
, CType
*type2
)
2776 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2778 /* check func_call */
2779 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2781 /* XXX: not complete */
2782 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2784 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2786 while (s1
!= NULL
) {
2789 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2799 /* return true if type1 and type2 are the same. If unqualified is
2800 true, qualifiers on the types are ignored.
2802 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2806 t1
= type1
->t
& VT_TYPE
;
2807 t2
= type2
->t
& VT_TYPE
;
2809 /* strip qualifiers before comparing */
2810 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2811 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2814 /* Default Vs explicit signedness only matters for char */
2815 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2819 /* XXX: bitfields ? */
2822 /* test more complicated cases */
2823 bt1
= t1
& VT_BTYPE
;
2824 if (bt1
== VT_PTR
) {
2825 type1
= pointed_type(type1
);
2826 type2
= pointed_type(type2
);
2827 return is_compatible_types(type1
, type2
);
2828 } else if (bt1
== VT_STRUCT
) {
2829 return (type1
->ref
== type2
->ref
);
2830 } else if (bt1
== VT_FUNC
) {
2831 return is_compatible_func(type1
, type2
);
2832 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
2833 return type1
->ref
== type2
->ref
;
2839 /* return true if type1 and type2 are exactly the same (including
2842 static int is_compatible_types(CType
*type1
, CType
*type2
)
2844 return compare_types(type1
,type2
,0);
2847 /* return true if type1 and type2 are the same (ignoring qualifiers).
2849 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2851 return compare_types(type1
,type2
,1);
2854 /* print a type. If 'varstr' is not NULL, then the variable is also
2855 printed in the type */
2857 /* XXX: add array and function pointers */
2858 static void type_to_str(char *buf
, int buf_size
,
2859 CType
*type
, const char *varstr
)
2871 pstrcat(buf
, buf_size
, "extern ");
2873 pstrcat(buf
, buf_size
, "static ");
2875 pstrcat(buf
, buf_size
, "typedef ");
2877 pstrcat(buf
, buf_size
, "inline ");
2878 if (t
& VT_VOLATILE
)
2879 pstrcat(buf
, buf_size
, "volatile ");
2880 if (t
& VT_CONSTANT
)
2881 pstrcat(buf
, buf_size
, "const ");
2883 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2884 || ((t
& VT_UNSIGNED
)
2885 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2888 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2890 buf_size
-= strlen(buf
);
2925 tstr
= "long double";
2927 pstrcat(buf
, buf_size
, tstr
);
2934 pstrcat(buf
, buf_size
, tstr
);
2935 v
= type
->ref
->v
& ~SYM_STRUCT
;
2936 if (v
>= SYM_FIRST_ANOM
)
2937 pstrcat(buf
, buf_size
, "<anonymous>");
2939 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2944 if (varstr
&& '*' == *varstr
) {
2945 pstrcat(buf1
, sizeof(buf1
), "(");
2946 pstrcat(buf1
, sizeof(buf1
), varstr
);
2947 pstrcat(buf1
, sizeof(buf1
), ")");
2949 pstrcat(buf1
, buf_size
, "(");
2951 while (sa
!= NULL
) {
2953 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2954 pstrcat(buf1
, sizeof(buf1
), buf2
);
2957 pstrcat(buf1
, sizeof(buf1
), ", ");
2959 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2960 pstrcat(buf1
, sizeof(buf1
), ", ...");
2961 pstrcat(buf1
, sizeof(buf1
), ")");
2962 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2967 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2968 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2971 pstrcpy(buf1
, sizeof(buf1
), "*");
2972 if (t
& VT_CONSTANT
)
2973 pstrcat(buf1
, buf_size
, "const ");
2974 if (t
& VT_VOLATILE
)
2975 pstrcat(buf1
, buf_size
, "volatile ");
2977 pstrcat(buf1
, sizeof(buf1
), varstr
);
2978 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2982 pstrcat(buf
, buf_size
, " ");
2983 pstrcat(buf
, buf_size
, varstr
);
2988 /* verify type compatibility to store vtop in 'dt' type, and generate
2990 static void gen_assign_cast(CType
*dt
)
2992 CType
*st
, *type1
, *type2
;
2993 char buf1
[256], buf2
[256];
2994 int dbt
, sbt
, qualwarn
, lvl
;
2996 st
= &vtop
->type
; /* source type */
2997 dbt
= dt
->t
& VT_BTYPE
;
2998 sbt
= st
->t
& VT_BTYPE
;
2999 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3000 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3001 ; /* It is Ok if both are void */
3003 tcc_error("cannot cast from/to void");
3005 if (dt
->t
& VT_CONSTANT
)
3006 tcc_warning("assignment of read-only location");
3009 /* special cases for pointers */
3010 /* '0' can also be a pointer */
3011 if (is_null_pointer(vtop
))
3013 /* accept implicit pointer to integer cast with warning */
3014 if (is_integer_btype(sbt
)) {
3015 tcc_warning("assignment makes pointer from integer without a cast");
3018 type1
= pointed_type(dt
);
3020 type2
= pointed_type(st
);
3021 else if (sbt
== VT_FUNC
)
3022 type2
= st
; /* a function is implicitly a function pointer */
3025 if (is_compatible_types(type1
, type2
))
3027 for (qualwarn
= lvl
= 0;; ++lvl
) {
3028 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3029 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3031 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3032 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3033 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3035 type1
= pointed_type(type1
);
3036 type2
= pointed_type(type2
);
3038 if (!is_compatible_unqualified_types(type1
, type2
)) {
3039 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3040 /* void * can match anything */
3041 } else if (dbt
== sbt
3042 && is_integer_btype(sbt
& VT_BTYPE
)
3043 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3044 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3045 /* Like GCC don't warn by default for merely changes
3046 in pointer target signedness. Do warn for different
3047 base types, though, in particular for unsigned enums
3048 and signed int targets. */
3050 tcc_warning("assignment from incompatible pointer type");
3055 tcc_warning("assignment discards qualifiers from pointer target type");
3061 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3062 tcc_warning("assignment makes integer from pointer without a cast");
3063 } else if (sbt
== VT_STRUCT
) {
3064 goto case_VT_STRUCT
;
3066 /* XXX: more tests */
3070 if (!is_compatible_unqualified_types(dt
, st
)) {
3072 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3073 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3074 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3081 /* store vtop in lvalue pushed on stack */
3082 ST_FUNC
void vstore(void)
3084 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3086 ft
= vtop
[-1].type
.t
;
3087 sbt
= vtop
->type
.t
& VT_BTYPE
;
3088 dbt
= ft
& VT_BTYPE
;
3089 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3090 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3091 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3092 /* optimize char/short casts */
3093 delayed_cast
= VT_MUSTCAST
;
3094 vtop
->type
.t
= ft
& VT_TYPE
;
3095 /* XXX: factorize */
3096 if (ft
& VT_CONSTANT
)
3097 tcc_warning("assignment of read-only location");
3100 if (!(ft
& VT_BITFIELD
))
3101 gen_assign_cast(&vtop
[-1].type
);
3104 if (sbt
== VT_STRUCT
) {
3105 /* if structure, only generate pointer */
3106 /* structure assignment : generate memcpy */
3107 /* XXX: optimize if small size */
3108 size
= type_size(&vtop
->type
, &align
);
3112 vtop
->type
.t
= VT_PTR
;
3115 /* address of memcpy() */
3118 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3119 else if(!(align
& 3))
3120 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3123 /* Use memmove, rather than memcpy, as dest and src may be same: */
3124 vpush_global_sym(&func_old_type
, TOK_memmove
);
3129 vtop
->type
.t
= VT_PTR
;
3135 /* leave source on stack */
3136 } else if (ft
& VT_BITFIELD
) {
3137 /* bitfield store handling */
3139 /* save lvalue as expression result (example: s.b = s.a = n;) */
3140 vdup(), vtop
[-1] = vtop
[-2];
3142 bit_pos
= BIT_POS(ft
);
3143 bit_size
= BIT_SIZE(ft
);
3144 /* remove bit field info to avoid loops */
3145 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3147 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3148 gen_cast(&vtop
[-1].type
);
3149 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3152 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3153 if (r
== VT_STRUCT
) {
3154 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3155 store_packed_bf(bit_pos
, bit_size
);
3157 unsigned long long mask
= (1ULL << bit_size
) - 1;
3158 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3160 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3163 vpushi((unsigned)mask
);
3170 /* duplicate destination */
3173 /* load destination, mask and or with source */
3174 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3175 vpushll(~(mask
<< bit_pos
));
3177 vpushi(~((unsigned)mask
<< bit_pos
));
3182 /* ... and discard */
3185 } else if (dbt
== VT_VOID
) {
3188 #ifdef CONFIG_TCC_BCHECK
3189 /* bound check case */
3190 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3199 #ifdef TCC_TARGET_X86_64
3200 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3202 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3207 r
= gv(rc
); /* generate value */
3208 /* if lvalue was saved on stack, must read it */
3209 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3211 t
= get_reg(RC_INT
);
3217 sv
.r
= VT_LOCAL
| VT_LVAL
;
3218 sv
.c
.i
= vtop
[-1].c
.i
;
3220 vtop
[-1].r
= t
| VT_LVAL
;
3222 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3224 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3225 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3227 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3228 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3230 vtop
[-1].type
.t
= load_type
;
3233 /* convert to int to increment easily */
3234 vtop
->type
.t
= addr_type
;
3240 vtop
[-1].type
.t
= load_type
;
3241 /* XXX: it works because r2 is spilled last ! */
3242 store(vtop
->r2
, vtop
- 1);
3248 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3249 vtop
->r
|= delayed_cast
;
3253 /* post defines POST/PRE add. c is the token ++ or -- */
3254 ST_FUNC
void inc(int post
, int c
)
3257 vdup(); /* save lvalue */
3259 gv_dup(); /* duplicate value */
3264 vpushi(c
- TOK_MID
);
3266 vstore(); /* store value */
3268 vpop(); /* if post op, return saved value */
3271 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3273 /* read the string */
3277 while (tok
== TOK_STR
) {
3278 /* XXX: add \0 handling too ? */
3279 cstr_cat(astr
, tokc
.str
.data
, -1);
3282 cstr_ccat(astr
, '\0');
3285 /* If I is >= 1 and a power of two, returns log2(i)+1.
3286 If I is 0 returns 0. */
3287 static int exact_log2p1(int i
)
3292 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3303 /* Parse __attribute__((...)) GNUC extension. */
3304 static void parse_attribute(AttributeDef
*ad
)
3310 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3315 while (tok
!= ')') {
3316 if (tok
< TOK_IDENT
)
3317 expect("attribute name");
3324 parse_mult_str(&astr
, "section name");
3325 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3332 parse_mult_str(&astr
, "alias(\"target\")");
3333 ad
->alias_target
= /* save string as token, for later */
3334 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3338 case TOK_VISIBILITY1
:
3339 case TOK_VISIBILITY2
:
3341 parse_mult_str(&astr
,
3342 "visibility(\"default|hidden|internal|protected\")");
3343 if (!strcmp (astr
.data
, "default"))
3344 ad
->a
.visibility
= STV_DEFAULT
;
3345 else if (!strcmp (astr
.data
, "hidden"))
3346 ad
->a
.visibility
= STV_HIDDEN
;
3347 else if (!strcmp (astr
.data
, "internal"))
3348 ad
->a
.visibility
= STV_INTERNAL
;
3349 else if (!strcmp (astr
.data
, "protected"))
3350 ad
->a
.visibility
= STV_PROTECTED
;
3352 expect("visibility(\"default|hidden|internal|protected\")");
3361 if (n
<= 0 || (n
& (n
- 1)) != 0)
3362 tcc_error("alignment must be a positive power of two");
3367 ad
->a
.aligned
= exact_log2p1(n
);
3368 if (n
!= 1 << (ad
->a
.aligned
- 1))
3369 tcc_error("alignment of %d is larger than implemented", n
);
3381 /* currently, no need to handle it because tcc does not
3382 track unused objects */
3386 /* currently, no need to handle it because tcc does not
3387 track unused objects */
3392 ad
->f
.func_call
= FUNC_CDECL
;
3397 ad
->f
.func_call
= FUNC_STDCALL
;
3399 #ifdef TCC_TARGET_I386
3409 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3415 ad
->f
.func_call
= FUNC_FASTCALLW
;
3422 ad
->attr_mode
= VT_LLONG
+ 1;
3425 ad
->attr_mode
= VT_BYTE
+ 1;
3428 ad
->attr_mode
= VT_SHORT
+ 1;
3432 ad
->attr_mode
= VT_INT
+ 1;
3435 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3442 ad
->a
.dllexport
= 1;
3444 case TOK_NODECORATE
:
3445 ad
->a
.nodecorate
= 1;
3448 ad
->a
.dllimport
= 1;
3451 if (tcc_state
->warn_unsupported
)
3452 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3453 /* skip parameters */
3455 int parenthesis
= 0;
3459 else if (tok
== ')')
3462 } while (parenthesis
&& tok
!= -1);
3475 static Sym
* find_field (CType
*type
, int v
)
3479 while ((s
= s
->next
) != NULL
) {
3480 if ((s
->v
& SYM_FIELD
) &&
3481 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3482 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3483 Sym
*ret
= find_field (&s
->type
, v
);
3493 static void struct_add_offset (Sym
*s
, int offset
)
3495 while ((s
= s
->next
) != NULL
) {
3496 if ((s
->v
& SYM_FIELD
) &&
3497 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3498 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3499 struct_add_offset(s
->type
.ref
, offset
);
3505 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3507 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3508 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3509 int pcc
= !tcc_state
->ms_bitfields
;
3510 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3517 prevbt
= VT_STRUCT
; /* make it never match */
3522 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3523 if (f
->type
.t
& VT_BITFIELD
)
3524 bit_size
= BIT_SIZE(f
->type
.t
);
3527 size
= type_size(&f
->type
, &align
);
3528 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3531 if (pcc
&& bit_size
== 0) {
3532 /* in pcc mode, packing does not affect zero-width bitfields */
3535 /* in pcc mode, attribute packed overrides if set. */
3536 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3539 /* pragma pack overrides align if lesser and packs bitfields always */
3542 if (pragma_pack
< align
)
3543 align
= pragma_pack
;
3544 /* in pcc mode pragma pack also overrides individual align */
3545 if (pcc
&& pragma_pack
< a
)
3549 /* some individual align was specified */
3553 if (type
->ref
->type
.t
== VT_UNION
) {
3554 if (pcc
&& bit_size
>= 0)
3555 size
= (bit_size
+ 7) >> 3;
3560 } else if (bit_size
< 0) {
3562 c
+= (bit_pos
+ 7) >> 3;
3563 c
= (c
+ align
- 1) & -align
;
3572 /* A bit-field. Layout is more complicated. There are two
3573 options: PCC (GCC) compatible and MS compatible */
3575 /* In PCC layout a bit-field is placed adjacent to the
3576 preceding bit-fields, except if:
3578 - an individual alignment was given
3579 - it would overflow its base type container and
3580 there is no packing */
3581 if (bit_size
== 0) {
3583 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3585 } else if (f
->a
.aligned
) {
3587 } else if (!packed
) {
3589 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3590 if (ofs
> size
/ align
)
3594 /* in pcc mode, long long bitfields have type int if they fit */
3595 if (size
== 8 && bit_size
<= 32)
3596 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3598 while (bit_pos
>= align
* 8)
3599 c
+= align
, bit_pos
-= align
* 8;
3602 /* In PCC layout named bit-fields influence the alignment
3603 of the containing struct using the base types alignment,
3604 except for packed fields (which here have correct align). */
3605 if (f
->v
& SYM_FIRST_ANOM
3606 // && bit_size // ??? gcc on ARM/rpi does that
3611 bt
= f
->type
.t
& VT_BTYPE
;
3612 if ((bit_pos
+ bit_size
> size
* 8)
3613 || (bit_size
> 0) == (bt
!= prevbt
)
3615 c
= (c
+ align
- 1) & -align
;
3618 /* In MS bitfield mode a bit-field run always uses
3619 at least as many bits as the underlying type.
3620 To start a new run it's also required that this
3621 or the last bit-field had non-zero width. */
3622 if (bit_size
|| prev_bit_size
)
3625 /* In MS layout the records alignment is normally
3626 influenced by the field, except for a zero-width
3627 field at the start of a run (but by further zero-width
3628 fields it is again). */
3629 if (bit_size
== 0 && prevbt
!= bt
)
3632 prev_bit_size
= bit_size
;
3635 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3636 | (bit_pos
<< VT_STRUCT_SHIFT
);
3637 bit_pos
+= bit_size
;
3639 if (align
> maxalign
)
3643 printf("set field %s offset %-2d size %-2d align %-2d",
3644 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3645 if (f
->type
.t
& VT_BITFIELD
) {
3646 printf(" pos %-2d bits %-2d",
3654 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3656 /* An anonymous struct/union. Adjust member offsets
3657 to reflect the real offset of our containing struct.
3658 Also set the offset of this anon member inside
3659 the outer struct to be zero. Via this it
3660 works when accessing the field offset directly
3661 (from base object), as well as when recursing
3662 members in initializer handling. */
3663 int v2
= f
->type
.ref
->v
;
3664 if (!(v2
& SYM_FIELD
) &&
3665 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3667 /* This happens only with MS extensions. The
3668 anon member has a named struct type, so it
3669 potentially is shared with other references.
3670 We need to unshare members so we can modify
3673 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3674 &f
->type
.ref
->type
, 0,
3676 pps
= &f
->type
.ref
->next
;
3677 while ((ass
= ass
->next
) != NULL
) {
3678 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3679 pps
= &((*pps
)->next
);
3683 struct_add_offset(f
->type
.ref
, offset
);
3693 c
+= (bit_pos
+ 7) >> 3;
3695 /* store size and alignment */
3696 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3700 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3701 /* can happen if individual align for some member was given. In
3702 this case MSVC ignores maxalign when aligning the size */
3707 c
= (c
+ a
- 1) & -a
;
3711 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3714 /* check whether we can access bitfields by their type */
3715 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3719 if (0 == (f
->type
.t
& VT_BITFIELD
))
3723 bit_size
= BIT_SIZE(f
->type
.t
);
3726 bit_pos
= BIT_POS(f
->type
.t
);
3727 size
= type_size(&f
->type
, &align
);
3728 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3731 /* try to access the field using a different type */
3732 c0
= -1, s
= align
= 1;
3734 px
= f
->c
* 8 + bit_pos
;
3735 cx
= (px
>> 3) & -align
;
3736 px
= px
- (cx
<< 3);
3739 s
= (px
+ bit_size
+ 7) >> 3;
3749 s
= type_size(&t
, &align
);
3753 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3754 /* update offset and bit position */
3757 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3758 | (bit_pos
<< VT_STRUCT_SHIFT
);
3762 printf("FIX field %s offset %-2d size %-2d align %-2d "
3763 "pos %-2d bits %-2d\n",
3764 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3765 cx
, s
, align
, px
, bit_size
);
3768 /* fall back to load/store single-byte wise */
3769 f
->auxtype
= VT_STRUCT
;
3771 printf("FIX field %s : load byte-wise\n",
3772 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3778 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3779 static void struct_decl(CType
*type
, int u
)
3781 int v
, c
, size
, align
, flexible
;
3782 int bit_size
, bsize
, bt
;
3784 AttributeDef ad
, ad1
;
3787 memset(&ad
, 0, sizeof ad
);
3789 parse_attribute(&ad
);
3793 /* struct already defined ? return it */
3795 expect("struct/union/enum name");
3797 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3800 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3802 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3807 /* Record the original enum/struct/union token. */
3808 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3810 /* we put an undefined size for struct/union */
3811 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3812 s
->r
= 0; /* default alignment is zero as gcc */
3814 type
->t
= s
->type
.t
;
3820 tcc_error("struct/union/enum already defined");
3821 /* cannot be empty */
3822 /* non empty enums are not allowed */
3825 long long ll
= 0, pl
= 0, nl
= 0;
3828 /* enum symbols have static storage */
3829 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3833 expect("identifier");
3835 if (ss
&& !local_stack
)
3836 tcc_error("redefinition of enumerator '%s'",
3837 get_tok_str(v
, NULL
));
3841 ll
= expr_const64();
3843 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3845 *ps
= ss
, ps
= &ss
->next
;
3854 /* NOTE: we accept a trailing comma */
3859 /* set integral type of the enum */
3862 if (pl
!= (unsigned)pl
)
3863 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3865 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3866 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3867 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3869 /* set type for enum members */
3870 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3872 if (ll
== (int)ll
) /* default is int if it fits */
3874 if (t
.t
& VT_UNSIGNED
) {
3875 ss
->type
.t
|= VT_UNSIGNED
;
3876 if (ll
== (unsigned)ll
)
3879 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3880 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3885 while (tok
!= '}') {
3886 if (!parse_btype(&btype
, &ad1
)) {
3892 tcc_error("flexible array member '%s' not at the end of struct",
3893 get_tok_str(v
, NULL
));
3899 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3901 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3902 expect("identifier");
3904 int v
= btype
.ref
->v
;
3905 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3906 if (tcc_state
->ms_extensions
== 0)
3907 expect("identifier");
3911 if (type_size(&type1
, &align
) < 0) {
3912 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3915 tcc_error("field '%s' has incomplete type",
3916 get_tok_str(v
, NULL
));
3918 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3919 (type1
.t
& VT_STORAGE
))
3920 tcc_error("invalid type for '%s'",
3921 get_tok_str(v
, NULL
));
3925 bit_size
= expr_const();
3926 /* XXX: handle v = 0 case for messages */
3928 tcc_error("negative width in bit-field '%s'",
3929 get_tok_str(v
, NULL
));
3930 if (v
&& bit_size
== 0)
3931 tcc_error("zero width for bit-field '%s'",
3932 get_tok_str(v
, NULL
));
3933 parse_attribute(&ad1
);
3935 size
= type_size(&type1
, &align
);
3936 if (bit_size
>= 0) {
3937 bt
= type1
.t
& VT_BTYPE
;
3943 tcc_error("bitfields must have scalar type");
3945 if (bit_size
> bsize
) {
3946 tcc_error("width of '%s' exceeds its type",
3947 get_tok_str(v
, NULL
));
3948 } else if (bit_size
== bsize
3949 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
3950 /* no need for bit fields */
3952 } else if (bit_size
== 64) {
3953 tcc_error("field width 64 not implemented");
3955 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
3957 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3960 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3961 /* Remember we've seen a real field to check
3962 for placement of flexible array member. */
3965 /* If member is a struct or bit-field, enforce
3966 placing into the struct (as anonymous). */
3968 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3973 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
3978 if (tok
== ';' || tok
== TOK_EOF
)
3985 parse_attribute(&ad
);
3986 struct_layout(type
, &ad
);
3991 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
3993 if (s
->a
.aligned
&& 0 == ad
->a
.aligned
)
3994 ad
->a
.aligned
= s
->a
.aligned
;
3995 if (s
->f
.func_call
&& 0 == ad
->f
.func_call
)
3996 ad
->f
.func_call
= s
->f
.func_call
;
3997 if (s
->f
.func_type
&& 0 == ad
->f
.func_type
)
3998 ad
->f
.func_type
= s
->f
.func_type
;
4003 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4004 are added to the element type, copied because it could be a typedef. */
4005 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4007 while (type
->t
& VT_ARRAY
) {
4008 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4009 type
= &type
->ref
->type
;
4011 type
->t
|= qualifiers
;
4014 /* return 0 if no type declaration. otherwise, return the basic type
4017 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4019 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
4023 memset(ad
, 0, sizeof(AttributeDef
));
4033 /* currently, we really ignore extension */
4043 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4044 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4045 tmbt
: tcc_error("too many basic types");
4048 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4053 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4066 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4067 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4068 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4069 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4076 #ifdef TCC_TARGET_ARM64
4078 /* GCC's __uint128_t appears in some Linux header files. Make it a
4079 synonym for long double to get the size and alignment right. */
4090 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4091 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4099 struct_decl(&type1
, VT_ENUM
);
4102 type
->ref
= type1
.ref
;
4105 struct_decl(&type1
, VT_STRUCT
);
4108 struct_decl(&type1
, VT_UNION
);
4111 /* type modifiers */
4116 parse_btype_qualify(type
, VT_CONSTANT
);
4124 parse_btype_qualify(type
, VT_VOLATILE
);
4131 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4132 tcc_error("signed and unsigned modifier");
4145 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4146 tcc_error("signed and unsigned modifier");
4147 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4163 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4164 tcc_error("multiple storage classes");
4175 /* GNUC attribute */
4176 case TOK_ATTRIBUTE1
:
4177 case TOK_ATTRIBUTE2
:
4178 parse_attribute(ad
);
4179 if (ad
->attr_mode
) {
4180 u
= ad
->attr_mode
-1;
4181 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4189 parse_expr_type(&type1
);
4190 /* remove all storage modifiers except typedef */
4191 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4193 sym_to_attr(ad
, type1
.ref
);
4199 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4201 t
&= ~(VT_BTYPE
|VT_LONG
);
4202 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4203 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4204 type
->ref
= s
->type
.ref
;
4206 parse_btype_qualify(type
, t
);
4208 /* get attributes from typedef */
4218 if (tcc_state
->char_is_unsigned
) {
4219 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4222 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4223 bt
= t
& (VT_BTYPE
|VT_LONG
);
4225 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4226 #ifdef TCC_TARGET_PE
4227 if (bt
== VT_LDOUBLE
)
4228 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4234 /* convert a function parameter type (array to pointer and function to
4235 function pointer) */
4236 static inline void convert_parameter_type(CType
*pt
)
4238 /* remove const and volatile qualifiers (XXX: const could be used
4239 to indicate a const function parameter */
4240 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4241 /* array must be transformed to pointer according to ANSI C */
4243 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4248 ST_FUNC
void parse_asm_str(CString
*astr
)
4251 parse_mult_str(astr
, "string constant");
4254 /* Parse an asm label and return the token */
4255 static int asm_label_instr(void)
4261 parse_asm_str(&astr
);
4264 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4266 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4271 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4273 int n
, l
, t1
, arg_size
, align
;
4274 Sym
**plast
, *s
, *first
;
4279 /* function type, or recursive declarator (return if so) */
4281 if (td
&& !(td
& TYPE_ABSTRACT
))
4285 else if (parse_btype(&pt
, &ad1
))
4296 /* read param name and compute offset */
4297 if (l
!= FUNC_OLD
) {
4298 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4300 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4301 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4302 tcc_error("parameter declared as void");
4306 expect("identifier");
4307 pt
.t
= VT_VOID
; /* invalid type */
4310 convert_parameter_type(&pt
);
4311 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4312 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4318 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4323 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4324 tcc_error("invalid type");
4327 /* if no parameters, then old type prototype */
4330 /* NOTE: const is ignored in returned type as it has a special
4331 meaning in gcc / C++ */
4332 type
->t
&= ~VT_CONSTANT
;
4333 /* some ancient pre-K&R C allows a function to return an array
4334 and the array brackets to be put after the arguments, such
4335 that "int c()[]" means something like "int[] c()" */
4338 skip(']'); /* only handle simple "[]" */
4341 /* we push a anonymous symbol which will contain the function prototype */
4342 ad
->f
.func_args
= arg_size
;
4343 ad
->f
.func_type
= l
;
4344 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4350 } else if (tok
== '[') {
4351 int saved_nocode_wanted
= nocode_wanted
;
4352 /* array definition */
4355 /* XXX The optional type-quals and static should only be accepted
4356 in parameter decls. The '*' as well, and then even only
4357 in prototypes (not function defs). */
4359 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4374 if (!local_stack
|| (storage
& VT_STATIC
))
4375 vpushi(expr_const());
4377 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4378 length must always be evaluated, even under nocode_wanted,
4379 so that its size slot is initialized (e.g. under sizeof
4384 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4387 tcc_error("invalid array size");
4389 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4390 tcc_error("size of variable length array should be an integer");
4395 /* parse next post type */
4396 post_type(type
, ad
, storage
, 0);
4397 if (type
->t
== VT_FUNC
)
4398 tcc_error("declaration of an array of functions");
4399 t1
|= type
->t
& VT_VLA
;
4402 loc
-= type_size(&int_type
, &align
);
4406 vla_runtime_type_size(type
, &align
);
4408 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4414 nocode_wanted
= saved_nocode_wanted
;
4416 /* we push an anonymous symbol which will contain the array
4418 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4419 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4425 /* Parse a type declarator (except basic type), and return the type
4426 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4427 expected. 'type' should contain the basic type. 'ad' is the
4428 attribute definition of the basic type. It can be modified by
4429 type_decl(). If this (possibly abstract) declarator is a pointer chain
4430 it returns the innermost pointed to type (equals *type, but is a different
4431 pointer), otherwise returns type itself, that's used for recursive calls. */
4432 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4435 int qualifiers
, storage
;
4437 /* recursive type, remove storage bits first, apply them later again */
4438 storage
= type
->t
& VT_STORAGE
;
4439 type
->t
&= ~VT_STORAGE
;
4442 while (tok
== '*') {
4450 qualifiers
|= VT_CONSTANT
;
4455 qualifiers
|= VT_VOLATILE
;
4461 /* XXX: clarify attribute handling */
4462 case TOK_ATTRIBUTE1
:
4463 case TOK_ATTRIBUTE2
:
4464 parse_attribute(ad
);
4468 type
->t
|= qualifiers
;
4470 /* innermost pointed to type is the one for the first derivation */
4471 ret
= pointed_type(type
);
4475 /* This is possibly a parameter type list for abstract declarators
4476 ('int ()'), use post_type for testing this. */
4477 if (!post_type(type
, ad
, 0, td
)) {
4478 /* It's not, so it's a nested declarator, and the post operations
4479 apply to the innermost pointed to type (if any). */
4480 /* XXX: this is not correct to modify 'ad' at this point, but
4481 the syntax is not clear */
4482 parse_attribute(ad
);
4483 post
= type_decl(type
, ad
, v
, td
);
4486 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4487 /* type identifier */
4491 if (!(td
& TYPE_ABSTRACT
))
4492 expect("identifier");
4495 post_type(post
, ad
, storage
, 0);
4496 parse_attribute(ad
);
4501 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4502 ST_FUNC
int lvalue_type(int t
)
4507 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4509 else if (bt
== VT_SHORT
)
4513 if (t
& VT_UNSIGNED
)
4514 r
|= VT_LVAL_UNSIGNED
;
4518 /* indirection with full error checking and bound check */
4519 ST_FUNC
void indir(void)
4521 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4522 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4526 if (vtop
->r
& VT_LVAL
)
4528 vtop
->type
= *pointed_type(&vtop
->type
);
4529 /* Arrays and functions are never lvalues */
4530 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4531 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4532 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4533 /* if bound checking, the referenced pointer must be checked */
4534 #ifdef CONFIG_TCC_BCHECK
4535 if (tcc_state
->do_bounds_check
)
4536 vtop
->r
|= VT_MUSTBOUND
;
4541 /* pass a parameter to a function and do type checking and casting */
4542 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4547 func_type
= func
->f
.func_type
;
4548 if (func_type
== FUNC_OLD
||
4549 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4550 /* default casting : only need to convert float to double */
4551 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4552 gen_cast_s(VT_DOUBLE
);
4553 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4554 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4555 type
.ref
= vtop
->type
.ref
;
4558 } else if (arg
== NULL
) {
4559 tcc_error("too many arguments to function");
4562 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4563 gen_assign_cast(&type
);
4567 /* parse an expression and return its type without any side effect. */
4568 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4577 /* parse an expression of the form '(type)' or '(expr)' and return its
4579 static void parse_expr_type(CType
*type
)
4585 if (parse_btype(type
, &ad
)) {
4586 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4588 expr_type(type
, gexpr
);
4593 static void parse_type(CType
*type
)
4598 if (!parse_btype(type
, &ad
)) {
4601 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4604 static void parse_builtin_params(int nc
, const char *args
)
4611 while ((c
= *args
++)) {
4615 case 'e': expr_eq(); continue;
4616 case 't': parse_type(&t
); vpush(&t
); continue;
4617 default: tcc_error("internal error"); break;
4625 ST_FUNC
void unary(void)
4627 int n
, t
, align
, size
, r
, sizeof_caller
;
4632 sizeof_caller
= in_sizeof
;
4635 /* XXX: GCC 2.95.3 does not generate a table although it should be
4643 #ifdef TCC_TARGET_PE
4644 t
= VT_SHORT
|VT_UNSIGNED
;
4652 vsetc(&type
, VT_CONST
, &tokc
);
4656 t
= VT_INT
| VT_UNSIGNED
;
4662 t
= VT_LLONG
| VT_UNSIGNED
;
4674 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4677 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4679 case TOK___FUNCTION__
:
4681 goto tok_identifier
;
4687 /* special function name identifier */
4688 len
= strlen(funcname
) + 1;
4689 /* generate char[len] type */
4694 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4695 if (!NODATA_WANTED
) {
4696 ptr
= section_ptr_add(data_section
, len
);
4697 memcpy(ptr
, funcname
, len
);
4703 #ifdef TCC_TARGET_PE
4704 t
= VT_SHORT
| VT_UNSIGNED
;
4710 /* string parsing */
4712 if (tcc_state
->char_is_unsigned
)
4713 t
= VT_BYTE
| VT_UNSIGNED
;
4715 if (tcc_state
->warn_write_strings
)
4720 memset(&ad
, 0, sizeof(AttributeDef
));
4721 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4726 if (parse_btype(&type
, &ad
)) {
4727 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4729 /* check ISOC99 compound literal */
4731 /* data is allocated locally by default */
4736 /* all except arrays are lvalues */
4737 if (!(type
.t
& VT_ARRAY
))
4738 r
|= lvalue_type(type
.t
);
4739 memset(&ad
, 0, sizeof(AttributeDef
));
4740 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4742 if (sizeof_caller
) {
4749 } else if (tok
== '{') {
4750 int saved_nocode_wanted
= nocode_wanted
;
4752 tcc_error("expected constant");
4753 /* save all registers */
4755 /* statement expression : we do not accept break/continue
4756 inside as GCC does. We do retain the nocode_wanted state,
4757 as statement expressions can't ever be entered from the
4758 outside, so any reactivation of code emission (from labels
4759 or loop heads) can be disabled again after the end of it. */
4760 block(NULL
, NULL
, 1);
4761 nocode_wanted
= saved_nocode_wanted
;
4776 /* functions names must be treated as function pointers,
4777 except for unary '&' and sizeof. Since we consider that
4778 functions are not lvalues, we only have to handle it
4779 there and in function calls. */
4780 /* arrays can also be used although they are not lvalues */
4781 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4782 !(vtop
->type
.t
& VT_ARRAY
))
4784 mk_pointer(&vtop
->type
);
4790 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4791 gen_cast_s(VT_BOOL
);
4792 vtop
->c
.i
= !vtop
->c
.i
;
4793 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4797 vseti(VT_JMP
, gvtst(1, 0));
4809 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4810 tcc_error("pointer not accepted for unary plus");
4811 /* In order to force cast, we add zero, except for floating point
4812 where we really need an noop (otherwise -0.0 will be transformed
4814 if (!is_float(vtop
->type
.t
)) {
4825 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
4826 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
4827 size
= type_size(&type
, &align
);
4828 if (s
&& s
->a
.aligned
)
4829 align
= 1 << (s
->a
.aligned
- 1);
4830 if (t
== TOK_SIZEOF
) {
4831 if (!(type
.t
& VT_VLA
)) {
4833 tcc_error("sizeof applied to an incomplete type");
4836 vla_runtime_type_size(&type
, &align
);
4841 vtop
->type
.t
|= VT_UNSIGNED
;
4844 case TOK_builtin_expect
:
4845 /* __builtin_expect is a no-op for now */
4846 parse_builtin_params(0, "ee");
4849 case TOK_builtin_types_compatible_p
:
4850 parse_builtin_params(0, "tt");
4851 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4852 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4853 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4857 case TOK_builtin_choose_expr
:
4884 case TOK_builtin_constant_p
:
4885 parse_builtin_params(1, "e");
4886 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4890 case TOK_builtin_frame_address
:
4891 case TOK_builtin_return_address
:
4897 if (tok
!= TOK_CINT
) {
4898 tcc_error("%s only takes positive integers",
4899 tok1
== TOK_builtin_return_address
?
4900 "__builtin_return_address" :
4901 "__builtin_frame_address");
4903 level
= (uint32_t)tokc
.i
;
4908 vset(&type
, VT_LOCAL
, 0); /* local frame */
4910 mk_pointer(&vtop
->type
);
4911 indir(); /* -> parent frame */
4913 if (tok1
== TOK_builtin_return_address
) {
4914 // assume return address is just above frame pointer on stack
4917 mk_pointer(&vtop
->type
);
4922 #ifdef TCC_TARGET_X86_64
4923 #ifdef TCC_TARGET_PE
4924 case TOK_builtin_va_start
:
4925 parse_builtin_params(0, "ee");
4926 r
= vtop
->r
& VT_VALMASK
;
4930 tcc_error("__builtin_va_start expects a local variable");
4932 vtop
->type
= char_pointer_type
;
4937 case TOK_builtin_va_arg_types
:
4938 parse_builtin_params(0, "t");
4939 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4946 #ifdef TCC_TARGET_ARM64
4947 case TOK___va_start
: {
4948 parse_builtin_params(0, "ee");
4952 vtop
->type
.t
= VT_VOID
;
4955 case TOK___va_arg
: {
4956 parse_builtin_params(0, "et");
4964 case TOK___arm64_clear_cache
: {
4965 parse_builtin_params(0, "ee");
4968 vtop
->type
.t
= VT_VOID
;
4972 /* pre operations */
4983 t
= vtop
->type
.t
& VT_BTYPE
;
4985 /* In IEEE negate(x) isn't subtract(0,x), but rather
4989 vtop
->c
.f
= -1.0 * 0.0;
4990 else if (t
== VT_DOUBLE
)
4991 vtop
->c
.d
= -1.0 * 0.0;
4993 vtop
->c
.ld
= -1.0 * 0.0;
5001 goto tok_identifier
;
5003 /* allow to take the address of a label */
5004 if (tok
< TOK_UIDENT
)
5005 expect("label identifier");
5006 s
= label_find(tok
);
5008 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5010 if (s
->r
== LABEL_DECLARED
)
5011 s
->r
= LABEL_FORWARD
;
5014 s
->type
.t
= VT_VOID
;
5015 mk_pointer(&s
->type
);
5016 s
->type
.t
|= VT_STATIC
;
5018 vpushsym(&s
->type
, s
);
5024 CType controlling_type
;
5025 int has_default
= 0;
5028 TokenString
*str
= NULL
;
5032 expr_type(&controlling_type
, expr_eq
);
5033 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5034 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5035 mk_pointer(&controlling_type
);
5039 if (tok
== TOK_DEFAULT
) {
5041 tcc_error("too many 'default'");
5047 AttributeDef ad_tmp
;
5050 parse_btype(&cur_type
, &ad_tmp
);
5051 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5052 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5054 tcc_error("type match twice");
5064 skip_or_save_block(&str
);
5066 skip_or_save_block(NULL
);
5073 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5074 tcc_error("type '%s' does not match any association", buf
);
5076 begin_macro(str
, 1);
5085 // special qnan , snan and infinity values
5090 vtop
->type
.t
= VT_FLOAT
;
5095 goto special_math_val
;
5098 goto special_math_val
;
5105 expect("identifier");
5107 if (!s
|| IS_ASM_SYM(s
)) {
5108 const char *name
= get_tok_str(t
, NULL
);
5110 tcc_error("'%s' undeclared", name
);
5111 /* for simple function calls, we tolerate undeclared
5112 external reference to int() function */
5113 if (tcc_state
->warn_implicit_function_declaration
5114 #ifdef TCC_TARGET_PE
5115 /* people must be warned about using undeclared WINAPI functions
5116 (which usually start with uppercase letter) */
5117 || (name
[0] >= 'A' && name
[0] <= 'Z')
5120 tcc_warning("implicit declaration of function '%s'", name
);
5121 s
= external_global_sym(t
, &func_old_type
, 0);
5125 /* A symbol that has a register is a local register variable,
5126 which starts out as VT_LOCAL value. */
5127 if ((r
& VT_VALMASK
) < VT_CONST
)
5128 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5130 vset(&s
->type
, r
, s
->c
);
5131 /* Point to s as backpointer (even without r&VT_SYM).
5132 Will be used by at least the x86 inline asm parser for
5138 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5139 vtop
->c
.i
= s
->enum_val
;
5144 /* post operations */
5146 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5149 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5152 if (tok
== TOK_ARROW
)
5154 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5157 /* expect pointer on structure */
5158 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5159 expect("struct or union");
5160 if (tok
== TOK_CDOUBLE
)
5161 expect("field name");
5163 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5164 expect("field name");
5165 s
= find_field(&vtop
->type
, tok
);
5167 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5168 /* add field offset to pointer */
5169 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5172 /* change type to field type, and set to lvalue */
5173 vtop
->type
= s
->type
;
5174 vtop
->type
.t
|= qualifiers
;
5175 /* an array is never an lvalue */
5176 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5177 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5178 #ifdef CONFIG_TCC_BCHECK
5179 /* if bound checking, the referenced pointer must be checked */
5180 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5181 vtop
->r
|= VT_MUSTBOUND
;
5185 } else if (tok
== '[') {
5191 } else if (tok
== '(') {
5194 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5197 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5198 /* pointer test (no array accepted) */
5199 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5200 vtop
->type
= *pointed_type(&vtop
->type
);
5201 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5205 expect("function pointer");
5208 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5210 /* get return type */
5213 sa
= s
->next
; /* first parameter */
5214 nb_args
= regsize
= 0;
5216 /* compute first implicit argument if a structure is returned */
5217 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5218 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5219 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5220 &ret_align
, ®size
);
5222 /* get some space for the returned structure */
5223 size
= type_size(&s
->type
, &align
);
5224 #ifdef TCC_TARGET_ARM64
5225 /* On arm64, a small struct is return in registers.
5226 It is much easier to write it to memory if we know
5227 that we are allowed to write some extra bytes, so
5228 round the allocated space up to a power of 2: */
5230 while (size
& (size
- 1))
5231 size
= (size
| (size
- 1)) + 1;
5233 loc
= (loc
- size
) & -align
;
5235 ret
.r
= VT_LOCAL
| VT_LVAL
;
5236 /* pass it as 'int' to avoid structure arg passing
5238 vseti(VT_LOCAL
, loc
);
5248 /* return in register */
5249 if (is_float(ret
.type
.t
)) {
5250 ret
.r
= reg_fret(ret
.type
.t
);
5251 #ifdef TCC_TARGET_X86_64
5252 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5256 #ifndef TCC_TARGET_ARM64
5257 #ifdef TCC_TARGET_X86_64
5258 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5260 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5271 gfunc_param_typed(s
, sa
);
5281 tcc_error("too few arguments to function");
5283 gfunc_call(nb_args
);
5286 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5287 vsetc(&ret
.type
, r
, &ret
.c
);
5288 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5291 /* handle packed struct return */
5292 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5295 size
= type_size(&s
->type
, &align
);
5296 /* We're writing whole regs often, make sure there's enough
5297 space. Assume register size is power of 2. */
5298 if (regsize
> align
)
5300 loc
= (loc
- size
) & -align
;
5304 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5308 if (--ret_nregs
== 0)
5312 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5320 ST_FUNC
void expr_prod(void)
5325 while (tok
== '*' || tok
== '/' || tok
== '%') {
5333 ST_FUNC
void expr_sum(void)
5338 while (tok
== '+' || tok
== '-') {
5346 static void expr_shift(void)
5351 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5359 static void expr_cmp(void)
5364 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5365 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5373 static void expr_cmpeq(void)
5378 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5386 static void expr_and(void)
5389 while (tok
== '&') {
5396 static void expr_xor(void)
5399 while (tok
== '^') {
5406 static void expr_or(void)
5409 while (tok
== '|') {
5416 static void expr_land(void)
5419 if (tok
== TOK_LAND
) {
5422 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5423 gen_cast_s(VT_BOOL
);
5428 while (tok
== TOK_LAND
) {
5444 if (tok
!= TOK_LAND
) {
5457 static void expr_lor(void)
5460 if (tok
== TOK_LOR
) {
5463 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5464 gen_cast_s(VT_BOOL
);
5469 while (tok
== TOK_LOR
) {
5485 if (tok
!= TOK_LOR
) {
5498 /* Assuming vtop is a value used in a conditional context
5499 (i.e. compared with zero) return 0 if it's false, 1 if
5500 true and -1 if it can't be statically determined. */
5501 static int condition_3way(void)
5504 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5505 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5507 gen_cast_s(VT_BOOL
);
5514 static void expr_cond(void)
5516 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5518 CType type
, type1
, type2
;
5523 c
= condition_3way();
5524 g
= (tok
== ':' && gnu_ext
);
5526 /* needed to avoid having different registers saved in
5528 if (is_float(vtop
->type
.t
)) {
5530 #ifdef TCC_TARGET_X86_64
5531 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5556 sv
= *vtop
; /* save value to handle it later */
5557 vtop
--; /* no vpop so that FP stack is not flushed */
5575 bt1
= t1
& VT_BTYPE
;
5577 bt2
= t2
& VT_BTYPE
;
5580 /* cast operands to correct type according to ISOC rules */
5581 if (is_float(bt1
) || is_float(bt2
)) {
5582 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5583 type
.t
= VT_LDOUBLE
;
5585 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5590 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5591 /* cast to biggest op */
5592 type
.t
= VT_LLONG
| VT_LONG
;
5593 if (bt1
== VT_LLONG
)
5595 if (bt2
== VT_LLONG
)
5597 /* convert to unsigned if it does not fit in a long long */
5598 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5599 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5600 type
.t
|= VT_UNSIGNED
;
5601 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5602 /* If one is a null ptr constant the result type
5604 if (is_null_pointer (vtop
))
5606 else if (is_null_pointer (&sv
))
5608 /* XXX: test pointer compatibility, C99 has more elaborate
5612 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5613 /* XXX: test function pointer compatibility */
5614 type
= bt1
== VT_FUNC
? type1
: type2
;
5615 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5616 /* XXX: test structure compatibility */
5617 type
= bt1
== VT_STRUCT
? type1
: type2
;
5618 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5619 /* NOTE: as an extension, we accept void on only one side */
5622 /* integer operations */
5623 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5624 /* convert to unsigned if it does not fit in an integer */
5625 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5626 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5627 type
.t
|= VT_UNSIGNED
;
5629 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5630 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5631 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5634 /* now we convert second operand */
5638 mk_pointer(&vtop
->type
);
5640 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5645 if (is_float(type
.t
)) {
5647 #ifdef TCC_TARGET_X86_64
5648 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5652 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5653 /* for long longs, we use fixed registers to avoid having
5654 to handle a complicated move */
5665 /* this is horrible, but we must also convert first
5671 mk_pointer(&vtop
->type
);
5673 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5679 move_reg(r2
, r1
, type
.t
);
5689 static void expr_eq(void)
5695 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5696 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5697 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5712 ST_FUNC
void gexpr(void)
5723 /* parse a constant expression and return value in vtop. */
5724 static void expr_const1(void)
5733 /* parse an integer constant and return its value. */
5734 static inline int64_t expr_const64(void)
5738 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5739 expect("constant expression");
5745 /* parse an integer constant and return its value.
5746 Complain if it doesn't fit 32bit (signed or unsigned). */
5747 ST_FUNC
int expr_const(void)
5750 int64_t wc
= expr_const64();
5752 if (c
!= wc
&& (unsigned)c
!= wc
)
5753 tcc_error("constant exceeds 32 bit");
5757 /* return the label token if current token is a label, otherwise
5759 static int is_label(void)
5763 /* fast test first */
5764 if (tok
< TOK_UIDENT
)
5766 /* no need to save tokc because tok is an identifier */
5772 unget_tok(last_tok
);
5777 #ifndef TCC_TARGET_ARM64
5778 static void gfunc_return(CType
*func_type
)
5780 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5781 CType type
, ret_type
;
5782 int ret_align
, ret_nregs
, regsize
;
5783 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5784 &ret_align
, ®size
);
5785 if (0 == ret_nregs
) {
5786 /* if returning structure, must copy it to implicit
5787 first pointer arg location */
5790 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5793 /* copy structure value to pointer */
5796 /* returning structure packed into registers */
5797 int r
, size
, addr
, align
;
5798 size
= type_size(func_type
,&align
);
5799 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5800 (vtop
->c
.i
& (ret_align
-1)))
5801 && (align
& (ret_align
-1))) {
5802 loc
= (loc
- size
) & -ret_align
;
5805 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5809 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5811 vtop
->type
= ret_type
;
5812 if (is_float(ret_type
.t
))
5813 r
= rc_fret(ret_type
.t
);
5824 if (--ret_nregs
== 0)
5826 /* We assume that when a structure is returned in multiple
5827 registers, their classes are consecutive values of the
5830 vtop
->c
.i
+= regsize
;
5834 } else if (is_float(func_type
->t
)) {
5835 gv(rc_fret(func_type
->t
));
5839 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5843 static int case_cmp(const void *pa
, const void *pb
)
5845 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5846 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5847 return a
< b
? -1 : a
> b
;
5850 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5854 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5872 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5874 gcase(base
, len
/2, bsym
);
5875 if (cur_switch
->def_sym
)
5876 gjmp_addr(cur_switch
->def_sym
);
5878 *bsym
= gjmp(*bsym
);
5882 base
+= e
; len
-= e
;
5892 if (p
->v1
== p
->v2
) {
5894 gtst_addr(0, p
->sym
);
5904 gtst_addr(0, p
->sym
);
5910 static void block(int *bsym
, int *csym
, int is_expr
)
5912 int a
, b
, c
, d
, cond
;
5915 /* generate line number info */
5916 if (tcc_state
->do_debug
)
5917 tcc_debug_line(tcc_state
);
5920 /* default return value is (void) */
5922 vtop
->type
.t
= VT_VOID
;
5925 if (tok
== TOK_IF
) {
5927 int saved_nocode_wanted
= nocode_wanted
;
5932 cond
= condition_3way();
5938 nocode_wanted
|= 0x20000000;
5939 block(bsym
, csym
, 0);
5941 nocode_wanted
= saved_nocode_wanted
;
5943 if (c
== TOK_ELSE
) {
5948 nocode_wanted
|= 0x20000000;
5949 block(bsym
, csym
, 0);
5950 gsym(d
); /* patch else jmp */
5952 nocode_wanted
= saved_nocode_wanted
;
5955 } else if (tok
== TOK_WHILE
) {
5956 int saved_nocode_wanted
;
5957 nocode_wanted
&= ~0x20000000;
5967 saved_nocode_wanted
= nocode_wanted
;
5969 nocode_wanted
= saved_nocode_wanted
;
5974 } else if (tok
== '{') {
5976 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5979 /* record local declaration stack position */
5981 llabel
= local_label_stack
;
5984 /* handle local labels declarations */
5985 if (tok
== TOK_LABEL
) {
5988 if (tok
< TOK_UIDENT
)
5989 expect("label identifier");
5990 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6000 while (tok
!= '}') {
6001 if ((a
= is_label()))
6008 block(bsym
, csym
, is_expr
);
6011 /* pop locally defined labels */
6012 label_pop(&local_label_stack
, llabel
, is_expr
);
6013 /* pop locally defined symbols */
6015 /* In the is_expr case (a statement expression is finished here),
6016 vtop might refer to symbols on the local_stack. Either via the
6017 type or via vtop->sym. We can't pop those nor any that in turn
6018 might be referred to. To make it easier we don't roll back
6019 any symbols in that case; some upper level call to block() will
6020 do that. We do have to remove such symbols from the lookup
6021 tables, though. sym_pop will do that. */
6022 sym_pop(&local_stack
, s
, is_expr
);
6024 /* Pop VLA frames and restore stack pointer if required */
6025 if (vlas_in_scope
> saved_vlas_in_scope
) {
6026 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
6029 vlas_in_scope
= saved_vlas_in_scope
;
6032 } else if (tok
== TOK_RETURN
) {
6036 gen_assign_cast(&func_vt
);
6037 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6040 gfunc_return(&func_vt
);
6043 /* jump unless last stmt in top-level block */
6044 if (tok
!= '}' || local_scope
!= 1)
6046 nocode_wanted
|= 0x20000000;
6047 } else if (tok
== TOK_BREAK
) {
6050 tcc_error("cannot break");
6051 *bsym
= gjmp(*bsym
);
6054 nocode_wanted
|= 0x20000000;
6055 } else if (tok
== TOK_CONTINUE
) {
6058 tcc_error("cannot continue");
6059 vla_sp_restore_root();
6060 *csym
= gjmp(*csym
);
6063 } else if (tok
== TOK_FOR
) {
6065 int saved_nocode_wanted
;
6066 nocode_wanted
&= ~0x20000000;
6072 /* c99 for-loop init decl? */
6073 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6074 /* no, regular for-loop init expr */
6100 saved_nocode_wanted
= nocode_wanted
;
6102 nocode_wanted
= saved_nocode_wanted
;
6107 sym_pop(&local_stack
, s
, 0);
6110 if (tok
== TOK_DO
) {
6111 int saved_nocode_wanted
;
6112 nocode_wanted
&= ~0x20000000;
6118 saved_nocode_wanted
= nocode_wanted
;
6124 nocode_wanted
= saved_nocode_wanted
;
6128 nocode_wanted
= saved_nocode_wanted
;
6133 if (tok
== TOK_SWITCH
) {
6134 struct switch_t
*saved
, sw
;
6135 int saved_nocode_wanted
= nocode_wanted
;
6141 switchval
= *vtop
--;
6143 b
= gjmp(0); /* jump to first case */
6144 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6148 nocode_wanted
= saved_nocode_wanted
;
6149 a
= gjmp(a
); /* add implicit break */
6152 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6153 for (b
= 1; b
< sw
.n
; b
++)
6154 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6155 tcc_error("duplicate case value");
6156 /* Our switch table sorting is signed, so the compared
6157 value needs to be as well when it's 64bit. */
6158 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6159 switchval
.type
.t
&= ~VT_UNSIGNED
;
6161 gcase(sw
.p
, sw
.n
, &a
);
6164 gjmp_addr(sw
.def_sym
);
6165 dynarray_reset(&sw
.p
, &sw
.n
);
6170 if (tok
== TOK_CASE
) {
6171 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6174 nocode_wanted
&= ~0x20000000;
6176 cr
->v1
= cr
->v2
= expr_const64();
6177 if (gnu_ext
&& tok
== TOK_DOTS
) {
6179 cr
->v2
= expr_const64();
6180 if (cr
->v2
< cr
->v1
)
6181 tcc_warning("empty case range");
6184 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6187 goto block_after_label
;
6189 if (tok
== TOK_DEFAULT
) {
6194 if (cur_switch
->def_sym
)
6195 tcc_error("too many 'default'");
6196 cur_switch
->def_sym
= ind
;
6198 goto block_after_label
;
6200 if (tok
== TOK_GOTO
) {
6202 if (tok
== '*' && gnu_ext
) {
6206 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6209 } else if (tok
>= TOK_UIDENT
) {
6210 s
= label_find(tok
);
6211 /* put forward definition if needed */
6213 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6215 if (s
->r
== LABEL_DECLARED
)
6216 s
->r
= LABEL_FORWARD
;
6218 vla_sp_restore_root();
6219 if (s
->r
& LABEL_FORWARD
)
6220 s
->jnext
= gjmp(s
->jnext
);
6222 gjmp_addr(s
->jnext
);
6225 expect("label identifier");
6228 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6237 if (s
->r
== LABEL_DEFINED
)
6238 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6240 s
->r
= LABEL_DEFINED
;
6242 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6246 /* we accept this, but it is a mistake */
6248 nocode_wanted
&= ~0x20000000;
6250 tcc_warning("deprecated use of label at end of compound statement");
6254 block(bsym
, csym
, is_expr
);
6257 /* expression case */
6272 /* This skips over a stream of tokens containing balanced {} and ()
6273 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6274 with a '{'). If STR then allocates and stores the skipped tokens
6275 in *STR. This doesn't check if () and {} are nested correctly,
6276 i.e. "({)}" is accepted. */
6277 static void skip_or_save_block(TokenString
**str
)
6279 int braces
= tok
== '{';
6282 *str
= tok_str_alloc();
6284 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6286 if (tok
== TOK_EOF
) {
6287 if (str
|| level
> 0)
6288 tcc_error("unexpected end of file");
6293 tok_str_add_tok(*str
);
6296 if (t
== '{' || t
== '(') {
6298 } else if (t
== '}' || t
== ')') {
6300 if (level
== 0 && braces
&& t
== '}')
6305 tok_str_add(*str
, -1);
6306 tok_str_add(*str
, 0);
6310 #define EXPR_CONST 1
6313 static void parse_init_elem(int expr_type
)
6315 int saved_global_expr
;
6318 /* compound literals must be allocated globally in this case */
6319 saved_global_expr
= global_expr
;
6322 global_expr
= saved_global_expr
;
6323 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6324 (compound literals). */
6325 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6326 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6327 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6328 #ifdef TCC_TARGET_PE
6329 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6332 tcc_error("initializer element is not constant");
6340 /* put zeros for variable based init */
6341 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6344 /* nothing to do because globals are already set to zero */
6346 vpush_global_sym(&func_old_type
, TOK_memset
);
6348 #ifdef TCC_TARGET_ARM
6359 /* t is the array or struct type. c is the array or struct
6360 address. cur_field is the pointer to the current
6361 field, for arrays the 'c' member contains the current start
6362 index. 'size_only' is true if only size info is needed (only used
6363 in arrays). al contains the already initialized length of the
6364 current container (starting at c). This returns the new length of that. */
6365 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6366 Sym
**cur_field
, int size_only
, int al
)
6369 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6370 unsigned long corig
= c
;
6374 if (gnu_ext
&& (l
= is_label()) != 0)
6376 /* NOTE: we only support ranges for last designator */
6377 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6379 if (!(type
->t
& VT_ARRAY
))
6380 expect("array type");
6382 index
= index_last
= expr_const();
6383 if (tok
== TOK_DOTS
&& gnu_ext
) {
6385 index_last
= expr_const();
6389 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6391 tcc_error("invalid index");
6393 (*cur_field
)->c
= index_last
;
6394 type
= pointed_type(type
);
6395 elem_size
= type_size(type
, &align
);
6396 c
+= index
* elem_size
;
6397 nb_elems
= index_last
- index
+ 1;
6403 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6404 expect("struct/union type");
6405 f
= find_field(type
, l
);
6418 } else if (!gnu_ext
) {
6422 if (type
->t
& VT_ARRAY
) {
6423 index
= (*cur_field
)->c
;
6424 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6425 tcc_error("index too large");
6426 type
= pointed_type(type
);
6427 c
+= index
* type_size(type
, &align
);
6430 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6431 *cur_field
= f
= f
->next
;
6433 tcc_error("too many field init");
6438 /* must put zero in holes (note that doing it that way
6439 ensures that it even works with designators) */
6440 if (!size_only
&& c
- corig
> al
)
6441 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6442 decl_initializer(type
, sec
, c
, 0, size_only
);
6444 /* XXX: make it more general */
6445 if (!size_only
&& nb_elems
> 1) {
6446 unsigned long c_end
;
6451 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6452 for (i
= 1; i
< nb_elems
; i
++) {
6453 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6458 } else if (!NODATA_WANTED
) {
6459 c_end
= c
+ nb_elems
* elem_size
;
6460 if (c_end
> sec
->data_allocated
)
6461 section_realloc(sec
, c_end
);
6462 src
= sec
->data
+ c
;
6464 for(i
= 1; i
< nb_elems
; i
++) {
6466 memcpy(dst
, src
, elem_size
);
6470 c
+= nb_elems
* type_size(type
, &align
);
6476 /* store a value or an expression directly in global data or in local array */
6477 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6484 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6488 /* XXX: not portable */
6489 /* XXX: generate error if incorrect relocation */
6490 gen_assign_cast(&dtype
);
6491 bt
= type
->t
& VT_BTYPE
;
6493 if ((vtop
->r
& VT_SYM
)
6496 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6497 || (type
->t
& VT_BITFIELD
))
6498 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6500 tcc_error("initializer element is not computable at load time");
6502 if (NODATA_WANTED
) {
6507 size
= type_size(type
, &align
);
6508 section_reserve(sec
, c
+ size
);
6509 ptr
= sec
->data
+ c
;
6511 /* XXX: make code faster ? */
6512 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6513 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6514 /* XXX This rejects compound literals like
6515 '(void *){ptr}'. The problem is that '&sym' is
6516 represented the same way, which would be ruled out
6517 by the SYM_FIRST_ANOM check above, but also '"string"'
6518 in 'char *p = "string"' is represented the same
6519 with the type being VT_PTR and the symbol being an
6520 anonymous one. That is, there's no difference in vtop
6521 between '(void *){x}' and '&(void *){x}'. Ignore
6522 pointer typed entities here. Hopefully no real code
6523 will every use compound literals with scalar type. */
6524 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6525 /* These come from compound literals, memcpy stuff over. */
6529 esym
= elfsym(vtop
->sym
);
6530 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6531 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6533 /* We need to copy over all memory contents, and that
6534 includes relocations. Use the fact that relocs are
6535 created it order, so look from the end of relocs
6536 until we hit one before the copied region. */
6537 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6538 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6539 while (num_relocs
--) {
6541 if (rel
->r_offset
>= esym
->st_value
+ size
)
6543 if (rel
->r_offset
< esym
->st_value
)
6545 /* Note: if the same fields are initialized multiple
6546 times (possible with designators) then we possibly
6547 add multiple relocations for the same offset here.
6548 That would lead to wrong code, the last reloc needs
6549 to win. We clean this up later after the whole
6550 initializer is parsed. */
6551 put_elf_reloca(symtab_section
, sec
,
6552 c
+ rel
->r_offset
- esym
->st_value
,
6553 ELFW(R_TYPE
)(rel
->r_info
),
6554 ELFW(R_SYM
)(rel
->r_info
),
6564 if (type
->t
& VT_BITFIELD
) {
6565 int bit_pos
, bit_size
, bits
, n
;
6566 unsigned char *p
, v
, m
;
6567 bit_pos
= BIT_POS(vtop
->type
.t
);
6568 bit_size
= BIT_SIZE(vtop
->type
.t
);
6569 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6570 bit_pos
&= 7, bits
= 0;
6575 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6576 m
= ((1 << n
) - 1) << bit_pos
;
6577 *p
= (*p
& ~m
) | (v
& m
);
6578 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6582 /* XXX: when cross-compiling we assume that each type has the
6583 same representation on host and target, which is likely to
6584 be wrong in the case of long double */
6586 vtop
->c
.i
= vtop
->c
.i
!= 0;
6588 *(char *)ptr
|= vtop
->c
.i
;
6591 *(short *)ptr
|= vtop
->c
.i
;
6594 *(float*)ptr
= vtop
->c
.f
;
6597 *(double *)ptr
= vtop
->c
.d
;
6600 #if defined TCC_IS_NATIVE_387
6601 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6602 memcpy(ptr
, &vtop
->c
.ld
, 10);
6604 else if (sizeof (long double) == sizeof (double))
6605 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
6607 else if (vtop
->c
.ld
== 0.0)
6611 if (sizeof(long double) == LDOUBLE_SIZE
)
6612 *(long double*)ptr
= vtop
->c
.ld
;
6613 else if (sizeof(double) == LDOUBLE_SIZE
)
6614 *(double *)ptr
= (double)vtop
->c
.ld
;
6616 tcc_error("can't cross compile long double constants");
6620 *(long long *)ptr
|= vtop
->c
.i
;
6627 addr_t val
= vtop
->c
.i
;
6629 if (vtop
->r
& VT_SYM
)
6630 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6632 *(addr_t
*)ptr
|= val
;
6634 if (vtop
->r
& VT_SYM
)
6635 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6636 *(addr_t
*)ptr
|= val
;
6642 int val
= vtop
->c
.i
;
6644 if (vtop
->r
& VT_SYM
)
6645 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6649 if (vtop
->r
& VT_SYM
)
6650 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6659 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6666 /* 't' contains the type and storage info. 'c' is the offset of the
6667 object in section 'sec'. If 'sec' is NULL, it means stack based
6668 allocation. 'first' is true if array '{' must be read (multi
6669 dimension implicit array init handling). 'size_only' is true if
6670 size only evaluation is wanted (only for arrays). */
6671 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6672 int first
, int size_only
)
6674 int len
, n
, no_oblock
, nb
, i
;
6681 /* If we currently are at an '}' or ',' we have read an initializer
6682 element in one of our callers, and not yet consumed it. */
6683 have_elem
= tok
== '}' || tok
== ',';
6684 if (!have_elem
&& tok
!= '{' &&
6685 /* In case of strings we have special handling for arrays, so
6686 don't consume them as initializer value (which would commit them
6687 to some anonymous symbol). */
6688 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6690 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6695 !(type
->t
& VT_ARRAY
) &&
6696 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6697 The source type might have VT_CONSTANT set, which is
6698 of course assignable to non-const elements. */
6699 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6700 init_putv(type
, sec
, c
);
6701 } else if (type
->t
& VT_ARRAY
) {
6704 t1
= pointed_type(type
);
6705 size1
= type_size(t1
, &align1
);
6708 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6711 tcc_error("character array initializer must be a literal,"
6712 " optionally enclosed in braces");
6717 /* only parse strings here if correct type (otherwise: handle
6718 them as ((w)char *) expressions */
6719 if ((tok
== TOK_LSTR
&&
6720 #ifdef TCC_TARGET_PE
6721 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6723 (t1
->t
& VT_BTYPE
) == VT_INT
6725 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6727 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6730 /* compute maximum number of chars wanted */
6732 cstr_len
= tokc
.str
.size
;
6734 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6737 if (n
>= 0 && nb
> (n
- len
))
6741 tcc_warning("initializer-string for array is too long");
6742 /* in order to go faster for common case (char
6743 string in global variable, we handle it
6745 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6747 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6751 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6753 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6755 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6762 /* only add trailing zero if enough storage (no
6763 warning in this case since it is standard) */
6764 if (n
< 0 || len
< n
) {
6767 init_putv(t1
, sec
, c
+ (len
* size1
));
6778 while (tok
!= '}' || have_elem
) {
6779 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
6781 if (type
->t
& VT_ARRAY
) {
6783 /* special test for multi dimensional arrays (may not
6784 be strictly correct if designators are used at the
6786 if (no_oblock
&& len
>= n
*size1
)
6789 if (s
->type
.t
== VT_UNION
)
6793 if (no_oblock
&& f
== NULL
)
6802 /* put zeros at the end */
6803 if (!size_only
&& len
< n
*size1
)
6804 init_putz(sec
, c
+ len
, n
*size1
- len
);
6807 /* patch type size if needed, which happens only for array types */
6809 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
6810 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6813 if (first
|| tok
== '{') {
6821 } else if (tok
== '{') {
6823 decl_initializer(type
, sec
, c
, first
, size_only
);
6825 } else if (size_only
) {
6826 /* If we supported only ISO C we wouldn't have to accept calling
6827 this on anything than an array size_only==1 (and even then
6828 only on the outermost level, so no recursion would be needed),
6829 because initializing a flex array member isn't supported.
6830 But GNU C supports it, so we need to recurse even into
6831 subfields of structs and arrays when size_only is set. */
6832 /* just skip expression */
6833 skip_or_save_block(NULL
);
6836 /* This should happen only when we haven't parsed
6837 the init element above for fear of committing a
6838 string constant to memory too early. */
6839 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6840 expect("string constant");
6841 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6843 init_putv(type
, sec
, c
);
6847 /* parse an initializer for type 't' if 'has_init' is non zero, and
6848 allocate space in local or global data space ('r' is either
6849 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6850 variable 'v' of scope 'scope' is declared before initializers
6851 are parsed. If 'v' is zero, then a reference to the new object
6852 is put in the value stack. If 'has_init' is 2, a special parsing
6853 is done to handle string constants. */
6854 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6855 int has_init
, int v
, int scope
)
6857 int size
, align
, addr
;
6858 TokenString
*init_str
= NULL
;
6861 Sym
*flexible_array
;
6863 int saved_nocode_wanted
= nocode_wanted
;
6864 #ifdef CONFIG_TCC_BCHECK
6868 /* Always allocate static or global variables */
6869 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
6870 nocode_wanted
|= 0x80000000;
6872 #ifdef CONFIG_TCC_BCHECK
6873 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
6876 flexible_array
= NULL
;
6877 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6878 Sym
*field
= type
->ref
->next
;
6881 field
= field
->next
;
6882 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6883 flexible_array
= field
;
6887 size
= type_size(type
, &align
);
6888 /* If unknown size, we must evaluate it before
6889 evaluating initializers because
6890 initializers can generate global data too
6891 (e.g. string pointers or ISOC99 compound
6892 literals). It also simplifies local
6893 initializers handling */
6894 if (size
< 0 || (flexible_array
&& has_init
)) {
6896 tcc_error("unknown type size");
6897 /* get all init string */
6898 if (has_init
== 2) {
6899 init_str
= tok_str_alloc();
6900 /* only get strings */
6901 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6902 tok_str_add_tok(init_str
);
6905 tok_str_add(init_str
, -1);
6906 tok_str_add(init_str
, 0);
6908 skip_or_save_block(&init_str
);
6913 begin_macro(init_str
, 1);
6915 decl_initializer(type
, NULL
, 0, 1, 1);
6916 /* prepare second initializer parsing */
6917 macro_ptr
= init_str
->str
;
6920 /* if still unknown size, error */
6921 size
= type_size(type
, &align
);
6923 tcc_error("unknown type size");
6925 /* If there's a flex member and it was used in the initializer
6927 if (flexible_array
&&
6928 flexible_array
->type
.ref
->c
> 0)
6929 size
+= flexible_array
->type
.ref
->c
6930 * pointed_size(&flexible_array
->type
);
6931 /* take into account specified alignment if bigger */
6932 if (ad
->a
.aligned
) {
6933 int speca
= 1 << (ad
->a
.aligned
- 1);
6936 } else if (ad
->a
.packed
) {
6940 if (!v
&& NODATA_WANTED
)
6941 size
= 0, align
= 1;
6943 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6945 #ifdef CONFIG_TCC_BCHECK
6946 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6950 loc
= (loc
- size
) & -align
;
6952 #ifdef CONFIG_TCC_BCHECK
6953 /* handles bounds */
6954 /* XXX: currently, since we do only one pass, we cannot track
6955 '&' operators, so we add only arrays */
6956 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6958 /* add padding between regions */
6960 /* then add local bound info */
6961 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6962 bounds_ptr
[0] = addr
;
6963 bounds_ptr
[1] = size
;
6967 /* local variable */
6968 #ifdef CONFIG_TCC_ASM
6969 if (ad
->asm_label
) {
6970 int reg
= asm_parse_regvar(ad
->asm_label
);
6972 r
= (r
& ~VT_VALMASK
) | reg
;
6975 sym
= sym_push(v
, type
, r
, addr
);
6978 /* push local reference */
6979 vset(type
, r
, addr
);
6982 if (v
&& scope
== VT_CONST
) {
6983 /* see if the symbol was already defined */
6986 patch_storage(sym
, ad
, type
);
6987 /* we accept several definitions of the same global variable. */
6988 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
6993 /* allocate symbol in corresponding section */
6998 else if (tcc_state
->nocommon
)
7003 addr
= section_add(sec
, size
, align
);
7004 #ifdef CONFIG_TCC_BCHECK
7005 /* add padding if bound check */
7007 section_add(sec
, 1, 1);
7010 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7011 sec
= common_section
;
7016 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7017 patch_storage(sym
, ad
, NULL
);
7019 /* Local statics have a scope until now (for
7020 warnings), remove it here. */
7022 /* update symbol definition */
7023 put_extern_sym(sym
, sec
, addr
, size
);
7025 /* push global reference */
7026 sym
= get_sym_ref(type
, sec
, addr
, size
);
7027 vpushsym(type
, sym
);
7031 #ifdef CONFIG_TCC_BCHECK
7032 /* handles bounds now because the symbol must be defined
7033 before for the relocation */
7037 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7038 /* then add global bound info */
7039 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7040 bounds_ptr
[0] = 0; /* relocated */
7041 bounds_ptr
[1] = size
;
7046 if (type
->t
& VT_VLA
) {
7052 /* save current stack pointer */
7053 if (vlas_in_scope
== 0) {
7054 if (vla_sp_root_loc
== -1)
7055 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
7056 gen_vla_sp_save(vla_sp_root_loc
);
7059 vla_runtime_type_size(type
, &a
);
7060 gen_vla_alloc(type
, a
);
7061 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7062 /* on _WIN64, because of the function args scratch area, the
7063 result of alloca differs from RSP and is returned in RAX. */
7064 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7066 gen_vla_sp_save(addr
);
7070 } else if (has_init
) {
7071 size_t oldreloc_offset
= 0;
7072 if (sec
&& sec
->reloc
)
7073 oldreloc_offset
= sec
->reloc
->data_offset
;
7074 decl_initializer(type
, sec
, addr
, 1, 0);
7075 if (sec
&& sec
->reloc
)
7076 squeeze_multi_relocs(sec
, oldreloc_offset
);
7077 /* patch flexible array member size back to -1, */
7078 /* for possible subsequent similar declarations */
7080 flexible_array
->type
.ref
->c
= -1;
7084 /* restore parse state if needed */
7090 nocode_wanted
= saved_nocode_wanted
;
7093 /* parse a function defined by symbol 'sym' and generate its code in
7094 'cur_text_section' */
7095 static void gen_function(Sym
*sym
)
7098 ind
= cur_text_section
->data_offset
;
7099 if (sym
->a
.aligned
) {
7100 size_t newoff
= section_add(cur_text_section
, 0,
7101 1 << (sym
->a
.aligned
- 1));
7102 gen_fill_nops(newoff
- ind
);
7104 /* NOTE: we patch the symbol size later */
7105 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7106 funcname
= get_tok_str(sym
->v
, NULL
);
7108 /* Initialize VLA state */
7110 vla_sp_root_loc
= -1;
7111 /* put debug symbol */
7112 tcc_debug_funcstart(tcc_state
, sym
);
7113 /* push a dummy symbol to enable local sym storage */
7114 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7115 local_scope
= 1; /* for function parameters */
7116 gfunc_prolog(&sym
->type
);
7119 block(NULL
, NULL
, 0);
7123 cur_text_section
->data_offset
= ind
;
7124 label_pop(&global_label_stack
, NULL
, 0);
7125 /* reset local stack */
7127 sym_pop(&local_stack
, NULL
, 0);
7128 /* end of function */
7129 /* patch symbol size */
7130 elfsym(sym
)->st_size
= ind
- func_ind
;
7131 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7132 /* It's better to crash than to generate wrong code */
7133 cur_text_section
= NULL
;
7134 funcname
= ""; /* for safety */
7135 func_vt
.t
= VT_VOID
; /* for safety */
7136 func_var
= 0; /* for safety */
7137 ind
= 0; /* for safety */
7138 nocode_wanted
= 0x80000000;
7142 static void gen_inline_functions(TCCState
*s
)
7145 int inline_generated
, i
, ln
;
7146 struct InlineFunc
*fn
;
7148 ln
= file
->line_num
;
7149 /* iterate while inline function are referenced */
7151 inline_generated
= 0;
7152 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7153 fn
= s
->inline_fns
[i
];
7155 if (sym
&& sym
->c
) {
7156 /* the function was used: generate its code and
7157 convert it to a normal function */
7160 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7161 sym
->type
.t
&= ~VT_INLINE
;
7163 begin_macro(fn
->func_str
, 1);
7165 cur_text_section
= text_section
;
7169 inline_generated
= 1;
7172 } while (inline_generated
);
7173 file
->line_num
= ln
;
7176 ST_FUNC
void free_inline_functions(TCCState
*s
)
7179 /* free tokens of unused inline functions */
7180 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7181 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7183 tok_str_free(fn
->func_str
);
7185 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7188 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7189 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7190 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7198 if (!parse_btype(&btype
, &ad
)) {
7199 if (is_for_loop_init
)
7201 /* skip redundant ';' if not in old parameter decl scope */
7202 if (tok
== ';' && l
!= VT_CMP
) {
7208 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7209 /* global asm block */
7213 if (tok
>= TOK_UIDENT
) {
7214 /* special test for old K&R protos without explicit int
7215 type. Only accepted when defining global data */
7219 expect("declaration");
7224 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7225 int v
= btype
.ref
->v
;
7226 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7227 tcc_warning("unnamed struct/union that defines no instances");
7231 if (IS_ENUM(btype
.t
)) {
7236 while (1) { /* iterate thru each declaration */
7238 /* If the base type itself was an array type of unspecified
7239 size (like in 'typedef int arr[]; arr x = {1};') then
7240 we will overwrite the unknown size by the real one for
7241 this decl. We need to unshare the ref symbol holding
7243 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7244 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7246 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7250 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7251 printf("type = '%s'\n", buf
);
7254 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7255 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7256 tcc_error("function without file scope cannot be static");
7258 /* if old style function prototype, we accept a
7261 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7262 decl0(VT_CMP
, 0, sym
);
7265 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7266 ad
.asm_label
= asm_label_instr();
7267 /* parse one last attribute list, after asm label */
7268 parse_attribute(&ad
);
7273 #ifdef TCC_TARGET_PE
7274 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7275 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7276 tcc_error("cannot have dll linkage with static or typedef");
7277 if (ad
.a
.dllimport
) {
7278 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7281 type
.t
|= VT_EXTERN
;
7287 tcc_error("cannot use local functions");
7288 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7289 expect("function definition");
7291 /* reject abstract declarators in function definition
7292 make old style params without decl have int type */
7294 while ((sym
= sym
->next
) != NULL
) {
7295 if (!(sym
->v
& ~SYM_FIELD
))
7296 expect("identifier");
7297 if (sym
->type
.t
== VT_VOID
)
7298 sym
->type
= int_type
;
7301 /* XXX: cannot do better now: convert extern line to static inline */
7302 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7303 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7305 /* put function symbol */
7306 sym
= external_global_sym(v
, &type
, 0);
7307 type
.t
&= ~VT_EXTERN
;
7308 patch_storage(sym
, &ad
, &type
);
7310 /* static inline functions are just recorded as a kind
7311 of macro. Their code will be emitted at the end of
7312 the compilation unit only if they are used */
7313 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7314 (VT_INLINE
| VT_STATIC
)) {
7315 struct InlineFunc
*fn
;
7316 const char *filename
;
7318 filename
= file
? file
->filename
: "";
7319 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7320 strcpy(fn
->filename
, filename
);
7322 skip_or_save_block(&fn
->func_str
);
7323 dynarray_add(&tcc_state
->inline_fns
,
7324 &tcc_state
->nb_inline_fns
, fn
);
7326 /* compute text section */
7327 cur_text_section
= ad
.section
;
7328 if (!cur_text_section
)
7329 cur_text_section
= text_section
;
7335 /* find parameter in function parameter list */
7336 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7337 if ((sym
->v
& ~SYM_FIELD
) == v
)
7339 tcc_error("declaration for parameter '%s' but no such parameter",
7340 get_tok_str(v
, NULL
));
7342 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7343 tcc_error("storage class specified for '%s'",
7344 get_tok_str(v
, NULL
));
7345 if (sym
->type
.t
!= VT_VOID
)
7346 tcc_error("redefinition of parameter '%s'",
7347 get_tok_str(v
, NULL
));
7348 convert_parameter_type(&type
);
7350 } else if (type
.t
& VT_TYPEDEF
) {
7351 /* save typedefed type */
7352 /* XXX: test storage specifiers ? */
7354 if (sym
&& sym
->sym_scope
== local_scope
) {
7355 if (!is_compatible_types(&sym
->type
, &type
)
7356 || !(sym
->type
.t
& VT_TYPEDEF
))
7357 tcc_error("incompatible redefinition of '%s'",
7358 get_tok_str(v
, NULL
));
7361 sym
= sym_push(v
, &type
, 0, 0);
7367 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7368 /* external function definition */
7369 /* specific case for func_call attribute */
7371 } else if (!(type
.t
& VT_ARRAY
)) {
7372 /* not lvalue if array */
7373 r
|= lvalue_type(type
.t
);
7375 has_init
= (tok
== '=');
7376 if (has_init
&& (type
.t
& VT_VLA
))
7377 tcc_error("variable length array cannot be initialized");
7378 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7379 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7380 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7381 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7382 /* external variable or function */
7383 /* NOTE: as GCC, uninitialized global static
7384 arrays of null size are considered as
7386 type
.t
|= VT_EXTERN
;
7387 sym
= external_sym(v
, &type
, r
, &ad
);
7388 if (ad
.alias_target
) {
7391 alias_target
= sym_find(ad
.alias_target
);
7392 esym
= elfsym(alias_target
);
7394 tcc_error("unsupported forward __alias__ attribute");
7395 /* Local statics have a scope until now (for
7396 warnings), remove it here. */
7398 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7401 if (type
.t
& VT_STATIC
)
7407 else if (l
== VT_CONST
)
7408 /* uninitialized global variables may be overridden */
7409 type
.t
|= VT_EXTERN
;
7410 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7414 if (is_for_loop_init
)
7427 static void decl(int l
)
7432 /* ------------------------------------------------------------------------- */