2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* no code generation wanted */
54 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
55 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
56 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
57 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
58 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
60 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
61 ST_DATA
const char *funcname
;
64 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
66 ST_DATA
struct switch_t
{
70 } **p
; int n
; /* list of case ranges */
71 int def_sym
; /* default symbol */
72 } *cur_switch
; /* current switch */
74 /* ------------------------------------------------------------------------- */
76 static void gen_cast(CType
*type
);
77 static void gen_cast_s(int t
);
78 static inline CType
*pointed_type(CType
*type
);
79 static int is_compatible_types(CType
*type1
, CType
*type2
);
80 static int parse_btype(CType
*type
, AttributeDef
*ad
);
81 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
82 static void parse_expr_type(CType
*type
);
83 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
84 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
85 static void block(int *bsym
, int *csym
, int is_expr
);
86 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
87 static void decl(int l
);
88 static int decl0(int l
, int is_for_loop_init
, Sym
*);
89 static void expr_eq(void);
90 static void vla_runtime_type_size(CType
*type
, int *a
);
91 static void vla_sp_restore(void);
92 static void vla_sp_restore_root(void);
93 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
94 static inline int64_t expr_const64(void);
95 static void vpush64(int ty
, unsigned long long v
);
96 static void vpush(CType
*type
);
97 static int gvtst(int inv
, int t
);
98 static void gen_inline_functions(TCCState
*s
);
99 static void skip_or_save_block(TokenString
**str
);
100 static void gv_dup(void);
102 ST_INLN
int is_float(int t
)
106 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
109 /* we use our own 'finite' function to avoid potential problems with
110 non standard math libs */
111 /* XXX: endianness dependent */
112 ST_FUNC
int ieee_finite(double d
)
115 memcpy(p
, &d
, sizeof(double));
116 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
119 /* compiling intel long double natively */
120 #if (defined __i386__ || defined __x86_64__) \
121 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
122 # define TCC_IS_NATIVE_387
125 ST_FUNC
void test_lvalue(void)
127 if (!(vtop
->r
& VT_LVAL
))
131 ST_FUNC
void check_vstack(void)
134 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
137 /* ------------------------------------------------------------------------- */
138 /* vstack debugging aid */
141 void pv (const char *lbl
, int a
, int b
)
144 for (i
= a
; i
< a
+ b
; ++i
) {
145 SValue
*p
= &vtop
[-i
];
146 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
147 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
152 /* ------------------------------------------------------------------------- */
153 /* start of translation unit info */
154 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
159 /* file info: full path + filename */
160 section_sym
= put_elf_sym(symtab_section
, 0, 0,
161 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
162 text_section
->sh_num
, NULL
);
163 getcwd(buf
, sizeof(buf
));
165 normalize_slashes(buf
);
167 pstrcat(buf
, sizeof(buf
), "/");
168 put_stabs_r(buf
, N_SO
, 0, 0,
169 text_section
->data_offset
, text_section
, section_sym
);
170 put_stabs_r(file
->filename
, N_SO
, 0, 0,
171 text_section
->data_offset
, text_section
, section_sym
);
176 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
177 symbols can be safely used */
178 put_elf_sym(symtab_section
, 0, 0,
179 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
180 SHN_ABS
, file
->filename
);
183 /* put end of translation unit info */
184 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
188 put_stabs_r(NULL
, N_SO
, 0, 0,
189 text_section
->data_offset
, text_section
, section_sym
);
193 /* generate line number info */
194 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
198 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
199 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
201 last_line_num
= file
->line_num
;
205 /* put function symbol */
206 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
214 /* XXX: we put here a dummy type */
215 snprintf(buf
, sizeof(buf
), "%s:%c1",
216 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
217 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
218 cur_text_section
, sym
->c
);
219 /* //gr gdb wants a line at the function */
220 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
226 /* put function size */
227 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
231 put_stabn(N_FUN
, 0, 0, size
);
234 /* ------------------------------------------------------------------------- */
235 ST_FUNC
int tccgen_compile(TCCState
*s1
)
237 cur_text_section
= NULL
;
239 anon_sym
= SYM_FIRST_ANOM
;
242 nocode_wanted
= 0x80000000;
244 /* define some often used types */
246 char_pointer_type
.t
= VT_BYTE
;
247 mk_pointer(&char_pointer_type
);
249 size_type
.t
= VT_INT
| VT_UNSIGNED
;
250 ptrdiff_type
.t
= VT_INT
;
252 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
253 ptrdiff_type
.t
= VT_LLONG
;
255 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
256 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
258 func_old_type
.t
= VT_FUNC
;
259 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
260 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
261 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
265 #ifdef TCC_TARGET_ARM
270 printf("%s: **** new file\n", file
->filename
);
273 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
276 gen_inline_functions(s1
);
278 /* end of translation unit info */
283 /* ------------------------------------------------------------------------- */
284 ST_FUNC ElfSym
*elfsym(Sym
*s
)
288 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
291 /* apply storage attributes to Elf symbol */
292 ST_FUNC
void update_storage(Sym
*sym
)
295 int sym_bind
, old_sym_bind
;
301 if (sym
->a
.visibility
)
302 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
305 if (sym
->type
.t
& VT_STATIC
)
306 sym_bind
= STB_LOCAL
;
307 else if (sym
->a
.weak
)
310 sym_bind
= STB_GLOBAL
;
311 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
312 if (sym_bind
!= old_sym_bind
) {
313 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
317 if (sym
->a
.dllimport
)
318 esym
->st_other
|= ST_PE_IMPORT
;
319 if (sym
->a
.dllexport
)
320 esym
->st_other
|= ST_PE_EXPORT
;
324 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
325 get_tok_str(sym
->v
, NULL
),
326 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
334 /* ------------------------------------------------------------------------- */
335 /* update sym->c so that it points to an external symbol in section
336 'section' with value 'value' */
338 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
339 addr_t value
, unsigned long size
,
340 int can_add_underscore
)
342 int sym_type
, sym_bind
, info
, other
, t
;
346 #ifdef CONFIG_TCC_BCHECK
351 name
= get_tok_str(sym
->v
, NULL
);
352 #ifdef CONFIG_TCC_BCHECK
353 if (tcc_state
->do_bounds_check
) {
354 /* XXX: avoid doing that for statics ? */
355 /* if bound checking is activated, we change some function
356 names by adding the "__bound" prefix */
359 /* XXX: we rely only on malloc hooks */
372 strcpy(buf
, "__bound_");
380 if ((t
& VT_BTYPE
) == VT_FUNC
) {
382 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
383 sym_type
= STT_NOTYPE
;
385 sym_type
= STT_OBJECT
;
388 sym_bind
= STB_LOCAL
;
390 sym_bind
= STB_GLOBAL
;
393 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
394 Sym
*ref
= sym
->type
.ref
;
395 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
396 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
398 other
|= ST_PE_STDCALL
;
399 can_add_underscore
= 0;
403 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
405 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
409 name
= get_tok_str(sym
->asm_label
, NULL
);
410 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
411 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
414 esym
->st_value
= value
;
415 esym
->st_size
= size
;
416 esym
->st_shndx
= sh_num
;
421 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
422 addr_t value
, unsigned long size
)
424 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
425 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
428 /* add a new relocation entry to symbol 'sym' in section 's' */
429 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
434 if (nocode_wanted
&& s
== cur_text_section
)
439 put_extern_sym(sym
, NULL
, 0, 0);
443 /* now we can add ELF relocation info */
444 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
448 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
450 greloca(s
, sym
, offset
, type
, 0);
454 /* ------------------------------------------------------------------------- */
455 /* symbol allocator */
456 static Sym
*__sym_malloc(void)
458 Sym
*sym_pool
, *sym
, *last_sym
;
461 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
462 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
464 last_sym
= sym_free_first
;
466 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
467 sym
->next
= last_sym
;
471 sym_free_first
= last_sym
;
475 static inline Sym
*sym_malloc(void)
479 sym
= sym_free_first
;
481 sym
= __sym_malloc();
482 sym_free_first
= sym
->next
;
485 sym
= tcc_malloc(sizeof(Sym
));
490 ST_INLN
void sym_free(Sym
*sym
)
493 sym
->next
= sym_free_first
;
494 sym_free_first
= sym
;
500 /* push, without hashing */
501 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
506 memset(s
, 0, sizeof *s
);
516 /* find a symbol and return its associated structure. 's' is the top
517 of the symbol stack */
518 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
530 /* structure lookup */
531 ST_INLN Sym
*struct_find(int v
)
534 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
536 return table_ident
[v
]->sym_struct
;
539 /* find an identifier */
540 ST_INLN Sym
*sym_find(int v
)
543 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
545 return table_ident
[v
]->sym_identifier
;
548 /* push a given symbol on the symbol stack */
549 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
558 s
= sym_push2(ps
, v
, type
->t
, c
);
559 s
->type
.ref
= type
->ref
;
561 /* don't record fields or anonymous symbols */
563 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
564 /* record symbol in token array */
565 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
567 ps
= &ts
->sym_struct
;
569 ps
= &ts
->sym_identifier
;
572 s
->sym_scope
= local_scope
;
573 if (s
->prev_tok
&& s
->prev_tok
->sym_scope
== s
->sym_scope
)
574 tcc_error("redeclaration of '%s'",
575 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
580 /* push a global identifier */
581 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
584 s
= sym_push2(&global_stack
, v
, t
, c
);
585 /* don't record anonymous symbol */
586 if (v
< SYM_FIRST_ANOM
) {
587 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
588 /* modify the top most local identifier, so that
589 sym_identifier will point to 's' when popped */
590 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
591 ps
= &(*ps
)->prev_tok
;
598 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
599 pop them yet from the list, but do remove them from the token array. */
600 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
610 /* remove symbol in token array */
612 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
613 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
615 ps
= &ts
->sym_struct
;
617 ps
= &ts
->sym_identifier
;
628 /* ------------------------------------------------------------------------- */
630 static void vsetc(CType
*type
, int r
, CValue
*vc
)
634 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
635 tcc_error("memory full (vstack)");
636 /* cannot let cpu flags if other instruction are generated. Also
637 avoid leaving VT_JMP anywhere except on the top of the stack
638 because it would complicate the code generator.
640 Don't do this when nocode_wanted. vtop might come from
641 !nocode_wanted regions (see 88_codeopt.c) and transforming
642 it to a register without actually generating code is wrong
643 as their value might still be used for real. All values
644 we push under nocode_wanted will eventually be popped
645 again, so that the VT_CMP/VT_JMP value will be in vtop
646 when code is unsuppressed again.
648 Same logic below in vswap(); */
649 if (vtop
>= vstack
&& !nocode_wanted
) {
650 v
= vtop
->r
& VT_VALMASK
;
651 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
663 ST_FUNC
void vswap(void)
666 /* cannot vswap cpu flags. See comment at vsetc() above */
667 if (vtop
>= vstack
&& !nocode_wanted
) {
668 int v
= vtop
->r
& VT_VALMASK
;
669 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
677 /* pop stack value */
678 ST_FUNC
void vpop(void)
681 v
= vtop
->r
& VT_VALMASK
;
682 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
683 /* for x86, we need to pop the FP stack */
685 o(0xd8dd); /* fstp %st(0) */
688 if (v
== VT_JMP
|| v
== VT_JMPI
) {
689 /* need to put correct jump if && or || without test */
695 /* push constant of type "type" with useless value */
696 ST_FUNC
void vpush(CType
*type
)
698 vset(type
, VT_CONST
, 0);
701 /* push integer constant */
702 ST_FUNC
void vpushi(int v
)
706 vsetc(&int_type
, VT_CONST
, &cval
);
709 /* push a pointer sized constant */
710 static void vpushs(addr_t v
)
714 vsetc(&size_type
, VT_CONST
, &cval
);
717 /* push arbitrary 64bit constant */
718 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
725 vsetc(&ctype
, VT_CONST
, &cval
);
728 /* push long long constant */
729 static inline void vpushll(long long v
)
731 vpush64(VT_LLONG
, v
);
734 ST_FUNC
void vset(CType
*type
, int r
, int v
)
739 vsetc(type
, r
, &cval
);
742 static void vseti(int r
, int v
)
750 ST_FUNC
void vpushv(SValue
*v
)
752 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
753 tcc_error("memory full (vstack)");
758 static void vdup(void)
763 /* rotate n first stack elements to the bottom
764 I1 ... In -> I2 ... In I1 [top is right]
766 ST_FUNC
void vrotb(int n
)
777 /* rotate the n elements before entry e towards the top
778 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
780 ST_FUNC
void vrote(SValue
*e
, int n
)
786 for(i
= 0;i
< n
- 1; i
++)
791 /* rotate n first stack elements to the top
792 I1 ... In -> In I1 ... I(n-1) [top is right]
794 ST_FUNC
void vrott(int n
)
799 /* push a symbol value of TYPE */
800 static inline void vpushsym(CType
*type
, Sym
*sym
)
804 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
808 /* Return a static symbol pointing to a section */
809 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
815 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
816 sym
->type
.ref
= type
->ref
;
817 sym
->r
= VT_CONST
| VT_SYM
;
818 put_extern_sym(sym
, sec
, offset
, size
);
822 /* push a reference to a section offset by adding a dummy symbol */
823 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
825 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
828 /* define a new external reference to a symbol 'v' of type 'u' */
829 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
835 /* push forward reference */
836 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
837 s
->type
.ref
= type
->ref
;
838 s
->r
= r
| VT_CONST
| VT_SYM
;
839 } else if (IS_ASM_SYM(s
)) {
840 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
841 s
->type
.ref
= type
->ref
;
847 /* Merge some type attributes. */
848 static void patch_type(Sym
*sym
, CType
*type
)
850 if (!(type
->t
& VT_EXTERN
)) {
851 if (!(sym
->type
.t
& VT_EXTERN
))
852 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
853 sym
->type
.t
&= ~VT_EXTERN
;
856 if (IS_ASM_SYM(sym
)) {
857 /* stay static if both are static */
858 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
859 sym
->type
.ref
= type
->ref
;
862 if (!is_compatible_types(&sym
->type
, type
)) {
863 tcc_error("incompatible types for redefinition of '%s'",
864 get_tok_str(sym
->v
, NULL
));
866 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
867 int static_proto
= sym
->type
.t
& VT_STATIC
;
868 /* warn if static follows non-static function declaration */
869 if ((type
->t
& VT_STATIC
) && !static_proto
&& !(type
->t
& VT_INLINE
))
870 tcc_warning("static storage ignored for redefinition of '%s'",
871 get_tok_str(sym
->v
, NULL
));
873 if (0 == (type
->t
& VT_EXTERN
)) {
874 /* put complete type, use static from prototype */
875 sym
->type
.t
= (type
->t
& ~VT_STATIC
) | static_proto
;
876 if (type
->t
& VT_INLINE
)
877 sym
->type
.t
= type
->t
;
878 sym
->type
.ref
= type
->ref
;
882 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
883 /* set array size if it was omitted in extern declaration */
884 if (sym
->type
.ref
->c
< 0)
885 sym
->type
.ref
->c
= type
->ref
->c
;
886 else if (sym
->type
.ref
->c
!= type
->ref
->c
)
887 tcc_error("conflicting type for '%s'", get_tok_str(sym
->v
, NULL
));
889 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
890 tcc_warning("storage mismatch for redefinition of '%s'",
891 get_tok_str(sym
->v
, NULL
));
896 /* Merge some storage attributes. */
897 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
900 patch_type(sym
, type
);
903 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
904 tcc_error("incompatible dll linkage for redefinition of '%s'",
905 get_tok_str(sym
->v
, NULL
));
906 sym
->a
.dllexport
|= ad
->a
.dllexport
;
908 sym
->a
.weak
|= ad
->a
.weak
;
909 if (ad
->a
.visibility
) {
910 int vis
= sym
->a
.visibility
;
911 int vis2
= ad
->a
.visibility
;
912 if (vis
== STV_DEFAULT
)
914 else if (vis2
!= STV_DEFAULT
)
915 vis
= (vis
< vis2
) ? vis
: vis2
;
916 sym
->a
.visibility
= vis
;
919 sym
->a
.aligned
= ad
->a
.aligned
;
921 sym
->asm_label
= ad
->asm_label
;
925 /* define a new external reference to a symbol 'v' */
926 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
931 /* push forward reference */
932 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
933 s
->type
.t
|= VT_EXTERN
;
937 if (s
->type
.ref
== func_old_type
.ref
) {
938 s
->type
.ref
= type
->ref
;
939 s
->r
= r
| VT_CONST
| VT_SYM
;
940 s
->type
.t
|= VT_EXTERN
;
942 patch_storage(s
, ad
, type
);
947 /* push a reference to global symbol v */
948 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
950 vpushsym(type
, external_global_sym(v
, type
, 0));
953 /* save registers up to (vtop - n) stack entry */
954 ST_FUNC
void save_regs(int n
)
957 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
961 /* save r to the memory stack, and mark it as being free */
962 ST_FUNC
void save_reg(int r
)
964 save_reg_upstack(r
, 0);
967 /* save r to the memory stack, and mark it as being free,
968 if seen up to (vtop - n) stack entry */
969 ST_FUNC
void save_reg_upstack(int r
, int n
)
971 int l
, saved
, size
, align
;
975 if ((r
&= VT_VALMASK
) >= VT_CONST
)
980 /* modify all stack values */
983 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
984 if ((p
->r
& VT_VALMASK
) == r
||
985 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
986 /* must save value on stack if not already done */
988 /* NOTE: must reload 'r' because r might be equal to r2 */
989 r
= p
->r
& VT_VALMASK
;
990 /* store register in the stack */
992 if ((p
->r
& VT_LVAL
) ||
993 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
995 type
= &char_pointer_type
;
999 size
= type_size(type
, &align
);
1000 loc
= (loc
- size
) & -align
;
1001 sv
.type
.t
= type
->t
;
1002 sv
.r
= VT_LOCAL
| VT_LVAL
;
1005 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1006 /* x86 specific: need to pop fp register ST0 if saved */
1007 if (r
== TREG_ST0
) {
1008 o(0xd8dd); /* fstp %st(0) */
1012 /* special long long case */
1013 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1021 /* mark that stack entry as being saved on the stack */
1022 if (p
->r
& VT_LVAL
) {
1023 /* also clear the bounded flag because the
1024 relocation address of the function was stored in
1026 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1028 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1036 #ifdef TCC_TARGET_ARM
1037 /* find a register of class 'rc2' with at most one reference on stack.
1038 * If none, call get_reg(rc) */
1039 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1044 for(r
=0;r
<NB_REGS
;r
++) {
1045 if (reg_classes
[r
] & rc2
) {
1048 for(p
= vstack
; p
<= vtop
; p
++) {
1049 if ((p
->r
& VT_VALMASK
) == r
||
1050 (p
->r2
& VT_VALMASK
) == r
)
1061 /* find a free register of class 'rc'. If none, save one register */
1062 ST_FUNC
int get_reg(int rc
)
1067 /* find a free register */
1068 for(r
=0;r
<NB_REGS
;r
++) {
1069 if (reg_classes
[r
] & rc
) {
1072 for(p
=vstack
;p
<=vtop
;p
++) {
1073 if ((p
->r
& VT_VALMASK
) == r
||
1074 (p
->r2
& VT_VALMASK
) == r
)
1082 /* no register left : free the first one on the stack (VERY
1083 IMPORTANT to start from the bottom to ensure that we don't
1084 spill registers used in gen_opi()) */
1085 for(p
=vstack
;p
<=vtop
;p
++) {
1086 /* look at second register (if long long) */
1087 r
= p
->r2
& VT_VALMASK
;
1088 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1090 r
= p
->r
& VT_VALMASK
;
1091 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1097 /* Should never comes here */
1101 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1103 static void move_reg(int r
, int s
, int t
)
1117 /* get address of vtop (vtop MUST BE an lvalue) */
1118 ST_FUNC
void gaddrof(void)
1120 vtop
->r
&= ~VT_LVAL
;
1121 /* tricky: if saved lvalue, then we can go back to lvalue */
1122 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1123 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1128 #ifdef CONFIG_TCC_BCHECK
1129 /* generate lvalue bound code */
1130 static void gbound(void)
1135 vtop
->r
&= ~VT_MUSTBOUND
;
1136 /* if lvalue, then use checking code before dereferencing */
1137 if (vtop
->r
& VT_LVAL
) {
1138 /* if not VT_BOUNDED value, then make one */
1139 if (!(vtop
->r
& VT_BOUNDED
)) {
1140 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1141 /* must save type because we must set it to int to get pointer */
1143 vtop
->type
.t
= VT_PTR
;
1146 gen_bounded_ptr_add();
1147 vtop
->r
|= lval_type
;
1150 /* then check for dereferencing */
1151 gen_bounded_ptr_deref();
1156 static void incr_bf_adr(int o
)
1158 vtop
->type
= char_pointer_type
;
1162 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1163 | (VT_BYTE
|VT_UNSIGNED
);
1164 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1165 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1168 /* single-byte load mode for packed or otherwise unaligned bitfields */
1169 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1172 save_reg_upstack(vtop
->r
, 1);
1173 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1174 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1183 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1185 vpushi((1 << n
) - 1), gen_op('&');
1188 vpushi(bits
), gen_op(TOK_SHL
);
1191 bits
+= n
, bit_size
-= n
, o
= 1;
1194 if (!(type
->t
& VT_UNSIGNED
)) {
1195 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1196 vpushi(n
), gen_op(TOK_SHL
);
1197 vpushi(n
), gen_op(TOK_SAR
);
1201 /* single-byte store mode for packed or otherwise unaligned bitfields */
1202 static void store_packed_bf(int bit_pos
, int bit_size
)
1204 int bits
, n
, o
, m
, c
;
1206 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1208 save_reg_upstack(vtop
->r
, 1);
1209 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1211 incr_bf_adr(o
); // X B
1213 c
? vdup() : gv_dup(); // B V X
1216 vpushi(bits
), gen_op(TOK_SHR
);
1218 vpushi(bit_pos
), gen_op(TOK_SHL
);
1223 m
= ((1 << n
) - 1) << bit_pos
;
1224 vpushi(m
), gen_op('&'); // X B V1
1225 vpushv(vtop
-1); // X B V1 B
1226 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1227 gen_op('&'); // X B V1 B1
1228 gen_op('|'); // X B V2
1230 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1231 vstore(), vpop(); // X B
1232 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1237 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1240 if (0 == sv
->type
.ref
)
1242 t
= sv
->type
.ref
->auxtype
;
1243 if (t
!= -1 && t
!= VT_STRUCT
) {
1244 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1245 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1250 /* store vtop a register belonging to class 'rc'. lvalues are
1251 converted to values. Cannot be used if cannot be converted to
1252 register value (such as structures). */
1253 ST_FUNC
int gv(int rc
)
1255 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1257 /* NOTE: get_reg can modify vstack[] */
1258 if (vtop
->type
.t
& VT_BITFIELD
) {
1261 bit_pos
= BIT_POS(vtop
->type
.t
);
1262 bit_size
= BIT_SIZE(vtop
->type
.t
);
1263 /* remove bit field info to avoid loops */
1264 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1267 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1268 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1269 type
.t
|= VT_UNSIGNED
;
1271 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1273 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1278 if (r
== VT_STRUCT
) {
1279 load_packed_bf(&type
, bit_pos
, bit_size
);
1281 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1282 /* cast to int to propagate signedness in following ops */
1284 /* generate shifts */
1285 vpushi(bits
- (bit_pos
+ bit_size
));
1287 vpushi(bits
- bit_size
);
1288 /* NOTE: transformed to SHR if unsigned */
1293 if (is_float(vtop
->type
.t
) &&
1294 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1295 unsigned long offset
;
1296 /* CPUs usually cannot use float constants, so we store them
1297 generically in data segment */
1298 size
= type_size(&vtop
->type
, &align
);
1300 size
= 0, align
= 1;
1301 offset
= section_add(data_section
, size
, align
);
1302 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1304 init_putv(&vtop
->type
, data_section
, offset
);
1307 #ifdef CONFIG_TCC_BCHECK
1308 if (vtop
->r
& VT_MUSTBOUND
)
1312 r
= vtop
->r
& VT_VALMASK
;
1313 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1314 #ifndef TCC_TARGET_ARM64
1317 #ifdef TCC_TARGET_X86_64
1318 else if (rc
== RC_FRET
)
1322 /* need to reload if:
1324 - lvalue (need to dereference pointer)
1325 - already a register, but not in the right class */
1327 || (vtop
->r
& VT_LVAL
)
1328 || !(reg_classes
[r
] & rc
)
1330 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1331 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1333 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1339 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1340 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1342 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1343 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1344 unsigned long long ll
;
1346 int r2
, original_type
;
1347 original_type
= vtop
->type
.t
;
1348 /* two register type load : expand to two words
1351 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1354 vtop
->c
.i
= ll
; /* first word */
1356 vtop
->r
= r
; /* save register value */
1357 vpushi(ll
>> 32); /* second word */
1360 if (vtop
->r
& VT_LVAL
) {
1361 /* We do not want to modifier the long long
1362 pointer here, so the safest (and less
1363 efficient) is to save all the other registers
1364 in the stack. XXX: totally inefficient. */
1368 /* lvalue_save: save only if used further down the stack */
1369 save_reg_upstack(vtop
->r
, 1);
1371 /* load from memory */
1372 vtop
->type
.t
= load_type
;
1375 vtop
[-1].r
= r
; /* save register value */
1376 /* increment pointer to get second word */
1377 vtop
->type
.t
= addr_type
;
1382 vtop
->type
.t
= load_type
;
1384 /* move registers */
1387 vtop
[-1].r
= r
; /* save register value */
1388 vtop
->r
= vtop
[-1].r2
;
1390 /* Allocate second register. Here we rely on the fact that
1391 get_reg() tries first to free r2 of an SValue. */
1395 /* write second register */
1397 vtop
->type
.t
= original_type
;
1398 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1400 /* lvalue of scalar type : need to use lvalue type
1401 because of possible cast */
1404 /* compute memory access type */
1405 if (vtop
->r
& VT_LVAL_BYTE
)
1407 else if (vtop
->r
& VT_LVAL_SHORT
)
1409 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1413 /* restore wanted type */
1416 /* one register type load */
1421 #ifdef TCC_TARGET_C67
1422 /* uses register pairs for doubles */
1423 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1430 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1431 ST_FUNC
void gv2(int rc1
, int rc2
)
1435 /* generate more generic register first. But VT_JMP or VT_CMP
1436 values must be generated first in all cases to avoid possible
1438 v
= vtop
[0].r
& VT_VALMASK
;
1439 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1444 /* test if reload is needed for first register */
1445 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1455 /* test if reload is needed for first register */
1456 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1462 #ifndef TCC_TARGET_ARM64
1463 /* wrapper around RC_FRET to return a register by type */
1464 static int rc_fret(int t
)
1466 #ifdef TCC_TARGET_X86_64
1467 if (t
== VT_LDOUBLE
) {
1475 /* wrapper around REG_FRET to return a register by type */
1476 static int reg_fret(int t
)
1478 #ifdef TCC_TARGET_X86_64
1479 if (t
== VT_LDOUBLE
) {
1487 /* expand 64bit on stack in two ints */
1488 static void lexpand(void)
1491 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1492 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1493 if (v
== VT_CONST
) {
1496 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1502 vtop
[0].r
= vtop
[-1].r2
;
1503 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1505 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1509 #ifdef TCC_TARGET_ARM
1510 /* expand long long on stack */
1511 ST_FUNC
void lexpand_nr(void)
1515 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1517 vtop
->r2
= VT_CONST
;
1518 vtop
->type
.t
= VT_INT
| u
;
1519 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1520 if (v
== VT_CONST
) {
1521 vtop
[-1].c
.i
= vtop
->c
.i
;
1522 vtop
->c
.i
= vtop
->c
.i
>> 32;
1524 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1526 vtop
->r
= vtop
[-1].r
;
1527 } else if (v
> VT_CONST
) {
1531 vtop
->r
= vtop
[-1].r2
;
1532 vtop
[-1].r2
= VT_CONST
;
1533 vtop
[-1].type
.t
= VT_INT
| u
;
1538 /* build a long long from two ints */
1539 static void lbuild(int t
)
1541 gv2(RC_INT
, RC_INT
);
1542 vtop
[-1].r2
= vtop
[0].r
;
1543 vtop
[-1].type
.t
= t
;
1548 /* convert stack entry to register and duplicate its value in another
1550 static void gv_dup(void)
1557 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1558 if (t
& VT_BITFIELD
) {
1568 /* stack: H L L1 H1 */
1578 /* duplicate value */
1583 #ifdef TCC_TARGET_X86_64
1584 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1594 load(r1
, &sv
); /* move r to r1 */
1596 /* duplicates value */
1602 /* Generate value test
1604 * Generate a test for any value (jump, comparison and integers) */
1605 ST_FUNC
int gvtst(int inv
, int t
)
1607 int v
= vtop
->r
& VT_VALMASK
;
1608 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1612 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1613 /* constant jmp optimization */
1614 if ((vtop
->c
.i
!= 0) != inv
)
1619 return gtst(inv
, t
);
1623 /* generate CPU independent (unsigned) long long operations */
1624 static void gen_opl(int op
)
1626 int t
, a
, b
, op1
, c
, i
;
1628 unsigned short reg_iret
= REG_IRET
;
1629 unsigned short reg_lret
= REG_LRET
;
1635 func
= TOK___divdi3
;
1638 func
= TOK___udivdi3
;
1641 func
= TOK___moddi3
;
1644 func
= TOK___umoddi3
;
1651 /* call generic long long function */
1652 vpush_global_sym(&func_old_type
, func
);
1657 vtop
->r2
= reg_lret
;
1665 //pv("gen_opl A",0,2);
1671 /* stack: L1 H1 L2 H2 */
1676 vtop
[-2] = vtop
[-3];
1679 /* stack: H1 H2 L1 L2 */
1680 //pv("gen_opl B",0,4);
1686 /* stack: H1 H2 L1 L2 ML MH */
1689 /* stack: ML MH H1 H2 L1 L2 */
1693 /* stack: ML MH H1 L2 H2 L1 */
1698 /* stack: ML MH M1 M2 */
1701 } else if (op
== '+' || op
== '-') {
1702 /* XXX: add non carry method too (for MIPS or alpha) */
1708 /* stack: H1 H2 (L1 op L2) */
1711 gen_op(op1
+ 1); /* TOK_xxxC2 */
1714 /* stack: H1 H2 (L1 op L2) */
1717 /* stack: (L1 op L2) H1 H2 */
1719 /* stack: (L1 op L2) (H1 op H2) */
1727 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1728 t
= vtop
[-1].type
.t
;
1732 /* stack: L H shift */
1734 /* constant: simpler */
1735 /* NOTE: all comments are for SHL. the other cases are
1736 done by swapping words */
1747 if (op
!= TOK_SAR
) {
1780 /* XXX: should provide a faster fallback on x86 ? */
1783 func
= TOK___ashrdi3
;
1786 func
= TOK___lshrdi3
;
1789 func
= TOK___ashldi3
;
1795 /* compare operations */
1801 /* stack: L1 H1 L2 H2 */
1803 vtop
[-1] = vtop
[-2];
1805 /* stack: L1 L2 H1 H2 */
1808 /* when values are equal, we need to compare low words. since
1809 the jump is inverted, we invert the test too. */
1812 else if (op1
== TOK_GT
)
1814 else if (op1
== TOK_ULT
)
1816 else if (op1
== TOK_UGT
)
1826 /* generate non equal test */
1832 /* compare low. Always unsigned */
1836 else if (op1
== TOK_LE
)
1838 else if (op1
== TOK_GT
)
1840 else if (op1
== TOK_GE
)
1851 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1853 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1854 return (a
^ b
) >> 63 ? -x
: x
;
1857 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1859 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1862 /* handle integer constant optimizations and various machine
1864 static void gen_opic(int op
)
1866 SValue
*v1
= vtop
- 1;
1868 int t1
= v1
->type
.t
& VT_BTYPE
;
1869 int t2
= v2
->type
.t
& VT_BTYPE
;
1870 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1871 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1872 uint64_t l1
= c1
? v1
->c
.i
: 0;
1873 uint64_t l2
= c2
? v2
->c
.i
: 0;
1874 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1876 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1877 l1
= ((uint32_t)l1
|
1878 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1879 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1880 l2
= ((uint32_t)l2
|
1881 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1885 case '+': l1
+= l2
; break;
1886 case '-': l1
-= l2
; break;
1887 case '&': l1
&= l2
; break;
1888 case '^': l1
^= l2
; break;
1889 case '|': l1
|= l2
; break;
1890 case '*': l1
*= l2
; break;
1897 /* if division by zero, generate explicit division */
1900 tcc_error("division by zero in constant");
1904 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1905 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1906 case TOK_UDIV
: l1
= l1
/ l2
; break;
1907 case TOK_UMOD
: l1
= l1
% l2
; break;
1910 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1911 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1913 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1916 case TOK_ULT
: l1
= l1
< l2
; break;
1917 case TOK_UGE
: l1
= l1
>= l2
; break;
1918 case TOK_EQ
: l1
= l1
== l2
; break;
1919 case TOK_NE
: l1
= l1
!= l2
; break;
1920 case TOK_ULE
: l1
= l1
<= l2
; break;
1921 case TOK_UGT
: l1
= l1
> l2
; break;
1922 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1923 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1924 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1925 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1927 case TOK_LAND
: l1
= l1
&& l2
; break;
1928 case TOK_LOR
: l1
= l1
|| l2
; break;
1932 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1933 l1
= ((uint32_t)l1
|
1934 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1938 /* if commutative ops, put c2 as constant */
1939 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1940 op
== '|' || op
== '*')) {
1942 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1943 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1945 if (!const_wanted
&&
1947 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1948 (l1
== -1 && op
== TOK_SAR
))) {
1949 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1951 } else if (!const_wanted
&&
1952 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1954 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
1955 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1956 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1961 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1964 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1965 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1968 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
1969 /* filter out NOP operations like x*1, x-0, x&-1... */
1971 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1972 /* try to use shifts instead of muls or divs */
1973 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1982 else if (op
== TOK_PDIV
)
1988 } else if (c2
&& (op
== '+' || op
== '-') &&
1989 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1990 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1991 /* symbol + constant case */
1995 /* The backends can't always deal with addends to symbols
1996 larger than +-1<<31. Don't construct such. */
2003 /* call low level op generator */
2004 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2005 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2013 /* generate a floating point operation with constant propagation */
2014 static void gen_opif(int op
)
2018 #if defined _MSC_VER && defined _AMD64_
2019 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2026 /* currently, we cannot do computations with forward symbols */
2027 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2028 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2030 if (v1
->type
.t
== VT_FLOAT
) {
2033 } else if (v1
->type
.t
== VT_DOUBLE
) {
2041 /* NOTE: we only do constant propagation if finite number (not
2042 NaN or infinity) (ANSI spec) */
2043 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2047 case '+': f1
+= f2
; break;
2048 case '-': f1
-= f2
; break;
2049 case '*': f1
*= f2
; break;
2052 /* If not in initializer we need to potentially generate
2053 FP exceptions at runtime, otherwise we want to fold. */
2059 /* XXX: also handles tests ? */
2063 /* XXX: overflow test ? */
2064 if (v1
->type
.t
== VT_FLOAT
) {
2066 } else if (v1
->type
.t
== VT_DOUBLE
) {
2078 static int pointed_size(CType
*type
)
2081 return type_size(pointed_type(type
), &align
);
2084 static void vla_runtime_pointed_size(CType
*type
)
2087 vla_runtime_type_size(pointed_type(type
), &align
);
2090 static inline int is_null_pointer(SValue
*p
)
2092 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2094 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2095 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2096 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2097 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
2100 static inline int is_integer_btype(int bt
)
2102 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2103 bt
== VT_INT
|| bt
== VT_LLONG
);
2106 /* check types for comparison or subtraction of pointers */
2107 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2109 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2112 /* null pointers are accepted for all comparisons as gcc */
2113 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2117 bt1
= type1
->t
& VT_BTYPE
;
2118 bt2
= type2
->t
& VT_BTYPE
;
2119 /* accept comparison between pointer and integer with a warning */
2120 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2121 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2122 tcc_warning("comparison between pointer and integer");
2126 /* both must be pointers or implicit function pointers */
2127 if (bt1
== VT_PTR
) {
2128 type1
= pointed_type(type1
);
2129 } else if (bt1
!= VT_FUNC
)
2130 goto invalid_operands
;
2132 if (bt2
== VT_PTR
) {
2133 type2
= pointed_type(type2
);
2134 } else if (bt2
!= VT_FUNC
) {
2136 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2138 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2139 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2143 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2144 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2145 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2146 /* gcc-like error if '-' is used */
2148 goto invalid_operands
;
2150 tcc_warning("comparison of distinct pointer types lacks a cast");
2154 /* generic gen_op: handles types problems */
2155 ST_FUNC
void gen_op(int op
)
2157 int u
, t1
, t2
, bt1
, bt2
, t
;
2161 t1
= vtop
[-1].type
.t
;
2162 t2
= vtop
[0].type
.t
;
2163 bt1
= t1
& VT_BTYPE
;
2164 bt2
= t2
& VT_BTYPE
;
2166 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2167 tcc_error("operation on a struct");
2168 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2169 if (bt2
== VT_FUNC
) {
2170 mk_pointer(&vtop
->type
);
2173 if (bt1
== VT_FUNC
) {
2175 mk_pointer(&vtop
->type
);
2180 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2181 /* at least one operand is a pointer */
2182 /* relational op: must be both pointers */
2183 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2184 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2185 /* pointers are handled are unsigned */
2187 t
= VT_LLONG
| VT_UNSIGNED
;
2189 t
= VT_INT
| VT_UNSIGNED
;
2193 /* if both pointers, then it must be the '-' op */
2194 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2196 tcc_error("cannot use pointers here");
2197 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2198 /* XXX: check that types are compatible */
2199 if (vtop
[-1].type
.t
& VT_VLA
) {
2200 vla_runtime_pointed_size(&vtop
[-1].type
);
2202 vpushi(pointed_size(&vtop
[-1].type
));
2206 vtop
->type
.t
= ptrdiff_type
.t
;
2210 /* exactly one pointer : must be '+' or '-'. */
2211 if (op
!= '-' && op
!= '+')
2212 tcc_error("cannot use pointers here");
2213 /* Put pointer as first operand */
2214 if (bt2
== VT_PTR
) {
2216 t
= t1
, t1
= t2
, t2
= t
;
2219 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2220 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2223 type1
= vtop
[-1].type
;
2224 type1
.t
&= ~VT_ARRAY
;
2225 if (vtop
[-1].type
.t
& VT_VLA
)
2226 vla_runtime_pointed_size(&vtop
[-1].type
);
2228 u
= pointed_size(&vtop
[-1].type
);
2230 tcc_error("unknown array element size");
2234 /* XXX: cast to int ? (long long case) */
2240 /* #ifdef CONFIG_TCC_BCHECK
2241 The main reason to removing this code:
2248 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2249 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2251 When this code is on. then the output looks like
2253 v+(i-j) = 0xbff84000
2255 /* if evaluating constant expression, no code should be
2256 generated, so no bound check */
2257 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2258 /* if bounded pointers, we generate a special code to
2265 gen_bounded_ptr_add();
2271 /* put again type if gen_opic() swaped operands */
2274 } else if (is_float(bt1
) || is_float(bt2
)) {
2275 /* compute bigger type and do implicit casts */
2276 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2278 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2283 /* floats can only be used for a few operations */
2284 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2285 (op
< TOK_ULT
|| op
> TOK_GT
))
2286 tcc_error("invalid operands for binary operation");
2288 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2289 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2290 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2292 t
|= (VT_LONG
& t1
);
2294 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2295 /* cast to biggest op */
2296 t
= VT_LLONG
| VT_LONG
;
2297 if (bt1
== VT_LLONG
)
2299 if (bt2
== VT_LLONG
)
2301 /* convert to unsigned if it does not fit in a long long */
2302 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2303 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2307 /* integer operations */
2308 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2309 /* convert to unsigned if it does not fit in an integer */
2310 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2311 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2314 /* XXX: currently, some unsigned operations are explicit, so
2315 we modify them here */
2316 if (t
& VT_UNSIGNED
) {
2323 else if (op
== TOK_LT
)
2325 else if (op
== TOK_GT
)
2327 else if (op
== TOK_LE
)
2329 else if (op
== TOK_GE
)
2337 /* special case for shifts and long long: we keep the shift as
2339 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2346 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2347 /* relational op: the result is an int */
2348 vtop
->type
.t
= VT_INT
;
2353 // Make sure that we have converted to an rvalue:
2354 if (vtop
->r
& VT_LVAL
)
2355 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2358 #ifndef TCC_TARGET_ARM
2359 /* generic itof for unsigned long long case */
2360 static void gen_cvt_itof1(int t
)
2362 #ifdef TCC_TARGET_ARM64
2365 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2366 (VT_LLONG
| VT_UNSIGNED
)) {
2369 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2370 #if LDOUBLE_SIZE != 8
2371 else if (t
== VT_LDOUBLE
)
2372 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2375 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2379 vtop
->r
= reg_fret(t
);
2387 /* generic ftoi for unsigned long long case */
2388 static void gen_cvt_ftoi1(int t
)
2390 #ifdef TCC_TARGET_ARM64
2395 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2396 /* not handled natively */
2397 st
= vtop
->type
.t
& VT_BTYPE
;
2399 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2400 #if LDOUBLE_SIZE != 8
2401 else if (st
== VT_LDOUBLE
)
2402 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2405 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2410 vtop
->r2
= REG_LRET
;
2417 /* force char or short cast */
2418 static void force_charshort_cast(int t
)
2422 /* cannot cast static initializers */
2423 if (STATIC_DATA_WANTED
)
2427 /* XXX: add optimization if lvalue : just change type and offset */
2432 if (t
& VT_UNSIGNED
) {
2433 vpushi((1 << bits
) - 1);
2436 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2442 /* result must be signed or the SAR is converted to an SHL
2443 This was not the case when "t" was a signed short
2444 and the last value on the stack was an unsigned int */
2445 vtop
->type
.t
&= ~VT_UNSIGNED
;
2451 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2452 static void gen_cast_s(int t
)
2460 static void gen_cast(CType
*type
)
2462 int sbt
, dbt
, sf
, df
, c
, p
;
2464 /* special delayed cast for char/short */
2465 /* XXX: in some cases (multiple cascaded casts), it may still
2467 if (vtop
->r
& VT_MUSTCAST
) {
2468 vtop
->r
&= ~VT_MUSTCAST
;
2469 force_charshort_cast(vtop
->type
.t
);
2472 /* bitfields first get cast to ints */
2473 if (vtop
->type
.t
& VT_BITFIELD
) {
2477 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2478 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2483 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2484 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2485 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2486 c
&= dbt
!= VT_LDOUBLE
;
2489 /* constant case: we can do it now */
2490 /* XXX: in ISOC, cannot do it if error in convert */
2491 if (sbt
== VT_FLOAT
)
2492 vtop
->c
.ld
= vtop
->c
.f
;
2493 else if (sbt
== VT_DOUBLE
)
2494 vtop
->c
.ld
= vtop
->c
.d
;
2497 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2498 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2499 vtop
->c
.ld
= vtop
->c
.i
;
2501 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2503 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2504 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2506 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2509 if (dbt
== VT_FLOAT
)
2510 vtop
->c
.f
= (float)vtop
->c
.ld
;
2511 else if (dbt
== VT_DOUBLE
)
2512 vtop
->c
.d
= (double)vtop
->c
.ld
;
2513 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2514 vtop
->c
.i
= vtop
->c
.ld
;
2515 } else if (sf
&& dbt
== VT_BOOL
) {
2516 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2519 vtop
->c
.i
= vtop
->c
.ld
;
2520 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2522 else if (sbt
& VT_UNSIGNED
)
2523 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2525 else if (sbt
== VT_PTR
)
2528 else if (sbt
!= VT_LLONG
)
2529 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2530 -(vtop
->c
.i
& 0x80000000));
2532 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2534 else if (dbt
== VT_BOOL
)
2535 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2537 else if (dbt
== VT_PTR
)
2540 else if (dbt
!= VT_LLONG
) {
2541 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2542 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2545 if (!(dbt
& VT_UNSIGNED
))
2546 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2549 } else if (p
&& dbt
== VT_BOOL
) {
2553 /* non constant case: generate code */
2555 /* convert from fp to fp */
2558 /* convert int to fp */
2561 /* convert fp to int */
2562 if (dbt
== VT_BOOL
) {
2566 /* we handle char/short/etc... with generic code */
2567 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2568 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2572 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2573 /* additional cast for char/short... */
2579 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2580 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2581 /* scalar to long long */
2582 /* machine independent conversion */
2584 /* generate high word */
2585 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2589 if (sbt
== VT_PTR
) {
2590 /* cast from pointer to int before we apply
2591 shift operation, which pointers don't support*/
2598 /* patch second register */
2599 vtop
[-1].r2
= vtop
->r
;
2603 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2604 (dbt
& VT_BTYPE
) == VT_PTR
||
2605 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2606 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2607 (sbt
& VT_BTYPE
) != VT_PTR
&&
2608 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2609 /* need to convert from 32bit to 64bit */
2611 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2612 #if defined(TCC_TARGET_ARM64)
2614 #elif defined(TCC_TARGET_X86_64)
2616 /* x86_64 specific: movslq */
2618 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2625 } else if (dbt
== VT_BOOL
) {
2626 /* scalar to bool */
2629 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2630 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2631 if (sbt
== VT_PTR
) {
2632 vtop
->type
.t
= VT_INT
;
2633 tcc_warning("nonportable conversion from pointer to char/short");
2635 force_charshort_cast(dbt
);
2637 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2639 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2640 /* from long long: just take low order word */
2644 /* if lvalue and single word type, nothing to do because
2645 the lvalue already contains the real type size (see
2646 VT_LVAL_xxx constants) */
2650 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2651 /* if we are casting between pointer types,
2652 we must update the VT_LVAL_xxx size */
2653 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2654 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2659 /* return type size as known at compile time. Put alignment at 'a' */
2660 ST_FUNC
int type_size(CType
*type
, int *a
)
2665 bt
= type
->t
& VT_BTYPE
;
2666 if (bt
== VT_STRUCT
) {
2671 } else if (bt
== VT_PTR
) {
2672 if (type
->t
& VT_ARRAY
) {
2676 ts
= type_size(&s
->type
, a
);
2678 if (ts
< 0 && s
->c
< 0)
2686 } else if (IS_ENUM(type
->t
) && type
->ref
->c
== -1) {
2687 return -1; /* incomplete enum */
2688 } else if (bt
== VT_LDOUBLE
) {
2690 return LDOUBLE_SIZE
;
2691 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2692 #ifdef TCC_TARGET_I386
2693 #ifdef TCC_TARGET_PE
2698 #elif defined(TCC_TARGET_ARM)
2708 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2711 } else if (bt
== VT_SHORT
) {
2714 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2718 /* char, void, function, _Bool */
2724 /* push type size as known at runtime time on top of value stack. Put
2726 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2728 if (type
->t
& VT_VLA
) {
2729 type_size(&type
->ref
->type
, a
);
2730 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2732 vpushi(type_size(type
, a
));
2736 static void vla_sp_restore(void) {
2737 if (vlas_in_scope
) {
2738 gen_vla_sp_restore(vla_sp_loc
);
2742 static void vla_sp_restore_root(void) {
2743 if (vlas_in_scope
) {
2744 gen_vla_sp_restore(vla_sp_root_loc
);
2748 /* return the pointed type of t */
2749 static inline CType
*pointed_type(CType
*type
)
2751 return &type
->ref
->type
;
2754 /* modify type so that its it is a pointer to type. */
2755 ST_FUNC
void mk_pointer(CType
*type
)
2758 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2759 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2763 /* compare function types. OLD functions match any new functions */
2764 static int is_compatible_func(CType
*type1
, CType
*type2
)
2770 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2772 /* check func_call */
2773 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2775 /* XXX: not complete */
2776 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2778 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2780 while (s1
!= NULL
) {
2783 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2793 /* return true if type1 and type2 are the same. If unqualified is
2794 true, qualifiers on the types are ignored.
2796 - enums are not checked as gcc __builtin_types_compatible_p ()
2798 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2802 t1
= type1
->t
& VT_TYPE
;
2803 t2
= type2
->t
& VT_TYPE
;
2805 /* strip qualifiers before comparing */
2806 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2807 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2810 /* Default Vs explicit signedness only matters for char */
2811 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2815 /* XXX: bitfields ? */
2818 /* test more complicated cases */
2819 bt1
= t1
& VT_BTYPE
;
2820 if (bt1
== VT_PTR
) {
2821 type1
= pointed_type(type1
);
2822 type2
= pointed_type(type2
);
2823 return is_compatible_types(type1
, type2
);
2824 } else if (bt1
== VT_STRUCT
) {
2825 return (type1
->ref
== type2
->ref
);
2826 } else if (bt1
== VT_FUNC
) {
2827 return is_compatible_func(type1
, type2
);
2833 /* return true if type1 and type2 are exactly the same (including
2836 static int is_compatible_types(CType
*type1
, CType
*type2
)
2838 return compare_types(type1
,type2
,0);
2841 /* return true if type1 and type2 are the same (ignoring qualifiers).
2843 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2845 return compare_types(type1
,type2
,1);
2848 /* print a type. If 'varstr' is not NULL, then the variable is also
2849 printed in the type */
2851 /* XXX: add array and function pointers */
2852 static void type_to_str(char *buf
, int buf_size
,
2853 CType
*type
, const char *varstr
)
2865 pstrcat(buf
, buf_size
, "extern ");
2867 pstrcat(buf
, buf_size
, "static ");
2869 pstrcat(buf
, buf_size
, "typedef ");
2871 pstrcat(buf
, buf_size
, "inline ");
2872 if (t
& VT_VOLATILE
)
2873 pstrcat(buf
, buf_size
, "volatile ");
2874 if (t
& VT_CONSTANT
)
2875 pstrcat(buf
, buf_size
, "const ");
2877 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2878 || ((t
& VT_UNSIGNED
)
2879 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2882 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2884 buf_size
-= strlen(buf
);
2919 tstr
= "long double";
2921 pstrcat(buf
, buf_size
, tstr
);
2928 pstrcat(buf
, buf_size
, tstr
);
2929 v
= type
->ref
->v
& ~SYM_STRUCT
;
2930 if (v
>= SYM_FIRST_ANOM
)
2931 pstrcat(buf
, buf_size
, "<anonymous>");
2933 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2937 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2938 pstrcat(buf
, buf_size
, "(");
2940 while (sa
!= NULL
) {
2941 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2942 pstrcat(buf
, buf_size
, buf1
);
2945 pstrcat(buf
, buf_size
, ", ");
2947 pstrcat(buf
, buf_size
, ")");
2952 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2953 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2956 pstrcpy(buf1
, sizeof(buf1
), "*");
2957 if (t
& VT_CONSTANT
)
2958 pstrcat(buf1
, buf_size
, "const ");
2959 if (t
& VT_VOLATILE
)
2960 pstrcat(buf1
, buf_size
, "volatile ");
2962 pstrcat(buf1
, sizeof(buf1
), varstr
);
2963 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2967 pstrcat(buf
, buf_size
, " ");
2968 pstrcat(buf
, buf_size
, varstr
);
2973 /* verify type compatibility to store vtop in 'dt' type, and generate
2975 static void gen_assign_cast(CType
*dt
)
2977 CType
*st
, *type1
, *type2
;
2978 char buf1
[256], buf2
[256];
2981 st
= &vtop
->type
; /* source type */
2982 dbt
= dt
->t
& VT_BTYPE
;
2983 sbt
= st
->t
& VT_BTYPE
;
2984 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2985 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2987 It is Ok if both are void
2993 gcc accepts this program
2996 tcc_error("cannot cast from/to void");
2998 if (dt
->t
& VT_CONSTANT
)
2999 tcc_warning("assignment of read-only location");
3002 /* special cases for pointers */
3003 /* '0' can also be a pointer */
3004 if (is_null_pointer(vtop
))
3006 /* accept implicit pointer to integer cast with warning */
3007 if (is_integer_btype(sbt
)) {
3008 tcc_warning("assignment makes pointer from integer without a cast");
3011 type1
= pointed_type(dt
);
3012 /* a function is implicitly a function pointer */
3013 if (sbt
== VT_FUNC
) {
3014 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
3015 !is_compatible_types(pointed_type(dt
), st
))
3016 tcc_warning("assignment from incompatible pointer type");
3021 type2
= pointed_type(st
);
3022 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
3023 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
3024 /* void * can match anything */
3026 //printf("types %08x %08x\n", type1->t, type2->t);
3027 /* exact type match, except for qualifiers */
3028 if (!is_compatible_unqualified_types(type1
, type2
)) {
3029 /* Like GCC don't warn by default for merely changes
3030 in pointer target signedness. Do warn for different
3031 base types, though, in particular for unsigned enums
3032 and signed int targets. */
3033 if ((type1
->t
& (VT_BTYPE
|VT_LONG
)) != (type2
->t
& (VT_BTYPE
|VT_LONG
))
3034 || IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)
3036 tcc_warning("assignment from incompatible pointer type");
3039 /* check const and volatile */
3040 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
3041 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
3042 tcc_warning("assignment discards qualifiers from pointer target type");
3048 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3049 tcc_warning("assignment makes integer from pointer without a cast");
3050 } else if (sbt
== VT_STRUCT
) {
3051 goto case_VT_STRUCT
;
3053 /* XXX: more tests */
3057 if (!is_compatible_unqualified_types(dt
, st
)) {
3059 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3060 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3061 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3069 /* store vtop in lvalue pushed on stack */
3070 ST_FUNC
void vstore(void)
3072 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3074 ft
= vtop
[-1].type
.t
;
3075 sbt
= vtop
->type
.t
& VT_BTYPE
;
3076 dbt
= ft
& VT_BTYPE
;
3077 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3078 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3079 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3080 /* optimize char/short casts */
3081 delayed_cast
= VT_MUSTCAST
;
3082 vtop
->type
.t
= ft
& VT_TYPE
;
3083 /* XXX: factorize */
3084 if (ft
& VT_CONSTANT
)
3085 tcc_warning("assignment of read-only location");
3088 if (!(ft
& VT_BITFIELD
))
3089 gen_assign_cast(&vtop
[-1].type
);
3092 if (sbt
== VT_STRUCT
) {
3093 /* if structure, only generate pointer */
3094 /* structure assignment : generate memcpy */
3095 /* XXX: optimize if small size */
3096 size
= type_size(&vtop
->type
, &align
);
3100 vtop
->type
.t
= VT_PTR
;
3103 /* address of memcpy() */
3106 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3107 else if(!(align
& 3))
3108 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3111 /* Use memmove, rather than memcpy, as dest and src may be same: */
3112 vpush_global_sym(&func_old_type
, TOK_memmove
);
3117 vtop
->type
.t
= VT_PTR
;
3123 /* leave source on stack */
3124 } else if (ft
& VT_BITFIELD
) {
3125 /* bitfield store handling */
3127 /* save lvalue as expression result (example: s.b = s.a = n;) */
3128 vdup(), vtop
[-1] = vtop
[-2];
3130 bit_pos
= BIT_POS(ft
);
3131 bit_size
= BIT_SIZE(ft
);
3132 /* remove bit field info to avoid loops */
3133 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3135 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3136 gen_cast(&vtop
[-1].type
);
3137 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3140 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3141 if (r
== VT_STRUCT
) {
3142 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3143 store_packed_bf(bit_pos
, bit_size
);
3145 unsigned long long mask
= (1ULL << bit_size
) - 1;
3146 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3148 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3151 vpushi((unsigned)mask
);
3158 /* duplicate destination */
3161 /* load destination, mask and or with source */
3162 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3163 vpushll(~(mask
<< bit_pos
));
3165 vpushi(~((unsigned)mask
<< bit_pos
));
3170 /* ... and discard */
3173 } else if (dbt
== VT_VOID
) {
3176 #ifdef CONFIG_TCC_BCHECK
3177 /* bound check case */
3178 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3187 #ifdef TCC_TARGET_X86_64
3188 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3190 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3195 r
= gv(rc
); /* generate value */
3196 /* if lvalue was saved on stack, must read it */
3197 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3199 t
= get_reg(RC_INT
);
3205 sv
.r
= VT_LOCAL
| VT_LVAL
;
3206 sv
.c
.i
= vtop
[-1].c
.i
;
3208 vtop
[-1].r
= t
| VT_LVAL
;
3210 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3212 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3213 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3215 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3216 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3218 vtop
[-1].type
.t
= load_type
;
3221 /* convert to int to increment easily */
3222 vtop
->type
.t
= addr_type
;
3228 vtop
[-1].type
.t
= load_type
;
3229 /* XXX: it works because r2 is spilled last ! */
3230 store(vtop
->r2
, vtop
- 1);
3236 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3237 vtop
->r
|= delayed_cast
;
3241 /* post defines POST/PRE add. c is the token ++ or -- */
3242 ST_FUNC
void inc(int post
, int c
)
3245 vdup(); /* save lvalue */
3247 gv_dup(); /* duplicate value */
3252 vpushi(c
- TOK_MID
);
3254 vstore(); /* store value */
3256 vpop(); /* if post op, return saved value */
3259 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3261 /* read the string */
3265 while (tok
== TOK_STR
) {
3266 /* XXX: add \0 handling too ? */
3267 cstr_cat(astr
, tokc
.str
.data
, -1);
3270 cstr_ccat(astr
, '\0');
3273 /* If I is >= 1 and a power of two, returns log2(i)+1.
3274 If I is 0 returns 0. */
3275 static int exact_log2p1(int i
)
3280 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3291 /* Parse __attribute__((...)) GNUC extension. */
3292 static void parse_attribute(AttributeDef
*ad
)
3298 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3303 while (tok
!= ')') {
3304 if (tok
< TOK_IDENT
)
3305 expect("attribute name");
3312 parse_mult_str(&astr
, "section name");
3313 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3320 parse_mult_str(&astr
, "alias(\"target\")");
3321 ad
->alias_target
= /* save string as token, for later */
3322 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3326 case TOK_VISIBILITY1
:
3327 case TOK_VISIBILITY2
:
3329 parse_mult_str(&astr
,
3330 "visibility(\"default|hidden|internal|protected\")");
3331 if (!strcmp (astr
.data
, "default"))
3332 ad
->a
.visibility
= STV_DEFAULT
;
3333 else if (!strcmp (astr
.data
, "hidden"))
3334 ad
->a
.visibility
= STV_HIDDEN
;
3335 else if (!strcmp (astr
.data
, "internal"))
3336 ad
->a
.visibility
= STV_INTERNAL
;
3337 else if (!strcmp (astr
.data
, "protected"))
3338 ad
->a
.visibility
= STV_PROTECTED
;
3340 expect("visibility(\"default|hidden|internal|protected\")");
3349 if (n
<= 0 || (n
& (n
- 1)) != 0)
3350 tcc_error("alignment must be a positive power of two");
3355 ad
->a
.aligned
= exact_log2p1(n
);
3356 if (n
!= 1 << (ad
->a
.aligned
- 1))
3357 tcc_error("alignment of %d is larger than implemented", n
);
3369 /* currently, no need to handle it because tcc does not
3370 track unused objects */
3374 /* currently, no need to handle it because tcc does not
3375 track unused objects */
3380 ad
->f
.func_call
= FUNC_CDECL
;
3385 ad
->f
.func_call
= FUNC_STDCALL
;
3387 #ifdef TCC_TARGET_I386
3397 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3403 ad
->f
.func_call
= FUNC_FASTCALLW
;
3410 ad
->attr_mode
= VT_LLONG
+ 1;
3413 ad
->attr_mode
= VT_BYTE
+ 1;
3416 ad
->attr_mode
= VT_SHORT
+ 1;
3420 ad
->attr_mode
= VT_INT
+ 1;
3423 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3430 ad
->a
.dllexport
= 1;
3433 ad
->a
.dllimport
= 1;
3436 if (tcc_state
->warn_unsupported
)
3437 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3438 /* skip parameters */
3440 int parenthesis
= 0;
3444 else if (tok
== ')')
3447 } while (parenthesis
&& tok
!= -1);
3460 static Sym
* find_field (CType
*type
, int v
)
3464 while ((s
= s
->next
) != NULL
) {
3465 if ((s
->v
& SYM_FIELD
) &&
3466 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3467 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3468 Sym
*ret
= find_field (&s
->type
, v
);
3478 static void struct_add_offset (Sym
*s
, int offset
)
3480 while ((s
= s
->next
) != NULL
) {
3481 if ((s
->v
& SYM_FIELD
) &&
3482 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3483 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3484 struct_add_offset(s
->type
.ref
, offset
);
3490 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3492 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3493 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3494 int pcc
= !tcc_state
->ms_bitfields
;
3495 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3502 prevbt
= VT_STRUCT
; /* make it never match */
3507 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3508 if (f
->type
.t
& VT_BITFIELD
)
3509 bit_size
= BIT_SIZE(f
->type
.t
);
3512 size
= type_size(&f
->type
, &align
);
3513 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3516 if (pcc
&& bit_size
== 0) {
3517 /* in pcc mode, packing does not affect zero-width bitfields */
3520 /* in pcc mode, attribute packed overrides if set. */
3521 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3524 /* pragma pack overrides align if lesser and packs bitfields always */
3527 if (pragma_pack
< align
)
3528 align
= pragma_pack
;
3529 /* in pcc mode pragma pack also overrides individual align */
3530 if (pcc
&& pragma_pack
< a
)
3534 /* some individual align was specified */
3538 if (type
->ref
->type
.t
== VT_UNION
) {
3539 if (pcc
&& bit_size
>= 0)
3540 size
= (bit_size
+ 7) >> 3;
3545 } else if (bit_size
< 0) {
3547 c
+= (bit_pos
+ 7) >> 3;
3548 c
= (c
+ align
- 1) & -align
;
3557 /* A bit-field. Layout is more complicated. There are two
3558 options: PCC (GCC) compatible and MS compatible */
3560 /* In PCC layout a bit-field is placed adjacent to the
3561 preceding bit-fields, except if:
3563 - an individual alignment was given
3564 - it would overflow its base type container and
3565 there is no packing */
3566 if (bit_size
== 0) {
3568 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3570 } else if (f
->a
.aligned
) {
3572 } else if (!packed
) {
3574 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3575 if (ofs
> size
/ align
)
3579 /* in pcc mode, long long bitfields have type int if they fit */
3580 if (size
== 8 && bit_size
<= 32)
3581 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3583 while (bit_pos
>= align
* 8)
3584 c
+= align
, bit_pos
-= align
* 8;
3587 /* In PCC layout named bit-fields influence the alignment
3588 of the containing struct using the base types alignment,
3589 except for packed fields (which here have correct align). */
3590 if (f
->v
& SYM_FIRST_ANOM
3591 // && bit_size // ??? gcc on ARM/rpi does that
3596 bt
= f
->type
.t
& VT_BTYPE
;
3597 if ((bit_pos
+ bit_size
> size
* 8)
3598 || (bit_size
> 0) == (bt
!= prevbt
)
3600 c
= (c
+ align
- 1) & -align
;
3603 /* In MS bitfield mode a bit-field run always uses
3604 at least as many bits as the underlying type.
3605 To start a new run it's also required that this
3606 or the last bit-field had non-zero width. */
3607 if (bit_size
|| prev_bit_size
)
3610 /* In MS layout the records alignment is normally
3611 influenced by the field, except for a zero-width
3612 field at the start of a run (but by further zero-width
3613 fields it is again). */
3614 if (bit_size
== 0 && prevbt
!= bt
)
3617 prev_bit_size
= bit_size
;
3620 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3621 | (bit_pos
<< VT_STRUCT_SHIFT
);
3622 bit_pos
+= bit_size
;
3624 if (align
> maxalign
)
3628 printf("set field %s offset %-2d size %-2d align %-2d",
3629 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3630 if (f
->type
.t
& VT_BITFIELD
) {
3631 printf(" pos %-2d bits %-2d",
3639 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3641 /* An anonymous struct/union. Adjust member offsets
3642 to reflect the real offset of our containing struct.
3643 Also set the offset of this anon member inside
3644 the outer struct to be zero. Via this it
3645 works when accessing the field offset directly
3646 (from base object), as well as when recursing
3647 members in initializer handling. */
3648 int v2
= f
->type
.ref
->v
;
3649 if (!(v2
& SYM_FIELD
) &&
3650 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3652 /* This happens only with MS extensions. The
3653 anon member has a named struct type, so it
3654 potentially is shared with other references.
3655 We need to unshare members so we can modify
3658 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3659 &f
->type
.ref
->type
, 0,
3661 pps
= &f
->type
.ref
->next
;
3662 while ((ass
= ass
->next
) != NULL
) {
3663 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3664 pps
= &((*pps
)->next
);
3668 struct_add_offset(f
->type
.ref
, offset
);
3678 c
+= (bit_pos
+ 7) >> 3;
3680 /* store size and alignment */
3681 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3685 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3686 /* can happen if individual align for some member was given. In
3687 this case MSVC ignores maxalign when aligning the size */
3692 c
= (c
+ a
- 1) & -a
;
3696 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3699 /* check whether we can access bitfields by their type */
3700 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3704 if (0 == (f
->type
.t
& VT_BITFIELD
))
3708 bit_size
= BIT_SIZE(f
->type
.t
);
3711 bit_pos
= BIT_POS(f
->type
.t
);
3712 size
= type_size(&f
->type
, &align
);
3713 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3716 /* try to access the field using a different type */
3717 c0
= -1, s
= align
= 1;
3719 px
= f
->c
* 8 + bit_pos
;
3720 cx
= (px
>> 3) & -align
;
3721 px
= px
- (cx
<< 3);
3724 s
= (px
+ bit_size
+ 7) >> 3;
3734 s
= type_size(&t
, &align
);
3738 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3739 /* update offset and bit position */
3742 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3743 | (bit_pos
<< VT_STRUCT_SHIFT
);
3747 printf("FIX field %s offset %-2d size %-2d align %-2d "
3748 "pos %-2d bits %-2d\n",
3749 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3750 cx
, s
, align
, px
, bit_size
);
3753 /* fall back to load/store single-byte wise */
3754 f
->auxtype
= VT_STRUCT
;
3756 printf("FIX field %s : load byte-wise\n",
3757 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3763 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3764 static void struct_decl(CType
*type
, int u
)
3766 int v
, c
, size
, align
, flexible
;
3767 int bit_size
, bsize
, bt
;
3769 AttributeDef ad
, ad1
;
3772 memset(&ad
, 0, sizeof ad
);
3774 parse_attribute(&ad
);
3778 /* struct already defined ? return it */
3780 expect("struct/union/enum name");
3782 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3785 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3787 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3792 /* Record the original enum/struct/union token. */
3793 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3795 /* we put an undefined size for struct/union */
3796 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3797 s
->r
= 0; /* default alignment is zero as gcc */
3799 type
->t
= s
->type
.t
;
3805 tcc_error("struct/union/enum already defined");
3806 /* cannot be empty */
3807 /* non empty enums are not allowed */
3810 long long ll
= 0, pl
= 0, nl
= 0;
3813 /* enum symbols have static storage */
3814 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3818 expect("identifier");
3820 if (ss
&& !local_stack
)
3821 tcc_error("redefinition of enumerator '%s'",
3822 get_tok_str(v
, NULL
));
3826 ll
= expr_const64();
3828 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3830 *ps
= ss
, ps
= &ss
->next
;
3839 /* NOTE: we accept a trailing comma */
3844 /* set integral type of the enum */
3847 if (pl
!= (unsigned)pl
)
3848 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3850 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3851 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3852 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3854 /* set type for enum members */
3855 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3857 if (ll
== (int)ll
) /* default is int if it fits */
3859 if (t
.t
& VT_UNSIGNED
) {
3860 ss
->type
.t
|= VT_UNSIGNED
;
3861 if (ll
== (unsigned)ll
)
3864 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3865 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3870 while (tok
!= '}') {
3871 if (!parse_btype(&btype
, &ad1
)) {
3877 tcc_error("flexible array member '%s' not at the end of struct",
3878 get_tok_str(v
, NULL
));
3884 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3886 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3887 expect("identifier");
3889 int v
= btype
.ref
->v
;
3890 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3891 if (tcc_state
->ms_extensions
== 0)
3892 expect("identifier");
3896 if (type_size(&type1
, &align
) < 0) {
3897 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3900 tcc_error("field '%s' has incomplete type",
3901 get_tok_str(v
, NULL
));
3903 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3904 (type1
.t
& VT_STORAGE
))
3905 tcc_error("invalid type for '%s'",
3906 get_tok_str(v
, NULL
));
3910 bit_size
= expr_const();
3911 /* XXX: handle v = 0 case for messages */
3913 tcc_error("negative width in bit-field '%s'",
3914 get_tok_str(v
, NULL
));
3915 if (v
&& bit_size
== 0)
3916 tcc_error("zero width for bit-field '%s'",
3917 get_tok_str(v
, NULL
));
3918 parse_attribute(&ad1
);
3920 size
= type_size(&type1
, &align
);
3921 if (bit_size
>= 0) {
3922 bt
= type1
.t
& VT_BTYPE
;
3928 tcc_error("bitfields must have scalar type");
3930 if (bit_size
> bsize
) {
3931 tcc_error("width of '%s' exceeds its type",
3932 get_tok_str(v
, NULL
));
3933 } else if (bit_size
== bsize
3934 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
3935 /* no need for bit fields */
3937 } else if (bit_size
== 64) {
3938 tcc_error("field width 64 not implemented");
3940 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
3942 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3945 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3946 /* Remember we've seen a real field to check
3947 for placement of flexible array member. */
3950 /* If member is a struct or bit-field, enforce
3951 placing into the struct (as anonymous). */
3953 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3958 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
3963 if (tok
== ';' || tok
== TOK_EOF
)
3970 parse_attribute(&ad
);
3971 struct_layout(type
, &ad
);
3976 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
3978 if (s
->a
.aligned
&& 0 == ad
->a
.aligned
)
3979 ad
->a
.aligned
= s
->a
.aligned
;
3980 if (s
->f
.func_call
&& 0 == ad
->f
.func_call
)
3981 ad
->f
.func_call
= s
->f
.func_call
;
3982 if (s
->f
.func_type
&& 0 == ad
->f
.func_type
)
3983 ad
->f
.func_type
= s
->f
.func_type
;
3988 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3989 are added to the element type, copied because it could be a typedef. */
3990 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3992 while (type
->t
& VT_ARRAY
) {
3993 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3994 type
= &type
->ref
->type
;
3996 type
->t
|= qualifiers
;
3999 /* return 0 if no type declaration. otherwise, return the basic type
4002 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4004 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
4008 memset(ad
, 0, sizeof(AttributeDef
));
4018 /* currently, we really ignore extension */
4028 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4029 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4030 tmbt
: tcc_error("too many basic types");
4033 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4038 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4051 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4052 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4053 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4054 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4061 #ifdef TCC_TARGET_ARM64
4063 /* GCC's __uint128_t appears in some Linux header files. Make it a
4064 synonym for long double to get the size and alignment right. */
4075 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4076 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4084 struct_decl(&type1
, VT_ENUM
);
4087 type
->ref
= type1
.ref
;
4090 struct_decl(&type1
, VT_STRUCT
);
4093 struct_decl(&type1
, VT_UNION
);
4096 /* type modifiers */
4101 parse_btype_qualify(type
, VT_CONSTANT
);
4109 parse_btype_qualify(type
, VT_VOLATILE
);
4116 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4117 tcc_error("signed and unsigned modifier");
4130 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4131 tcc_error("signed and unsigned modifier");
4132 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4148 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4149 tcc_error("multiple storage classes");
4160 /* GNUC attribute */
4161 case TOK_ATTRIBUTE1
:
4162 case TOK_ATTRIBUTE2
:
4163 parse_attribute(ad
);
4164 if (ad
->attr_mode
) {
4165 u
= ad
->attr_mode
-1;
4166 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4174 parse_expr_type(&type1
);
4175 /* remove all storage modifiers except typedef */
4176 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4178 sym_to_attr(ad
, type1
.ref
);
4184 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4186 t
&= ~(VT_BTYPE
|VT_LONG
);
4187 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4188 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4189 type
->ref
= s
->type
.ref
;
4191 parse_btype_qualify(type
, t
);
4193 /* get attributes from typedef */
4203 if (tcc_state
->char_is_unsigned
) {
4204 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4207 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4208 bt
= t
& (VT_BTYPE
|VT_LONG
);
4210 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4211 #ifdef TCC_TARGET_PE
4212 if (bt
== VT_LDOUBLE
)
4213 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4219 /* convert a function parameter type (array to pointer and function to
4220 function pointer) */
4221 static inline void convert_parameter_type(CType
*pt
)
4223 /* remove const and volatile qualifiers (XXX: const could be used
4224 to indicate a const function parameter */
4225 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4226 /* array must be transformed to pointer according to ANSI C */
4228 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4233 ST_FUNC
void parse_asm_str(CString
*astr
)
4236 parse_mult_str(astr
, "string constant");
4239 /* Parse an asm label and return the token */
4240 static int asm_label_instr(void)
4246 parse_asm_str(&astr
);
4249 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4251 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4256 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4258 int n
, l
, t1
, arg_size
, align
;
4259 Sym
**plast
, *s
, *first
;
4264 /* function type, or recursive declarator (return if so) */
4266 if (td
&& !(td
& TYPE_ABSTRACT
))
4270 else if (parse_btype(&pt
, &ad1
))
4281 /* read param name and compute offset */
4282 if (l
!= FUNC_OLD
) {
4283 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4285 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4286 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4287 tcc_error("parameter declared as void");
4288 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4292 expect("identifier");
4293 pt
.t
= VT_VOID
; /* invalid type */
4296 convert_parameter_type(&pt
);
4297 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4303 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4308 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4309 tcc_error("invalid type");
4312 /* if no parameters, then old type prototype */
4315 /* NOTE: const is ignored in returned type as it has a special
4316 meaning in gcc / C++ */
4317 type
->t
&= ~VT_CONSTANT
;
4318 /* some ancient pre-K&R C allows a function to return an array
4319 and the array brackets to be put after the arguments, such
4320 that "int c()[]" means something like "int[] c()" */
4323 skip(']'); /* only handle simple "[]" */
4326 /* we push a anonymous symbol which will contain the function prototype */
4327 ad
->f
.func_args
= arg_size
;
4328 ad
->f
.func_type
= l
;
4329 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4335 } else if (tok
== '[') {
4336 int saved_nocode_wanted
= nocode_wanted
;
4337 /* array definition */
4339 if (tok
== TOK_RESTRICT1
)
4344 if (!local_stack
|| (storage
& VT_STATIC
))
4345 vpushi(expr_const());
4347 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4348 length must always be evaluated, even under nocode_wanted,
4349 so that its size slot is initialized (e.g. under sizeof
4354 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4357 tcc_error("invalid array size");
4359 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4360 tcc_error("size of variable length array should be an integer");
4365 /* parse next post type */
4366 post_type(type
, ad
, storage
, 0);
4367 if (type
->t
== VT_FUNC
)
4368 tcc_error("declaration of an array of functions");
4369 t1
|= type
->t
& VT_VLA
;
4372 loc
-= type_size(&int_type
, &align
);
4376 vla_runtime_type_size(type
, &align
);
4378 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4384 nocode_wanted
= saved_nocode_wanted
;
4386 /* we push an anonymous symbol which will contain the array
4388 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4389 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4395 /* Parse a type declarator (except basic type), and return the type
4396 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4397 expected. 'type' should contain the basic type. 'ad' is the
4398 attribute definition of the basic type. It can be modified by
4399 type_decl(). If this (possibly abstract) declarator is a pointer chain
4400 it returns the innermost pointed to type (equals *type, but is a different
4401 pointer), otherwise returns type itself, that's used for recursive calls. */
4402 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4405 int qualifiers
, storage
;
4407 /* recursive type, remove storage bits first, apply them later again */
4408 storage
= type
->t
& VT_STORAGE
;
4409 type
->t
&= ~VT_STORAGE
;
4412 while (tok
== '*') {
4420 qualifiers
|= VT_CONSTANT
;
4425 qualifiers
|= VT_VOLATILE
;
4431 /* XXX: clarify attribute handling */
4432 case TOK_ATTRIBUTE1
:
4433 case TOK_ATTRIBUTE2
:
4434 parse_attribute(ad
);
4438 type
->t
|= qualifiers
;
4440 /* innermost pointed to type is the one for the first derivation */
4441 ret
= pointed_type(type
);
4445 /* This is possibly a parameter type list for abstract declarators
4446 ('int ()'), use post_type for testing this. */
4447 if (!post_type(type
, ad
, 0, td
)) {
4448 /* It's not, so it's a nested declarator, and the post operations
4449 apply to the innermost pointed to type (if any). */
4450 /* XXX: this is not correct to modify 'ad' at this point, but
4451 the syntax is not clear */
4452 parse_attribute(ad
);
4453 post
= type_decl(type
, ad
, v
, td
);
4456 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4457 /* type identifier */
4461 if (!(td
& TYPE_ABSTRACT
))
4462 expect("identifier");
4465 post_type(post
, ad
, storage
, 0);
4466 parse_attribute(ad
);
4471 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4472 ST_FUNC
int lvalue_type(int t
)
4477 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4479 else if (bt
== VT_SHORT
)
4483 if (t
& VT_UNSIGNED
)
4484 r
|= VT_LVAL_UNSIGNED
;
4488 /* indirection with full error checking and bound check */
4489 ST_FUNC
void indir(void)
4491 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4492 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4496 if (vtop
->r
& VT_LVAL
)
4498 vtop
->type
= *pointed_type(&vtop
->type
);
4499 /* Arrays and functions are never lvalues */
4500 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4501 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4502 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4503 /* if bound checking, the referenced pointer must be checked */
4504 #ifdef CONFIG_TCC_BCHECK
4505 if (tcc_state
->do_bounds_check
)
4506 vtop
->r
|= VT_MUSTBOUND
;
4511 /* pass a parameter to a function and do type checking and casting */
4512 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4517 func_type
= func
->f
.func_type
;
4518 if (func_type
== FUNC_OLD
||
4519 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4520 /* default casting : only need to convert float to double */
4521 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4522 gen_cast_s(VT_DOUBLE
);
4523 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4524 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4525 type
.ref
= vtop
->type
.ref
;
4528 } else if (arg
== NULL
) {
4529 tcc_error("too many arguments to function");
4532 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4533 gen_assign_cast(&type
);
4537 /* parse an expression and return its type without any side effect. */
4538 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4547 /* parse an expression of the form '(type)' or '(expr)' and return its
4549 static void parse_expr_type(CType
*type
)
4555 if (parse_btype(type
, &ad
)) {
4556 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4558 expr_type(type
, gexpr
);
4563 static void parse_type(CType
*type
)
4568 if (!parse_btype(type
, &ad
)) {
4571 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4574 static void parse_builtin_params(int nc
, const char *args
)
4581 while ((c
= *args
++)) {
4585 case 'e': expr_eq(); continue;
4586 case 't': parse_type(&t
); vpush(&t
); continue;
4587 default: tcc_error("internal error"); break;
4595 ST_FUNC
void unary(void)
4597 int n
, t
, align
, size
, r
, sizeof_caller
;
4602 sizeof_caller
= in_sizeof
;
4605 /* XXX: GCC 2.95.3 does not generate a table although it should be
4613 #ifdef TCC_TARGET_PE
4614 t
= VT_SHORT
|VT_UNSIGNED
;
4622 vsetc(&type
, VT_CONST
, &tokc
);
4626 t
= VT_INT
| VT_UNSIGNED
;
4632 t
= VT_LLONG
| VT_UNSIGNED
;
4644 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4647 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4649 case TOK___FUNCTION__
:
4651 goto tok_identifier
;
4657 /* special function name identifier */
4658 len
= strlen(funcname
) + 1;
4659 /* generate char[len] type */
4664 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4665 if (!NODATA_WANTED
) {
4666 ptr
= section_ptr_add(data_section
, len
);
4667 memcpy(ptr
, funcname
, len
);
4673 #ifdef TCC_TARGET_PE
4674 t
= VT_SHORT
| VT_UNSIGNED
;
4680 /* string parsing */
4682 if (tcc_state
->char_is_unsigned
)
4683 t
= VT_BYTE
| VT_UNSIGNED
;
4685 if (tcc_state
->warn_write_strings
)
4690 memset(&ad
, 0, sizeof(AttributeDef
));
4691 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4696 if (parse_btype(&type
, &ad
)) {
4697 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4699 /* check ISOC99 compound literal */
4701 /* data is allocated locally by default */
4706 /* all except arrays are lvalues */
4707 if (!(type
.t
& VT_ARRAY
))
4708 r
|= lvalue_type(type
.t
);
4709 memset(&ad
, 0, sizeof(AttributeDef
));
4710 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4712 if (sizeof_caller
) {
4719 } else if (tok
== '{') {
4720 int saved_nocode_wanted
= nocode_wanted
;
4722 tcc_error("expected constant");
4723 /* save all registers */
4725 /* statement expression : we do not accept break/continue
4726 inside as GCC does. We do retain the nocode_wanted state,
4727 as statement expressions can't ever be entered from the
4728 outside, so any reactivation of code emission (from labels
4729 or loop heads) can be disabled again after the end of it. */
4730 block(NULL
, NULL
, 1);
4731 nocode_wanted
= saved_nocode_wanted
;
4746 /* functions names must be treated as function pointers,
4747 except for unary '&' and sizeof. Since we consider that
4748 functions are not lvalues, we only have to handle it
4749 there and in function calls. */
4750 /* arrays can also be used although they are not lvalues */
4751 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4752 !(vtop
->type
.t
& VT_ARRAY
))
4754 mk_pointer(&vtop
->type
);
4760 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4761 gen_cast_s(VT_BOOL
);
4762 vtop
->c
.i
= !vtop
->c
.i
;
4763 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4767 vseti(VT_JMP
, gvtst(1, 0));
4779 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4780 tcc_error("pointer not accepted for unary plus");
4781 /* In order to force cast, we add zero, except for floating point
4782 where we really need an noop (otherwise -0.0 will be transformed
4784 if (!is_float(vtop
->type
.t
)) {
4795 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
4796 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
4797 size
= type_size(&type
, &align
);
4798 if (s
&& s
->a
.aligned
)
4799 align
= 1 << (s
->a
.aligned
- 1);
4800 if (t
== TOK_SIZEOF
) {
4801 if (!(type
.t
& VT_VLA
)) {
4803 tcc_error("sizeof applied to an incomplete type");
4806 vla_runtime_type_size(&type
, &align
);
4811 vtop
->type
.t
|= VT_UNSIGNED
;
4814 case TOK_builtin_expect
:
4815 /* __builtin_expect is a no-op for now */
4816 parse_builtin_params(0, "ee");
4819 case TOK_builtin_types_compatible_p
:
4820 parse_builtin_params(0, "tt");
4821 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4822 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4823 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4827 case TOK_builtin_choose_expr
:
4854 case TOK_builtin_constant_p
:
4855 parse_builtin_params(1, "e");
4856 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4860 case TOK_builtin_frame_address
:
4861 case TOK_builtin_return_address
:
4867 if (tok
!= TOK_CINT
) {
4868 tcc_error("%s only takes positive integers",
4869 tok1
== TOK_builtin_return_address
?
4870 "__builtin_return_address" :
4871 "__builtin_frame_address");
4873 level
= (uint32_t)tokc
.i
;
4878 vset(&type
, VT_LOCAL
, 0); /* local frame */
4880 mk_pointer(&vtop
->type
);
4881 indir(); /* -> parent frame */
4883 if (tok1
== TOK_builtin_return_address
) {
4884 // assume return address is just above frame pointer on stack
4887 mk_pointer(&vtop
->type
);
4892 #ifdef TCC_TARGET_X86_64
4893 #ifdef TCC_TARGET_PE
4894 case TOK_builtin_va_start
:
4895 parse_builtin_params(0, "ee");
4896 r
= vtop
->r
& VT_VALMASK
;
4900 tcc_error("__builtin_va_start expects a local variable");
4902 vtop
->type
= char_pointer_type
;
4907 case TOK_builtin_va_arg_types
:
4908 parse_builtin_params(0, "t");
4909 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4916 #ifdef TCC_TARGET_ARM64
4917 case TOK___va_start
: {
4918 parse_builtin_params(0, "ee");
4922 vtop
->type
.t
= VT_VOID
;
4925 case TOK___va_arg
: {
4926 parse_builtin_params(0, "et");
4934 case TOK___arm64_clear_cache
: {
4935 parse_builtin_params(0, "ee");
4938 vtop
->type
.t
= VT_VOID
;
4942 /* pre operations */
4953 t
= vtop
->type
.t
& VT_BTYPE
;
4955 /* In IEEE negate(x) isn't subtract(0,x), but rather
4959 vtop
->c
.f
= -1.0 * 0.0;
4960 else if (t
== VT_DOUBLE
)
4961 vtop
->c
.d
= -1.0 * 0.0;
4963 vtop
->c
.ld
= -1.0 * 0.0;
4971 goto tok_identifier
;
4973 /* allow to take the address of a label */
4974 if (tok
< TOK_UIDENT
)
4975 expect("label identifier");
4976 s
= label_find(tok
);
4978 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4980 if (s
->r
== LABEL_DECLARED
)
4981 s
->r
= LABEL_FORWARD
;
4984 s
->type
.t
= VT_VOID
;
4985 mk_pointer(&s
->type
);
4986 s
->type
.t
|= VT_STATIC
;
4988 vpushsym(&s
->type
, s
);
4994 CType controlling_type
;
4995 int has_default
= 0;
4998 TokenString
*str
= NULL
;
5002 expr_type(&controlling_type
, expr_eq
);
5003 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5007 if (tok
== TOK_DEFAULT
) {
5009 tcc_error("too many 'default'");
5015 AttributeDef ad_tmp
;
5018 parse_btype(&cur_type
, &ad_tmp
);
5019 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5020 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5022 tcc_error("type match twice");
5032 skip_or_save_block(&str
);
5034 skip_or_save_block(NULL
);
5041 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5042 tcc_error("type '%s' does not match any association", buf
);
5044 begin_macro(str
, 1);
5053 // special qnan , snan and infinity values
5058 vtop
->type
.t
= VT_FLOAT
;
5063 goto special_math_val
;
5066 goto special_math_val
;
5073 expect("identifier");
5075 if (!s
|| IS_ASM_SYM(s
)) {
5076 const char *name
= get_tok_str(t
, NULL
);
5078 tcc_error("'%s' undeclared", name
);
5079 /* for simple function calls, we tolerate undeclared
5080 external reference to int() function */
5081 if (tcc_state
->warn_implicit_function_declaration
5082 #ifdef TCC_TARGET_PE
5083 /* people must be warned about using undeclared WINAPI functions
5084 (which usually start with uppercase letter) */
5085 || (name
[0] >= 'A' && name
[0] <= 'Z')
5088 tcc_warning("implicit declaration of function '%s'", name
);
5089 s
= external_global_sym(t
, &func_old_type
, 0);
5093 /* A symbol that has a register is a local register variable,
5094 which starts out as VT_LOCAL value. */
5095 if ((r
& VT_VALMASK
) < VT_CONST
)
5096 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5098 vset(&s
->type
, r
, s
->c
);
5099 /* Point to s as backpointer (even without r&VT_SYM).
5100 Will be used by at least the x86 inline asm parser for
5106 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5107 vtop
->c
.i
= s
->enum_val
;
5112 /* post operations */
5114 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5117 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5120 if (tok
== TOK_ARROW
)
5122 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5125 /* expect pointer on structure */
5126 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5127 expect("struct or union");
5128 if (tok
== TOK_CDOUBLE
)
5129 expect("field name");
5131 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5132 expect("field name");
5133 s
= find_field(&vtop
->type
, tok
);
5135 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5136 /* add field offset to pointer */
5137 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5140 /* change type to field type, and set to lvalue */
5141 vtop
->type
= s
->type
;
5142 vtop
->type
.t
|= qualifiers
;
5143 /* an array is never an lvalue */
5144 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5145 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5146 #ifdef CONFIG_TCC_BCHECK
5147 /* if bound checking, the referenced pointer must be checked */
5148 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5149 vtop
->r
|= VT_MUSTBOUND
;
5153 } else if (tok
== '[') {
5159 } else if (tok
== '(') {
5162 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5165 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5166 /* pointer test (no array accepted) */
5167 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5168 vtop
->type
= *pointed_type(&vtop
->type
);
5169 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5173 expect("function pointer");
5176 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5178 /* get return type */
5181 sa
= s
->next
; /* first parameter */
5182 nb_args
= regsize
= 0;
5184 /* compute first implicit argument if a structure is returned */
5185 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5186 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5187 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5188 &ret_align
, ®size
);
5190 /* get some space for the returned structure */
5191 size
= type_size(&s
->type
, &align
);
5192 #ifdef TCC_TARGET_ARM64
5193 /* On arm64, a small struct is return in registers.
5194 It is much easier to write it to memory if we know
5195 that we are allowed to write some extra bytes, so
5196 round the allocated space up to a power of 2: */
5198 while (size
& (size
- 1))
5199 size
= (size
| (size
- 1)) + 1;
5201 loc
= (loc
- size
) & -align
;
5203 ret
.r
= VT_LOCAL
| VT_LVAL
;
5204 /* pass it as 'int' to avoid structure arg passing
5206 vseti(VT_LOCAL
, loc
);
5216 /* return in register */
5217 if (is_float(ret
.type
.t
)) {
5218 ret
.r
= reg_fret(ret
.type
.t
);
5219 #ifdef TCC_TARGET_X86_64
5220 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5224 #ifndef TCC_TARGET_ARM64
5225 #ifdef TCC_TARGET_X86_64
5226 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5228 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5239 gfunc_param_typed(s
, sa
);
5249 tcc_error("too few arguments to function");
5251 gfunc_call(nb_args
);
5254 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5255 vsetc(&ret
.type
, r
, &ret
.c
);
5256 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5259 /* handle packed struct return */
5260 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5263 size
= type_size(&s
->type
, &align
);
5264 /* We're writing whole regs often, make sure there's enough
5265 space. Assume register size is power of 2. */
5266 if (regsize
> align
)
5268 loc
= (loc
- size
) & -align
;
5272 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5276 if (--ret_nregs
== 0)
5280 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5288 ST_FUNC
void expr_prod(void)
5293 while (tok
== '*' || tok
== '/' || tok
== '%') {
5301 ST_FUNC
void expr_sum(void)
5306 while (tok
== '+' || tok
== '-') {
5314 static void expr_shift(void)
5319 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5327 static void expr_cmp(void)
5332 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5333 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5341 static void expr_cmpeq(void)
5346 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5354 static void expr_and(void)
5357 while (tok
== '&') {
5364 static void expr_xor(void)
5367 while (tok
== '^') {
5374 static void expr_or(void)
5377 while (tok
== '|') {
5384 static void expr_land(void)
5387 if (tok
== TOK_LAND
) {
5390 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5391 gen_cast_s(VT_BOOL
);
5396 while (tok
== TOK_LAND
) {
5412 if (tok
!= TOK_LAND
) {
5425 static void expr_lor(void)
5428 if (tok
== TOK_LOR
) {
5431 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5432 gen_cast_s(VT_BOOL
);
5437 while (tok
== TOK_LOR
) {
5453 if (tok
!= TOK_LOR
) {
5466 /* Assuming vtop is a value used in a conditional context
5467 (i.e. compared with zero) return 0 if it's false, 1 if
5468 true and -1 if it can't be statically determined. */
5469 static int condition_3way(void)
5472 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5473 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5475 gen_cast_s(VT_BOOL
);
5482 static void expr_cond(void)
5484 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5486 CType type
, type1
, type2
;
5491 c
= condition_3way();
5492 g
= (tok
== ':' && gnu_ext
);
5494 /* needed to avoid having different registers saved in
5496 if (is_float(vtop
->type
.t
)) {
5498 #ifdef TCC_TARGET_X86_64
5499 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5524 sv
= *vtop
; /* save value to handle it later */
5525 vtop
--; /* no vpop so that FP stack is not flushed */
5543 bt1
= t1
& VT_BTYPE
;
5545 bt2
= t2
& VT_BTYPE
;
5548 /* cast operands to correct type according to ISOC rules */
5549 if (is_float(bt1
) || is_float(bt2
)) {
5550 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5551 type
.t
= VT_LDOUBLE
;
5553 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5558 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5559 /* cast to biggest op */
5560 type
.t
= VT_LLONG
| VT_LONG
;
5561 if (bt1
== VT_LLONG
)
5563 if (bt2
== VT_LLONG
)
5565 /* convert to unsigned if it does not fit in a long long */
5566 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5567 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5568 type
.t
|= VT_UNSIGNED
;
5569 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5570 /* If one is a null ptr constant the result type
5572 if (is_null_pointer (vtop
))
5574 else if (is_null_pointer (&sv
))
5576 /* XXX: test pointer compatibility, C99 has more elaborate
5580 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5581 /* XXX: test function pointer compatibility */
5582 type
= bt1
== VT_FUNC
? type1
: type2
;
5583 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5584 /* XXX: test structure compatibility */
5585 type
= bt1
== VT_STRUCT
? type1
: type2
;
5586 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5587 /* NOTE: as an extension, we accept void on only one side */
5590 /* integer operations */
5591 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5592 /* convert to unsigned if it does not fit in an integer */
5593 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5594 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5595 type
.t
|= VT_UNSIGNED
;
5597 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5598 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5599 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5602 /* now we convert second operand */
5606 mk_pointer(&vtop
->type
);
5608 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5613 if (is_float(type
.t
)) {
5615 #ifdef TCC_TARGET_X86_64
5616 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5620 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5621 /* for long longs, we use fixed registers to avoid having
5622 to handle a complicated move */
5633 /* this is horrible, but we must also convert first
5639 mk_pointer(&vtop
->type
);
5641 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5647 move_reg(r2
, r1
, type
.t
);
5657 static void expr_eq(void)
5663 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5664 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5665 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5680 ST_FUNC
void gexpr(void)
5691 /* parse a constant expression and return value in vtop. */
5692 static void expr_const1(void)
5701 /* parse an integer constant and return its value. */
5702 static inline int64_t expr_const64(void)
5706 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5707 expect("constant expression");
5713 /* parse an integer constant and return its value.
5714 Complain if it doesn't fit 32bit (signed or unsigned). */
5715 ST_FUNC
int expr_const(void)
5718 int64_t wc
= expr_const64();
5720 if (c
!= wc
&& (unsigned)c
!= wc
)
5721 tcc_error("constant exceeds 32 bit");
5725 /* return the label token if current token is a label, otherwise
5727 static int is_label(void)
5731 /* fast test first */
5732 if (tok
< TOK_UIDENT
)
5734 /* no need to save tokc because tok is an identifier */
5740 unget_tok(last_tok
);
5745 #ifndef TCC_TARGET_ARM64
5746 static void gfunc_return(CType
*func_type
)
5748 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5749 CType type
, ret_type
;
5750 int ret_align
, ret_nregs
, regsize
;
5751 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5752 &ret_align
, ®size
);
5753 if (0 == ret_nregs
) {
5754 /* if returning structure, must copy it to implicit
5755 first pointer arg location */
5758 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5761 /* copy structure value to pointer */
5764 /* returning structure packed into registers */
5765 int r
, size
, addr
, align
;
5766 size
= type_size(func_type
,&align
);
5767 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5768 (vtop
->c
.i
& (ret_align
-1)))
5769 && (align
& (ret_align
-1))) {
5770 loc
= (loc
- size
) & -ret_align
;
5773 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5777 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5779 vtop
->type
= ret_type
;
5780 if (is_float(ret_type
.t
))
5781 r
= rc_fret(ret_type
.t
);
5792 if (--ret_nregs
== 0)
5794 /* We assume that when a structure is returned in multiple
5795 registers, their classes are consecutive values of the
5798 vtop
->c
.i
+= regsize
;
5802 } else if (is_float(func_type
->t
)) {
5803 gv(rc_fret(func_type
->t
));
5807 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5811 static int case_cmp(const void *pa
, const void *pb
)
5813 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5814 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5815 return a
< b
? -1 : a
> b
;
5818 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5822 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5840 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5842 gcase(base
, len
/2, bsym
);
5843 if (cur_switch
->def_sym
)
5844 gjmp_addr(cur_switch
->def_sym
);
5846 *bsym
= gjmp(*bsym
);
5850 base
+= e
; len
-= e
;
5860 if (p
->v1
== p
->v2
) {
5862 gtst_addr(0, p
->sym
);
5872 gtst_addr(0, p
->sym
);
5878 static void block(int *bsym
, int *csym
, int is_expr
)
5880 int a
, b
, c
, d
, cond
;
5883 /* generate line number info */
5884 if (tcc_state
->do_debug
)
5885 tcc_debug_line(tcc_state
);
5888 /* default return value is (void) */
5890 vtop
->type
.t
= VT_VOID
;
5893 if (tok
== TOK_IF
) {
5895 int saved_nocode_wanted
= nocode_wanted
;
5900 cond
= condition_3way();
5906 nocode_wanted
|= 0x20000000;
5907 block(bsym
, csym
, 0);
5909 nocode_wanted
= saved_nocode_wanted
;
5911 if (c
== TOK_ELSE
) {
5916 nocode_wanted
|= 0x20000000;
5917 block(bsym
, csym
, 0);
5918 gsym(d
); /* patch else jmp */
5920 nocode_wanted
= saved_nocode_wanted
;
5923 } else if (tok
== TOK_WHILE
) {
5924 int saved_nocode_wanted
;
5925 nocode_wanted
&= ~0x20000000;
5935 saved_nocode_wanted
= nocode_wanted
;
5937 nocode_wanted
= saved_nocode_wanted
;
5942 } else if (tok
== '{') {
5944 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5947 /* record local declaration stack position */
5949 llabel
= local_label_stack
;
5952 /* handle local labels declarations */
5953 if (tok
== TOK_LABEL
) {
5956 if (tok
< TOK_UIDENT
)
5957 expect("label identifier");
5958 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5968 while (tok
!= '}') {
5969 if ((a
= is_label()))
5976 block(bsym
, csym
, is_expr
);
5979 /* pop locally defined labels */
5980 label_pop(&local_label_stack
, llabel
, is_expr
);
5981 /* pop locally defined symbols */
5983 /* In the is_expr case (a statement expression is finished here),
5984 vtop might refer to symbols on the local_stack. Either via the
5985 type or via vtop->sym. We can't pop those nor any that in turn
5986 might be referred to. To make it easier we don't roll back
5987 any symbols in that case; some upper level call to block() will
5988 do that. We do have to remove such symbols from the lookup
5989 tables, though. sym_pop will do that. */
5990 sym_pop(&local_stack
, s
, is_expr
);
5992 /* Pop VLA frames and restore stack pointer if required */
5993 if (vlas_in_scope
> saved_vlas_in_scope
) {
5994 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5997 vlas_in_scope
= saved_vlas_in_scope
;
6000 } else if (tok
== TOK_RETURN
) {
6004 gen_assign_cast(&func_vt
);
6005 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6008 gfunc_return(&func_vt
);
6011 /* jump unless last stmt in top-level block */
6012 if (tok
!= '}' || local_scope
!= 1)
6014 nocode_wanted
|= 0x20000000;
6015 } else if (tok
== TOK_BREAK
) {
6018 tcc_error("cannot break");
6019 *bsym
= gjmp(*bsym
);
6022 nocode_wanted
|= 0x20000000;
6023 } else if (tok
== TOK_CONTINUE
) {
6026 tcc_error("cannot continue");
6027 vla_sp_restore_root();
6028 *csym
= gjmp(*csym
);
6031 } else if (tok
== TOK_FOR
) {
6033 int saved_nocode_wanted
;
6034 nocode_wanted
&= ~0x20000000;
6040 /* c99 for-loop init decl? */
6041 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6042 /* no, regular for-loop init expr */
6068 saved_nocode_wanted
= nocode_wanted
;
6070 nocode_wanted
= saved_nocode_wanted
;
6075 sym_pop(&local_stack
, s
, 0);
6078 if (tok
== TOK_DO
) {
6079 int saved_nocode_wanted
;
6080 nocode_wanted
&= ~0x20000000;
6086 saved_nocode_wanted
= nocode_wanted
;
6094 nocode_wanted
= saved_nocode_wanted
;
6099 if (tok
== TOK_SWITCH
) {
6100 struct switch_t
*saved
, sw
;
6101 int saved_nocode_wanted
= nocode_wanted
;
6107 switchval
= *vtop
--;
6109 b
= gjmp(0); /* jump to first case */
6110 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6114 nocode_wanted
= saved_nocode_wanted
;
6115 a
= gjmp(a
); /* add implicit break */
6118 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6119 for (b
= 1; b
< sw
.n
; b
++)
6120 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6121 tcc_error("duplicate case value");
6122 /* Our switch table sorting is signed, so the compared
6123 value needs to be as well when it's 64bit. */
6124 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6125 switchval
.type
.t
&= ~VT_UNSIGNED
;
6127 gcase(sw
.p
, sw
.n
, &a
);
6130 gjmp_addr(sw
.def_sym
);
6131 dynarray_reset(&sw
.p
, &sw
.n
);
6136 if (tok
== TOK_CASE
) {
6137 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6140 nocode_wanted
&= ~0x20000000;
6142 cr
->v1
= cr
->v2
= expr_const64();
6143 if (gnu_ext
&& tok
== TOK_DOTS
) {
6145 cr
->v2
= expr_const64();
6146 if (cr
->v2
< cr
->v1
)
6147 tcc_warning("empty case range");
6150 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6153 goto block_after_label
;
6155 if (tok
== TOK_DEFAULT
) {
6160 if (cur_switch
->def_sym
)
6161 tcc_error("too many 'default'");
6162 cur_switch
->def_sym
= ind
;
6164 goto block_after_label
;
6166 if (tok
== TOK_GOTO
) {
6168 if (tok
== '*' && gnu_ext
) {
6172 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6175 } else if (tok
>= TOK_UIDENT
) {
6176 s
= label_find(tok
);
6177 /* put forward definition if needed */
6179 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6181 if (s
->r
== LABEL_DECLARED
)
6182 s
->r
= LABEL_FORWARD
;
6184 vla_sp_restore_root();
6185 if (s
->r
& LABEL_FORWARD
)
6186 s
->jnext
= gjmp(s
->jnext
);
6188 gjmp_addr(s
->jnext
);
6191 expect("label identifier");
6194 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6203 if (s
->r
== LABEL_DEFINED
)
6204 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6206 s
->r
= LABEL_DEFINED
;
6208 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6212 /* we accept this, but it is a mistake */
6214 nocode_wanted
&= ~0x20000000;
6216 tcc_warning("deprecated use of label at end of compound statement");
6220 block(bsym
, csym
, is_expr
);
6223 /* expression case */
6238 /* This skips over a stream of tokens containing balanced {} and ()
6239 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6240 with a '{'). If STR then allocates and stores the skipped tokens
6241 in *STR. This doesn't check if () and {} are nested correctly,
6242 i.e. "({)}" is accepted. */
6243 static void skip_or_save_block(TokenString
**str
)
6245 int braces
= tok
== '{';
6248 *str
= tok_str_alloc();
6250 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6252 if (tok
== TOK_EOF
) {
6253 if (str
|| level
> 0)
6254 tcc_error("unexpected end of file");
6259 tok_str_add_tok(*str
);
6262 if (t
== '{' || t
== '(') {
6264 } else if (t
== '}' || t
== ')') {
6266 if (level
== 0 && braces
&& t
== '}')
6271 tok_str_add(*str
, -1);
6272 tok_str_add(*str
, 0);
6276 #define EXPR_CONST 1
6279 static void parse_init_elem(int expr_type
)
6281 int saved_global_expr
;
6284 /* compound literals must be allocated globally in this case */
6285 saved_global_expr
= global_expr
;
6288 global_expr
= saved_global_expr
;
6289 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6290 (compound literals). */
6291 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6292 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6293 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6294 #ifdef TCC_TARGET_PE
6295 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6298 tcc_error("initializer element is not constant");
6306 /* put zeros for variable based init */
6307 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6310 /* nothing to do because globals are already set to zero */
6312 vpush_global_sym(&func_old_type
, TOK_memset
);
6314 #ifdef TCC_TARGET_ARM
6325 /* t is the array or struct type. c is the array or struct
6326 address. cur_field is the pointer to the current
6327 field, for arrays the 'c' member contains the current start
6328 index. 'size_only' is true if only size info is needed (only used
6329 in arrays). al contains the already initialized length of the
6330 current container (starting at c). This returns the new length of that. */
6331 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6332 Sym
**cur_field
, int size_only
, int al
)
6335 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6336 unsigned long corig
= c
;
6340 if (gnu_ext
&& (l
= is_label()) != 0)
6342 /* NOTE: we only support ranges for last designator */
6343 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6345 if (!(type
->t
& VT_ARRAY
))
6346 expect("array type");
6348 index
= index_last
= expr_const();
6349 if (tok
== TOK_DOTS
&& gnu_ext
) {
6351 index_last
= expr_const();
6355 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6357 tcc_error("invalid index");
6359 (*cur_field
)->c
= index_last
;
6360 type
= pointed_type(type
);
6361 elem_size
= type_size(type
, &align
);
6362 c
+= index
* elem_size
;
6363 nb_elems
= index_last
- index
+ 1;
6369 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6370 expect("struct/union type");
6371 f
= find_field(type
, l
);
6384 } else if (!gnu_ext
) {
6388 if (type
->t
& VT_ARRAY
) {
6389 index
= (*cur_field
)->c
;
6390 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6391 tcc_error("index too large");
6392 type
= pointed_type(type
);
6393 c
+= index
* type_size(type
, &align
);
6396 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6397 *cur_field
= f
= f
->next
;
6399 tcc_error("too many field init");
6404 /* must put zero in holes (note that doing it that way
6405 ensures that it even works with designators) */
6406 if (!size_only
&& c
- corig
> al
)
6407 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6408 decl_initializer(type
, sec
, c
, 0, size_only
);
6410 /* XXX: make it more general */
6411 if (!size_only
&& nb_elems
> 1) {
6412 unsigned long c_end
;
6417 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6418 for (i
= 1; i
< nb_elems
; i
++) {
6419 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6424 } else if (!NODATA_WANTED
) {
6425 c_end
= c
+ nb_elems
* elem_size
;
6426 if (c_end
> sec
->data_allocated
)
6427 section_realloc(sec
, c_end
);
6428 src
= sec
->data
+ c
;
6430 for(i
= 1; i
< nb_elems
; i
++) {
6432 memcpy(dst
, src
, elem_size
);
6436 c
+= nb_elems
* type_size(type
, &align
);
6442 /* store a value or an expression directly in global data or in local array */
6443 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6450 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6454 /* XXX: not portable */
6455 /* XXX: generate error if incorrect relocation */
6456 gen_assign_cast(&dtype
);
6457 bt
= type
->t
& VT_BTYPE
;
6459 if ((vtop
->r
& VT_SYM
)
6462 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6463 || (type
->t
& VT_BITFIELD
))
6464 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6466 tcc_error("initializer element is not computable at load time");
6468 if (NODATA_WANTED
) {
6473 size
= type_size(type
, &align
);
6474 section_reserve(sec
, c
+ size
);
6475 ptr
= sec
->data
+ c
;
6477 /* XXX: make code faster ? */
6478 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6479 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6480 /* XXX This rejects compound literals like
6481 '(void *){ptr}'. The problem is that '&sym' is
6482 represented the same way, which would be ruled out
6483 by the SYM_FIRST_ANOM check above, but also '"string"'
6484 in 'char *p = "string"' is represented the same
6485 with the type being VT_PTR and the symbol being an
6486 anonymous one. That is, there's no difference in vtop
6487 between '(void *){x}' and '&(void *){x}'. Ignore
6488 pointer typed entities here. Hopefully no real code
6489 will every use compound literals with scalar type. */
6490 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6491 /* These come from compound literals, memcpy stuff over. */
6495 esym
= elfsym(vtop
->sym
);
6496 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6497 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6499 /* We need to copy over all memory contents, and that
6500 includes relocations. Use the fact that relocs are
6501 created it order, so look from the end of relocs
6502 until we hit one before the copied region. */
6503 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6504 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6505 while (num_relocs
--) {
6507 if (rel
->r_offset
>= esym
->st_value
+ size
)
6509 if (rel
->r_offset
< esym
->st_value
)
6511 /* Note: if the same fields are initialized multiple
6512 times (possible with designators) then we possibly
6513 add multiple relocations for the same offset here.
6514 That would lead to wrong code, the last reloc needs
6515 to win. We clean this up later after the whole
6516 initializer is parsed. */
6517 put_elf_reloca(symtab_section
, sec
,
6518 c
+ rel
->r_offset
- esym
->st_value
,
6519 ELFW(R_TYPE
)(rel
->r_info
),
6520 ELFW(R_SYM
)(rel
->r_info
),
6530 if (type
->t
& VT_BITFIELD
) {
6531 int bit_pos
, bit_size
, bits
, n
;
6532 unsigned char *p
, v
, m
;
6533 bit_pos
= BIT_POS(vtop
->type
.t
);
6534 bit_size
= BIT_SIZE(vtop
->type
.t
);
6535 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6536 bit_pos
&= 7, bits
= 0;
6541 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6542 m
= ((1 << n
) - 1) << bit_pos
;
6543 *p
= (*p
& ~m
) | (v
& m
);
6544 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6548 /* XXX: when cross-compiling we assume that each type has the
6549 same representation on host and target, which is likely to
6550 be wrong in the case of long double */
6552 vtop
->c
.i
= vtop
->c
.i
!= 0;
6554 *(char *)ptr
|= vtop
->c
.i
;
6557 *(short *)ptr
|= vtop
->c
.i
;
6560 *(float*)ptr
= vtop
->c
.f
;
6563 *(double *)ptr
= vtop
->c
.d
;
6566 #if defined TCC_IS_NATIVE_387
6567 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6568 memcpy(ptr
, &vtop
->c
.ld
, 10);
6570 else if (sizeof (long double) == sizeof (double))
6571 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
6573 else if (vtop
->c
.ld
== 0.0)
6577 if (sizeof(long double) == LDOUBLE_SIZE
)
6578 *(long double*)ptr
= vtop
->c
.ld
;
6579 else if (sizeof(double) == LDOUBLE_SIZE
)
6580 *(double *)ptr
= (double)vtop
->c
.ld
;
6582 tcc_error("can't cross compile long double constants");
6586 *(long long *)ptr
|= vtop
->c
.i
;
6593 addr_t val
= vtop
->c
.i
;
6595 if (vtop
->r
& VT_SYM
)
6596 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6598 *(addr_t
*)ptr
|= val
;
6600 if (vtop
->r
& VT_SYM
)
6601 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6602 *(addr_t
*)ptr
|= val
;
6608 int val
= vtop
->c
.i
;
6610 if (vtop
->r
& VT_SYM
)
6611 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6615 if (vtop
->r
& VT_SYM
)
6616 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6625 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6632 /* 't' contains the type and storage info. 'c' is the offset of the
6633 object in section 'sec'. If 'sec' is NULL, it means stack based
6634 allocation. 'first' is true if array '{' must be read (multi
6635 dimension implicit array init handling). 'size_only' is true if
6636 size only evaluation is wanted (only for arrays). */
6637 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6638 int first
, int size_only
)
6640 int len
, n
, no_oblock
, nb
, i
;
6647 /* If we currently are at an '}' or ',' we have read an initializer
6648 element in one of our callers, and not yet consumed it. */
6649 have_elem
= tok
== '}' || tok
== ',';
6650 if (!have_elem
&& tok
!= '{' &&
6651 /* In case of strings we have special handling for arrays, so
6652 don't consume them as initializer value (which would commit them
6653 to some anonymous symbol). */
6654 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6656 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6661 !(type
->t
& VT_ARRAY
) &&
6662 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6663 The source type might have VT_CONSTANT set, which is
6664 of course assignable to non-const elements. */
6665 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6666 init_putv(type
, sec
, c
);
6667 } else if (type
->t
& VT_ARRAY
) {
6670 t1
= pointed_type(type
);
6671 size1
= type_size(t1
, &align1
);
6674 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6677 tcc_error("character array initializer must be a literal,"
6678 " optionally enclosed in braces");
6683 /* only parse strings here if correct type (otherwise: handle
6684 them as ((w)char *) expressions */
6685 if ((tok
== TOK_LSTR
&&
6686 #ifdef TCC_TARGET_PE
6687 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6689 (t1
->t
& VT_BTYPE
) == VT_INT
6691 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6693 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6696 /* compute maximum number of chars wanted */
6698 cstr_len
= tokc
.str
.size
;
6700 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6703 if (n
>= 0 && nb
> (n
- len
))
6707 tcc_warning("initializer-string for array is too long");
6708 /* in order to go faster for common case (char
6709 string in global variable, we handle it
6711 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6713 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6717 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6719 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6721 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6728 /* only add trailing zero if enough storage (no
6729 warning in this case since it is standard) */
6730 if (n
< 0 || len
< n
) {
6733 init_putv(t1
, sec
, c
+ (len
* size1
));
6744 while (tok
!= '}' || have_elem
) {
6745 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
6747 if (type
->t
& VT_ARRAY
) {
6749 /* special test for multi dimensional arrays (may not
6750 be strictly correct if designators are used at the
6752 if (no_oblock
&& len
>= n
*size1
)
6755 if (s
->type
.t
== VT_UNION
)
6759 if (no_oblock
&& f
== NULL
)
6768 /* put zeros at the end */
6769 if (!size_only
&& len
< n
*size1
)
6770 init_putz(sec
, c
+ len
, n
*size1
- len
);
6773 /* patch type size if needed, which happens only for array types */
6775 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
6776 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6779 if (first
|| tok
== '{') {
6787 } else if (tok
== '{') {
6789 decl_initializer(type
, sec
, c
, first
, size_only
);
6791 } else if (size_only
) {
6792 /* If we supported only ISO C we wouldn't have to accept calling
6793 this on anything than an array size_only==1 (and even then
6794 only on the outermost level, so no recursion would be needed),
6795 because initializing a flex array member isn't supported.
6796 But GNU C supports it, so we need to recurse even into
6797 subfields of structs and arrays when size_only is set. */
6798 /* just skip expression */
6799 skip_or_save_block(NULL
);
6802 /* This should happen only when we haven't parsed
6803 the init element above for fear of committing a
6804 string constant to memory too early. */
6805 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6806 expect("string constant");
6807 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6809 init_putv(type
, sec
, c
);
6813 /* parse an initializer for type 't' if 'has_init' is non zero, and
6814 allocate space in local or global data space ('r' is either
6815 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6816 variable 'v' of scope 'scope' is declared before initializers
6817 are parsed. If 'v' is zero, then a reference to the new object
6818 is put in the value stack. If 'has_init' is 2, a special parsing
6819 is done to handle string constants. */
6820 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6821 int has_init
, int v
, int scope
)
6823 int size
, align
, addr
;
6824 TokenString
*init_str
= NULL
;
6827 Sym
*flexible_array
;
6829 int saved_nocode_wanted
= nocode_wanted
;
6830 #ifdef CONFIG_TCC_BCHECK
6831 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
6834 if (type
->t
& VT_STATIC
)
6835 nocode_wanted
|= NODATA_WANTED
? 0x40000000 : 0x80000000;
6837 flexible_array
= NULL
;
6838 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6839 Sym
*field
= type
->ref
->next
;
6842 field
= field
->next
;
6843 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6844 flexible_array
= field
;
6848 size
= type_size(type
, &align
);
6849 /* If unknown size, we must evaluate it before
6850 evaluating initializers because
6851 initializers can generate global data too
6852 (e.g. string pointers or ISOC99 compound
6853 literals). It also simplifies local
6854 initializers handling */
6855 if (size
< 0 || (flexible_array
&& has_init
)) {
6857 tcc_error("unknown type size");
6858 /* get all init string */
6859 if (has_init
== 2) {
6860 init_str
= tok_str_alloc();
6861 /* only get strings */
6862 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6863 tok_str_add_tok(init_str
);
6866 tok_str_add(init_str
, -1);
6867 tok_str_add(init_str
, 0);
6869 skip_or_save_block(&init_str
);
6874 begin_macro(init_str
, 1);
6876 decl_initializer(type
, NULL
, 0, 1, 1);
6877 /* prepare second initializer parsing */
6878 macro_ptr
= init_str
->str
;
6881 /* if still unknown size, error */
6882 size
= type_size(type
, &align
);
6884 tcc_error("unknown type size");
6886 /* If there's a flex member and it was used in the initializer
6888 if (flexible_array
&&
6889 flexible_array
->type
.ref
->c
> 0)
6890 size
+= flexible_array
->type
.ref
->c
6891 * pointed_size(&flexible_array
->type
);
6892 /* take into account specified alignment if bigger */
6893 if (ad
->a
.aligned
) {
6894 int speca
= 1 << (ad
->a
.aligned
- 1);
6897 } else if (ad
->a
.packed
) {
6902 size
= 0, align
= 1;
6904 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6906 #ifdef CONFIG_TCC_BCHECK
6907 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6911 loc
= (loc
- size
) & -align
;
6913 #ifdef CONFIG_TCC_BCHECK
6914 /* handles bounds */
6915 /* XXX: currently, since we do only one pass, we cannot track
6916 '&' operators, so we add only arrays */
6917 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6919 /* add padding between regions */
6921 /* then add local bound info */
6922 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6923 bounds_ptr
[0] = addr
;
6924 bounds_ptr
[1] = size
;
6928 /* local variable */
6929 #ifdef CONFIG_TCC_ASM
6930 if (ad
->asm_label
) {
6931 int reg
= asm_parse_regvar(ad
->asm_label
);
6933 r
= (r
& ~VT_VALMASK
) | reg
;
6936 sym
= sym_push(v
, type
, r
, addr
);
6939 /* push local reference */
6940 vset(type
, r
, addr
);
6943 if (v
&& scope
== VT_CONST
) {
6944 /* see if the symbol was already defined */
6947 patch_storage(sym
, ad
, type
);
6948 /* we accept several definitions of the same global variable. */
6949 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
6954 /* allocate symbol in corresponding section */
6959 else if (tcc_state
->nocommon
)
6964 addr
= section_add(sec
, size
, align
);
6965 #ifdef CONFIG_TCC_BCHECK
6966 /* add padding if bound check */
6968 section_add(sec
, 1, 1);
6971 addr
= align
; /* SHN_COMMON is special, symbol value is align */
6972 sec
= common_section
;
6977 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6978 patch_storage(sym
, ad
, NULL
);
6980 /* Local statics have a scope until now (for
6981 warnings), remove it here. */
6983 /* update symbol definition */
6984 put_extern_sym(sym
, sec
, addr
, size
);
6986 /* push global reference */
6987 sym
= get_sym_ref(type
, sec
, addr
, size
);
6988 vpushsym(type
, sym
);
6992 #ifdef CONFIG_TCC_BCHECK
6993 /* handles bounds now because the symbol must be defined
6994 before for the relocation */
6998 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
6999 /* then add global bound info */
7000 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7001 bounds_ptr
[0] = 0; /* relocated */
7002 bounds_ptr
[1] = size
;
7007 if (type
->t
& VT_VLA
) {
7013 /* save current stack pointer */
7014 if (vlas_in_scope
== 0) {
7015 if (vla_sp_root_loc
== -1)
7016 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
7017 gen_vla_sp_save(vla_sp_root_loc
);
7020 vla_runtime_type_size(type
, &a
);
7021 gen_vla_alloc(type
, a
);
7022 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7023 /* on _WIN64, because of the function args scratch area, the
7024 result of alloca differs from RSP and is returned in RAX. */
7025 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7027 gen_vla_sp_save(addr
);
7031 } else if (has_init
) {
7032 size_t oldreloc_offset
= 0;
7033 if (sec
&& sec
->reloc
)
7034 oldreloc_offset
= sec
->reloc
->data_offset
;
7035 decl_initializer(type
, sec
, addr
, 1, 0);
7036 if (sec
&& sec
->reloc
)
7037 squeeze_multi_relocs(sec
, oldreloc_offset
);
7038 /* patch flexible array member size back to -1, */
7039 /* for possible subsequent similar declarations */
7041 flexible_array
->type
.ref
->c
= -1;
7045 /* restore parse state if needed */
7051 nocode_wanted
= saved_nocode_wanted
;
7054 /* parse a function defined by symbol 'sym' and generate its code in
7055 'cur_text_section' */
7056 static void gen_function(Sym
*sym
)
7059 ind
= cur_text_section
->data_offset
;
7060 /* NOTE: we patch the symbol size later */
7061 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7062 funcname
= get_tok_str(sym
->v
, NULL
);
7064 /* Initialize VLA state */
7066 vla_sp_root_loc
= -1;
7067 /* put debug symbol */
7068 tcc_debug_funcstart(tcc_state
, sym
);
7069 /* push a dummy symbol to enable local sym storage */
7070 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7071 local_scope
= 1; /* for function parameters */
7072 gfunc_prolog(&sym
->type
);
7075 block(NULL
, NULL
, 0);
7079 cur_text_section
->data_offset
= ind
;
7080 label_pop(&global_label_stack
, NULL
, 0);
7081 /* reset local stack */
7083 sym_pop(&local_stack
, NULL
, 0);
7084 /* end of function */
7085 /* patch symbol size */
7086 elfsym(sym
)->st_size
= ind
- func_ind
;
7087 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7088 /* It's better to crash than to generate wrong code */
7089 cur_text_section
= NULL
;
7090 funcname
= ""; /* for safety */
7091 func_vt
.t
= VT_VOID
; /* for safety */
7092 func_var
= 0; /* for safety */
7093 ind
= 0; /* for safety */
7094 nocode_wanted
= 0x80000000;
7098 static void gen_inline_functions(TCCState
*s
)
7101 int inline_generated
, i
, ln
;
7102 struct InlineFunc
*fn
;
7104 ln
= file
->line_num
;
7105 /* iterate while inline function are referenced */
7107 inline_generated
= 0;
7108 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7109 fn
= s
->inline_fns
[i
];
7111 if (sym
&& sym
->c
) {
7112 /* the function was used: generate its code and
7113 convert it to a normal function */
7116 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7117 sym
->type
.t
&= ~VT_INLINE
;
7119 begin_macro(fn
->func_str
, 1);
7121 cur_text_section
= text_section
;
7125 inline_generated
= 1;
7128 } while (inline_generated
);
7129 file
->line_num
= ln
;
7132 ST_FUNC
void free_inline_functions(TCCState
*s
)
7135 /* free tokens of unused inline functions */
7136 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7137 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7139 tok_str_free(fn
->func_str
);
7141 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7144 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7145 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7146 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7154 if (!parse_btype(&btype
, &ad
)) {
7155 if (is_for_loop_init
)
7157 /* skip redundant ';' if not in old parameter decl scope */
7158 if (tok
== ';' && l
!= VT_CMP
) {
7164 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7165 /* global asm block */
7169 if (tok
>= TOK_UIDENT
) {
7170 /* special test for old K&R protos without explicit int
7171 type. Only accepted when defining global data */
7175 expect("declaration");
7180 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7181 int v
= btype
.ref
->v
;
7182 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7183 tcc_warning("unnamed struct/union that defines no instances");
7187 if (IS_ENUM(btype
.t
)) {
7192 while (1) { /* iterate thru each declaration */
7194 /* If the base type itself was an array type of unspecified
7195 size (like in 'typedef int arr[]; arr x = {1};') then
7196 we will overwrite the unknown size by the real one for
7197 this decl. We need to unshare the ref symbol holding
7199 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7200 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7202 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7206 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7207 printf("type = '%s'\n", buf
);
7210 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7211 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7212 tcc_error("function without file scope cannot be static");
7214 /* if old style function prototype, we accept a
7217 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7218 decl0(VT_CMP
, 0, sym
);
7221 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7222 ad
.asm_label
= asm_label_instr();
7223 /* parse one last attribute list, after asm label */
7224 parse_attribute(&ad
);
7229 #ifdef TCC_TARGET_PE
7230 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7231 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7232 tcc_error("cannot have dll linkage with static or typedef");
7233 if (ad
.a
.dllimport
) {
7234 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7237 type
.t
|= VT_EXTERN
;
7243 tcc_error("cannot use local functions");
7244 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7245 expect("function definition");
7247 /* reject abstract declarators in function definition
7248 make old style params without decl have int type */
7250 while ((sym
= sym
->next
) != NULL
) {
7251 if (!(sym
->v
& ~SYM_FIELD
))
7252 expect("identifier");
7253 if (sym
->type
.t
== VT_VOID
)
7254 sym
->type
= int_type
;
7257 /* XXX: cannot do better now: convert extern line to static inline */
7258 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7259 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7261 /* put function symbol */
7262 sym
= external_global_sym(v
, &type
, 0);
7263 type
.t
&= ~VT_EXTERN
;
7264 patch_storage(sym
, &ad
, &type
);
7266 /* static inline functions are just recorded as a kind
7267 of macro. Their code will be emitted at the end of
7268 the compilation unit only if they are used */
7269 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7270 (VT_INLINE
| VT_STATIC
)) {
7271 struct InlineFunc
*fn
;
7272 const char *filename
;
7274 filename
= file
? file
->filename
: "";
7275 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7276 strcpy(fn
->filename
, filename
);
7278 skip_or_save_block(&fn
->func_str
);
7279 dynarray_add(&tcc_state
->inline_fns
,
7280 &tcc_state
->nb_inline_fns
, fn
);
7282 /* compute text section */
7283 cur_text_section
= ad
.section
;
7284 if (!cur_text_section
)
7285 cur_text_section
= text_section
;
7291 /* find parameter in function parameter list */
7292 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7293 if ((sym
->v
& ~SYM_FIELD
) == v
)
7295 tcc_error("declaration for parameter '%s' but no such parameter",
7296 get_tok_str(v
, NULL
));
7298 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7299 tcc_error("storage class specified for '%s'",
7300 get_tok_str(v
, NULL
));
7301 if (sym
->type
.t
!= VT_VOID
)
7302 tcc_error("redefinition of parameter '%s'",
7303 get_tok_str(v
, NULL
));
7304 convert_parameter_type(&type
);
7306 } else if (type
.t
& VT_TYPEDEF
) {
7307 /* save typedefed type */
7308 /* XXX: test storage specifiers ? */
7310 if (sym
&& sym
->sym_scope
== local_scope
) {
7311 if (!is_compatible_types(&sym
->type
, &type
)
7312 || !(sym
->type
.t
& VT_TYPEDEF
))
7313 tcc_error("incompatible redefinition of '%s'",
7314 get_tok_str(v
, NULL
));
7317 sym
= sym_push(v
, &type
, 0, 0);
7323 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7324 /* external function definition */
7325 /* specific case for func_call attribute */
7327 } else if (!(type
.t
& VT_ARRAY
)) {
7328 /* not lvalue if array */
7329 r
|= lvalue_type(type
.t
);
7331 has_init
= (tok
== '=');
7332 if (has_init
&& (type
.t
& VT_VLA
))
7333 tcc_error("variable length array cannot be initialized");
7334 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7335 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7336 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7337 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7338 /* external variable or function */
7339 /* NOTE: as GCC, uninitialized global static
7340 arrays of null size are considered as
7342 type
.t
|= VT_EXTERN
;
7343 sym
= external_sym(v
, &type
, r
, &ad
);
7344 if (ad
.alias_target
) {
7347 alias_target
= sym_find(ad
.alias_target
);
7348 esym
= elfsym(alias_target
);
7350 tcc_error("unsupported forward __alias__ attribute");
7351 /* Local statics have a scope until now (for
7352 warnings), remove it here. */
7354 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7357 if (type
.t
& VT_STATIC
)
7363 else if (l
== VT_CONST
)
7364 /* uninitialized global variables may be overridden */
7365 type
.t
|= VT_EXTERN
;
7366 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7370 if (is_for_loop_init
)
7383 static void decl(int l
)
7388 /* ------------------------------------------------------------------------- */