2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* no code generation wanted */
54 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
55 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
56 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
57 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
58 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
60 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
61 ST_DATA
const char *funcname
;
64 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
66 ST_DATA
struct switch_t
{
70 } **p
; int n
; /* list of case ranges */
71 int def_sym
; /* default symbol */
72 } *cur_switch
; /* current switch */
74 /* ------------------------------------------------------------------------- */
76 static void gen_cast(CType
*type
);
77 static void gen_cast_s(int t
);
78 static inline CType
*pointed_type(CType
*type
);
79 static int is_compatible_types(CType
*type1
, CType
*type2
);
80 static int parse_btype(CType
*type
, AttributeDef
*ad
);
81 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
82 static void parse_expr_type(CType
*type
);
83 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
84 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
85 static void block(int *bsym
, int *csym
, int is_expr
);
86 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
87 static void decl(int l
);
88 static int decl0(int l
, int is_for_loop_init
, Sym
*);
89 static void expr_eq(void);
90 static void vla_runtime_type_size(CType
*type
, int *a
);
91 static void vla_sp_restore(void);
92 static void vla_sp_restore_root(void);
93 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
94 static inline int64_t expr_const64(void);
95 static void vpush64(int ty
, unsigned long long v
);
96 static void vpush(CType
*type
);
97 static int gvtst(int inv
, int t
);
98 static void gen_inline_functions(TCCState
*s
);
99 static void skip_or_save_block(TokenString
**str
);
100 static void gv_dup(void);
102 ST_INLN
int is_float(int t
)
106 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
109 /* we use our own 'finite' function to avoid potential problems with
110 non standard math libs */
111 /* XXX: endianness dependent */
112 ST_FUNC
int ieee_finite(double d
)
115 memcpy(p
, &d
, sizeof(double));
116 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
119 /* compiling intel long double natively */
120 #if (defined __i386__ || defined __x86_64__) \
121 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
122 # define TCC_IS_NATIVE_387
125 ST_FUNC
void test_lvalue(void)
127 if (!(vtop
->r
& VT_LVAL
))
131 ST_FUNC
void check_vstack(void)
134 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
137 /* ------------------------------------------------------------------------- */
138 /* vstack debugging aid */
141 void pv (const char *lbl
, int a
, int b
)
144 for (i
= a
; i
< a
+ b
; ++i
) {
145 SValue
*p
= &vtop
[-i
];
146 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
147 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
152 /* ------------------------------------------------------------------------- */
153 /* start of translation unit info */
154 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
159 /* file info: full path + filename */
160 section_sym
= put_elf_sym(symtab_section
, 0, 0,
161 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
162 text_section
->sh_num
, NULL
);
163 getcwd(buf
, sizeof(buf
));
165 normalize_slashes(buf
);
167 pstrcat(buf
, sizeof(buf
), "/");
168 put_stabs_r(buf
, N_SO
, 0, 0,
169 text_section
->data_offset
, text_section
, section_sym
);
170 put_stabs_r(file
->filename
, N_SO
, 0, 0,
171 text_section
->data_offset
, text_section
, section_sym
);
176 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
177 symbols can be safely used */
178 put_elf_sym(symtab_section
, 0, 0,
179 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
180 SHN_ABS
, file
->filename
);
183 /* put end of translation unit info */
184 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
188 put_stabs_r(NULL
, N_SO
, 0, 0,
189 text_section
->data_offset
, text_section
, section_sym
);
193 /* generate line number info */
194 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
198 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
199 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
201 last_line_num
= file
->line_num
;
205 /* put function symbol */
206 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
214 /* XXX: we put here a dummy type */
215 snprintf(buf
, sizeof(buf
), "%s:%c1",
216 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
217 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
218 cur_text_section
, sym
->c
);
219 /* //gr gdb wants a line at the function */
220 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
226 /* put function size */
227 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
231 put_stabn(N_FUN
, 0, 0, size
);
234 /* ------------------------------------------------------------------------- */
235 ST_FUNC
int tccgen_compile(TCCState
*s1
)
237 cur_text_section
= NULL
;
239 anon_sym
= SYM_FIRST_ANOM
;
242 nocode_wanted
= 0x80000000;
244 /* define some often used types */
246 char_pointer_type
.t
= VT_BYTE
;
247 mk_pointer(&char_pointer_type
);
249 size_type
.t
= VT_INT
| VT_UNSIGNED
;
250 ptrdiff_type
.t
= VT_INT
;
252 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
253 ptrdiff_type
.t
= VT_LLONG
;
255 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
256 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
258 func_old_type
.t
= VT_FUNC
;
259 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
260 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
261 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
265 #ifdef TCC_TARGET_ARM
270 printf("%s: **** new file\n", file
->filename
);
273 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
276 gen_inline_functions(s1
);
278 /* end of translation unit info */
283 /* ------------------------------------------------------------------------- */
284 ST_FUNC ElfSym
*elfsym(Sym
*s
)
288 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
291 /* apply storage attributes to Elf symbol */
292 ST_FUNC
void update_storage(Sym
*sym
)
295 int sym_bind
, old_sym_bind
;
301 if (sym
->a
.visibility
)
302 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
305 if (sym
->type
.t
& VT_STATIC
)
306 sym_bind
= STB_LOCAL
;
307 else if (sym
->a
.weak
)
310 sym_bind
= STB_GLOBAL
;
311 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
312 if (sym_bind
!= old_sym_bind
) {
313 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
317 if (sym
->a
.dllimport
)
318 esym
->st_other
|= ST_PE_IMPORT
;
319 if (sym
->a
.dllexport
)
320 esym
->st_other
|= ST_PE_EXPORT
;
324 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
325 get_tok_str(sym
->v
, NULL
),
326 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
334 /* ------------------------------------------------------------------------- */
335 /* update sym->c so that it points to an external symbol in section
336 'section' with value 'value' */
338 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
339 addr_t value
, unsigned long size
,
340 int can_add_underscore
)
342 int sym_type
, sym_bind
, info
, other
, t
;
346 #ifdef CONFIG_TCC_BCHECK
351 name
= get_tok_str(sym
->v
, NULL
);
352 #ifdef CONFIG_TCC_BCHECK
353 if (tcc_state
->do_bounds_check
) {
354 /* XXX: avoid doing that for statics ? */
355 /* if bound checking is activated, we change some function
356 names by adding the "__bound" prefix */
359 /* XXX: we rely only on malloc hooks */
372 strcpy(buf
, "__bound_");
380 if ((t
& VT_BTYPE
) == VT_FUNC
) {
382 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
383 sym_type
= STT_NOTYPE
;
385 sym_type
= STT_OBJECT
;
388 sym_bind
= STB_LOCAL
;
390 sym_bind
= STB_GLOBAL
;
393 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
394 Sym
*ref
= sym
->type
.ref
;
395 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
396 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
398 other
|= ST_PE_STDCALL
;
399 can_add_underscore
= 0;
403 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
405 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
409 name
= get_tok_str(sym
->asm_label
, NULL
);
410 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
411 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
414 esym
->st_value
= value
;
415 esym
->st_size
= size
;
416 esym
->st_shndx
= sh_num
;
421 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
422 addr_t value
, unsigned long size
)
424 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
425 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
428 /* add a new relocation entry to symbol 'sym' in section 's' */
429 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
434 if (nocode_wanted
&& s
== cur_text_section
)
439 put_extern_sym(sym
, NULL
, 0, 0);
443 /* now we can add ELF relocation info */
444 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
448 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
450 greloca(s
, sym
, offset
, type
, 0);
454 /* ------------------------------------------------------------------------- */
455 /* symbol allocator */
456 static Sym
*__sym_malloc(void)
458 Sym
*sym_pool
, *sym
, *last_sym
;
461 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
462 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
464 last_sym
= sym_free_first
;
466 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
467 sym
->next
= last_sym
;
471 sym_free_first
= last_sym
;
475 static inline Sym
*sym_malloc(void)
479 sym
= sym_free_first
;
481 sym
= __sym_malloc();
482 sym_free_first
= sym
->next
;
485 sym
= tcc_malloc(sizeof(Sym
));
490 ST_INLN
void sym_free(Sym
*sym
)
493 sym
->next
= sym_free_first
;
494 sym_free_first
= sym
;
500 /* push, without hashing */
501 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
506 memset(s
, 0, sizeof *s
);
516 /* find a symbol and return its associated structure. 's' is the top
517 of the symbol stack */
518 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
530 /* structure lookup */
531 ST_INLN Sym
*struct_find(int v
)
534 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
536 return table_ident
[v
]->sym_struct
;
539 /* find an identifier */
540 ST_INLN Sym
*sym_find(int v
)
543 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
545 return table_ident
[v
]->sym_identifier
;
548 /* push a given symbol on the symbol stack */
549 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
558 s
= sym_push2(ps
, v
, type
->t
, c
);
559 s
->type
.ref
= type
->ref
;
561 /* don't record fields or anonymous symbols */
563 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
564 /* record symbol in token array */
565 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
567 ps
= &ts
->sym_struct
;
569 ps
= &ts
->sym_identifier
;
572 s
->sym_scope
= local_scope
;
573 if (s
->prev_tok
&& s
->prev_tok
->sym_scope
== s
->sym_scope
)
574 tcc_error("redeclaration of '%s'",
575 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
580 /* push a global identifier */
581 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
584 s
= sym_push2(&global_stack
, v
, t
, c
);
585 /* don't record anonymous symbol */
586 if (v
< SYM_FIRST_ANOM
) {
587 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
588 /* modify the top most local identifier, so that
589 sym_identifier will point to 's' when popped */
590 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
591 ps
= &(*ps
)->prev_tok
;
598 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
599 pop them yet from the list, but do remove them from the token array. */
600 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
610 /* remove symbol in token array */
612 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
613 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
615 ps
= &ts
->sym_struct
;
617 ps
= &ts
->sym_identifier
;
628 /* ------------------------------------------------------------------------- */
630 static void vsetc(CType
*type
, int r
, CValue
*vc
)
634 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
635 tcc_error("memory full (vstack)");
636 /* cannot let cpu flags if other instruction are generated. Also
637 avoid leaving VT_JMP anywhere except on the top of the stack
638 because it would complicate the code generator.
640 Don't do this when nocode_wanted. vtop might come from
641 !nocode_wanted regions (see 88_codeopt.c) and transforming
642 it to a register without actually generating code is wrong
643 as their value might still be used for real. All values
644 we push under nocode_wanted will eventually be popped
645 again, so that the VT_CMP/VT_JMP value will be in vtop
646 when code is unsuppressed again.
648 Same logic below in vswap(); */
649 if (vtop
>= vstack
&& !nocode_wanted
) {
650 v
= vtop
->r
& VT_VALMASK
;
651 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
663 ST_FUNC
void vswap(void)
666 /* cannot vswap cpu flags. See comment at vsetc() above */
667 if (vtop
>= vstack
&& !nocode_wanted
) {
668 int v
= vtop
->r
& VT_VALMASK
;
669 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
677 /* pop stack value */
678 ST_FUNC
void vpop(void)
681 v
= vtop
->r
& VT_VALMASK
;
682 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
683 /* for x86, we need to pop the FP stack */
685 o(0xd8dd); /* fstp %st(0) */
688 if (v
== VT_JMP
|| v
== VT_JMPI
) {
689 /* need to put correct jump if && or || without test */
695 /* push constant of type "type" with useless value */
696 ST_FUNC
void vpush(CType
*type
)
698 vset(type
, VT_CONST
, 0);
701 /* push integer constant */
702 ST_FUNC
void vpushi(int v
)
706 vsetc(&int_type
, VT_CONST
, &cval
);
709 /* push a pointer sized constant */
710 static void vpushs(addr_t v
)
714 vsetc(&size_type
, VT_CONST
, &cval
);
717 /* push arbitrary 64bit constant */
718 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
725 vsetc(&ctype
, VT_CONST
, &cval
);
728 /* push long long constant */
729 static inline void vpushll(long long v
)
731 vpush64(VT_LLONG
, v
);
734 ST_FUNC
void vset(CType
*type
, int r
, int v
)
739 vsetc(type
, r
, &cval
);
742 static void vseti(int r
, int v
)
750 ST_FUNC
void vpushv(SValue
*v
)
752 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
753 tcc_error("memory full (vstack)");
758 static void vdup(void)
763 /* rotate n first stack elements to the bottom
764 I1 ... In -> I2 ... In I1 [top is right]
766 ST_FUNC
void vrotb(int n
)
777 /* rotate the n elements before entry e towards the top
778 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
780 ST_FUNC
void vrote(SValue
*e
, int n
)
786 for(i
= 0;i
< n
- 1; i
++)
791 /* rotate n first stack elements to the top
792 I1 ... In -> In I1 ... I(n-1) [top is right]
794 ST_FUNC
void vrott(int n
)
799 /* push a symbol value of TYPE */
800 static inline void vpushsym(CType
*type
, Sym
*sym
)
804 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
808 /* Return a static symbol pointing to a section */
809 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
815 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
816 sym
->type
.ref
= type
->ref
;
817 sym
->r
= VT_CONST
| VT_SYM
;
818 put_extern_sym(sym
, sec
, offset
, size
);
822 /* push a reference to a section offset by adding a dummy symbol */
823 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
825 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
828 /* define a new external reference to a symbol 'v' of type 'u' */
829 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
835 /* push forward reference */
836 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
837 s
->type
.ref
= type
->ref
;
838 s
->r
= r
| VT_CONST
| VT_SYM
;
839 } else if (IS_ASM_SYM(s
)) {
840 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
841 s
->type
.ref
= type
->ref
;
847 /* Merge some type attributes. */
848 static void patch_type(Sym
*sym
, CType
*type
)
850 if (!(type
->t
& VT_EXTERN
)) {
851 if (!(sym
->type
.t
& VT_EXTERN
))
852 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
853 sym
->type
.t
&= ~VT_EXTERN
;
856 if (IS_ASM_SYM(sym
)) {
857 /* stay static if both are static */
858 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
859 sym
->type
.ref
= type
->ref
;
862 if (!is_compatible_types(&sym
->type
, type
)) {
863 tcc_error("incompatible types for redefinition of '%s'",
864 get_tok_str(sym
->v
, NULL
));
866 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
867 int static_proto
= sym
->type
.t
& VT_STATIC
;
868 /* warn if static follows non-static function declaration */
869 if ((type
->t
& VT_STATIC
) && !static_proto
&& !(type
->t
& VT_INLINE
))
870 tcc_warning("static storage ignored for redefinition of '%s'",
871 get_tok_str(sym
->v
, NULL
));
873 if (0 == (type
->t
& VT_EXTERN
)) {
874 /* put complete type, use static from prototype */
875 sym
->type
.t
= (type
->t
& ~VT_STATIC
) | static_proto
;
876 if (type
->t
& VT_INLINE
)
877 sym
->type
.t
= type
->t
;
878 sym
->type
.ref
= type
->ref
;
882 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
883 /* set array size if it was omitted in extern declaration */
884 if (sym
->type
.ref
->c
< 0)
885 sym
->type
.ref
->c
= type
->ref
->c
;
886 else if (sym
->type
.ref
->c
!= type
->ref
->c
)
887 tcc_error("conflicting type for '%s'", get_tok_str(sym
->v
, NULL
));
889 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
890 tcc_warning("storage mismatch for redefinition of '%s'",
891 get_tok_str(sym
->v
, NULL
));
896 /* Merge some storage attributes. */
897 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
900 patch_type(sym
, type
);
903 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
904 tcc_error("incompatible dll linkage for redefinition of '%s'",
905 get_tok_str(sym
->v
, NULL
));
906 sym
->a
.dllexport
|= ad
->a
.dllexport
;
908 sym
->a
.weak
|= ad
->a
.weak
;
909 if (ad
->a
.visibility
) {
910 int vis
= sym
->a
.visibility
;
911 int vis2
= ad
->a
.visibility
;
912 if (vis
== STV_DEFAULT
)
914 else if (vis2
!= STV_DEFAULT
)
915 vis
= (vis
< vis2
) ? vis
: vis2
;
916 sym
->a
.visibility
= vis
;
919 sym
->a
.aligned
= ad
->a
.aligned
;
921 sym
->asm_label
= ad
->asm_label
;
925 /* define a new external reference to a symbol 'v' */
926 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
931 /* push forward reference */
932 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
933 s
->type
.t
|= VT_EXTERN
;
937 if (s
->type
.ref
== func_old_type
.ref
) {
938 s
->type
.ref
= type
->ref
;
939 s
->r
= r
| VT_CONST
| VT_SYM
;
940 s
->type
.t
|= VT_EXTERN
;
942 patch_storage(s
, ad
, type
);
947 /* push a reference to global symbol v */
948 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
950 vpushsym(type
, external_global_sym(v
, type
, 0));
953 /* save registers up to (vtop - n) stack entry */
954 ST_FUNC
void save_regs(int n
)
957 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
961 /* save r to the memory stack, and mark it as being free */
962 ST_FUNC
void save_reg(int r
)
964 save_reg_upstack(r
, 0);
967 /* save r to the memory stack, and mark it as being free,
968 if seen up to (vtop - n) stack entry */
969 ST_FUNC
void save_reg_upstack(int r
, int n
)
971 int l
, saved
, size
, align
;
975 if ((r
&= VT_VALMASK
) >= VT_CONST
)
980 /* modify all stack values */
983 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
984 if ((p
->r
& VT_VALMASK
) == r
||
985 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
986 /* must save value on stack if not already done */
988 /* NOTE: must reload 'r' because r might be equal to r2 */
989 r
= p
->r
& VT_VALMASK
;
990 /* store register in the stack */
992 if ((p
->r
& VT_LVAL
) ||
993 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
995 type
= &char_pointer_type
;
999 size
= type_size(type
, &align
);
1000 loc
= (loc
- size
) & -align
;
1001 sv
.type
.t
= type
->t
;
1002 sv
.r
= VT_LOCAL
| VT_LVAL
;
1005 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1006 /* x86 specific: need to pop fp register ST0 if saved */
1007 if (r
== TREG_ST0
) {
1008 o(0xd8dd); /* fstp %st(0) */
1012 /* special long long case */
1013 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1021 /* mark that stack entry as being saved on the stack */
1022 if (p
->r
& VT_LVAL
) {
1023 /* also clear the bounded flag because the
1024 relocation address of the function was stored in
1026 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1028 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1036 #ifdef TCC_TARGET_ARM
1037 /* find a register of class 'rc2' with at most one reference on stack.
1038 * If none, call get_reg(rc) */
1039 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1044 for(r
=0;r
<NB_REGS
;r
++) {
1045 if (reg_classes
[r
] & rc2
) {
1048 for(p
= vstack
; p
<= vtop
; p
++) {
1049 if ((p
->r
& VT_VALMASK
) == r
||
1050 (p
->r2
& VT_VALMASK
) == r
)
1061 /* find a free register of class 'rc'. If none, save one register */
1062 ST_FUNC
int get_reg(int rc
)
1067 /* find a free register */
1068 for(r
=0;r
<NB_REGS
;r
++) {
1069 if (reg_classes
[r
] & rc
) {
1072 for(p
=vstack
;p
<=vtop
;p
++) {
1073 if ((p
->r
& VT_VALMASK
) == r
||
1074 (p
->r2
& VT_VALMASK
) == r
)
1082 /* no register left : free the first one on the stack (VERY
1083 IMPORTANT to start from the bottom to ensure that we don't
1084 spill registers used in gen_opi()) */
1085 for(p
=vstack
;p
<=vtop
;p
++) {
1086 /* look at second register (if long long) */
1087 r
= p
->r2
& VT_VALMASK
;
1088 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1090 r
= p
->r
& VT_VALMASK
;
1091 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1097 /* Should never comes here */
1101 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1103 static void move_reg(int r
, int s
, int t
)
1117 /* get address of vtop (vtop MUST BE an lvalue) */
1118 ST_FUNC
void gaddrof(void)
1120 vtop
->r
&= ~VT_LVAL
;
1121 /* tricky: if saved lvalue, then we can go back to lvalue */
1122 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1123 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1128 #ifdef CONFIG_TCC_BCHECK
1129 /* generate lvalue bound code */
1130 static void gbound(void)
1135 vtop
->r
&= ~VT_MUSTBOUND
;
1136 /* if lvalue, then use checking code before dereferencing */
1137 if (vtop
->r
& VT_LVAL
) {
1138 /* if not VT_BOUNDED value, then make one */
1139 if (!(vtop
->r
& VT_BOUNDED
)) {
1140 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1141 /* must save type because we must set it to int to get pointer */
1143 vtop
->type
.t
= VT_PTR
;
1146 gen_bounded_ptr_add();
1147 vtop
->r
|= lval_type
;
1150 /* then check for dereferencing */
1151 gen_bounded_ptr_deref();
1156 static void incr_bf_adr(int o
)
1158 vtop
->type
= char_pointer_type
;
1162 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1163 | (VT_BYTE
|VT_UNSIGNED
);
1164 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1165 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1168 /* single-byte load mode for packed or otherwise unaligned bitfields */
1169 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1172 save_reg_upstack(vtop
->r
, 1);
1173 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1174 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1183 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1185 vpushi((1 << n
) - 1), gen_op('&');
1188 vpushi(bits
), gen_op(TOK_SHL
);
1191 bits
+= n
, bit_size
-= n
, o
= 1;
1194 if (!(type
->t
& VT_UNSIGNED
)) {
1195 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1196 vpushi(n
), gen_op(TOK_SHL
);
1197 vpushi(n
), gen_op(TOK_SAR
);
1201 /* single-byte store mode for packed or otherwise unaligned bitfields */
1202 static void store_packed_bf(int bit_pos
, int bit_size
)
1204 int bits
, n
, o
, m
, c
;
1206 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1208 save_reg_upstack(vtop
->r
, 1);
1209 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1211 incr_bf_adr(o
); // X B
1213 c
? vdup() : gv_dup(); // B V X
1216 vpushi(bits
), gen_op(TOK_SHR
);
1218 vpushi(bit_pos
), gen_op(TOK_SHL
);
1223 m
= ((1 << n
) - 1) << bit_pos
;
1224 vpushi(m
), gen_op('&'); // X B V1
1225 vpushv(vtop
-1); // X B V1 B
1226 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1227 gen_op('&'); // X B V1 B1
1228 gen_op('|'); // X B V2
1230 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1231 vstore(), vpop(); // X B
1232 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1237 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1240 if (0 == sv
->type
.ref
)
1242 t
= sv
->type
.ref
->auxtype
;
1243 if (t
!= -1 && t
!= VT_STRUCT
) {
1244 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1245 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1250 /* store vtop a register belonging to class 'rc'. lvalues are
1251 converted to values. Cannot be used if cannot be converted to
1252 register value (such as structures). */
1253 ST_FUNC
int gv(int rc
)
1255 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1257 /* NOTE: get_reg can modify vstack[] */
1258 if (vtop
->type
.t
& VT_BITFIELD
) {
1261 bit_pos
= BIT_POS(vtop
->type
.t
);
1262 bit_size
= BIT_SIZE(vtop
->type
.t
);
1263 /* remove bit field info to avoid loops */
1264 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1267 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1268 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1269 type
.t
|= VT_UNSIGNED
;
1271 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1273 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1278 if (r
== VT_STRUCT
) {
1279 load_packed_bf(&type
, bit_pos
, bit_size
);
1281 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1282 /* cast to int to propagate signedness in following ops */
1284 /* generate shifts */
1285 vpushi(bits
- (bit_pos
+ bit_size
));
1287 vpushi(bits
- bit_size
);
1288 /* NOTE: transformed to SHR if unsigned */
1293 if (is_float(vtop
->type
.t
) &&
1294 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1295 unsigned long offset
;
1296 /* CPUs usually cannot use float constants, so we store them
1297 generically in data segment */
1298 size
= type_size(&vtop
->type
, &align
);
1300 size
= 0, align
= 1;
1301 offset
= section_add(data_section
, size
, align
);
1302 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1304 init_putv(&vtop
->type
, data_section
, offset
);
1307 #ifdef CONFIG_TCC_BCHECK
1308 if (vtop
->r
& VT_MUSTBOUND
)
1312 r
= vtop
->r
& VT_VALMASK
;
1313 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1314 #ifndef TCC_TARGET_ARM64
1317 #ifdef TCC_TARGET_X86_64
1318 else if (rc
== RC_FRET
)
1322 /* need to reload if:
1324 - lvalue (need to dereference pointer)
1325 - already a register, but not in the right class */
1327 || (vtop
->r
& VT_LVAL
)
1328 || !(reg_classes
[r
] & rc
)
1330 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1331 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1333 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1339 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1340 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1342 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1343 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1344 unsigned long long ll
;
1346 int r2
, original_type
;
1347 original_type
= vtop
->type
.t
;
1348 /* two register type load : expand to two words
1351 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1354 vtop
->c
.i
= ll
; /* first word */
1356 vtop
->r
= r
; /* save register value */
1357 vpushi(ll
>> 32); /* second word */
1360 if (vtop
->r
& VT_LVAL
) {
1361 /* We do not want to modifier the long long
1362 pointer here, so the safest (and less
1363 efficient) is to save all the other registers
1364 in the stack. XXX: totally inefficient. */
1368 /* lvalue_save: save only if used further down the stack */
1369 save_reg_upstack(vtop
->r
, 1);
1371 /* load from memory */
1372 vtop
->type
.t
= load_type
;
1375 vtop
[-1].r
= r
; /* save register value */
1376 /* increment pointer to get second word */
1377 vtop
->type
.t
= addr_type
;
1382 vtop
->type
.t
= load_type
;
1384 /* move registers */
1387 vtop
[-1].r
= r
; /* save register value */
1388 vtop
->r
= vtop
[-1].r2
;
1390 /* Allocate second register. Here we rely on the fact that
1391 get_reg() tries first to free r2 of an SValue. */
1395 /* write second register */
1397 vtop
->type
.t
= original_type
;
1398 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1400 /* lvalue of scalar type : need to use lvalue type
1401 because of possible cast */
1404 /* compute memory access type */
1405 if (vtop
->r
& VT_LVAL_BYTE
)
1407 else if (vtop
->r
& VT_LVAL_SHORT
)
1409 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1413 /* restore wanted type */
1416 /* one register type load */
1421 #ifdef TCC_TARGET_C67
1422 /* uses register pairs for doubles */
1423 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1430 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1431 ST_FUNC
void gv2(int rc1
, int rc2
)
1435 /* generate more generic register first. But VT_JMP or VT_CMP
1436 values must be generated first in all cases to avoid possible
1438 v
= vtop
[0].r
& VT_VALMASK
;
1439 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1444 /* test if reload is needed for first register */
1445 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1455 /* test if reload is needed for first register */
1456 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1462 #ifndef TCC_TARGET_ARM64
1463 /* wrapper around RC_FRET to return a register by type */
1464 static int rc_fret(int t
)
1466 #ifdef TCC_TARGET_X86_64
1467 if (t
== VT_LDOUBLE
) {
1475 /* wrapper around REG_FRET to return a register by type */
1476 static int reg_fret(int t
)
1478 #ifdef TCC_TARGET_X86_64
1479 if (t
== VT_LDOUBLE
) {
1487 /* expand 64bit on stack in two ints */
1488 static void lexpand(void)
1491 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1492 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1493 if (v
== VT_CONST
) {
1496 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1502 vtop
[0].r
= vtop
[-1].r2
;
1503 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1505 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1509 #ifdef TCC_TARGET_ARM
1510 /* expand long long on stack */
1511 ST_FUNC
void lexpand_nr(void)
1515 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1517 vtop
->r2
= VT_CONST
;
1518 vtop
->type
.t
= VT_INT
| u
;
1519 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1520 if (v
== VT_CONST
) {
1521 vtop
[-1].c
.i
= vtop
->c
.i
;
1522 vtop
->c
.i
= vtop
->c
.i
>> 32;
1524 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1526 vtop
->r
= vtop
[-1].r
;
1527 } else if (v
> VT_CONST
) {
1531 vtop
->r
= vtop
[-1].r2
;
1532 vtop
[-1].r2
= VT_CONST
;
1533 vtop
[-1].type
.t
= VT_INT
| u
;
1538 /* build a long long from two ints */
1539 static void lbuild(int t
)
1541 gv2(RC_INT
, RC_INT
);
1542 vtop
[-1].r2
= vtop
[0].r
;
1543 vtop
[-1].type
.t
= t
;
1548 /* convert stack entry to register and duplicate its value in another
1550 static void gv_dup(void)
1557 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1558 if (t
& VT_BITFIELD
) {
1568 /* stack: H L L1 H1 */
1578 /* duplicate value */
1583 #ifdef TCC_TARGET_X86_64
1584 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1594 load(r1
, &sv
); /* move r to r1 */
1596 /* duplicates value */
1602 /* Generate value test
1604 * Generate a test for any value (jump, comparison and integers) */
1605 ST_FUNC
int gvtst(int inv
, int t
)
1607 int v
= vtop
->r
& VT_VALMASK
;
1608 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1612 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1613 /* constant jmp optimization */
1614 if ((vtop
->c
.i
!= 0) != inv
)
1619 return gtst(inv
, t
);
1623 /* generate CPU independent (unsigned) long long operations */
1624 static void gen_opl(int op
)
1626 int t
, a
, b
, op1
, c
, i
;
1628 unsigned short reg_iret
= REG_IRET
;
1629 unsigned short reg_lret
= REG_LRET
;
1635 func
= TOK___divdi3
;
1638 func
= TOK___udivdi3
;
1641 func
= TOK___moddi3
;
1644 func
= TOK___umoddi3
;
1651 /* call generic long long function */
1652 vpush_global_sym(&func_old_type
, func
);
1657 vtop
->r2
= reg_lret
;
1665 //pv("gen_opl A",0,2);
1671 /* stack: L1 H1 L2 H2 */
1676 vtop
[-2] = vtop
[-3];
1679 /* stack: H1 H2 L1 L2 */
1680 //pv("gen_opl B",0,4);
1686 /* stack: H1 H2 L1 L2 ML MH */
1689 /* stack: ML MH H1 H2 L1 L2 */
1693 /* stack: ML MH H1 L2 H2 L1 */
1698 /* stack: ML MH M1 M2 */
1701 } else if (op
== '+' || op
== '-') {
1702 /* XXX: add non carry method too (for MIPS or alpha) */
1708 /* stack: H1 H2 (L1 op L2) */
1711 gen_op(op1
+ 1); /* TOK_xxxC2 */
1714 /* stack: H1 H2 (L1 op L2) */
1717 /* stack: (L1 op L2) H1 H2 */
1719 /* stack: (L1 op L2) (H1 op H2) */
1727 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1728 t
= vtop
[-1].type
.t
;
1732 /* stack: L H shift */
1734 /* constant: simpler */
1735 /* NOTE: all comments are for SHL. the other cases are
1736 done by swapping words */
1747 if (op
!= TOK_SAR
) {
1780 /* XXX: should provide a faster fallback on x86 ? */
1783 func
= TOK___ashrdi3
;
1786 func
= TOK___lshrdi3
;
1789 func
= TOK___ashldi3
;
1795 /* compare operations */
1801 /* stack: L1 H1 L2 H2 */
1803 vtop
[-1] = vtop
[-2];
1805 /* stack: L1 L2 H1 H2 */
1808 /* when values are equal, we need to compare low words. since
1809 the jump is inverted, we invert the test too. */
1812 else if (op1
== TOK_GT
)
1814 else if (op1
== TOK_ULT
)
1816 else if (op1
== TOK_UGT
)
1826 /* generate non equal test */
1832 /* compare low. Always unsigned */
1836 else if (op1
== TOK_LE
)
1838 else if (op1
== TOK_GT
)
1840 else if (op1
== TOK_GE
)
1851 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1853 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1854 return (a
^ b
) >> 63 ? -x
: x
;
1857 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1859 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1862 /* handle integer constant optimizations and various machine
1864 static void gen_opic(int op
)
1866 SValue
*v1
= vtop
- 1;
1868 int t1
= v1
->type
.t
& VT_BTYPE
;
1869 int t2
= v2
->type
.t
& VT_BTYPE
;
1870 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1871 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1872 uint64_t l1
= c1
? v1
->c
.i
: 0;
1873 uint64_t l2
= c2
? v2
->c
.i
: 0;
1874 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1876 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1877 l1
= ((uint32_t)l1
|
1878 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1879 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1880 l2
= ((uint32_t)l2
|
1881 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1885 case '+': l1
+= l2
; break;
1886 case '-': l1
-= l2
; break;
1887 case '&': l1
&= l2
; break;
1888 case '^': l1
^= l2
; break;
1889 case '|': l1
|= l2
; break;
1890 case '*': l1
*= l2
; break;
1897 /* if division by zero, generate explicit division */
1900 tcc_error("division by zero in constant");
1904 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1905 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1906 case TOK_UDIV
: l1
= l1
/ l2
; break;
1907 case TOK_UMOD
: l1
= l1
% l2
; break;
1910 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1911 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1913 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1916 case TOK_ULT
: l1
= l1
< l2
; break;
1917 case TOK_UGE
: l1
= l1
>= l2
; break;
1918 case TOK_EQ
: l1
= l1
== l2
; break;
1919 case TOK_NE
: l1
= l1
!= l2
; break;
1920 case TOK_ULE
: l1
= l1
<= l2
; break;
1921 case TOK_UGT
: l1
= l1
> l2
; break;
1922 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1923 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1924 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1925 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1927 case TOK_LAND
: l1
= l1
&& l2
; break;
1928 case TOK_LOR
: l1
= l1
|| l2
; break;
1932 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1933 l1
= ((uint32_t)l1
|
1934 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1938 /* if commutative ops, put c2 as constant */
1939 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1940 op
== '|' || op
== '*')) {
1942 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1943 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1945 if (!const_wanted
&&
1947 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1948 (l1
== -1 && op
== TOK_SAR
))) {
1949 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1951 } else if (!const_wanted
&&
1952 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1954 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
1955 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1956 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1961 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1964 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1965 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1968 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
1969 /* filter out NOP operations like x*1, x-0, x&-1... */
1971 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1972 /* try to use shifts instead of muls or divs */
1973 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1982 else if (op
== TOK_PDIV
)
1988 } else if (c2
&& (op
== '+' || op
== '-') &&
1989 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1990 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1991 /* symbol + constant case */
1995 /* The backends can't always deal with addends to symbols
1996 larger than +-1<<31. Don't construct such. */
2003 /* call low level op generator */
2004 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2005 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2013 /* generate a floating point operation with constant propagation */
2014 static void gen_opif(int op
)
2018 #if defined _MSC_VER && defined _AMD64_
2019 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2026 /* currently, we cannot do computations with forward symbols */
2027 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2028 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2030 if (v1
->type
.t
== VT_FLOAT
) {
2033 } else if (v1
->type
.t
== VT_DOUBLE
) {
2041 /* NOTE: we only do constant propagation if finite number (not
2042 NaN or infinity) (ANSI spec) */
2043 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2047 case '+': f1
+= f2
; break;
2048 case '-': f1
-= f2
; break;
2049 case '*': f1
*= f2
; break;
2053 tcc_error("division by zero in constant");
2058 /* XXX: also handles tests ? */
2062 /* XXX: overflow test ? */
2063 if (v1
->type
.t
== VT_FLOAT
) {
2065 } else if (v1
->type
.t
== VT_DOUBLE
) {
2077 static int pointed_size(CType
*type
)
2080 return type_size(pointed_type(type
), &align
);
2083 static void vla_runtime_pointed_size(CType
*type
)
2086 vla_runtime_type_size(pointed_type(type
), &align
);
2089 static inline int is_null_pointer(SValue
*p
)
2091 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2093 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2094 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2095 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2096 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
2099 static inline int is_integer_btype(int bt
)
2101 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2102 bt
== VT_INT
|| bt
== VT_LLONG
);
2105 /* check types for comparison or subtraction of pointers */
2106 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2108 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2111 /* null pointers are accepted for all comparisons as gcc */
2112 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2116 bt1
= type1
->t
& VT_BTYPE
;
2117 bt2
= type2
->t
& VT_BTYPE
;
2118 /* accept comparison between pointer and integer with a warning */
2119 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2120 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2121 tcc_warning("comparison between pointer and integer");
2125 /* both must be pointers or implicit function pointers */
2126 if (bt1
== VT_PTR
) {
2127 type1
= pointed_type(type1
);
2128 } else if (bt1
!= VT_FUNC
)
2129 goto invalid_operands
;
2131 if (bt2
== VT_PTR
) {
2132 type2
= pointed_type(type2
);
2133 } else if (bt2
!= VT_FUNC
) {
2135 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2137 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2138 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2142 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2143 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2144 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2145 /* gcc-like error if '-' is used */
2147 goto invalid_operands
;
2149 tcc_warning("comparison of distinct pointer types lacks a cast");
2153 /* generic gen_op: handles types problems */
2154 ST_FUNC
void gen_op(int op
)
2156 int u
, t1
, t2
, bt1
, bt2
, t
;
2160 t1
= vtop
[-1].type
.t
;
2161 t2
= vtop
[0].type
.t
;
2162 bt1
= t1
& VT_BTYPE
;
2163 bt2
= t2
& VT_BTYPE
;
2165 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2166 tcc_error("operation on a struct");
2167 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2168 if (bt2
== VT_FUNC
) {
2169 mk_pointer(&vtop
->type
);
2172 if (bt1
== VT_FUNC
) {
2174 mk_pointer(&vtop
->type
);
2179 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2180 /* at least one operand is a pointer */
2181 /* relational op: must be both pointers */
2182 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2183 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2184 /* pointers are handled are unsigned */
2186 t
= VT_LLONG
| VT_UNSIGNED
;
2188 t
= VT_INT
| VT_UNSIGNED
;
2192 /* if both pointers, then it must be the '-' op */
2193 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2195 tcc_error("cannot use pointers here");
2196 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2197 /* XXX: check that types are compatible */
2198 if (vtop
[-1].type
.t
& VT_VLA
) {
2199 vla_runtime_pointed_size(&vtop
[-1].type
);
2201 vpushi(pointed_size(&vtop
[-1].type
));
2205 vtop
->type
.t
= ptrdiff_type
.t
;
2209 /* exactly one pointer : must be '+' or '-'. */
2210 if (op
!= '-' && op
!= '+')
2211 tcc_error("cannot use pointers here");
2212 /* Put pointer as first operand */
2213 if (bt2
== VT_PTR
) {
2215 t
= t1
, t1
= t2
, t2
= t
;
2218 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2219 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2222 type1
= vtop
[-1].type
;
2223 type1
.t
&= ~VT_ARRAY
;
2224 if (vtop
[-1].type
.t
& VT_VLA
)
2225 vla_runtime_pointed_size(&vtop
[-1].type
);
2227 u
= pointed_size(&vtop
[-1].type
);
2229 tcc_error("unknown array element size");
2233 /* XXX: cast to int ? (long long case) */
2239 /* #ifdef CONFIG_TCC_BCHECK
2240 The main reason to removing this code:
2247 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2248 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2250 When this code is on. then the output looks like
2252 v+(i-j) = 0xbff84000
2254 /* if evaluating constant expression, no code should be
2255 generated, so no bound check */
2256 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2257 /* if bounded pointers, we generate a special code to
2264 gen_bounded_ptr_add();
2270 /* put again type if gen_opic() swaped operands */
2273 } else if (is_float(bt1
) || is_float(bt2
)) {
2274 /* compute bigger type and do implicit casts */
2275 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2277 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2282 /* floats can only be used for a few operations */
2283 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2284 (op
< TOK_ULT
|| op
> TOK_GT
))
2285 tcc_error("invalid operands for binary operation");
2287 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2288 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2289 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2291 t
|= (VT_LONG
& t1
);
2293 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2294 /* cast to biggest op */
2295 t
= VT_LLONG
| VT_LONG
;
2296 if (bt1
== VT_LLONG
)
2298 if (bt2
== VT_LLONG
)
2300 /* convert to unsigned if it does not fit in a long long */
2301 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2302 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2306 /* integer operations */
2307 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2308 /* convert to unsigned if it does not fit in an integer */
2309 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2310 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2313 /* XXX: currently, some unsigned operations are explicit, so
2314 we modify them here */
2315 if (t
& VT_UNSIGNED
) {
2322 else if (op
== TOK_LT
)
2324 else if (op
== TOK_GT
)
2326 else if (op
== TOK_LE
)
2328 else if (op
== TOK_GE
)
2336 /* special case for shifts and long long: we keep the shift as
2338 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2345 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2346 /* relational op: the result is an int */
2347 vtop
->type
.t
= VT_INT
;
2352 // Make sure that we have converted to an rvalue:
2353 if (vtop
->r
& VT_LVAL
)
2354 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2357 #ifndef TCC_TARGET_ARM
2358 /* generic itof for unsigned long long case */
2359 static void gen_cvt_itof1(int t
)
2361 #ifdef TCC_TARGET_ARM64
2364 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2365 (VT_LLONG
| VT_UNSIGNED
)) {
2368 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2369 #if LDOUBLE_SIZE != 8
2370 else if (t
== VT_LDOUBLE
)
2371 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2374 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2378 vtop
->r
= reg_fret(t
);
2386 /* generic ftoi for unsigned long long case */
2387 static void gen_cvt_ftoi1(int t
)
2389 #ifdef TCC_TARGET_ARM64
2394 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2395 /* not handled natively */
2396 st
= vtop
->type
.t
& VT_BTYPE
;
2398 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2399 #if LDOUBLE_SIZE != 8
2400 else if (st
== VT_LDOUBLE
)
2401 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2404 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2409 vtop
->r2
= REG_LRET
;
2416 /* force char or short cast */
2417 static void force_charshort_cast(int t
)
2421 /* cannot cast static initializers */
2422 if (STATIC_DATA_WANTED
)
2426 /* XXX: add optimization if lvalue : just change type and offset */
2431 if (t
& VT_UNSIGNED
) {
2432 vpushi((1 << bits
) - 1);
2435 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2441 /* result must be signed or the SAR is converted to an SHL
2442 This was not the case when "t" was a signed short
2443 and the last value on the stack was an unsigned int */
2444 vtop
->type
.t
&= ~VT_UNSIGNED
;
2450 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2451 static void gen_cast_s(int t
)
2459 static void gen_cast(CType
*type
)
2461 int sbt
, dbt
, sf
, df
, c
, p
;
2463 /* special delayed cast for char/short */
2464 /* XXX: in some cases (multiple cascaded casts), it may still
2466 if (vtop
->r
& VT_MUSTCAST
) {
2467 vtop
->r
&= ~VT_MUSTCAST
;
2468 force_charshort_cast(vtop
->type
.t
);
2471 /* bitfields first get cast to ints */
2472 if (vtop
->type
.t
& VT_BITFIELD
) {
2476 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2477 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2482 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2483 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2484 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2485 c
&= dbt
!= VT_LDOUBLE
;
2488 /* constant case: we can do it now */
2489 /* XXX: in ISOC, cannot do it if error in convert */
2490 if (sbt
== VT_FLOAT
)
2491 vtop
->c
.ld
= vtop
->c
.f
;
2492 else if (sbt
== VT_DOUBLE
)
2493 vtop
->c
.ld
= vtop
->c
.d
;
2496 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2497 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2498 vtop
->c
.ld
= vtop
->c
.i
;
2500 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2502 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2503 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2505 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2508 if (dbt
== VT_FLOAT
)
2509 vtop
->c
.f
= (float)vtop
->c
.ld
;
2510 else if (dbt
== VT_DOUBLE
)
2511 vtop
->c
.d
= (double)vtop
->c
.ld
;
2512 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2513 vtop
->c
.i
= vtop
->c
.ld
;
2514 } else if (sf
&& dbt
== VT_BOOL
) {
2515 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2518 vtop
->c
.i
= vtop
->c
.ld
;
2519 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2521 else if (sbt
& VT_UNSIGNED
)
2522 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2524 else if (sbt
== VT_PTR
)
2527 else if (sbt
!= VT_LLONG
)
2528 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2529 -(vtop
->c
.i
& 0x80000000));
2531 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2533 else if (dbt
== VT_BOOL
)
2534 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2536 else if (dbt
== VT_PTR
)
2539 else if (dbt
!= VT_LLONG
) {
2540 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2541 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2544 if (!(dbt
& VT_UNSIGNED
))
2545 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2548 } else if (p
&& dbt
== VT_BOOL
) {
2552 /* non constant case: generate code */
2554 /* convert from fp to fp */
2557 /* convert int to fp */
2560 /* convert fp to int */
2561 if (dbt
== VT_BOOL
) {
2565 /* we handle char/short/etc... with generic code */
2566 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2567 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2571 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2572 /* additional cast for char/short... */
2578 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2579 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2580 /* scalar to long long */
2581 /* machine independent conversion */
2583 /* generate high word */
2584 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2588 if (sbt
== VT_PTR
) {
2589 /* cast from pointer to int before we apply
2590 shift operation, which pointers don't support*/
2597 /* patch second register */
2598 vtop
[-1].r2
= vtop
->r
;
2602 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2603 (dbt
& VT_BTYPE
) == VT_PTR
||
2604 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2605 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2606 (sbt
& VT_BTYPE
) != VT_PTR
&&
2607 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2608 /* need to convert from 32bit to 64bit */
2610 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2611 #if defined(TCC_TARGET_ARM64)
2613 #elif defined(TCC_TARGET_X86_64)
2615 /* x86_64 specific: movslq */
2617 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2624 } else if (dbt
== VT_BOOL
) {
2625 /* scalar to bool */
2628 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2629 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2630 if (sbt
== VT_PTR
) {
2631 vtop
->type
.t
= VT_INT
;
2632 tcc_warning("nonportable conversion from pointer to char/short");
2634 force_charshort_cast(dbt
);
2636 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2638 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2639 /* from long long: just take low order word */
2643 /* if lvalue and single word type, nothing to do because
2644 the lvalue already contains the real type size (see
2645 VT_LVAL_xxx constants) */
2649 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2650 /* if we are casting between pointer types,
2651 we must update the VT_LVAL_xxx size */
2652 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2653 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2658 /* return type size as known at compile time. Put alignment at 'a' */
2659 ST_FUNC
int type_size(CType
*type
, int *a
)
2664 bt
= type
->t
& VT_BTYPE
;
2665 if (bt
== VT_STRUCT
) {
2670 } else if (bt
== VT_PTR
) {
2671 if (type
->t
& VT_ARRAY
) {
2675 ts
= type_size(&s
->type
, a
);
2677 if (ts
< 0 && s
->c
< 0)
2685 } else if (IS_ENUM(type
->t
) && type
->ref
->c
== -1) {
2686 return -1; /* incomplete enum */
2687 } else if (bt
== VT_LDOUBLE
) {
2689 return LDOUBLE_SIZE
;
2690 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2691 #ifdef TCC_TARGET_I386
2692 #ifdef TCC_TARGET_PE
2697 #elif defined(TCC_TARGET_ARM)
2707 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2710 } else if (bt
== VT_SHORT
) {
2713 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2717 /* char, void, function, _Bool */
2723 /* push type size as known at runtime time on top of value stack. Put
2725 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2727 if (type
->t
& VT_VLA
) {
2728 type_size(&type
->ref
->type
, a
);
2729 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2731 vpushi(type_size(type
, a
));
2735 static void vla_sp_restore(void) {
2736 if (vlas_in_scope
) {
2737 gen_vla_sp_restore(vla_sp_loc
);
2741 static void vla_sp_restore_root(void) {
2742 if (vlas_in_scope
) {
2743 gen_vla_sp_restore(vla_sp_root_loc
);
2747 /* return the pointed type of t */
2748 static inline CType
*pointed_type(CType
*type
)
2750 return &type
->ref
->type
;
2753 /* modify type so that its it is a pointer to type. */
2754 ST_FUNC
void mk_pointer(CType
*type
)
2757 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2758 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2762 /* compare function types. OLD functions match any new functions */
2763 static int is_compatible_func(CType
*type1
, CType
*type2
)
2769 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2771 /* check func_call */
2772 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2774 /* XXX: not complete */
2775 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2777 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2779 while (s1
!= NULL
) {
2782 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2792 /* return true if type1 and type2 are the same. If unqualified is
2793 true, qualifiers on the types are ignored.
2795 - enums are not checked as gcc __builtin_types_compatible_p ()
2797 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2801 t1
= type1
->t
& VT_TYPE
;
2802 t2
= type2
->t
& VT_TYPE
;
2804 /* strip qualifiers before comparing */
2805 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2806 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2809 /* Default Vs explicit signedness only matters for char */
2810 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2814 /* XXX: bitfields ? */
2817 /* test more complicated cases */
2818 bt1
= t1
& VT_BTYPE
;
2819 if (bt1
== VT_PTR
) {
2820 type1
= pointed_type(type1
);
2821 type2
= pointed_type(type2
);
2822 return is_compatible_types(type1
, type2
);
2823 } else if (bt1
== VT_STRUCT
) {
2824 return (type1
->ref
== type2
->ref
);
2825 } else if (bt1
== VT_FUNC
) {
2826 return is_compatible_func(type1
, type2
);
2832 /* return true if type1 and type2 are exactly the same (including
2835 static int is_compatible_types(CType
*type1
, CType
*type2
)
2837 return compare_types(type1
,type2
,0);
2840 /* return true if type1 and type2 are the same (ignoring qualifiers).
2842 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2844 return compare_types(type1
,type2
,1);
2847 /* print a type. If 'varstr' is not NULL, then the variable is also
2848 printed in the type */
2850 /* XXX: add array and function pointers */
2851 static void type_to_str(char *buf
, int buf_size
,
2852 CType
*type
, const char *varstr
)
2864 pstrcat(buf
, buf_size
, "extern ");
2866 pstrcat(buf
, buf_size
, "static ");
2868 pstrcat(buf
, buf_size
, "typedef ");
2870 pstrcat(buf
, buf_size
, "inline ");
2871 if (t
& VT_VOLATILE
)
2872 pstrcat(buf
, buf_size
, "volatile ");
2873 if (t
& VT_CONSTANT
)
2874 pstrcat(buf
, buf_size
, "const ");
2876 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2877 || ((t
& VT_UNSIGNED
)
2878 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2881 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2883 buf_size
-= strlen(buf
);
2918 tstr
= "long double";
2920 pstrcat(buf
, buf_size
, tstr
);
2927 pstrcat(buf
, buf_size
, tstr
);
2928 v
= type
->ref
->v
& ~SYM_STRUCT
;
2929 if (v
>= SYM_FIRST_ANOM
)
2930 pstrcat(buf
, buf_size
, "<anonymous>");
2932 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2936 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2937 pstrcat(buf
, buf_size
, "(");
2939 while (sa
!= NULL
) {
2940 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2941 pstrcat(buf
, buf_size
, buf1
);
2944 pstrcat(buf
, buf_size
, ", ");
2946 pstrcat(buf
, buf_size
, ")");
2951 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2952 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2955 pstrcpy(buf1
, sizeof(buf1
), "*");
2956 if (t
& VT_CONSTANT
)
2957 pstrcat(buf1
, buf_size
, "const ");
2958 if (t
& VT_VOLATILE
)
2959 pstrcat(buf1
, buf_size
, "volatile ");
2961 pstrcat(buf1
, sizeof(buf1
), varstr
);
2962 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2966 pstrcat(buf
, buf_size
, " ");
2967 pstrcat(buf
, buf_size
, varstr
);
2972 /* verify type compatibility to store vtop in 'dt' type, and generate
2974 static void gen_assign_cast(CType
*dt
)
2976 CType
*st
, *type1
, *type2
;
2977 char buf1
[256], buf2
[256];
2980 st
= &vtop
->type
; /* source type */
2981 dbt
= dt
->t
& VT_BTYPE
;
2982 sbt
= st
->t
& VT_BTYPE
;
2983 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2984 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2986 It is Ok if both are void
2992 gcc accepts this program
2995 tcc_error("cannot cast from/to void");
2997 if (dt
->t
& VT_CONSTANT
)
2998 tcc_warning("assignment of read-only location");
3001 /* special cases for pointers */
3002 /* '0' can also be a pointer */
3003 if (is_null_pointer(vtop
))
3005 /* accept implicit pointer to integer cast with warning */
3006 if (is_integer_btype(sbt
)) {
3007 tcc_warning("assignment makes pointer from integer without a cast");
3010 type1
= pointed_type(dt
);
3011 /* a function is implicitly a function pointer */
3012 if (sbt
== VT_FUNC
) {
3013 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
3014 !is_compatible_types(pointed_type(dt
), st
))
3015 tcc_warning("assignment from incompatible pointer type");
3020 type2
= pointed_type(st
);
3021 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
3022 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
3023 /* void * can match anything */
3025 //printf("types %08x %08x\n", type1->t, type2->t);
3026 /* exact type match, except for qualifiers */
3027 if (!is_compatible_unqualified_types(type1
, type2
)) {
3028 /* Like GCC don't warn by default for merely changes
3029 in pointer target signedness. Do warn for different
3030 base types, though, in particular for unsigned enums
3031 and signed int targets. */
3032 if ((type1
->t
& (VT_BTYPE
|VT_LONG
)) != (type2
->t
& (VT_BTYPE
|VT_LONG
))
3033 || IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)
3035 tcc_warning("assignment from incompatible pointer type");
3038 /* check const and volatile */
3039 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
3040 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
3041 tcc_warning("assignment discards qualifiers from pointer target type");
3047 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3048 tcc_warning("assignment makes integer from pointer without a cast");
3049 } else if (sbt
== VT_STRUCT
) {
3050 goto case_VT_STRUCT
;
3052 /* XXX: more tests */
3056 if (!is_compatible_unqualified_types(dt
, st
)) {
3058 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3059 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3060 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3068 /* store vtop in lvalue pushed on stack */
3069 ST_FUNC
void vstore(void)
3071 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3073 ft
= vtop
[-1].type
.t
;
3074 sbt
= vtop
->type
.t
& VT_BTYPE
;
3075 dbt
= ft
& VT_BTYPE
;
3076 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3077 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3078 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3079 /* optimize char/short casts */
3080 delayed_cast
= VT_MUSTCAST
;
3081 vtop
->type
.t
= ft
& VT_TYPE
;
3082 /* XXX: factorize */
3083 if (ft
& VT_CONSTANT
)
3084 tcc_warning("assignment of read-only location");
3087 if (!(ft
& VT_BITFIELD
))
3088 gen_assign_cast(&vtop
[-1].type
);
3091 if (sbt
== VT_STRUCT
) {
3092 /* if structure, only generate pointer */
3093 /* structure assignment : generate memcpy */
3094 /* XXX: optimize if small size */
3095 size
= type_size(&vtop
->type
, &align
);
3099 vtop
->type
.t
= VT_PTR
;
3102 /* address of memcpy() */
3105 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3106 else if(!(align
& 3))
3107 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3110 /* Use memmove, rather than memcpy, as dest and src may be same: */
3111 vpush_global_sym(&func_old_type
, TOK_memmove
);
3116 vtop
->type
.t
= VT_PTR
;
3122 /* leave source on stack */
3123 } else if (ft
& VT_BITFIELD
) {
3124 /* bitfield store handling */
3126 /* save lvalue as expression result (example: s.b = s.a = n;) */
3127 vdup(), vtop
[-1] = vtop
[-2];
3129 bit_pos
= BIT_POS(ft
);
3130 bit_size
= BIT_SIZE(ft
);
3131 /* remove bit field info to avoid loops */
3132 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3134 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3135 gen_cast(&vtop
[-1].type
);
3136 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3139 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3140 if (r
== VT_STRUCT
) {
3141 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3142 store_packed_bf(bit_pos
, bit_size
);
3144 unsigned long long mask
= (1ULL << bit_size
) - 1;
3145 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3147 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3150 vpushi((unsigned)mask
);
3157 /* duplicate destination */
3160 /* load destination, mask and or with source */
3161 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3162 vpushll(~(mask
<< bit_pos
));
3164 vpushi(~((unsigned)mask
<< bit_pos
));
3169 /* ... and discard */
3172 } else if (dbt
== VT_VOID
) {
3175 #ifdef CONFIG_TCC_BCHECK
3176 /* bound check case */
3177 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3186 #ifdef TCC_TARGET_X86_64
3187 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3189 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3194 r
= gv(rc
); /* generate value */
3195 /* if lvalue was saved on stack, must read it */
3196 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3198 t
= get_reg(RC_INT
);
3204 sv
.r
= VT_LOCAL
| VT_LVAL
;
3205 sv
.c
.i
= vtop
[-1].c
.i
;
3207 vtop
[-1].r
= t
| VT_LVAL
;
3209 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3211 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3212 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3214 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3215 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3217 vtop
[-1].type
.t
= load_type
;
3220 /* convert to int to increment easily */
3221 vtop
->type
.t
= addr_type
;
3227 vtop
[-1].type
.t
= load_type
;
3228 /* XXX: it works because r2 is spilled last ! */
3229 store(vtop
->r2
, vtop
- 1);
3235 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3236 vtop
->r
|= delayed_cast
;
3240 /* post defines POST/PRE add. c is the token ++ or -- */
3241 ST_FUNC
void inc(int post
, int c
)
3244 vdup(); /* save lvalue */
3246 gv_dup(); /* duplicate value */
3251 vpushi(c
- TOK_MID
);
3253 vstore(); /* store value */
3255 vpop(); /* if post op, return saved value */
3258 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3260 /* read the string */
3264 while (tok
== TOK_STR
) {
3265 /* XXX: add \0 handling too ? */
3266 cstr_cat(astr
, tokc
.str
.data
, -1);
3269 cstr_ccat(astr
, '\0');
3272 /* If I is >= 1 and a power of two, returns log2(i)+1.
3273 If I is 0 returns 0. */
3274 static int exact_log2p1(int i
)
3279 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3290 /* Parse __attribute__((...)) GNUC extension. */
3291 static void parse_attribute(AttributeDef
*ad
)
3297 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3302 while (tok
!= ')') {
3303 if (tok
< TOK_IDENT
)
3304 expect("attribute name");
3311 parse_mult_str(&astr
, "section name");
3312 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3319 parse_mult_str(&astr
, "alias(\"target\")");
3320 ad
->alias_target
= /* save string as token, for later */
3321 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3325 case TOK_VISIBILITY1
:
3326 case TOK_VISIBILITY2
:
3328 parse_mult_str(&astr
,
3329 "visibility(\"default|hidden|internal|protected\")");
3330 if (!strcmp (astr
.data
, "default"))
3331 ad
->a
.visibility
= STV_DEFAULT
;
3332 else if (!strcmp (astr
.data
, "hidden"))
3333 ad
->a
.visibility
= STV_HIDDEN
;
3334 else if (!strcmp (astr
.data
, "internal"))
3335 ad
->a
.visibility
= STV_INTERNAL
;
3336 else if (!strcmp (astr
.data
, "protected"))
3337 ad
->a
.visibility
= STV_PROTECTED
;
3339 expect("visibility(\"default|hidden|internal|protected\")");
3348 if (n
<= 0 || (n
& (n
- 1)) != 0)
3349 tcc_error("alignment must be a positive power of two");
3354 ad
->a
.aligned
= exact_log2p1(n
);
3355 if (n
!= 1 << (ad
->a
.aligned
- 1))
3356 tcc_error("alignment of %d is larger than implemented", n
);
3368 /* currently, no need to handle it because tcc does not
3369 track unused objects */
3373 /* currently, no need to handle it because tcc does not
3374 track unused objects */
3379 ad
->f
.func_call
= FUNC_CDECL
;
3384 ad
->f
.func_call
= FUNC_STDCALL
;
3386 #ifdef TCC_TARGET_I386
3396 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3402 ad
->f
.func_call
= FUNC_FASTCALLW
;
3409 ad
->attr_mode
= VT_LLONG
+ 1;
3412 ad
->attr_mode
= VT_BYTE
+ 1;
3415 ad
->attr_mode
= VT_SHORT
+ 1;
3419 ad
->attr_mode
= VT_INT
+ 1;
3422 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3429 ad
->a
.dllexport
= 1;
3432 ad
->a
.dllimport
= 1;
3435 if (tcc_state
->warn_unsupported
)
3436 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3437 /* skip parameters */
3439 int parenthesis
= 0;
3443 else if (tok
== ')')
3446 } while (parenthesis
&& tok
!= -1);
3459 static Sym
* find_field (CType
*type
, int v
)
3463 while ((s
= s
->next
) != NULL
) {
3464 if ((s
->v
& SYM_FIELD
) &&
3465 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3466 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3467 Sym
*ret
= find_field (&s
->type
, v
);
3477 static void struct_add_offset (Sym
*s
, int offset
)
3479 while ((s
= s
->next
) != NULL
) {
3480 if ((s
->v
& SYM_FIELD
) &&
3481 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3482 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3483 struct_add_offset(s
->type
.ref
, offset
);
3489 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3491 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3492 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3493 int pcc
= !tcc_state
->ms_bitfields
;
3494 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3501 prevbt
= VT_STRUCT
; /* make it never match */
3506 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3507 if (f
->type
.t
& VT_BITFIELD
)
3508 bit_size
= BIT_SIZE(f
->type
.t
);
3511 size
= type_size(&f
->type
, &align
);
3512 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3515 if (pcc
&& bit_size
== 0) {
3516 /* in pcc mode, packing does not affect zero-width bitfields */
3519 /* in pcc mode, attribute packed overrides if set. */
3520 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3523 /* pragma pack overrides align if lesser and packs bitfields always */
3526 if (pragma_pack
< align
)
3527 align
= pragma_pack
;
3528 /* in pcc mode pragma pack also overrides individual align */
3529 if (pcc
&& pragma_pack
< a
)
3533 /* some individual align was specified */
3537 if (type
->ref
->type
.t
== VT_UNION
) {
3538 if (pcc
&& bit_size
>= 0)
3539 size
= (bit_size
+ 7) >> 3;
3544 } else if (bit_size
< 0) {
3546 c
+= (bit_pos
+ 7) >> 3;
3547 c
= (c
+ align
- 1) & -align
;
3556 /* A bit-field. Layout is more complicated. There are two
3557 options: PCC (GCC) compatible and MS compatible */
3559 /* In PCC layout a bit-field is placed adjacent to the
3560 preceding bit-fields, except if:
3562 - an individual alignment was given
3563 - it would overflow its base type container and
3564 there is no packing */
3565 if (bit_size
== 0) {
3567 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3569 } else if (f
->a
.aligned
) {
3571 } else if (!packed
) {
3573 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3574 if (ofs
> size
/ align
)
3578 /* in pcc mode, long long bitfields have type int if they fit */
3579 if (size
== 8 && bit_size
<= 32)
3580 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3582 while (bit_pos
>= align
* 8)
3583 c
+= align
, bit_pos
-= align
* 8;
3586 /* In PCC layout named bit-fields influence the alignment
3587 of the containing struct using the base types alignment,
3588 except for packed fields (which here have correct align). */
3589 if (f
->v
& SYM_FIRST_ANOM
3590 // && bit_size // ??? gcc on ARM/rpi does that
3595 bt
= f
->type
.t
& VT_BTYPE
;
3596 if ((bit_pos
+ bit_size
> size
* 8)
3597 || (bit_size
> 0) == (bt
!= prevbt
)
3599 c
= (c
+ align
- 1) & -align
;
3602 /* In MS bitfield mode a bit-field run always uses
3603 at least as many bits as the underlying type.
3604 To start a new run it's also required that this
3605 or the last bit-field had non-zero width. */
3606 if (bit_size
|| prev_bit_size
)
3609 /* In MS layout the records alignment is normally
3610 influenced by the field, except for a zero-width
3611 field at the start of a run (but by further zero-width
3612 fields it is again). */
3613 if (bit_size
== 0 && prevbt
!= bt
)
3616 prev_bit_size
= bit_size
;
3619 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3620 | (bit_pos
<< VT_STRUCT_SHIFT
);
3621 bit_pos
+= bit_size
;
3623 if (align
> maxalign
)
3627 printf("set field %s offset %-2d size %-2d align %-2d",
3628 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3629 if (f
->type
.t
& VT_BITFIELD
) {
3630 printf(" pos %-2d bits %-2d",
3638 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3640 /* An anonymous struct/union. Adjust member offsets
3641 to reflect the real offset of our containing struct.
3642 Also set the offset of this anon member inside
3643 the outer struct to be zero. Via this it
3644 works when accessing the field offset directly
3645 (from base object), as well as when recursing
3646 members in initializer handling. */
3647 int v2
= f
->type
.ref
->v
;
3648 if (!(v2
& SYM_FIELD
) &&
3649 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3651 /* This happens only with MS extensions. The
3652 anon member has a named struct type, so it
3653 potentially is shared with other references.
3654 We need to unshare members so we can modify
3657 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3658 &f
->type
.ref
->type
, 0,
3660 pps
= &f
->type
.ref
->next
;
3661 while ((ass
= ass
->next
) != NULL
) {
3662 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3663 pps
= &((*pps
)->next
);
3667 struct_add_offset(f
->type
.ref
, offset
);
3677 c
+= (bit_pos
+ 7) >> 3;
3679 /* store size and alignment */
3680 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3684 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3685 /* can happen if individual align for some member was given. In
3686 this case MSVC ignores maxalign when aligning the size */
3691 c
= (c
+ a
- 1) & -a
;
3695 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3698 /* check whether we can access bitfields by their type */
3699 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3703 if (0 == (f
->type
.t
& VT_BITFIELD
))
3707 bit_size
= BIT_SIZE(f
->type
.t
);
3710 bit_pos
= BIT_POS(f
->type
.t
);
3711 size
= type_size(&f
->type
, &align
);
3712 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3715 /* try to access the field using a different type */
3716 c0
= -1, s
= align
= 1;
3718 px
= f
->c
* 8 + bit_pos
;
3719 cx
= (px
>> 3) & -align
;
3720 px
= px
- (cx
<< 3);
3723 s
= (px
+ bit_size
+ 7) >> 3;
3733 s
= type_size(&t
, &align
);
3737 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3738 /* update offset and bit position */
3741 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3742 | (bit_pos
<< VT_STRUCT_SHIFT
);
3746 printf("FIX field %s offset %-2d size %-2d align %-2d "
3747 "pos %-2d bits %-2d\n",
3748 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3749 cx
, s
, align
, px
, bit_size
);
3752 /* fall back to load/store single-byte wise */
3753 f
->auxtype
= VT_STRUCT
;
3755 printf("FIX field %s : load byte-wise\n",
3756 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3762 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3763 static void struct_decl(CType
*type
, int u
)
3765 int v
, c
, size
, align
, flexible
;
3766 int bit_size
, bsize
, bt
;
3768 AttributeDef ad
, ad1
;
3771 memset(&ad
, 0, sizeof ad
);
3773 parse_attribute(&ad
);
3777 /* struct already defined ? return it */
3779 expect("struct/union/enum name");
3781 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3784 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3786 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3791 /* Record the original enum/struct/union token. */
3792 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3794 /* we put an undefined size for struct/union */
3795 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3796 s
->r
= 0; /* default alignment is zero as gcc */
3798 type
->t
= s
->type
.t
;
3804 tcc_error("struct/union/enum already defined");
3805 /* cannot be empty */
3806 /* non empty enums are not allowed */
3809 long long ll
= 0, pl
= 0, nl
= 0;
3812 /* enum symbols have static storage */
3813 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3817 expect("identifier");
3819 if (ss
&& !local_stack
)
3820 tcc_error("redefinition of enumerator '%s'",
3821 get_tok_str(v
, NULL
));
3825 ll
= expr_const64();
3827 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3829 *ps
= ss
, ps
= &ss
->next
;
3838 /* NOTE: we accept a trailing comma */
3843 /* set integral type of the enum */
3846 if (pl
!= (unsigned)pl
)
3847 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3849 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3850 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3851 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3853 /* set type for enum members */
3854 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3856 if (ll
== (int)ll
) /* default is int if it fits */
3858 if (t
.t
& VT_UNSIGNED
) {
3859 ss
->type
.t
|= VT_UNSIGNED
;
3860 if (ll
== (unsigned)ll
)
3863 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3864 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3869 while (tok
!= '}') {
3870 if (!parse_btype(&btype
, &ad1
)) {
3876 tcc_error("flexible array member '%s' not at the end of struct",
3877 get_tok_str(v
, NULL
));
3883 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3885 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3886 expect("identifier");
3888 int v
= btype
.ref
->v
;
3889 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3890 if (tcc_state
->ms_extensions
== 0)
3891 expect("identifier");
3895 if (type_size(&type1
, &align
) < 0) {
3896 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3899 tcc_error("field '%s' has incomplete type",
3900 get_tok_str(v
, NULL
));
3902 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3903 (type1
.t
& VT_STORAGE
))
3904 tcc_error("invalid type for '%s'",
3905 get_tok_str(v
, NULL
));
3909 bit_size
= expr_const();
3910 /* XXX: handle v = 0 case for messages */
3912 tcc_error("negative width in bit-field '%s'",
3913 get_tok_str(v
, NULL
));
3914 if (v
&& bit_size
== 0)
3915 tcc_error("zero width for bit-field '%s'",
3916 get_tok_str(v
, NULL
));
3917 parse_attribute(&ad1
);
3919 size
= type_size(&type1
, &align
);
3920 if (bit_size
>= 0) {
3921 bt
= type1
.t
& VT_BTYPE
;
3927 tcc_error("bitfields must have scalar type");
3929 if (bit_size
> bsize
) {
3930 tcc_error("width of '%s' exceeds its type",
3931 get_tok_str(v
, NULL
));
3932 } else if (bit_size
== bsize
3933 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
3934 /* no need for bit fields */
3936 } else if (bit_size
== 64) {
3937 tcc_error("field width 64 not implemented");
3939 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
3941 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3944 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3945 /* Remember we've seen a real field to check
3946 for placement of flexible array member. */
3949 /* If member is a struct or bit-field, enforce
3950 placing into the struct (as anonymous). */
3952 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3957 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
3962 if (tok
== ';' || tok
== TOK_EOF
)
3969 parse_attribute(&ad
);
3970 struct_layout(type
, &ad
);
3975 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
3977 if (s
->a
.aligned
&& 0 == ad
->a
.aligned
)
3978 ad
->a
.aligned
= s
->a
.aligned
;
3979 if (s
->f
.func_call
&& 0 == ad
->f
.func_call
)
3980 ad
->f
.func_call
= s
->f
.func_call
;
3981 if (s
->f
.func_type
&& 0 == ad
->f
.func_type
)
3982 ad
->f
.func_type
= s
->f
.func_type
;
3987 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3988 are added to the element type, copied because it could be a typedef. */
3989 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3991 while (type
->t
& VT_ARRAY
) {
3992 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3993 type
= &type
->ref
->type
;
3995 type
->t
|= qualifiers
;
3998 /* return 0 if no type declaration. otherwise, return the basic type
4001 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4003 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
4007 memset(ad
, 0, sizeof(AttributeDef
));
4017 /* currently, we really ignore extension */
4027 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4028 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4029 tmbt
: tcc_error("too many basic types");
4032 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4037 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4050 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4051 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4052 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4053 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4060 #ifdef TCC_TARGET_ARM64
4062 /* GCC's __uint128_t appears in some Linux header files. Make it a
4063 synonym for long double to get the size and alignment right. */
4074 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4075 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4083 struct_decl(&type1
, VT_ENUM
);
4086 type
->ref
= type1
.ref
;
4089 struct_decl(&type1
, VT_STRUCT
);
4092 struct_decl(&type1
, VT_UNION
);
4095 /* type modifiers */
4100 parse_btype_qualify(type
, VT_CONSTANT
);
4108 parse_btype_qualify(type
, VT_VOLATILE
);
4115 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4116 tcc_error("signed and unsigned modifier");
4129 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4130 tcc_error("signed and unsigned modifier");
4131 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4147 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4148 tcc_error("multiple storage classes");
4159 /* GNUC attribute */
4160 case TOK_ATTRIBUTE1
:
4161 case TOK_ATTRIBUTE2
:
4162 parse_attribute(ad
);
4163 if (ad
->attr_mode
) {
4164 u
= ad
->attr_mode
-1;
4165 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4173 parse_expr_type(&type1
);
4174 /* remove all storage modifiers except typedef */
4175 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4177 sym_to_attr(ad
, type1
.ref
);
4183 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4185 t
&= ~(VT_BTYPE
|VT_LONG
);
4186 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4187 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4188 type
->ref
= s
->type
.ref
;
4190 parse_btype_qualify(type
, t
);
4192 /* get attributes from typedef */
4202 if (tcc_state
->char_is_unsigned
) {
4203 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4206 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4207 bt
= t
& (VT_BTYPE
|VT_LONG
);
4209 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4210 #ifdef TCC_TARGET_PE
4211 if (bt
== VT_LDOUBLE
)
4212 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4218 /* convert a function parameter type (array to pointer and function to
4219 function pointer) */
4220 static inline void convert_parameter_type(CType
*pt
)
4222 /* remove const and volatile qualifiers (XXX: const could be used
4223 to indicate a const function parameter */
4224 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4225 /* array must be transformed to pointer according to ANSI C */
4227 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4232 ST_FUNC
void parse_asm_str(CString
*astr
)
4235 parse_mult_str(astr
, "string constant");
4238 /* Parse an asm label and return the token */
4239 static int asm_label_instr(void)
4245 parse_asm_str(&astr
);
4248 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4250 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4255 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4257 int n
, l
, t1
, arg_size
, align
;
4258 Sym
**plast
, *s
, *first
;
4263 /* function type, or recursive declarator (return if so) */
4265 if (td
&& !(td
& TYPE_ABSTRACT
))
4269 else if (parse_btype(&pt
, &ad1
))
4280 /* read param name and compute offset */
4281 if (l
!= FUNC_OLD
) {
4282 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4284 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4285 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4286 tcc_error("parameter declared as void");
4287 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4291 expect("identifier");
4292 pt
.t
= VT_VOID
; /* invalid type */
4295 convert_parameter_type(&pt
);
4296 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4302 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4307 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4308 tcc_error("invalid type");
4311 /* if no parameters, then old type prototype */
4314 /* NOTE: const is ignored in returned type as it has a special
4315 meaning in gcc / C++ */
4316 type
->t
&= ~VT_CONSTANT
;
4317 /* some ancient pre-K&R C allows a function to return an array
4318 and the array brackets to be put after the arguments, such
4319 that "int c()[]" means something like "int[] c()" */
4322 skip(']'); /* only handle simple "[]" */
4325 /* we push a anonymous symbol which will contain the function prototype */
4326 ad
->f
.func_args
= arg_size
;
4327 ad
->f
.func_type
= l
;
4328 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4334 } else if (tok
== '[') {
4335 int saved_nocode_wanted
= nocode_wanted
;
4336 /* array definition */
4338 if (tok
== TOK_RESTRICT1
)
4343 if (!local_stack
|| (storage
& VT_STATIC
))
4344 vpushi(expr_const());
4346 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4347 length must always be evaluated, even under nocode_wanted,
4348 so that its size slot is initialized (e.g. under sizeof
4353 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4356 tcc_error("invalid array size");
4358 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4359 tcc_error("size of variable length array should be an integer");
4364 /* parse next post type */
4365 post_type(type
, ad
, storage
, 0);
4366 if (type
->t
== VT_FUNC
)
4367 tcc_error("declaration of an array of functions");
4368 t1
|= type
->t
& VT_VLA
;
4371 loc
-= type_size(&int_type
, &align
);
4375 vla_runtime_type_size(type
, &align
);
4377 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4383 nocode_wanted
= saved_nocode_wanted
;
4385 /* we push an anonymous symbol which will contain the array
4387 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4388 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4394 /* Parse a type declarator (except basic type), and return the type
4395 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4396 expected. 'type' should contain the basic type. 'ad' is the
4397 attribute definition of the basic type. It can be modified by
4398 type_decl(). If this (possibly abstract) declarator is a pointer chain
4399 it returns the innermost pointed to type (equals *type, but is a different
4400 pointer), otherwise returns type itself, that's used for recursive calls. */
4401 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4404 int qualifiers
, storage
;
4406 /* recursive type, remove storage bits first, apply them later again */
4407 storage
= type
->t
& VT_STORAGE
;
4408 type
->t
&= ~VT_STORAGE
;
4411 while (tok
== '*') {
4419 qualifiers
|= VT_CONSTANT
;
4424 qualifiers
|= VT_VOLATILE
;
4430 /* XXX: clarify attribute handling */
4431 case TOK_ATTRIBUTE1
:
4432 case TOK_ATTRIBUTE2
:
4433 parse_attribute(ad
);
4437 type
->t
|= qualifiers
;
4439 /* innermost pointed to type is the one for the first derivation */
4440 ret
= pointed_type(type
);
4444 /* This is possibly a parameter type list for abstract declarators
4445 ('int ()'), use post_type for testing this. */
4446 if (!post_type(type
, ad
, 0, td
)) {
4447 /* It's not, so it's a nested declarator, and the post operations
4448 apply to the innermost pointed to type (if any). */
4449 /* XXX: this is not correct to modify 'ad' at this point, but
4450 the syntax is not clear */
4451 parse_attribute(ad
);
4452 post
= type_decl(type
, ad
, v
, td
);
4455 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4456 /* type identifier */
4460 if (!(td
& TYPE_ABSTRACT
))
4461 expect("identifier");
4464 post_type(post
, ad
, storage
, 0);
4465 parse_attribute(ad
);
4470 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4471 ST_FUNC
int lvalue_type(int t
)
4476 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4478 else if (bt
== VT_SHORT
)
4482 if (t
& VT_UNSIGNED
)
4483 r
|= VT_LVAL_UNSIGNED
;
4487 /* indirection with full error checking and bound check */
4488 ST_FUNC
void indir(void)
4490 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4491 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4495 if (vtop
->r
& VT_LVAL
)
4497 vtop
->type
= *pointed_type(&vtop
->type
);
4498 /* Arrays and functions are never lvalues */
4499 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4500 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4501 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4502 /* if bound checking, the referenced pointer must be checked */
4503 #ifdef CONFIG_TCC_BCHECK
4504 if (tcc_state
->do_bounds_check
)
4505 vtop
->r
|= VT_MUSTBOUND
;
4510 /* pass a parameter to a function and do type checking and casting */
4511 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4516 func_type
= func
->f
.func_type
;
4517 if (func_type
== FUNC_OLD
||
4518 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4519 /* default casting : only need to convert float to double */
4520 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4521 gen_cast_s(VT_DOUBLE
);
4522 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4523 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4524 type
.ref
= vtop
->type
.ref
;
4527 } else if (arg
== NULL
) {
4528 tcc_error("too many arguments to function");
4531 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4532 gen_assign_cast(&type
);
4536 /* parse an expression and return its type without any side effect. */
4537 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4546 /* parse an expression of the form '(type)' or '(expr)' and return its
4548 static void parse_expr_type(CType
*type
)
4554 if (parse_btype(type
, &ad
)) {
4555 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4557 expr_type(type
, gexpr
);
4562 static void parse_type(CType
*type
)
4567 if (!parse_btype(type
, &ad
)) {
4570 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4573 static void parse_builtin_params(int nc
, const char *args
)
4580 while ((c
= *args
++)) {
4584 case 'e': expr_eq(); continue;
4585 case 't': parse_type(&t
); vpush(&t
); continue;
4586 default: tcc_error("internal error"); break;
4594 ST_FUNC
void unary(void)
4596 int n
, t
, align
, size
, r
, sizeof_caller
;
4601 sizeof_caller
= in_sizeof
;
4604 /* XXX: GCC 2.95.3 does not generate a table although it should be
4612 #ifdef TCC_TARGET_PE
4613 t
= VT_SHORT
|VT_UNSIGNED
;
4621 vsetc(&type
, VT_CONST
, &tokc
);
4625 t
= VT_INT
| VT_UNSIGNED
;
4631 t
= VT_LLONG
| VT_UNSIGNED
;
4643 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4646 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4648 case TOK___FUNCTION__
:
4650 goto tok_identifier
;
4656 /* special function name identifier */
4657 len
= strlen(funcname
) + 1;
4658 /* generate char[len] type */
4663 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4664 if (!NODATA_WANTED
) {
4665 ptr
= section_ptr_add(data_section
, len
);
4666 memcpy(ptr
, funcname
, len
);
4672 #ifdef TCC_TARGET_PE
4673 t
= VT_SHORT
| VT_UNSIGNED
;
4679 /* string parsing */
4681 if (tcc_state
->char_is_unsigned
)
4682 t
= VT_BYTE
| VT_UNSIGNED
;
4684 if (tcc_state
->warn_write_strings
)
4689 memset(&ad
, 0, sizeof(AttributeDef
));
4690 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4695 if (parse_btype(&type
, &ad
)) {
4696 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4698 /* check ISOC99 compound literal */
4700 /* data is allocated locally by default */
4705 /* all except arrays are lvalues */
4706 if (!(type
.t
& VT_ARRAY
))
4707 r
|= lvalue_type(type
.t
);
4708 memset(&ad
, 0, sizeof(AttributeDef
));
4709 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4711 if (sizeof_caller
) {
4718 } else if (tok
== '{') {
4719 int saved_nocode_wanted
= nocode_wanted
;
4721 tcc_error("expected constant");
4722 /* save all registers */
4724 /* statement expression : we do not accept break/continue
4725 inside as GCC does. We do retain the nocode_wanted state,
4726 as statement expressions can't ever be entered from the
4727 outside, so any reactivation of code emission (from labels
4728 or loop heads) can be disabled again after the end of it. */
4729 block(NULL
, NULL
, 1);
4730 nocode_wanted
= saved_nocode_wanted
;
4745 /* functions names must be treated as function pointers,
4746 except for unary '&' and sizeof. Since we consider that
4747 functions are not lvalues, we only have to handle it
4748 there and in function calls. */
4749 /* arrays can also be used although they are not lvalues */
4750 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4751 !(vtop
->type
.t
& VT_ARRAY
))
4753 mk_pointer(&vtop
->type
);
4759 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4760 gen_cast_s(VT_BOOL
);
4761 vtop
->c
.i
= !vtop
->c
.i
;
4762 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4766 vseti(VT_JMP
, gvtst(1, 0));
4778 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4779 tcc_error("pointer not accepted for unary plus");
4780 /* In order to force cast, we add zero, except for floating point
4781 where we really need an noop (otherwise -0.0 will be transformed
4783 if (!is_float(vtop
->type
.t
)) {
4794 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
4795 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
4796 size
= type_size(&type
, &align
);
4797 if (s
&& s
->a
.aligned
)
4798 align
= 1 << (s
->a
.aligned
- 1);
4799 if (t
== TOK_SIZEOF
) {
4800 if (!(type
.t
& VT_VLA
)) {
4802 tcc_error("sizeof applied to an incomplete type");
4805 vla_runtime_type_size(&type
, &align
);
4810 vtop
->type
.t
|= VT_UNSIGNED
;
4813 case TOK_builtin_expect
:
4814 /* __builtin_expect is a no-op for now */
4815 parse_builtin_params(0, "ee");
4818 case TOK_builtin_types_compatible_p
:
4819 parse_builtin_params(0, "tt");
4820 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4821 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4822 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4826 case TOK_builtin_choose_expr
:
4853 case TOK_builtin_constant_p
:
4854 parse_builtin_params(1, "e");
4855 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4859 case TOK_builtin_frame_address
:
4860 case TOK_builtin_return_address
:
4866 if (tok
!= TOK_CINT
) {
4867 tcc_error("%s only takes positive integers",
4868 tok1
== TOK_builtin_return_address
?
4869 "__builtin_return_address" :
4870 "__builtin_frame_address");
4872 level
= (uint32_t)tokc
.i
;
4877 vset(&type
, VT_LOCAL
, 0); /* local frame */
4879 mk_pointer(&vtop
->type
);
4880 indir(); /* -> parent frame */
4882 if (tok1
== TOK_builtin_return_address
) {
4883 // assume return address is just above frame pointer on stack
4886 mk_pointer(&vtop
->type
);
4891 #ifdef TCC_TARGET_X86_64
4892 #ifdef TCC_TARGET_PE
4893 case TOK_builtin_va_start
:
4894 parse_builtin_params(0, "ee");
4895 r
= vtop
->r
& VT_VALMASK
;
4899 tcc_error("__builtin_va_start expects a local variable");
4901 vtop
->type
= char_pointer_type
;
4906 case TOK_builtin_va_arg_types
:
4907 parse_builtin_params(0, "t");
4908 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4915 #ifdef TCC_TARGET_ARM64
4916 case TOK___va_start
: {
4917 parse_builtin_params(0, "ee");
4921 vtop
->type
.t
= VT_VOID
;
4924 case TOK___va_arg
: {
4925 parse_builtin_params(0, "et");
4933 case TOK___arm64_clear_cache
: {
4934 parse_builtin_params(0, "ee");
4937 vtop
->type
.t
= VT_VOID
;
4941 /* pre operations */
4952 t
= vtop
->type
.t
& VT_BTYPE
;
4954 /* In IEEE negate(x) isn't subtract(0,x), but rather
4958 vtop
->c
.f
= -1.0 * 0.0;
4959 else if (t
== VT_DOUBLE
)
4960 vtop
->c
.d
= -1.0 * 0.0;
4962 vtop
->c
.ld
= -1.0 * 0.0;
4970 goto tok_identifier
;
4972 /* allow to take the address of a label */
4973 if (tok
< TOK_UIDENT
)
4974 expect("label identifier");
4975 s
= label_find(tok
);
4977 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4979 if (s
->r
== LABEL_DECLARED
)
4980 s
->r
= LABEL_FORWARD
;
4983 s
->type
.t
= VT_VOID
;
4984 mk_pointer(&s
->type
);
4985 s
->type
.t
|= VT_STATIC
;
4987 vpushsym(&s
->type
, s
);
4993 CType controlling_type
;
4994 int has_default
= 0;
4997 TokenString
*str
= NULL
;
5001 expr_type(&controlling_type
, expr_eq
);
5002 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5006 if (tok
== TOK_DEFAULT
) {
5008 tcc_error("too many 'default'");
5014 AttributeDef ad_tmp
;
5017 parse_btype(&cur_type
, &ad_tmp
);
5018 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5019 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5021 tcc_error("type match twice");
5031 skip_or_save_block(&str
);
5033 skip_or_save_block(NULL
);
5040 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5041 tcc_error("type '%s' does not match any association", buf
);
5043 begin_macro(str
, 1);
5052 // special qnan , snan and infinity values
5057 vtop
->type
.t
= VT_FLOAT
;
5062 goto special_math_val
;
5065 goto special_math_val
;
5072 expect("identifier");
5074 if (!s
|| IS_ASM_SYM(s
)) {
5075 const char *name
= get_tok_str(t
, NULL
);
5077 tcc_error("'%s' undeclared", name
);
5078 /* for simple function calls, we tolerate undeclared
5079 external reference to int() function */
5080 if (tcc_state
->warn_implicit_function_declaration
5081 #ifdef TCC_TARGET_PE
5082 /* people must be warned about using undeclared WINAPI functions
5083 (which usually start with uppercase letter) */
5084 || (name
[0] >= 'A' && name
[0] <= 'Z')
5087 tcc_warning("implicit declaration of function '%s'", name
);
5088 s
= external_global_sym(t
, &func_old_type
, 0);
5092 /* A symbol that has a register is a local register variable,
5093 which starts out as VT_LOCAL value. */
5094 if ((r
& VT_VALMASK
) < VT_CONST
)
5095 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5097 vset(&s
->type
, r
, s
->c
);
5098 /* Point to s as backpointer (even without r&VT_SYM).
5099 Will be used by at least the x86 inline asm parser for
5105 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5106 vtop
->c
.i
= s
->enum_val
;
5111 /* post operations */
5113 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5116 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5119 if (tok
== TOK_ARROW
)
5121 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5124 /* expect pointer on structure */
5125 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5126 expect("struct or union");
5127 if (tok
== TOK_CDOUBLE
)
5128 expect("field name");
5130 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5131 expect("field name");
5132 s
= find_field(&vtop
->type
, tok
);
5134 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5135 /* add field offset to pointer */
5136 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5139 /* change type to field type, and set to lvalue */
5140 vtop
->type
= s
->type
;
5141 vtop
->type
.t
|= qualifiers
;
5142 /* an array is never an lvalue */
5143 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5144 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5145 #ifdef CONFIG_TCC_BCHECK
5146 /* if bound checking, the referenced pointer must be checked */
5147 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5148 vtop
->r
|= VT_MUSTBOUND
;
5152 } else if (tok
== '[') {
5158 } else if (tok
== '(') {
5161 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5164 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5165 /* pointer test (no array accepted) */
5166 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5167 vtop
->type
= *pointed_type(&vtop
->type
);
5168 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5172 expect("function pointer");
5175 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5177 /* get return type */
5180 sa
= s
->next
; /* first parameter */
5181 nb_args
= regsize
= 0;
5183 /* compute first implicit argument if a structure is returned */
5184 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5185 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5186 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5187 &ret_align
, ®size
);
5189 /* get some space for the returned structure */
5190 size
= type_size(&s
->type
, &align
);
5191 #ifdef TCC_TARGET_ARM64
5192 /* On arm64, a small struct is return in registers.
5193 It is much easier to write it to memory if we know
5194 that we are allowed to write some extra bytes, so
5195 round the allocated space up to a power of 2: */
5197 while (size
& (size
- 1))
5198 size
= (size
| (size
- 1)) + 1;
5200 loc
= (loc
- size
) & -align
;
5202 ret
.r
= VT_LOCAL
| VT_LVAL
;
5203 /* pass it as 'int' to avoid structure arg passing
5205 vseti(VT_LOCAL
, loc
);
5215 /* return in register */
5216 if (is_float(ret
.type
.t
)) {
5217 ret
.r
= reg_fret(ret
.type
.t
);
5218 #ifdef TCC_TARGET_X86_64
5219 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5223 #ifndef TCC_TARGET_ARM64
5224 #ifdef TCC_TARGET_X86_64
5225 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5227 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5238 gfunc_param_typed(s
, sa
);
5248 tcc_error("too few arguments to function");
5250 gfunc_call(nb_args
);
5253 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5254 vsetc(&ret
.type
, r
, &ret
.c
);
5255 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5258 /* handle packed struct return */
5259 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5262 size
= type_size(&s
->type
, &align
);
5263 /* We're writing whole regs often, make sure there's enough
5264 space. Assume register size is power of 2. */
5265 if (regsize
> align
)
5267 loc
= (loc
- size
) & -align
;
5271 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5275 if (--ret_nregs
== 0)
5279 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5287 ST_FUNC
void expr_prod(void)
5292 while (tok
== '*' || tok
== '/' || tok
== '%') {
5300 ST_FUNC
void expr_sum(void)
5305 while (tok
== '+' || tok
== '-') {
5313 static void expr_shift(void)
5318 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5326 static void expr_cmp(void)
5331 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5332 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5340 static void expr_cmpeq(void)
5345 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5353 static void expr_and(void)
5356 while (tok
== '&') {
5363 static void expr_xor(void)
5366 while (tok
== '^') {
5373 static void expr_or(void)
5376 while (tok
== '|') {
5383 static void expr_land(void)
5386 if (tok
== TOK_LAND
) {
5389 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5390 gen_cast_s(VT_BOOL
);
5395 while (tok
== TOK_LAND
) {
5411 if (tok
!= TOK_LAND
) {
5424 static void expr_lor(void)
5427 if (tok
== TOK_LOR
) {
5430 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5431 gen_cast_s(VT_BOOL
);
5436 while (tok
== TOK_LOR
) {
5452 if (tok
!= TOK_LOR
) {
5465 /* Assuming vtop is a value used in a conditional context
5466 (i.e. compared with zero) return 0 if it's false, 1 if
5467 true and -1 if it can't be statically determined. */
5468 static int condition_3way(void)
5471 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5472 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5474 gen_cast_s(VT_BOOL
);
5481 static void expr_cond(void)
5483 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5485 CType type
, type1
, type2
;
5490 c
= condition_3way();
5491 g
= (tok
== ':' && gnu_ext
);
5493 /* needed to avoid having different registers saved in
5495 if (is_float(vtop
->type
.t
)) {
5497 #ifdef TCC_TARGET_X86_64
5498 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5523 sv
= *vtop
; /* save value to handle it later */
5524 vtop
--; /* no vpop so that FP stack is not flushed */
5542 bt1
= t1
& VT_BTYPE
;
5544 bt2
= t2
& VT_BTYPE
;
5547 /* cast operands to correct type according to ISOC rules */
5548 if (is_float(bt1
) || is_float(bt2
)) {
5549 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5550 type
.t
= VT_LDOUBLE
;
5552 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5557 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5558 /* cast to biggest op */
5559 type
.t
= VT_LLONG
| VT_LONG
;
5560 if (bt1
== VT_LLONG
)
5562 if (bt2
== VT_LLONG
)
5564 /* convert to unsigned if it does not fit in a long long */
5565 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5566 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5567 type
.t
|= VT_UNSIGNED
;
5568 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5569 /* If one is a null ptr constant the result type
5571 if (is_null_pointer (vtop
))
5573 else if (is_null_pointer (&sv
))
5575 /* XXX: test pointer compatibility, C99 has more elaborate
5579 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5580 /* XXX: test function pointer compatibility */
5581 type
= bt1
== VT_FUNC
? type1
: type2
;
5582 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5583 /* XXX: test structure compatibility */
5584 type
= bt1
== VT_STRUCT
? type1
: type2
;
5585 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5586 /* NOTE: as an extension, we accept void on only one side */
5589 /* integer operations */
5590 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5591 /* convert to unsigned if it does not fit in an integer */
5592 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5593 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5594 type
.t
|= VT_UNSIGNED
;
5596 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5597 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5598 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5601 /* now we convert second operand */
5605 mk_pointer(&vtop
->type
);
5607 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5612 if (is_float(type
.t
)) {
5614 #ifdef TCC_TARGET_X86_64
5615 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5619 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5620 /* for long longs, we use fixed registers to avoid having
5621 to handle a complicated move */
5632 /* this is horrible, but we must also convert first
5638 mk_pointer(&vtop
->type
);
5640 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5646 move_reg(r2
, r1
, type
.t
);
5656 static void expr_eq(void)
5662 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5663 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5664 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5679 ST_FUNC
void gexpr(void)
5690 /* parse a constant expression and return value in vtop. */
5691 static void expr_const1(void)
5700 /* parse an integer constant and return its value. */
5701 static inline int64_t expr_const64(void)
5705 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5706 expect("constant expression");
5712 /* parse an integer constant and return its value.
5713 Complain if it doesn't fit 32bit (signed or unsigned). */
5714 ST_FUNC
int expr_const(void)
5717 int64_t wc
= expr_const64();
5719 if (c
!= wc
&& (unsigned)c
!= wc
)
5720 tcc_error("constant exceeds 32 bit");
5724 /* return the label token if current token is a label, otherwise
5726 static int is_label(void)
5730 /* fast test first */
5731 if (tok
< TOK_UIDENT
)
5733 /* no need to save tokc because tok is an identifier */
5739 unget_tok(last_tok
);
5744 #ifndef TCC_TARGET_ARM64
5745 static void gfunc_return(CType
*func_type
)
5747 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5748 CType type
, ret_type
;
5749 int ret_align
, ret_nregs
, regsize
;
5750 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5751 &ret_align
, ®size
);
5752 if (0 == ret_nregs
) {
5753 /* if returning structure, must copy it to implicit
5754 first pointer arg location */
5757 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5760 /* copy structure value to pointer */
5763 /* returning structure packed into registers */
5764 int r
, size
, addr
, align
;
5765 size
= type_size(func_type
,&align
);
5766 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5767 (vtop
->c
.i
& (ret_align
-1)))
5768 && (align
& (ret_align
-1))) {
5769 loc
= (loc
- size
) & -ret_align
;
5772 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5776 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5778 vtop
->type
= ret_type
;
5779 if (is_float(ret_type
.t
))
5780 r
= rc_fret(ret_type
.t
);
5791 if (--ret_nregs
== 0)
5793 /* We assume that when a structure is returned in multiple
5794 registers, their classes are consecutive values of the
5797 vtop
->c
.i
+= regsize
;
5801 } else if (is_float(func_type
->t
)) {
5802 gv(rc_fret(func_type
->t
));
5806 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5810 static int case_cmp(const void *pa
, const void *pb
)
5812 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5813 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5814 return a
< b
? -1 : a
> b
;
5817 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5821 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5839 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5841 gcase(base
, len
/2, bsym
);
5842 if (cur_switch
->def_sym
)
5843 gjmp_addr(cur_switch
->def_sym
);
5845 *bsym
= gjmp(*bsym
);
5849 base
+= e
; len
-= e
;
5859 if (p
->v1
== p
->v2
) {
5861 gtst_addr(0, p
->sym
);
5871 gtst_addr(0, p
->sym
);
5877 static void block(int *bsym
, int *csym
, int is_expr
)
5879 int a
, b
, c
, d
, cond
;
5882 /* generate line number info */
5883 if (tcc_state
->do_debug
)
5884 tcc_debug_line(tcc_state
);
5887 /* default return value is (void) */
5889 vtop
->type
.t
= VT_VOID
;
5892 if (tok
== TOK_IF
) {
5894 int saved_nocode_wanted
= nocode_wanted
;
5899 cond
= condition_3way();
5905 nocode_wanted
|= 0x20000000;
5906 block(bsym
, csym
, 0);
5908 nocode_wanted
= saved_nocode_wanted
;
5910 if (c
== TOK_ELSE
) {
5915 nocode_wanted
|= 0x20000000;
5916 block(bsym
, csym
, 0);
5917 gsym(d
); /* patch else jmp */
5919 nocode_wanted
= saved_nocode_wanted
;
5922 } else if (tok
== TOK_WHILE
) {
5923 int saved_nocode_wanted
;
5924 nocode_wanted
&= ~0x20000000;
5934 saved_nocode_wanted
= nocode_wanted
;
5936 nocode_wanted
= saved_nocode_wanted
;
5941 } else if (tok
== '{') {
5943 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5946 /* record local declaration stack position */
5948 llabel
= local_label_stack
;
5951 /* handle local labels declarations */
5952 if (tok
== TOK_LABEL
) {
5955 if (tok
< TOK_UIDENT
)
5956 expect("label identifier");
5957 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5967 while (tok
!= '}') {
5968 if ((a
= is_label()))
5975 block(bsym
, csym
, is_expr
);
5978 /* pop locally defined labels */
5979 label_pop(&local_label_stack
, llabel
, is_expr
);
5980 /* pop locally defined symbols */
5982 /* In the is_expr case (a statement expression is finished here),
5983 vtop might refer to symbols on the local_stack. Either via the
5984 type or via vtop->sym. We can't pop those nor any that in turn
5985 might be referred to. To make it easier we don't roll back
5986 any symbols in that case; some upper level call to block() will
5987 do that. We do have to remove such symbols from the lookup
5988 tables, though. sym_pop will do that. */
5989 sym_pop(&local_stack
, s
, is_expr
);
5991 /* Pop VLA frames and restore stack pointer if required */
5992 if (vlas_in_scope
> saved_vlas_in_scope
) {
5993 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5996 vlas_in_scope
= saved_vlas_in_scope
;
5999 } else if (tok
== TOK_RETURN
) {
6003 gen_assign_cast(&func_vt
);
6004 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6007 gfunc_return(&func_vt
);
6010 /* jump unless last stmt in top-level block */
6011 if (tok
!= '}' || local_scope
!= 1)
6013 nocode_wanted
|= 0x20000000;
6014 } else if (tok
== TOK_BREAK
) {
6017 tcc_error("cannot break");
6018 *bsym
= gjmp(*bsym
);
6021 nocode_wanted
|= 0x20000000;
6022 } else if (tok
== TOK_CONTINUE
) {
6025 tcc_error("cannot continue");
6026 vla_sp_restore_root();
6027 *csym
= gjmp(*csym
);
6030 } else if (tok
== TOK_FOR
) {
6032 int saved_nocode_wanted
;
6033 nocode_wanted
&= ~0x20000000;
6039 /* c99 for-loop init decl? */
6040 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6041 /* no, regular for-loop init expr */
6067 saved_nocode_wanted
= nocode_wanted
;
6069 nocode_wanted
= saved_nocode_wanted
;
6074 sym_pop(&local_stack
, s
, 0);
6077 if (tok
== TOK_DO
) {
6078 int saved_nocode_wanted
;
6079 nocode_wanted
&= ~0x20000000;
6085 saved_nocode_wanted
= nocode_wanted
;
6093 nocode_wanted
= saved_nocode_wanted
;
6098 if (tok
== TOK_SWITCH
) {
6099 struct switch_t
*saved
, sw
;
6100 int saved_nocode_wanted
= nocode_wanted
;
6106 switchval
= *vtop
--;
6108 b
= gjmp(0); /* jump to first case */
6109 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6113 nocode_wanted
= saved_nocode_wanted
;
6114 a
= gjmp(a
); /* add implicit break */
6117 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6118 for (b
= 1; b
< sw
.n
; b
++)
6119 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6120 tcc_error("duplicate case value");
6121 /* Our switch table sorting is signed, so the compared
6122 value needs to be as well when it's 64bit. */
6123 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6124 switchval
.type
.t
&= ~VT_UNSIGNED
;
6126 gcase(sw
.p
, sw
.n
, &a
);
6129 gjmp_addr(sw
.def_sym
);
6130 dynarray_reset(&sw
.p
, &sw
.n
);
6135 if (tok
== TOK_CASE
) {
6136 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6139 nocode_wanted
&= ~0x20000000;
6141 cr
->v1
= cr
->v2
= expr_const64();
6142 if (gnu_ext
&& tok
== TOK_DOTS
) {
6144 cr
->v2
= expr_const64();
6145 if (cr
->v2
< cr
->v1
)
6146 tcc_warning("empty case range");
6149 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6152 goto block_after_label
;
6154 if (tok
== TOK_DEFAULT
) {
6159 if (cur_switch
->def_sym
)
6160 tcc_error("too many 'default'");
6161 cur_switch
->def_sym
= ind
;
6163 goto block_after_label
;
6165 if (tok
== TOK_GOTO
) {
6167 if (tok
== '*' && gnu_ext
) {
6171 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6174 } else if (tok
>= TOK_UIDENT
) {
6175 s
= label_find(tok
);
6176 /* put forward definition if needed */
6178 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6180 if (s
->r
== LABEL_DECLARED
)
6181 s
->r
= LABEL_FORWARD
;
6183 vla_sp_restore_root();
6184 if (s
->r
& LABEL_FORWARD
)
6185 s
->jnext
= gjmp(s
->jnext
);
6187 gjmp_addr(s
->jnext
);
6190 expect("label identifier");
6193 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6202 if (s
->r
== LABEL_DEFINED
)
6203 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6205 s
->r
= LABEL_DEFINED
;
6207 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6211 /* we accept this, but it is a mistake */
6213 nocode_wanted
&= ~0x20000000;
6215 tcc_warning("deprecated use of label at end of compound statement");
6219 block(bsym
, csym
, is_expr
);
6222 /* expression case */
6237 /* This skips over a stream of tokens containing balanced {} and ()
6238 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6239 with a '{'). If STR then allocates and stores the skipped tokens
6240 in *STR. This doesn't check if () and {} are nested correctly,
6241 i.e. "({)}" is accepted. */
6242 static void skip_or_save_block(TokenString
**str
)
6244 int braces
= tok
== '{';
6247 *str
= tok_str_alloc();
6249 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6251 if (tok
== TOK_EOF
) {
6252 if (str
|| level
> 0)
6253 tcc_error("unexpected end of file");
6258 tok_str_add_tok(*str
);
6261 if (t
== '{' || t
== '(') {
6263 } else if (t
== '}' || t
== ')') {
6265 if (level
== 0 && braces
&& t
== '}')
6270 tok_str_add(*str
, -1);
6271 tok_str_add(*str
, 0);
6275 #define EXPR_CONST 1
6278 static void parse_init_elem(int expr_type
)
6280 int saved_global_expr
;
6283 /* compound literals must be allocated globally in this case */
6284 saved_global_expr
= global_expr
;
6287 global_expr
= saved_global_expr
;
6288 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6289 (compound literals). */
6290 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6291 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6292 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6293 #ifdef TCC_TARGET_PE
6294 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6297 tcc_error("initializer element is not constant");
6305 /* put zeros for variable based init */
6306 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6309 /* nothing to do because globals are already set to zero */
6311 vpush_global_sym(&func_old_type
, TOK_memset
);
6313 #ifdef TCC_TARGET_ARM
6324 /* t is the array or struct type. c is the array or struct
6325 address. cur_field is the pointer to the current
6326 field, for arrays the 'c' member contains the current start
6327 index. 'size_only' is true if only size info is needed (only used
6328 in arrays). al contains the already initialized length of the
6329 current container (starting at c). This returns the new length of that. */
6330 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6331 Sym
**cur_field
, int size_only
, int al
)
6334 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6335 unsigned long corig
= c
;
6339 if (gnu_ext
&& (l
= is_label()) != 0)
6341 /* NOTE: we only support ranges for last designator */
6342 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6344 if (!(type
->t
& VT_ARRAY
))
6345 expect("array type");
6347 index
= index_last
= expr_const();
6348 if (tok
== TOK_DOTS
&& gnu_ext
) {
6350 index_last
= expr_const();
6354 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6356 tcc_error("invalid index");
6358 (*cur_field
)->c
= index_last
;
6359 type
= pointed_type(type
);
6360 elem_size
= type_size(type
, &align
);
6361 c
+= index
* elem_size
;
6362 nb_elems
= index_last
- index
+ 1;
6368 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6369 expect("struct/union type");
6370 f
= find_field(type
, l
);
6383 } else if (!gnu_ext
) {
6387 if (type
->t
& VT_ARRAY
) {
6388 index
= (*cur_field
)->c
;
6389 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6390 tcc_error("index too large");
6391 type
= pointed_type(type
);
6392 c
+= index
* type_size(type
, &align
);
6395 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6396 *cur_field
= f
= f
->next
;
6398 tcc_error("too many field init");
6403 /* must put zero in holes (note that doing it that way
6404 ensures that it even works with designators) */
6405 if (!size_only
&& c
- corig
> al
)
6406 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6407 decl_initializer(type
, sec
, c
, 0, size_only
);
6409 /* XXX: make it more general */
6410 if (!size_only
&& nb_elems
> 1) {
6411 unsigned long c_end
;
6416 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6417 for (i
= 1; i
< nb_elems
; i
++) {
6418 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6423 } else if (!NODATA_WANTED
) {
6424 c_end
= c
+ nb_elems
* elem_size
;
6425 if (c_end
> sec
->data_allocated
)
6426 section_realloc(sec
, c_end
);
6427 src
= sec
->data
+ c
;
6429 for(i
= 1; i
< nb_elems
; i
++) {
6431 memcpy(dst
, src
, elem_size
);
6435 c
+= nb_elems
* type_size(type
, &align
);
6441 /* store a value or an expression directly in global data or in local array */
6442 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6449 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6453 /* XXX: not portable */
6454 /* XXX: generate error if incorrect relocation */
6455 gen_assign_cast(&dtype
);
6456 bt
= type
->t
& VT_BTYPE
;
6458 if ((vtop
->r
& VT_SYM
)
6461 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6462 || (type
->t
& VT_BITFIELD
))
6463 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6465 tcc_error("initializer element is not computable at load time");
6467 if (NODATA_WANTED
) {
6472 size
= type_size(type
, &align
);
6473 section_reserve(sec
, c
+ size
);
6474 ptr
= sec
->data
+ c
;
6476 /* XXX: make code faster ? */
6477 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6478 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6479 /* XXX This rejects compound literals like
6480 '(void *){ptr}'. The problem is that '&sym' is
6481 represented the same way, which would be ruled out
6482 by the SYM_FIRST_ANOM check above, but also '"string"'
6483 in 'char *p = "string"' is represented the same
6484 with the type being VT_PTR and the symbol being an
6485 anonymous one. That is, there's no difference in vtop
6486 between '(void *){x}' and '&(void *){x}'. Ignore
6487 pointer typed entities here. Hopefully no real code
6488 will every use compound literals with scalar type. */
6489 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6490 /* These come from compound literals, memcpy stuff over. */
6494 esym
= elfsym(vtop
->sym
);
6495 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6496 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6498 /* We need to copy over all memory contents, and that
6499 includes relocations. Use the fact that relocs are
6500 created it order, so look from the end of relocs
6501 until we hit one before the copied region. */
6502 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6503 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6504 while (num_relocs
--) {
6506 if (rel
->r_offset
>= esym
->st_value
+ size
)
6508 if (rel
->r_offset
< esym
->st_value
)
6510 /* Note: if the same fields are initialized multiple
6511 times (possible with designators) then we possibly
6512 add multiple relocations for the same offset here.
6513 That would lead to wrong code, the last reloc needs
6514 to win. We clean this up later after the whole
6515 initializer is parsed. */
6516 put_elf_reloca(symtab_section
, sec
,
6517 c
+ rel
->r_offset
- esym
->st_value
,
6518 ELFW(R_TYPE
)(rel
->r_info
),
6519 ELFW(R_SYM
)(rel
->r_info
),
6529 if (type
->t
& VT_BITFIELD
) {
6530 int bit_pos
, bit_size
, bits
, n
;
6531 unsigned char *p
, v
, m
;
6532 bit_pos
= BIT_POS(vtop
->type
.t
);
6533 bit_size
= BIT_SIZE(vtop
->type
.t
);
6534 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6535 bit_pos
&= 7, bits
= 0;
6540 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6541 m
= ((1 << n
) - 1) << bit_pos
;
6542 *p
= (*p
& ~m
) | (v
& m
);
6543 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6547 /* XXX: when cross-compiling we assume that each type has the
6548 same representation on host and target, which is likely to
6549 be wrong in the case of long double */
6551 vtop
->c
.i
= vtop
->c
.i
!= 0;
6553 *(char *)ptr
|= vtop
->c
.i
;
6556 *(short *)ptr
|= vtop
->c
.i
;
6559 *(float*)ptr
= vtop
->c
.f
;
6562 *(double *)ptr
= vtop
->c
.d
;
6565 #if defined TCC_IS_NATIVE_387
6566 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6567 memcpy(ptr
, &vtop
->c
.ld
, 10);
6569 else if (sizeof (long double) == sizeof (double))
6570 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
6572 else if (vtop
->c
.ld
== 0.0)
6576 if (sizeof(long double) == LDOUBLE_SIZE
)
6577 *(long double*)ptr
= vtop
->c
.ld
;
6578 else if (sizeof(double) == LDOUBLE_SIZE
)
6579 *(double *)ptr
= (double)vtop
->c
.ld
;
6581 tcc_error("can't cross compile long double constants");
6585 *(long long *)ptr
|= vtop
->c
.i
;
6592 addr_t val
= vtop
->c
.i
;
6594 if (vtop
->r
& VT_SYM
)
6595 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6597 *(addr_t
*)ptr
|= val
;
6599 if (vtop
->r
& VT_SYM
)
6600 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6601 *(addr_t
*)ptr
|= val
;
6607 int val
= vtop
->c
.i
;
6609 if (vtop
->r
& VT_SYM
)
6610 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6614 if (vtop
->r
& VT_SYM
)
6615 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6624 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6631 /* 't' contains the type and storage info. 'c' is the offset of the
6632 object in section 'sec'. If 'sec' is NULL, it means stack based
6633 allocation. 'first' is true if array '{' must be read (multi
6634 dimension implicit array init handling). 'size_only' is true if
6635 size only evaluation is wanted (only for arrays). */
6636 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6637 int first
, int size_only
)
6639 int len
, n
, no_oblock
, nb
, i
;
6646 /* If we currently are at an '}' or ',' we have read an initializer
6647 element in one of our callers, and not yet consumed it. */
6648 have_elem
= tok
== '}' || tok
== ',';
6649 if (!have_elem
&& tok
!= '{' &&
6650 /* In case of strings we have special handling for arrays, so
6651 don't consume them as initializer value (which would commit them
6652 to some anonymous symbol). */
6653 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6655 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6660 !(type
->t
& VT_ARRAY
) &&
6661 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6662 The source type might have VT_CONSTANT set, which is
6663 of course assignable to non-const elements. */
6664 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6665 init_putv(type
, sec
, c
);
6666 } else if (type
->t
& VT_ARRAY
) {
6669 t1
= pointed_type(type
);
6670 size1
= type_size(t1
, &align1
);
6673 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6676 tcc_error("character array initializer must be a literal,"
6677 " optionally enclosed in braces");
6682 /* only parse strings here if correct type (otherwise: handle
6683 them as ((w)char *) expressions */
6684 if ((tok
== TOK_LSTR
&&
6685 #ifdef TCC_TARGET_PE
6686 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6688 (t1
->t
& VT_BTYPE
) == VT_INT
6690 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6692 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6695 /* compute maximum number of chars wanted */
6697 cstr_len
= tokc
.str
.size
;
6699 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6702 if (n
>= 0 && nb
> (n
- len
))
6706 tcc_warning("initializer-string for array is too long");
6707 /* in order to go faster for common case (char
6708 string in global variable, we handle it
6710 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6712 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6716 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6718 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6720 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6727 /* only add trailing zero if enough storage (no
6728 warning in this case since it is standard) */
6729 if (n
< 0 || len
< n
) {
6732 init_putv(t1
, sec
, c
+ (len
* size1
));
6743 while (tok
!= '}' || have_elem
) {
6744 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
6746 if (type
->t
& VT_ARRAY
) {
6748 /* special test for multi dimensional arrays (may not
6749 be strictly correct if designators are used at the
6751 if (no_oblock
&& len
>= n
*size1
)
6754 if (s
->type
.t
== VT_UNION
)
6758 if (no_oblock
&& f
== NULL
)
6767 /* put zeros at the end */
6768 if (!size_only
&& len
< n
*size1
)
6769 init_putz(sec
, c
+ len
, n
*size1
- len
);
6772 /* patch type size if needed, which happens only for array types */
6774 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
6775 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6778 if (first
|| tok
== '{') {
6786 } else if (tok
== '{') {
6788 decl_initializer(type
, sec
, c
, first
, size_only
);
6790 } else if (size_only
) {
6791 /* If we supported only ISO C we wouldn't have to accept calling
6792 this on anything than an array size_only==1 (and even then
6793 only on the outermost level, so no recursion would be needed),
6794 because initializing a flex array member isn't supported.
6795 But GNU C supports it, so we need to recurse even into
6796 subfields of structs and arrays when size_only is set. */
6797 /* just skip expression */
6798 skip_or_save_block(NULL
);
6801 /* This should happen only when we haven't parsed
6802 the init element above for fear of committing a
6803 string constant to memory too early. */
6804 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6805 expect("string constant");
6806 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6808 init_putv(type
, sec
, c
);
6812 /* parse an initializer for type 't' if 'has_init' is non zero, and
6813 allocate space in local or global data space ('r' is either
6814 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6815 variable 'v' of scope 'scope' is declared before initializers
6816 are parsed. If 'v' is zero, then a reference to the new object
6817 is put in the value stack. If 'has_init' is 2, a special parsing
6818 is done to handle string constants. */
6819 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6820 int has_init
, int v
, int scope
)
6822 int size
, align
, addr
;
6823 TokenString
*init_str
= NULL
;
6826 Sym
*flexible_array
;
6828 int saved_nocode_wanted
= nocode_wanted
;
6829 #ifdef CONFIG_TCC_BCHECK
6830 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
6833 if (type
->t
& VT_STATIC
)
6834 nocode_wanted
|= NODATA_WANTED
? 0x40000000 : 0x80000000;
6836 flexible_array
= NULL
;
6837 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6838 Sym
*field
= type
->ref
->next
;
6841 field
= field
->next
;
6842 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6843 flexible_array
= field
;
6847 size
= type_size(type
, &align
);
6848 /* If unknown size, we must evaluate it before
6849 evaluating initializers because
6850 initializers can generate global data too
6851 (e.g. string pointers or ISOC99 compound
6852 literals). It also simplifies local
6853 initializers handling */
6854 if (size
< 0 || (flexible_array
&& has_init
)) {
6856 tcc_error("unknown type size");
6857 /* get all init string */
6858 if (has_init
== 2) {
6859 init_str
= tok_str_alloc();
6860 /* only get strings */
6861 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6862 tok_str_add_tok(init_str
);
6865 tok_str_add(init_str
, -1);
6866 tok_str_add(init_str
, 0);
6868 skip_or_save_block(&init_str
);
6873 begin_macro(init_str
, 1);
6875 decl_initializer(type
, NULL
, 0, 1, 1);
6876 /* prepare second initializer parsing */
6877 macro_ptr
= init_str
->str
;
6880 /* if still unknown size, error */
6881 size
= type_size(type
, &align
);
6883 tcc_error("unknown type size");
6885 /* If there's a flex member and it was used in the initializer
6887 if (flexible_array
&&
6888 flexible_array
->type
.ref
->c
> 0)
6889 size
+= flexible_array
->type
.ref
->c
6890 * pointed_size(&flexible_array
->type
);
6891 /* take into account specified alignment if bigger */
6892 if (ad
->a
.aligned
) {
6893 int speca
= 1 << (ad
->a
.aligned
- 1);
6896 } else if (ad
->a
.packed
) {
6901 size
= 0, align
= 1;
6903 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6905 #ifdef CONFIG_TCC_BCHECK
6906 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6910 loc
= (loc
- size
) & -align
;
6912 #ifdef CONFIG_TCC_BCHECK
6913 /* handles bounds */
6914 /* XXX: currently, since we do only one pass, we cannot track
6915 '&' operators, so we add only arrays */
6916 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6918 /* add padding between regions */
6920 /* then add local bound info */
6921 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6922 bounds_ptr
[0] = addr
;
6923 bounds_ptr
[1] = size
;
6927 /* local variable */
6928 #ifdef CONFIG_TCC_ASM
6929 if (ad
->asm_label
) {
6930 int reg
= asm_parse_regvar(ad
->asm_label
);
6932 r
= (r
& ~VT_VALMASK
) | reg
;
6935 sym
= sym_push(v
, type
, r
, addr
);
6938 /* push local reference */
6939 vset(type
, r
, addr
);
6942 if (v
&& scope
== VT_CONST
) {
6943 /* see if the symbol was already defined */
6946 patch_storage(sym
, ad
, type
);
6947 /* we accept several definitions of the same global variable. */
6948 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
6953 /* allocate symbol in corresponding section */
6958 else if (tcc_state
->nocommon
)
6963 addr
= section_add(sec
, size
, align
);
6964 #ifdef CONFIG_TCC_BCHECK
6965 /* add padding if bound check */
6967 section_add(sec
, 1, 1);
6970 addr
= align
; /* SHN_COMMON is special, symbol value is align */
6971 sec
= common_section
;
6976 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6977 patch_storage(sym
, ad
, NULL
);
6979 /* Local statics have a scope until now (for
6980 warnings), remove it here. */
6982 /* update symbol definition */
6983 put_extern_sym(sym
, sec
, addr
, size
);
6985 /* push global reference */
6986 sym
= get_sym_ref(type
, sec
, addr
, size
);
6987 vpushsym(type
, sym
);
6991 #ifdef CONFIG_TCC_BCHECK
6992 /* handles bounds now because the symbol must be defined
6993 before for the relocation */
6997 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
6998 /* then add global bound info */
6999 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7000 bounds_ptr
[0] = 0; /* relocated */
7001 bounds_ptr
[1] = size
;
7006 if (type
->t
& VT_VLA
) {
7012 /* save current stack pointer */
7013 if (vlas_in_scope
== 0) {
7014 if (vla_sp_root_loc
== -1)
7015 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
7016 gen_vla_sp_save(vla_sp_root_loc
);
7019 vla_runtime_type_size(type
, &a
);
7020 gen_vla_alloc(type
, a
);
7021 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7022 /* on _WIN64, because of the function args scratch area, the
7023 result of alloca differs from RSP and is returned in RAX. */
7024 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7026 gen_vla_sp_save(addr
);
7030 } else if (has_init
) {
7031 size_t oldreloc_offset
= 0;
7032 if (sec
&& sec
->reloc
)
7033 oldreloc_offset
= sec
->reloc
->data_offset
;
7034 decl_initializer(type
, sec
, addr
, 1, 0);
7035 if (sec
&& sec
->reloc
)
7036 squeeze_multi_relocs(sec
, oldreloc_offset
);
7037 /* patch flexible array member size back to -1, */
7038 /* for possible subsequent similar declarations */
7040 flexible_array
->type
.ref
->c
= -1;
7044 /* restore parse state if needed */
7050 nocode_wanted
= saved_nocode_wanted
;
7053 /* parse a function defined by symbol 'sym' and generate its code in
7054 'cur_text_section' */
7055 static void gen_function(Sym
*sym
)
7058 ind
= cur_text_section
->data_offset
;
7059 /* NOTE: we patch the symbol size later */
7060 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7061 funcname
= get_tok_str(sym
->v
, NULL
);
7063 /* Initialize VLA state */
7065 vla_sp_root_loc
= -1;
7066 /* put debug symbol */
7067 tcc_debug_funcstart(tcc_state
, sym
);
7068 /* push a dummy symbol to enable local sym storage */
7069 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7070 local_scope
= 1; /* for function parameters */
7071 gfunc_prolog(&sym
->type
);
7074 block(NULL
, NULL
, 0);
7078 cur_text_section
->data_offset
= ind
;
7079 label_pop(&global_label_stack
, NULL
, 0);
7080 /* reset local stack */
7082 sym_pop(&local_stack
, NULL
, 0);
7083 /* end of function */
7084 /* patch symbol size */
7085 elfsym(sym
)->st_size
= ind
- func_ind
;
7086 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7087 /* It's better to crash than to generate wrong code */
7088 cur_text_section
= NULL
;
7089 funcname
= ""; /* for safety */
7090 func_vt
.t
= VT_VOID
; /* for safety */
7091 func_var
= 0; /* for safety */
7092 ind
= 0; /* for safety */
7093 nocode_wanted
= 0x80000000;
7097 static void gen_inline_functions(TCCState
*s
)
7100 int inline_generated
, i
, ln
;
7101 struct InlineFunc
*fn
;
7103 ln
= file
->line_num
;
7104 /* iterate while inline function are referenced */
7106 inline_generated
= 0;
7107 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7108 fn
= s
->inline_fns
[i
];
7110 if (sym
&& sym
->c
) {
7111 /* the function was used: generate its code and
7112 convert it to a normal function */
7115 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7116 sym
->type
.t
&= ~VT_INLINE
;
7118 begin_macro(fn
->func_str
, 1);
7120 cur_text_section
= text_section
;
7124 inline_generated
= 1;
7127 } while (inline_generated
);
7128 file
->line_num
= ln
;
7131 ST_FUNC
void free_inline_functions(TCCState
*s
)
7134 /* free tokens of unused inline functions */
7135 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7136 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7138 tok_str_free(fn
->func_str
);
7140 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7143 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7144 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7145 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7153 if (!parse_btype(&btype
, &ad
)) {
7154 if (is_for_loop_init
)
7156 /* skip redundant ';' if not in old parameter decl scope */
7157 if (tok
== ';' && l
!= VT_CMP
) {
7163 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7164 /* global asm block */
7168 if (tok
>= TOK_UIDENT
) {
7169 /* special test for old K&R protos without explicit int
7170 type. Only accepted when defining global data */
7174 expect("declaration");
7179 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7180 int v
= btype
.ref
->v
;
7181 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7182 tcc_warning("unnamed struct/union that defines no instances");
7186 if (IS_ENUM(btype
.t
)) {
7191 while (1) { /* iterate thru each declaration */
7193 /* If the base type itself was an array type of unspecified
7194 size (like in 'typedef int arr[]; arr x = {1};') then
7195 we will overwrite the unknown size by the real one for
7196 this decl. We need to unshare the ref symbol holding
7198 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7199 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7201 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7205 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7206 printf("type = '%s'\n", buf
);
7209 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7210 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7211 tcc_error("function without file scope cannot be static");
7213 /* if old style function prototype, we accept a
7216 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7217 decl0(VT_CMP
, 0, sym
);
7220 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7221 ad
.asm_label
= asm_label_instr();
7222 /* parse one last attribute list, after asm label */
7223 parse_attribute(&ad
);
7228 #ifdef TCC_TARGET_PE
7229 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7230 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7231 tcc_error("cannot have dll linkage with static or typedef");
7232 if (ad
.a
.dllimport
) {
7233 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7236 type
.t
|= VT_EXTERN
;
7242 tcc_error("cannot use local functions");
7243 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7244 expect("function definition");
7246 /* reject abstract declarators in function definition
7247 make old style params without decl have int type */
7249 while ((sym
= sym
->next
) != NULL
) {
7250 if (!(sym
->v
& ~SYM_FIELD
))
7251 expect("identifier");
7252 if (sym
->type
.t
== VT_VOID
)
7253 sym
->type
= int_type
;
7256 /* XXX: cannot do better now: convert extern line to static inline */
7257 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7258 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7260 /* put function symbol */
7261 sym
= external_global_sym(v
, &type
, 0);
7262 type
.t
&= ~VT_EXTERN
;
7263 patch_storage(sym
, &ad
, &type
);
7265 /* static inline functions are just recorded as a kind
7266 of macro. Their code will be emitted at the end of
7267 the compilation unit only if they are used */
7268 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7269 (VT_INLINE
| VT_STATIC
)) {
7270 struct InlineFunc
*fn
;
7271 const char *filename
;
7273 filename
= file
? file
->filename
: "";
7274 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7275 strcpy(fn
->filename
, filename
);
7277 skip_or_save_block(&fn
->func_str
);
7278 dynarray_add(&tcc_state
->inline_fns
,
7279 &tcc_state
->nb_inline_fns
, fn
);
7281 /* compute text section */
7282 cur_text_section
= ad
.section
;
7283 if (!cur_text_section
)
7284 cur_text_section
= text_section
;
7290 /* find parameter in function parameter list */
7291 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7292 if ((sym
->v
& ~SYM_FIELD
) == v
)
7294 tcc_error("declaration for parameter '%s' but no such parameter",
7295 get_tok_str(v
, NULL
));
7297 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7298 tcc_error("storage class specified for '%s'",
7299 get_tok_str(v
, NULL
));
7300 if (sym
->type
.t
!= VT_VOID
)
7301 tcc_error("redefinition of parameter '%s'",
7302 get_tok_str(v
, NULL
));
7303 convert_parameter_type(&type
);
7305 } else if (type
.t
& VT_TYPEDEF
) {
7306 /* save typedefed type */
7307 /* XXX: test storage specifiers ? */
7309 if (sym
&& sym
->sym_scope
== local_scope
) {
7310 if (!is_compatible_types(&sym
->type
, &type
)
7311 || !(sym
->type
.t
& VT_TYPEDEF
))
7312 tcc_error("incompatible redefinition of '%s'",
7313 get_tok_str(v
, NULL
));
7316 sym
= sym_push(v
, &type
, 0, 0);
7322 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7323 /* external function definition */
7324 /* specific case for func_call attribute */
7326 } else if (!(type
.t
& VT_ARRAY
)) {
7327 /* not lvalue if array */
7328 r
|= lvalue_type(type
.t
);
7330 has_init
= (tok
== '=');
7331 if (has_init
&& (type
.t
& VT_VLA
))
7332 tcc_error("variable length array cannot be initialized");
7333 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7334 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7335 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7336 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7337 /* external variable or function */
7338 /* NOTE: as GCC, uninitialized global static
7339 arrays of null size are considered as
7341 type
.t
|= VT_EXTERN
;
7342 sym
= external_sym(v
, &type
, r
, &ad
);
7343 if (ad
.alias_target
) {
7346 alias_target
= sym_find(ad
.alias_target
);
7347 esym
= elfsym(alias_target
);
7349 tcc_error("unsupported forward __alias__ attribute");
7350 /* Local statics have a scope until now (for
7351 warnings), remove it here. */
7353 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7356 if (type
.t
& VT_STATIC
)
7362 else if (l
== VT_CONST
)
7363 /* uninitialized global variables may be overridden */
7364 type
.t
|= VT_EXTERN
;
7365 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7369 if (is_for_loop_init
)
7382 static void decl(int l
)
7387 /* ------------------------------------------------------------------------- */