2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* true if no code generation wanted for an expression */
54 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
56 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
58 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
59 ST_DATA
const char *funcname
;
61 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
;
63 ST_DATA
struct switch_t
{
67 } **p
; int n
; /* list of case ranges */
68 int def_sym
; /* default symbol */
69 } *cur_switch
; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType
*type
);
74 static void gen_cast_s(int t
);
75 static inline CType
*pointed_type(CType
*type
);
76 static int is_compatible_types(CType
*type1
, CType
*type2
);
77 static int parse_btype(CType
*type
, AttributeDef
*ad
);
78 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
79 static void parse_expr_type(CType
*type
);
80 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
81 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
82 static void block(int *bsym
, int *csym
, int is_expr
);
83 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
84 static int decl0(int l
, int is_for_loop_init
, Sym
*);
85 static void expr_eq(void);
86 static void vla_runtime_type_size(CType
*type
, int *a
);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
90 static inline int64_t expr_const64(void);
91 ST_FUNC
void vpush64(int ty
, unsigned long long v
);
92 ST_FUNC
void vpush(CType
*type
);
93 ST_FUNC
int gvtst(int inv
, int t
);
94 static void gen_inline_functions(TCCState
*s
);
95 static void skip_or_save_block(TokenString
**str
);
97 ST_INLN
int is_float(int t
)
101 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
104 /* we use our own 'finite' function to avoid potential problems with
105 non standard math libs */
106 /* XXX: endianness dependent */
107 ST_FUNC
int ieee_finite(double d
)
110 memcpy(p
, &d
, sizeof(double));
111 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
114 ST_FUNC
void test_lvalue(void)
116 if (!(vtop
->r
& VT_LVAL
))
120 ST_FUNC
void check_vstack(void)
123 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
126 /* ------------------------------------------------------------------------- */
127 /* vstack debugging aid */
130 void pv (const char *lbl
, int a
, int b
)
133 for (i
= a
; i
< a
+ b
; ++i
) {
134 SValue
*p
= &vtop
[-i
];
135 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
136 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
141 /* ------------------------------------------------------------------------- */
142 /* start of translation unit info */
143 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
148 /* file info: full path + filename */
149 section_sym
= put_elf_sym(symtab_section
, 0, 0,
150 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
151 text_section
->sh_num
, NULL
);
152 getcwd(buf
, sizeof(buf
));
154 normalize_slashes(buf
);
156 pstrcat(buf
, sizeof(buf
), "/");
157 put_stabs_r(buf
, N_SO
, 0, 0,
158 text_section
->data_offset
, text_section
, section_sym
);
159 put_stabs_r(file
->filename
, N_SO
, 0, 0,
160 text_section
->data_offset
, text_section
, section_sym
);
165 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
166 symbols can be safely used */
167 put_elf_sym(symtab_section
, 0, 0,
168 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
169 SHN_ABS
, file
->filename
);
172 /* put end of translation unit info */
173 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
177 put_stabs_r(NULL
, N_SO
, 0, 0,
178 text_section
->data_offset
, text_section
, section_sym
);
182 /* generate line number info */
183 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
187 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
188 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
190 last_line_num
= file
->line_num
;
194 /* put function symbol */
195 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
203 /* XXX: we put here a dummy type */
204 snprintf(buf
, sizeof(buf
), "%s:%c1",
205 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
206 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
207 cur_text_section
, sym
->c
);
208 /* //gr gdb wants a line at the function */
209 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
215 /* put function size */
216 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
220 put_stabn(N_FUN
, 0, 0, size
);
223 /* ------------------------------------------------------------------------- */
224 ST_FUNC
void tccgen_start(TCCState
*s1
)
226 cur_text_section
= NULL
;
228 anon_sym
= SYM_FIRST_ANOM
;
233 /* define some often used types */
235 char_pointer_type
.t
= VT_BYTE
;
236 mk_pointer(&char_pointer_type
);
238 size_type
.t
= VT_INT
;
240 size_type
.t
= VT_LLONG
;
242 func_old_type
.t
= VT_FUNC
;
243 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
244 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
245 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
249 #ifdef TCC_TARGET_ARM
254 ST_FUNC
void tccgen_end(TCCState
*s1
)
256 gen_inline_functions(s1
);
258 /* end of translation unit info */
262 /* ------------------------------------------------------------------------- */
263 /* apply storage attributes to Elf symbol */
265 static void update_storage(Sym
*sym
)
270 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
271 if (sym
->a
.visibility
)
272 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
275 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, ELFW(ST_TYPE
)(esym
->st_info
));
277 if (sym
->a
.dllimport
)
278 esym
->st_other
|= ST_PE_IMPORT
;
279 if (sym
->a
.dllexport
)
280 esym
->st_other
|= ST_PE_EXPORT
;
283 printf("storage %s: vis=%d weak=%d exp=%d imp=%d\n",
284 get_tok_str(sym
->v
, NULL
),
293 /* ------------------------------------------------------------------------- */
294 /* update sym->c so that it points to an external symbol in section
295 'section' with value 'value' */
297 ST_FUNC
void put_extern_sym2(Sym
*sym
, Section
*section
,
298 addr_t value
, unsigned long size
,
299 int can_add_underscore
)
301 int sym_type
, sym_bind
, sh_num
, info
, other
, t
;
305 #ifdef CONFIG_TCC_BCHECK
311 else if (section
== SECTION_ABS
)
314 sh_num
= section
->sh_num
;
317 name
= get_tok_str(sym
->v
, NULL
);
318 #ifdef CONFIG_TCC_BCHECK
319 if (tcc_state
->do_bounds_check
) {
320 /* XXX: avoid doing that for statics ? */
321 /* if bound checking is activated, we change some function
322 names by adding the "__bound" prefix */
325 /* XXX: we rely only on malloc hooks */
338 strcpy(buf
, "__bound_");
346 if ((t
& VT_BTYPE
) == VT_FUNC
) {
348 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
349 sym_type
= STT_NOTYPE
;
351 sym_type
= STT_OBJECT
;
354 sym_bind
= STB_LOCAL
;
356 sym_bind
= STB_GLOBAL
;
359 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
360 Sym
*ref
= sym
->type
.ref
;
361 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
362 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
364 other
|= ST_PE_STDCALL
;
365 can_add_underscore
= 0;
369 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
371 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
375 name
= get_tok_str(sym
->asm_label
, NULL
);
376 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
377 sym
->c
= set_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
379 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
380 esym
->st_value
= value
;
381 esym
->st_size
= size
;
382 esym
->st_shndx
= sh_num
;
387 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
388 addr_t value
, unsigned long size
)
390 put_extern_sym2(sym
, section
, value
, size
, 1);
393 /* add a new relocation entry to symbol 'sym' in section 's' */
394 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
399 if (nocode_wanted
&& s
== cur_text_section
)
404 put_extern_sym(sym
, NULL
, 0, 0);
408 /* now we can add ELF relocation info */
409 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
413 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
415 greloca(s
, sym
, offset
, type
, 0);
419 /* ------------------------------------------------------------------------- */
420 /* symbol allocator */
421 static Sym
*__sym_malloc(void)
423 Sym
*sym_pool
, *sym
, *last_sym
;
426 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
427 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
429 last_sym
= sym_free_first
;
431 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
432 sym
->next
= last_sym
;
436 sym_free_first
= last_sym
;
440 static inline Sym
*sym_malloc(void)
444 sym
= sym_free_first
;
446 sym
= __sym_malloc();
447 sym_free_first
= sym
->next
;
450 sym
= tcc_malloc(sizeof(Sym
));
455 ST_INLN
void sym_free(Sym
*sym
)
458 sym
->next
= sym_free_first
;
459 sym_free_first
= sym
;
465 /* push, without hashing */
466 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
471 memset(s
, 0, sizeof *s
);
481 /* find a symbol and return its associated structure. 's' is the top
482 of the symbol stack */
483 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
495 /* structure lookup */
496 ST_INLN Sym
*struct_find(int v
)
499 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
501 return table_ident
[v
]->sym_struct
;
504 /* find an identifier */
505 ST_INLN Sym
*sym_find(int v
)
508 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
510 return table_ident
[v
]->sym_identifier
;
513 /* push a given symbol on the symbol stack */
514 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
523 s
= sym_push2(ps
, v
, type
->t
, c
);
524 s
->type
.ref
= type
->ref
;
526 /* don't record fields or anonymous symbols */
528 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
529 /* record symbol in token array */
530 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
532 ps
= &ts
->sym_struct
;
534 ps
= &ts
->sym_identifier
;
537 s
->sym_scope
= local_scope
;
538 if (s
->prev_tok
&& s
->prev_tok
->sym_scope
== s
->sym_scope
)
539 tcc_error("redeclaration of '%s'",
540 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
545 /* push a global identifier */
546 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
549 s
= sym_push2(&global_stack
, v
, t
, c
);
550 /* don't record anonymous symbol */
551 if (v
< SYM_FIRST_ANOM
) {
552 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
553 /* modify the top most local identifier, so that
554 sym_identifier will point to 's' when popped */
556 ps
= &(*ps
)->prev_tok
;
563 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
564 pop them yet from the list, but do remove them from the token array. */
565 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
575 /* remove symbol in token array */
577 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
578 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
580 ps
= &ts
->sym_struct
;
582 ps
= &ts
->sym_identifier
;
593 /* ------------------------------------------------------------------------- */
595 static void vsetc(CType
*type
, int r
, CValue
*vc
)
599 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
600 tcc_error("memory full (vstack)");
601 /* cannot let cpu flags if other instruction are generated. Also
602 avoid leaving VT_JMP anywhere except on the top of the stack
603 because it would complicate the code generator.
605 Don't do this when nocode_wanted. vtop might come from
606 !nocode_wanted regions (see 88_codeopt.c) and transforming
607 it to a register without actually generating code is wrong
608 as their value might still be used for real. All values
609 we push under nocode_wanted will eventually be popped
610 again, so that the VT_CMP/VT_JMP value will be in vtop
611 when code is unsuppressed again.
613 Same logic below in vswap(); */
614 if (vtop
>= vstack
&& !nocode_wanted
) {
615 v
= vtop
->r
& VT_VALMASK
;
616 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
628 ST_FUNC
void vswap(void)
631 /* cannot vswap cpu flags. See comment at vsetc() above */
632 if (vtop
>= vstack
&& !nocode_wanted
) {
633 int v
= vtop
->r
& VT_VALMASK
;
634 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
642 /* pop stack value */
643 ST_FUNC
void vpop(void)
646 v
= vtop
->r
& VT_VALMASK
;
647 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
648 /* for x86, we need to pop the FP stack */
650 o(0xd8dd); /* fstp %st(0) */
653 if (v
== VT_JMP
|| v
== VT_JMPI
) {
654 /* need to put correct jump if && or || without test */
660 /* push constant of type "type" with useless value */
661 ST_FUNC
void vpush(CType
*type
)
663 vset(type
, VT_CONST
, 0);
666 /* push integer constant */
667 ST_FUNC
void vpushi(int v
)
671 vsetc(&int_type
, VT_CONST
, &cval
);
674 /* push a pointer sized constant */
675 static void vpushs(addr_t v
)
679 vsetc(&size_type
, VT_CONST
, &cval
);
682 /* push arbitrary 64bit constant */
683 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
690 vsetc(&ctype
, VT_CONST
, &cval
);
693 /* push long long constant */
694 static inline void vpushll(long long v
)
696 vpush64(VT_LLONG
, v
);
699 ST_FUNC
void vset(CType
*type
, int r
, int v
)
704 vsetc(type
, r
, &cval
);
707 static void vseti(int r
, int v
)
715 ST_FUNC
void vpushv(SValue
*v
)
717 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
718 tcc_error("memory full (vstack)");
723 static void vdup(void)
728 /* rotate n first stack elements to the bottom
729 I1 ... In -> I2 ... In I1 [top is right]
731 ST_FUNC
void vrotb(int n
)
742 /* rotate the n elements before entry e towards the top
743 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
745 ST_FUNC
void vrote(SValue
*e
, int n
)
751 for(i
= 0;i
< n
- 1; i
++)
756 /* rotate n first stack elements to the top
757 I1 ... In -> In I1 ... I(n-1) [top is right]
759 ST_FUNC
void vrott(int n
)
764 /* push a symbol value of TYPE */
765 static inline void vpushsym(CType
*type
, Sym
*sym
)
769 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
773 /* Return a static symbol pointing to a section */
774 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
780 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
781 sym
->type
.ref
= type
->ref
;
782 sym
->r
= VT_CONST
| VT_SYM
;
783 put_extern_sym(sym
, sec
, offset
, size
);
787 /* push a reference to a section offset by adding a dummy symbol */
788 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
790 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
793 /* define a new external reference to a symbol 'v' of type 'u' */
794 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
800 /* push forward reference */
801 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
802 s
->type
.ref
= type
->ref
;
803 s
->r
= r
| VT_CONST
| VT_SYM
;
808 /* Merge some storage attributes. */
809 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
811 if (type
&& !is_compatible_types(&sym
->type
, type
))
812 tcc_error("incompatible types for redefinition of '%s'",
813 get_tok_str(sym
->v
, NULL
));
815 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
816 tcc_error("incompatible dll linkage for redefinition of '%s'",
817 get_tok_str(sym
->v
, NULL
));
819 sym
->a
.dllexport
|= ad
->a
.dllexport
;
820 sym
->a
.weak
|= ad
->a
.weak
;
821 if (ad
->a
.visibility
) {
822 int vis
= sym
->a
.visibility
;
823 int vis2
= ad
->a
.visibility
;
824 if (vis
== STV_DEFAULT
)
826 else if (vis2
!= STV_DEFAULT
)
827 vis
= (vis
< vis2
) ? vis
: vis2
;
828 sym
->a
.visibility
= vis
;
831 sym
->a
.aligned
= ad
->a
.aligned
;
833 sym
->asm_label
= ad
->asm_label
;
837 /* define a new external reference to a symbol 'v' */
838 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
843 /* push forward reference */
844 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
845 s
->type
.t
|= VT_EXTERN
;
849 if (s
->type
.ref
== func_old_type
.ref
) {
850 s
->type
.ref
= type
->ref
;
851 s
->r
= r
| VT_CONST
| VT_SYM
;
852 s
->type
.t
|= VT_EXTERN
;
854 patch_storage(s
, ad
, type
);
859 /* push a reference to global symbol v */
860 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
862 vpushsym(type
, external_global_sym(v
, type
, 0));
865 /* save registers up to (vtop - n) stack entry */
866 ST_FUNC
void save_regs(int n
)
869 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
873 /* save r to the memory stack, and mark it as being free */
874 ST_FUNC
void save_reg(int r
)
876 save_reg_upstack(r
, 0);
879 /* save r to the memory stack, and mark it as being free,
880 if seen up to (vtop - n) stack entry */
881 ST_FUNC
void save_reg_upstack(int r
, int n
)
883 int l
, saved
, size
, align
;
887 if ((r
&= VT_VALMASK
) >= VT_CONST
)
892 /* modify all stack values */
895 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
896 if ((p
->r
& VT_VALMASK
) == r
||
897 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
898 /* must save value on stack if not already done */
900 /* NOTE: must reload 'r' because r might be equal to r2 */
901 r
= p
->r
& VT_VALMASK
;
902 /* store register in the stack */
904 if ((p
->r
& VT_LVAL
) ||
905 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
907 type
= &char_pointer_type
;
911 size
= type_size(type
, &align
);
912 loc
= (loc
- size
) & -align
;
914 sv
.r
= VT_LOCAL
| VT_LVAL
;
917 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
918 /* x86 specific: need to pop fp register ST0 if saved */
920 o(0xd8dd); /* fstp %st(0) */
924 /* special long long case */
925 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
933 /* mark that stack entry as being saved on the stack */
934 if (p
->r
& VT_LVAL
) {
935 /* also clear the bounded flag because the
936 relocation address of the function was stored in
938 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
940 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
948 #ifdef TCC_TARGET_ARM
949 /* find a register of class 'rc2' with at most one reference on stack.
950 * If none, call get_reg(rc) */
951 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
956 for(r
=0;r
<NB_REGS
;r
++) {
957 if (reg_classes
[r
] & rc2
) {
960 for(p
= vstack
; p
<= vtop
; p
++) {
961 if ((p
->r
& VT_VALMASK
) == r
||
962 (p
->r2
& VT_VALMASK
) == r
)
973 /* find a free register of class 'rc'. If none, save one register */
974 ST_FUNC
int get_reg(int rc
)
979 /* find a free register */
980 for(r
=0;r
<NB_REGS
;r
++) {
981 if (reg_classes
[r
] & rc
) {
984 for(p
=vstack
;p
<=vtop
;p
++) {
985 if ((p
->r
& VT_VALMASK
) == r
||
986 (p
->r2
& VT_VALMASK
) == r
)
994 /* no register left : free the first one on the stack (VERY
995 IMPORTANT to start from the bottom to ensure that we don't
996 spill registers used in gen_opi()) */
997 for(p
=vstack
;p
<=vtop
;p
++) {
998 /* look at second register (if long long) */
999 r
= p
->r2
& VT_VALMASK
;
1000 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1002 r
= p
->r
& VT_VALMASK
;
1003 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1009 /* Should never comes here */
1013 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1015 static void move_reg(int r
, int s
, int t
)
1029 /* get address of vtop (vtop MUST BE an lvalue) */
1030 ST_FUNC
void gaddrof(void)
1032 vtop
->r
&= ~VT_LVAL
;
1033 /* tricky: if saved lvalue, then we can go back to lvalue */
1034 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1035 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1040 #ifdef CONFIG_TCC_BCHECK
1041 /* generate lvalue bound code */
1042 static void gbound(void)
1047 vtop
->r
&= ~VT_MUSTBOUND
;
1048 /* if lvalue, then use checking code before dereferencing */
1049 if (vtop
->r
& VT_LVAL
) {
1050 /* if not VT_BOUNDED value, then make one */
1051 if (!(vtop
->r
& VT_BOUNDED
)) {
1052 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1053 /* must save type because we must set it to int to get pointer */
1055 vtop
->type
.t
= VT_PTR
;
1058 gen_bounded_ptr_add();
1059 vtop
->r
|= lval_type
;
1062 /* then check for dereferencing */
1063 gen_bounded_ptr_deref();
1068 /* store vtop a register belonging to class 'rc'. lvalues are
1069 converted to values. Cannot be used if cannot be converted to
1070 register value (such as structures). */
1071 ST_FUNC
int gv(int rc
)
1073 int r
, bit_pos
, bit_size
, size
, align
;
1076 /* NOTE: get_reg can modify vstack[] */
1077 if (vtop
->type
.t
& VT_BITFIELD
) {
1080 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
1081 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
1082 /* remove bit field info to avoid loops */
1083 vtop
->type
.t
&= ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
1084 /* cast to int to propagate signedness in following ops */
1085 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1090 if((vtop
->type
.t
& VT_UNSIGNED
)
1091 || (vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1092 type
.t
|= VT_UNSIGNED
;
1094 /* generate shifts */
1095 vpushi(bits
- (bit_pos
+ bit_size
));
1097 vpushi(bits
- bit_size
);
1098 /* NOTE: transformed to SHR if unsigned */
1102 if (is_float(vtop
->type
.t
) &&
1103 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1104 unsigned long offset
;
1105 /* CPUs usually cannot use float constants, so we store them
1106 generically in data segment */
1107 size
= type_size(&vtop
->type
, &align
);
1108 offset
= section_add(data_section
, size
, align
);
1109 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1111 init_putv(&vtop
->type
, data_section
, offset
);
1114 #ifdef CONFIG_TCC_BCHECK
1115 if (vtop
->r
& VT_MUSTBOUND
)
1119 r
= vtop
->r
& VT_VALMASK
;
1120 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1121 #ifndef TCC_TARGET_ARM64
1124 #ifdef TCC_TARGET_X86_64
1125 else if (rc
== RC_FRET
)
1129 /* need to reload if:
1131 - lvalue (need to dereference pointer)
1132 - already a register, but not in the right class */
1134 || (vtop
->r
& VT_LVAL
)
1135 || !(reg_classes
[r
] & rc
)
1137 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1138 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1140 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1146 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1147 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1149 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1150 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1151 unsigned long long ll
;
1153 int r2
, original_type
;
1154 original_type
= vtop
->type
.t
;
1155 /* two register type load : expand to two words
1158 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1161 vtop
->c
.i
= ll
; /* first word */
1163 vtop
->r
= r
; /* save register value */
1164 vpushi(ll
>> 32); /* second word */
1167 if (vtop
->r
& VT_LVAL
) {
1168 /* We do not want to modifier the long long
1169 pointer here, so the safest (and less
1170 efficient) is to save all the other registers
1171 in the stack. XXX: totally inefficient. */
1175 /* lvalue_save: save only if used further down the stack */
1176 save_reg_upstack(vtop
->r
, 1);
1178 /* load from memory */
1179 vtop
->type
.t
= load_type
;
1182 vtop
[-1].r
= r
; /* save register value */
1183 /* increment pointer to get second word */
1184 vtop
->type
.t
= addr_type
;
1189 vtop
->type
.t
= load_type
;
1191 /* move registers */
1194 vtop
[-1].r
= r
; /* save register value */
1195 vtop
->r
= vtop
[-1].r2
;
1197 /* Allocate second register. Here we rely on the fact that
1198 get_reg() tries first to free r2 of an SValue. */
1202 /* write second register */
1204 vtop
->type
.t
= original_type
;
1205 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1207 /* lvalue of scalar type : need to use lvalue type
1208 because of possible cast */
1211 /* compute memory access type */
1212 if (vtop
->r
& VT_LVAL_BYTE
)
1214 else if (vtop
->r
& VT_LVAL_SHORT
)
1216 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1220 /* restore wanted type */
1223 /* one register type load */
1228 #ifdef TCC_TARGET_C67
1229 /* uses register pairs for doubles */
1230 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1237 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1238 ST_FUNC
void gv2(int rc1
, int rc2
)
1242 /* generate more generic register first. But VT_JMP or VT_CMP
1243 values must be generated first in all cases to avoid possible
1245 v
= vtop
[0].r
& VT_VALMASK
;
1246 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1251 /* test if reload is needed for first register */
1252 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1262 /* test if reload is needed for first register */
1263 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1269 #ifndef TCC_TARGET_ARM64
1270 /* wrapper around RC_FRET to return a register by type */
1271 static int rc_fret(int t
)
1273 #ifdef TCC_TARGET_X86_64
1274 if (t
== VT_LDOUBLE
) {
1282 /* wrapper around REG_FRET to return a register by type */
1283 static int reg_fret(int t
)
1285 #ifdef TCC_TARGET_X86_64
1286 if (t
== VT_LDOUBLE
) {
1294 /* expand 64bit on stack in two ints */
1295 static void lexpand(void)
1298 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1299 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1300 if (v
== VT_CONST
) {
1303 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1309 vtop
[0].r
= vtop
[-1].r2
;
1310 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1312 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1316 #ifdef TCC_TARGET_ARM
1317 /* expand long long on stack */
1318 ST_FUNC
void lexpand_nr(void)
1322 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1324 vtop
->r2
= VT_CONST
;
1325 vtop
->type
.t
= VT_INT
| u
;
1326 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1327 if (v
== VT_CONST
) {
1328 vtop
[-1].c
.i
= vtop
->c
.i
;
1329 vtop
->c
.i
= vtop
->c
.i
>> 32;
1331 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1333 vtop
->r
= vtop
[-1].r
;
1334 } else if (v
> VT_CONST
) {
1338 vtop
->r
= vtop
[-1].r2
;
1339 vtop
[-1].r2
= VT_CONST
;
1340 vtop
[-1].type
.t
= VT_INT
| u
;
1345 /* build a long long from two ints */
1346 static void lbuild(int t
)
1348 gv2(RC_INT
, RC_INT
);
1349 vtop
[-1].r2
= vtop
[0].r
;
1350 vtop
[-1].type
.t
= t
;
1355 /* convert stack entry to register and duplicate its value in another
1357 static void gv_dup(void)
1364 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1371 /* stack: H L L1 H1 */
1381 /* duplicate value */
1386 #ifdef TCC_TARGET_X86_64
1387 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1397 load(r1
, &sv
); /* move r to r1 */
1399 /* duplicates value */
1405 /* Generate value test
1407 * Generate a test for any value (jump, comparison and integers) */
1408 ST_FUNC
int gvtst(int inv
, int t
)
1410 int v
= vtop
->r
& VT_VALMASK
;
1411 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1415 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1416 /* constant jmp optimization */
1417 if ((vtop
->c
.i
!= 0) != inv
)
1422 return gtst(inv
, t
);
1426 /* generate CPU independent (unsigned) long long operations */
1427 static void gen_opl(int op
)
1429 int t
, a
, b
, op1
, c
, i
;
1431 unsigned short reg_iret
= REG_IRET
;
1432 unsigned short reg_lret
= REG_LRET
;
1438 func
= TOK___divdi3
;
1441 func
= TOK___udivdi3
;
1444 func
= TOK___moddi3
;
1447 func
= TOK___umoddi3
;
1454 /* call generic long long function */
1455 vpush_global_sym(&func_old_type
, func
);
1460 vtop
->r2
= reg_lret
;
1468 //pv("gen_opl A",0,2);
1474 /* stack: L1 H1 L2 H2 */
1479 vtop
[-2] = vtop
[-3];
1482 /* stack: H1 H2 L1 L2 */
1483 //pv("gen_opl B",0,4);
1489 /* stack: H1 H2 L1 L2 ML MH */
1492 /* stack: ML MH H1 H2 L1 L2 */
1496 /* stack: ML MH H1 L2 H2 L1 */
1501 /* stack: ML MH M1 M2 */
1504 } else if (op
== '+' || op
== '-') {
1505 /* XXX: add non carry method too (for MIPS or alpha) */
1511 /* stack: H1 H2 (L1 op L2) */
1514 gen_op(op1
+ 1); /* TOK_xxxC2 */
1517 /* stack: H1 H2 (L1 op L2) */
1520 /* stack: (L1 op L2) H1 H2 */
1522 /* stack: (L1 op L2) (H1 op H2) */
1530 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1531 t
= vtop
[-1].type
.t
;
1535 /* stack: L H shift */
1537 /* constant: simpler */
1538 /* NOTE: all comments are for SHL. the other cases are
1539 done by swapping words */
1550 if (op
!= TOK_SAR
) {
1583 /* XXX: should provide a faster fallback on x86 ? */
1586 func
= TOK___ashrdi3
;
1589 func
= TOK___lshrdi3
;
1592 func
= TOK___ashldi3
;
1598 /* compare operations */
1604 /* stack: L1 H1 L2 H2 */
1606 vtop
[-1] = vtop
[-2];
1608 /* stack: L1 L2 H1 H2 */
1611 /* when values are equal, we need to compare low words. since
1612 the jump is inverted, we invert the test too. */
1615 else if (op1
== TOK_GT
)
1617 else if (op1
== TOK_ULT
)
1619 else if (op1
== TOK_UGT
)
1629 /* generate non equal test */
1635 /* compare low. Always unsigned */
1639 else if (op1
== TOK_LE
)
1641 else if (op1
== TOK_GT
)
1643 else if (op1
== TOK_GE
)
1654 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1656 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1657 return (a
^ b
) >> 63 ? -x
: x
;
1660 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1662 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1665 /* handle integer constant optimizations and various machine
1667 static void gen_opic(int op
)
1669 SValue
*v1
= vtop
- 1;
1671 int t1
= v1
->type
.t
& VT_BTYPE
;
1672 int t2
= v2
->type
.t
& VT_BTYPE
;
1673 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1674 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1675 uint64_t l1
= c1
? v1
->c
.i
: 0;
1676 uint64_t l2
= c2
? v2
->c
.i
: 0;
1677 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1679 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1680 l1
= ((uint32_t)l1
|
1681 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1682 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1683 l2
= ((uint32_t)l2
|
1684 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1688 case '+': l1
+= l2
; break;
1689 case '-': l1
-= l2
; break;
1690 case '&': l1
&= l2
; break;
1691 case '^': l1
^= l2
; break;
1692 case '|': l1
|= l2
; break;
1693 case '*': l1
*= l2
; break;
1700 /* if division by zero, generate explicit division */
1703 tcc_error("division by zero in constant");
1707 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1708 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1709 case TOK_UDIV
: l1
= l1
/ l2
; break;
1710 case TOK_UMOD
: l1
= l1
% l2
; break;
1713 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1714 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1716 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1719 case TOK_ULT
: l1
= l1
< l2
; break;
1720 case TOK_UGE
: l1
= l1
>= l2
; break;
1721 case TOK_EQ
: l1
= l1
== l2
; break;
1722 case TOK_NE
: l1
= l1
!= l2
; break;
1723 case TOK_ULE
: l1
= l1
<= l2
; break;
1724 case TOK_UGT
: l1
= l1
> l2
; break;
1725 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1726 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1727 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1728 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1730 case TOK_LAND
: l1
= l1
&& l2
; break;
1731 case TOK_LOR
: l1
= l1
|| l2
; break;
1735 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1736 l1
= ((uint32_t)l1
|
1737 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1741 /* if commutative ops, put c2 as constant */
1742 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1743 op
== '|' || op
== '*')) {
1745 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1746 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1748 if (!const_wanted
&&
1750 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1751 (l1
== -1 && op
== TOK_SAR
))) {
1752 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1754 } else if (!const_wanted
&&
1755 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1757 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
1758 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1759 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1764 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1767 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1768 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1771 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
1772 /* filter out NOP operations like x*1, x-0, x&-1... */
1774 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1775 /* try to use shifts instead of muls or divs */
1776 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1785 else if (op
== TOK_PDIV
)
1791 } else if (c2
&& (op
== '+' || op
== '-') &&
1792 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1793 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1794 /* symbol + constant case */
1798 /* The backends can't always deal with addends to symbols
1799 larger than +-1<<31. Don't construct such. */
1806 /* call low level op generator */
1807 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1808 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1816 /* generate a floating point operation with constant propagation */
1817 static void gen_opif(int op
)
1821 #if defined _MSC_VER && defined _AMD64_
1822 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
1829 /* currently, we cannot do computations with forward symbols */
1830 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1831 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1833 if (v1
->type
.t
== VT_FLOAT
) {
1836 } else if (v1
->type
.t
== VT_DOUBLE
) {
1844 /* NOTE: we only do constant propagation if finite number (not
1845 NaN or infinity) (ANSI spec) */
1846 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1850 case '+': f1
+= f2
; break;
1851 case '-': f1
-= f2
; break;
1852 case '*': f1
*= f2
; break;
1856 tcc_error("division by zero in constant");
1861 /* XXX: also handles tests ? */
1865 /* XXX: overflow test ? */
1866 if (v1
->type
.t
== VT_FLOAT
) {
1868 } else if (v1
->type
.t
== VT_DOUBLE
) {
1880 static int pointed_size(CType
*type
)
1883 return type_size(pointed_type(type
), &align
);
1886 static void vla_runtime_pointed_size(CType
*type
)
1889 vla_runtime_type_size(pointed_type(type
), &align
);
1892 static inline int is_null_pointer(SValue
*p
)
1894 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
1896 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
1897 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
1898 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
1899 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
1902 static inline int is_integer_btype(int bt
)
1904 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
1905 bt
== VT_INT
|| bt
== VT_LLONG
);
1908 /* check types for comparison or subtraction of pointers */
1909 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
1911 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
1914 /* null pointers are accepted for all comparisons as gcc */
1915 if (is_null_pointer(p1
) || is_null_pointer(p2
))
1919 bt1
= type1
->t
& VT_BTYPE
;
1920 bt2
= type2
->t
& VT_BTYPE
;
1921 /* accept comparison between pointer and integer with a warning */
1922 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
1923 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
1924 tcc_warning("comparison between pointer and integer");
1928 /* both must be pointers or implicit function pointers */
1929 if (bt1
== VT_PTR
) {
1930 type1
= pointed_type(type1
);
1931 } else if (bt1
!= VT_FUNC
)
1932 goto invalid_operands
;
1934 if (bt2
== VT_PTR
) {
1935 type2
= pointed_type(type2
);
1936 } else if (bt2
!= VT_FUNC
) {
1938 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
1940 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
1941 (type2
->t
& VT_BTYPE
) == VT_VOID
)
1945 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1946 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1947 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
1948 /* gcc-like error if '-' is used */
1950 goto invalid_operands
;
1952 tcc_warning("comparison of distinct pointer types lacks a cast");
1956 /* generic gen_op: handles types problems */
1957 ST_FUNC
void gen_op(int op
)
1959 int u
, t1
, t2
, bt1
, bt2
, t
;
1963 t1
= vtop
[-1].type
.t
;
1964 t2
= vtop
[0].type
.t
;
1965 bt1
= t1
& VT_BTYPE
;
1966 bt2
= t2
& VT_BTYPE
;
1968 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
1969 tcc_error("operation on a struct");
1970 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
1971 if (bt2
== VT_FUNC
) {
1972 mk_pointer(&vtop
->type
);
1975 if (bt1
== VT_FUNC
) {
1977 mk_pointer(&vtop
->type
);
1982 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
1983 /* at least one operand is a pointer */
1984 /* relational op: must be both pointers */
1985 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
1986 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1987 /* pointers are handled are unsigned */
1989 t
= VT_LLONG
| VT_UNSIGNED
;
1991 t
= VT_INT
| VT_UNSIGNED
;
1995 /* if both pointers, then it must be the '-' op */
1996 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
1998 tcc_error("cannot use pointers here");
1999 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2000 /* XXX: check that types are compatible */
2001 if (vtop
[-1].type
.t
& VT_VLA
) {
2002 vla_runtime_pointed_size(&vtop
[-1].type
);
2004 vpushi(pointed_size(&vtop
[-1].type
));
2008 /* set to integer type */
2010 vtop
->type
.t
= VT_LLONG
;
2012 vtop
->type
.t
= VT_INT
;
2017 /* exactly one pointer : must be '+' or '-'. */
2018 if (op
!= '-' && op
!= '+')
2019 tcc_error("cannot use pointers here");
2020 /* Put pointer as first operand */
2021 if (bt2
== VT_PTR
) {
2023 t
= t1
, t1
= t2
, t2
= t
;
2026 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2027 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2030 type1
= vtop
[-1].type
;
2031 type1
.t
&= ~VT_ARRAY
;
2032 if (vtop
[-1].type
.t
& VT_VLA
)
2033 vla_runtime_pointed_size(&vtop
[-1].type
);
2035 u
= pointed_size(&vtop
[-1].type
);
2037 tcc_error("unknown array element size");
2041 /* XXX: cast to int ? (long long case) */
2047 /* #ifdef CONFIG_TCC_BCHECK
2048 The main reason to removing this code:
2055 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2056 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2058 When this code is on. then the output looks like
2060 v+(i-j) = 0xbff84000
2062 /* if evaluating constant expression, no code should be
2063 generated, so no bound check */
2064 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2065 /* if bounded pointers, we generate a special code to
2072 gen_bounded_ptr_add();
2078 /* put again type if gen_opic() swaped operands */
2081 } else if (is_float(bt1
) || is_float(bt2
)) {
2082 /* compute bigger type and do implicit casts */
2083 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2085 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2090 /* floats can only be used for a few operations */
2091 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2092 (op
< TOK_ULT
|| op
> TOK_GT
))
2093 tcc_error("invalid operands for binary operation");
2095 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2096 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2097 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2100 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2101 /* cast to biggest op */
2103 /* convert to unsigned if it does not fit in a long long */
2104 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2105 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2109 /* integer operations */
2111 /* convert to unsigned if it does not fit in an integer */
2112 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2113 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2116 /* XXX: currently, some unsigned operations are explicit, so
2117 we modify them here */
2118 if (t
& VT_UNSIGNED
) {
2125 else if (op
== TOK_LT
)
2127 else if (op
== TOK_GT
)
2129 else if (op
== TOK_LE
)
2131 else if (op
== TOK_GE
)
2139 /* special case for shifts and long long: we keep the shift as
2141 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2148 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2149 /* relational op: the result is an int */
2150 vtop
->type
.t
= VT_INT
;
2155 // Make sure that we have converted to an rvalue:
2156 if (vtop
->r
& VT_LVAL
)
2157 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2160 #ifndef TCC_TARGET_ARM
2161 /* generic itof for unsigned long long case */
2162 static void gen_cvt_itof1(int t
)
2164 #ifdef TCC_TARGET_ARM64
2167 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2168 (VT_LLONG
| VT_UNSIGNED
)) {
2171 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2172 #if LDOUBLE_SIZE != 8
2173 else if (t
== VT_LDOUBLE
)
2174 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2177 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2181 vtop
->r
= reg_fret(t
);
2189 /* generic ftoi for unsigned long long case */
2190 static void gen_cvt_ftoi1(int t
)
2192 #ifdef TCC_TARGET_ARM64
2197 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2198 /* not handled natively */
2199 st
= vtop
->type
.t
& VT_BTYPE
;
2201 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2202 #if LDOUBLE_SIZE != 8
2203 else if (st
== VT_LDOUBLE
)
2204 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2207 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2212 vtop
->r2
= REG_LRET
;
2219 /* force char or short cast */
2220 static void force_charshort_cast(int t
)
2224 /* XXX: add optimization if lvalue : just change type and offset */
2229 if (t
& VT_UNSIGNED
) {
2230 vpushi((1 << bits
) - 1);
2233 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2239 /* result must be signed or the SAR is converted to an SHL
2240 This was not the case when "t" was a signed short
2241 and the last value on the stack was an unsigned int */
2242 vtop
->type
.t
&= ~VT_UNSIGNED
;
2248 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2249 static void gen_cast_s(int t
)
2257 static void gen_cast(CType
*type
)
2259 int sbt
, dbt
, sf
, df
, c
, p
;
2261 /* special delayed cast for char/short */
2262 /* XXX: in some cases (multiple cascaded casts), it may still
2264 if (vtop
->r
& VT_MUSTCAST
) {
2265 vtop
->r
&= ~VT_MUSTCAST
;
2266 force_charshort_cast(vtop
->type
.t
);
2269 /* bitfields first get cast to ints */
2270 if (vtop
->type
.t
& VT_BITFIELD
) {
2274 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2275 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2280 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2281 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2283 /* constant case: we can do it now */
2284 /* XXX: in ISOC, cannot do it if error in convert */
2285 if (sbt
== VT_FLOAT
)
2286 vtop
->c
.ld
= vtop
->c
.f
;
2287 else if (sbt
== VT_DOUBLE
)
2288 vtop
->c
.ld
= vtop
->c
.d
;
2291 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2292 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2293 vtop
->c
.ld
= vtop
->c
.i
;
2295 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2297 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2298 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2300 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2303 if (dbt
== VT_FLOAT
)
2304 vtop
->c
.f
= (float)vtop
->c
.ld
;
2305 else if (dbt
== VT_DOUBLE
)
2306 vtop
->c
.d
= (double)vtop
->c
.ld
;
2307 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2308 vtop
->c
.i
= vtop
->c
.ld
;
2309 } else if (sf
&& dbt
== VT_BOOL
) {
2310 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2313 vtop
->c
.i
= vtop
->c
.ld
;
2314 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2316 else if (sbt
& VT_UNSIGNED
)
2317 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2319 else if (sbt
== VT_PTR
)
2322 else if (sbt
!= VT_LLONG
)
2323 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2324 -(vtop
->c
.i
& 0x80000000));
2326 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2328 else if (dbt
== VT_BOOL
)
2329 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2331 else if (dbt
== VT_PTR
)
2334 else if (dbt
!= VT_LLONG
) {
2335 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2336 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2339 if (!(dbt
& VT_UNSIGNED
))
2340 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2343 } else if (p
&& dbt
== VT_BOOL
) {
2347 /* non constant case: generate code */
2349 /* convert from fp to fp */
2352 /* convert int to fp */
2355 /* convert fp to int */
2356 if (dbt
== VT_BOOL
) {
2360 /* we handle char/short/etc... with generic code */
2361 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2362 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2366 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2367 /* additional cast for char/short... */
2373 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2374 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2375 /* scalar to long long */
2376 /* machine independent conversion */
2378 /* generate high word */
2379 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2383 if (sbt
== VT_PTR
) {
2384 /* cast from pointer to int before we apply
2385 shift operation, which pointers don't support*/
2392 /* patch second register */
2393 vtop
[-1].r2
= vtop
->r
;
2397 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2398 (dbt
& VT_BTYPE
) == VT_PTR
||
2399 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2400 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2401 (sbt
& VT_BTYPE
) != VT_PTR
&&
2402 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2403 /* need to convert from 32bit to 64bit */
2405 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2406 #if defined(TCC_TARGET_ARM64)
2408 #elif defined(TCC_TARGET_X86_64)
2410 /* x86_64 specific: movslq */
2412 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2419 } else if (dbt
== VT_BOOL
) {
2420 /* scalar to bool */
2423 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2424 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2425 if (sbt
== VT_PTR
) {
2426 vtop
->type
.t
= VT_INT
;
2427 tcc_warning("nonportable conversion from pointer to char/short");
2429 force_charshort_cast(dbt
);
2431 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2433 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2434 /* from long long: just take low order word */
2438 /* if lvalue and single word type, nothing to do because
2439 the lvalue already contains the real type size (see
2440 VT_LVAL_xxx constants) */
2444 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2445 /* if we are casting between pointer types,
2446 we must update the VT_LVAL_xxx size */
2447 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2448 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2453 /* return type size as known at compile time. Put alignment at 'a' */
2454 ST_FUNC
int type_size(CType
*type
, int *a
)
2459 bt
= type
->t
& VT_BTYPE
;
2460 if (bt
== VT_STRUCT
) {
2465 } else if (bt
== VT_PTR
) {
2466 if (type
->t
& VT_ARRAY
) {
2470 ts
= type_size(&s
->type
, a
);
2472 if (ts
< 0 && s
->c
< 0)
2480 } else if (IS_ENUM(type
->t
) && type
->ref
->c
== -1) {
2481 return -1; /* incomplete enum */
2482 } else if (bt
== VT_LDOUBLE
) {
2484 return LDOUBLE_SIZE
;
2485 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2486 #ifdef TCC_TARGET_I386
2487 #ifdef TCC_TARGET_PE
2492 #elif defined(TCC_TARGET_ARM)
2502 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2505 } else if (bt
== VT_SHORT
) {
2508 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2512 /* char, void, function, _Bool */
2518 /* push type size as known at runtime time on top of value stack. Put
2520 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2522 if (type
->t
& VT_VLA
) {
2523 type_size(&type
->ref
->type
, a
);
2524 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2526 vpushi(type_size(type
, a
));
2530 static void vla_sp_restore(void) {
2531 if (vlas_in_scope
) {
2532 gen_vla_sp_restore(vla_sp_loc
);
2536 static void vla_sp_restore_root(void) {
2537 if (vlas_in_scope
) {
2538 gen_vla_sp_restore(vla_sp_root_loc
);
2542 /* return the pointed type of t */
2543 static inline CType
*pointed_type(CType
*type
)
2545 return &type
->ref
->type
;
2548 /* modify type so that its it is a pointer to type. */
2549 ST_FUNC
void mk_pointer(CType
*type
)
2552 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2553 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2557 /* compare function types. OLD functions match any new functions */
2558 static int is_compatible_func(CType
*type1
, CType
*type2
)
2564 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2566 /* check func_call */
2567 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2569 /* XXX: not complete */
2570 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2572 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2574 while (s1
!= NULL
) {
2577 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2587 /* return true if type1 and type2 are the same. If unqualified is
2588 true, qualifiers on the types are ignored.
2590 - enums are not checked as gcc __builtin_types_compatible_p ()
2592 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2596 t1
= type1
->t
& VT_TYPE
;
2597 t2
= type2
->t
& VT_TYPE
;
2599 /* strip qualifiers before comparing */
2600 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2601 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2603 /* Default Vs explicit signedness only matters for char */
2604 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2609 /* XXX: bitfields ? */
2612 /* test more complicated cases */
2613 bt1
= t1
& VT_BTYPE
;
2614 if (bt1
== VT_PTR
) {
2615 type1
= pointed_type(type1
);
2616 type2
= pointed_type(type2
);
2617 return is_compatible_types(type1
, type2
);
2618 } else if (bt1
== VT_STRUCT
) {
2619 return (type1
->ref
== type2
->ref
);
2620 } else if (bt1
== VT_FUNC
) {
2621 return is_compatible_func(type1
, type2
);
2627 /* return true if type1 and type2 are exactly the same (including
2630 static int is_compatible_types(CType
*type1
, CType
*type2
)
2632 return compare_types(type1
,type2
,0);
2635 /* return true if type1 and type2 are the same (ignoring qualifiers).
2637 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2639 return compare_types(type1
,type2
,1);
2642 /* print a type. If 'varstr' is not NULL, then the variable is also
2643 printed in the type */
2645 /* XXX: add array and function pointers */
2646 static void type_to_str(char *buf
, int buf_size
,
2647 CType
*type
, const char *varstr
)
2657 if (t
& VT_CONSTANT
)
2658 pstrcat(buf
, buf_size
, "const ");
2659 if (t
& VT_VOLATILE
)
2660 pstrcat(buf
, buf_size
, "volatile ");
2661 if ((t
& (VT_DEFSIGN
| VT_UNSIGNED
)) == (VT_DEFSIGN
| VT_UNSIGNED
))
2662 pstrcat(buf
, buf_size
, "unsigned ");
2663 else if (t
& VT_DEFSIGN
)
2664 pstrcat(buf
, buf_size
, "signed ");
2666 pstrcat(buf
, buf_size
, "extern ");
2668 pstrcat(buf
, buf_size
, "static ");
2670 pstrcat(buf
, buf_size
, "typedef ");
2672 pstrcat(buf
, buf_size
, "inline ");
2673 buf_size
-= strlen(buf
);
2675 if (IS_ENUM(type
->t
)) {
2708 tstr
= "long double";
2710 pstrcat(buf
, buf_size
, tstr
);
2717 pstrcat(buf
, buf_size
, tstr
);
2718 v
= type
->ref
->v
& ~SYM_STRUCT
;
2719 if (v
>= SYM_FIRST_ANOM
)
2720 pstrcat(buf
, buf_size
, "<anonymous>");
2722 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2726 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2727 pstrcat(buf
, buf_size
, "(");
2729 while (sa
!= NULL
) {
2730 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2731 pstrcat(buf
, buf_size
, buf1
);
2734 pstrcat(buf
, buf_size
, ", ");
2736 pstrcat(buf
, buf_size
, ")");
2741 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2742 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2745 pstrcpy(buf1
, sizeof(buf1
), "*");
2746 if (t
& VT_CONSTANT
)
2747 pstrcat(buf1
, buf_size
, "const ");
2748 if (t
& VT_VOLATILE
)
2749 pstrcat(buf1
, buf_size
, "volatile ");
2751 pstrcat(buf1
, sizeof(buf1
), varstr
);
2752 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2756 pstrcat(buf
, buf_size
, " ");
2757 pstrcat(buf
, buf_size
, varstr
);
2762 /* verify type compatibility to store vtop in 'dt' type, and generate
2764 static void gen_assign_cast(CType
*dt
)
2766 CType
*st
, *type1
, *type2
, tmp_type1
, tmp_type2
;
2767 char buf1
[256], buf2
[256];
2770 st
= &vtop
->type
; /* source type */
2771 dbt
= dt
->t
& VT_BTYPE
;
2772 sbt
= st
->t
& VT_BTYPE
;
2773 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2774 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2776 It is Ok if both are void
2782 gcc accepts this program
2785 tcc_error("cannot cast from/to void");
2787 if (dt
->t
& VT_CONSTANT
)
2788 tcc_warning("assignment of read-only location");
2791 /* special cases for pointers */
2792 /* '0' can also be a pointer */
2793 if (is_null_pointer(vtop
))
2795 /* accept implicit pointer to integer cast with warning */
2796 if (is_integer_btype(sbt
)) {
2797 tcc_warning("assignment makes pointer from integer without a cast");
2800 type1
= pointed_type(dt
);
2801 /* a function is implicitly a function pointer */
2802 if (sbt
== VT_FUNC
) {
2803 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2804 !is_compatible_types(pointed_type(dt
), st
))
2805 tcc_warning("assignment from incompatible pointer type");
2810 type2
= pointed_type(st
);
2811 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2812 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2813 /* void * can match anything */
2815 //printf("types %08x %08x\n", type1->t, type2->t);
2816 /* exact type match, except for qualifiers */
2817 if (!is_compatible_unqualified_types(type1
, type2
)) {
2818 /* Like GCC don't warn by default for merely changes
2819 in pointer target signedness. Do warn for different
2820 base types, though, in particular for unsigned enums
2821 and signed int targets. */
2822 if ((type1
->t
& VT_BTYPE
) != (type2
->t
& VT_BTYPE
)
2823 || IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)
2825 tcc_warning("assignment from incompatible pointer type");
2828 /* check const and volatile */
2829 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2830 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2831 tcc_warning("assignment discards qualifiers from pointer target type");
2837 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2838 tcc_warning("assignment makes integer from pointer without a cast");
2839 } else if (sbt
== VT_STRUCT
) {
2840 goto case_VT_STRUCT
;
2842 /* XXX: more tests */
2848 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2849 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2850 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2852 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2853 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2854 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
2862 /* store vtop in lvalue pushed on stack */
2863 ST_FUNC
void vstore(void)
2865 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
2867 ft
= vtop
[-1].type
.t
;
2868 sbt
= vtop
->type
.t
& VT_BTYPE
;
2869 dbt
= ft
& VT_BTYPE
;
2870 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
2871 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
2872 && !(vtop
->type
.t
& VT_BITFIELD
)) {
2873 /* optimize char/short casts */
2874 delayed_cast
= VT_MUSTCAST
;
2875 vtop
->type
.t
= ft
& VT_TYPE
;
2876 /* XXX: factorize */
2877 if (ft
& VT_CONSTANT
)
2878 tcc_warning("assignment of read-only location");
2881 if (!(ft
& VT_BITFIELD
))
2882 gen_assign_cast(&vtop
[-1].type
);
2885 if (sbt
== VT_STRUCT
) {
2886 /* if structure, only generate pointer */
2887 /* structure assignment : generate memcpy */
2888 /* XXX: optimize if small size */
2889 size
= type_size(&vtop
->type
, &align
);
2893 vtop
->type
.t
= VT_PTR
;
2896 /* address of memcpy() */
2899 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
2900 else if(!(align
& 3))
2901 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
2904 /* Use memmove, rather than memcpy, as dest and src may be same: */
2905 vpush_global_sym(&func_old_type
, TOK_memmove
);
2910 vtop
->type
.t
= VT_PTR
;
2916 /* leave source on stack */
2917 } else if (ft
& VT_BITFIELD
) {
2918 /* bitfield store handling */
2920 /* save lvalue as expression result (example: s.b = s.a = n;) */
2921 vdup(), vtop
[-1] = vtop
[-2];
2923 bit_pos
= (ft
>> VT_STRUCT_SHIFT
) & 0x3f;
2924 bit_size
= (ft
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
2925 /* remove bit field info to avoid loops */
2926 vtop
[-1].type
.t
= ft
& ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
2928 if((ft
& VT_BTYPE
) == VT_BOOL
) {
2929 gen_cast(&vtop
[-1].type
);
2930 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
2933 /* duplicate destination */
2935 vtop
[-1] = vtop
[-2];
2937 /* mask and shift source */
2938 if((ft
& VT_BTYPE
) != VT_BOOL
) {
2939 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2940 vpushll((1ULL << bit_size
) - 1ULL);
2942 vpushi((1 << bit_size
) - 1);
2948 /* load destination, mask and or with source */
2950 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2951 vpushll(~(((1ULL << bit_size
) - 1ULL) << bit_pos
));
2953 vpushi(~(((1 << bit_size
) - 1) << bit_pos
));
2959 /* ... and discard */
2963 #ifdef CONFIG_TCC_BCHECK
2964 /* bound check case */
2965 if (vtop
[-1].r
& VT_MUSTBOUND
) {
2974 #ifdef TCC_TARGET_X86_64
2975 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
2977 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
2982 r
= gv(rc
); /* generate value */
2983 /* if lvalue was saved on stack, must read it */
2984 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
2986 t
= get_reg(RC_INT
);
2992 sv
.r
= VT_LOCAL
| VT_LVAL
;
2993 sv
.c
.i
= vtop
[-1].c
.i
;
2995 vtop
[-1].r
= t
| VT_LVAL
;
2997 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2999 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3000 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3002 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3003 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3005 vtop
[-1].type
.t
= load_type
;
3008 /* convert to int to increment easily */
3009 vtop
->type
.t
= addr_type
;
3015 vtop
[-1].type
.t
= load_type
;
3016 /* XXX: it works because r2 is spilled last ! */
3017 store(vtop
->r2
, vtop
- 1);
3023 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3024 vtop
->r
|= delayed_cast
;
3028 /* post defines POST/PRE add. c is the token ++ or -- */
3029 ST_FUNC
void inc(int post
, int c
)
3032 vdup(); /* save lvalue */
3034 gv_dup(); /* duplicate value */
3039 vpushi(c
- TOK_MID
);
3041 vstore(); /* store value */
3043 vpop(); /* if post op, return saved value */
3046 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3048 /* read the string */
3052 while (tok
== TOK_STR
) {
3053 /* XXX: add \0 handling too ? */
3054 cstr_cat(astr
, tokc
.str
.data
, -1);
3057 cstr_ccat(astr
, '\0');
3060 /* If I is >= 1 and a power of two, returns log2(i)+1.
3061 If I is 0 returns 0. */
3062 static int exact_log2p1(int i
)
3067 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3078 /* Parse __attribute__((...)) GNUC extension. */
3079 static void parse_attribute(AttributeDef
*ad
)
3085 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3090 while (tok
!= ')') {
3091 if (tok
< TOK_IDENT
)
3092 expect("attribute name");
3099 parse_mult_str(&astr
, "section name");
3100 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3107 parse_mult_str(&astr
, "alias(\"target\")");
3108 ad
->alias_target
= /* save string as token, for later */
3109 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3113 case TOK_VISIBILITY1
:
3114 case TOK_VISIBILITY2
:
3116 parse_mult_str(&astr
,
3117 "visibility(\"default|hidden|internal|protected\")");
3118 if (!strcmp (astr
.data
, "default"))
3119 ad
->a
.visibility
= STV_DEFAULT
;
3120 else if (!strcmp (astr
.data
, "hidden"))
3121 ad
->a
.visibility
= STV_HIDDEN
;
3122 else if (!strcmp (astr
.data
, "internal"))
3123 ad
->a
.visibility
= STV_INTERNAL
;
3124 else if (!strcmp (astr
.data
, "protected"))
3125 ad
->a
.visibility
= STV_PROTECTED
;
3127 expect("visibility(\"default|hidden|internal|protected\")");
3136 if (n
<= 0 || (n
& (n
- 1)) != 0)
3137 tcc_error("alignment must be a positive power of two");
3142 ad
->a
.aligned
= exact_log2p1(n
);
3143 if (n
!= 1 << (ad
->a
.aligned
- 1))
3144 tcc_error("alignment of %d is larger than implemented", n
);
3156 /* currently, no need to handle it because tcc does not
3157 track unused objects */
3161 /* currently, no need to handle it because tcc does not
3162 track unused objects */
3167 ad
->f
.func_call
= FUNC_CDECL
;
3172 ad
->f
.func_call
= FUNC_STDCALL
;
3174 #ifdef TCC_TARGET_I386
3184 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3190 ad
->f
.func_call
= FUNC_FASTCALLW
;
3197 ad
->attr_mode
= VT_LLONG
+ 1;
3200 ad
->attr_mode
= VT_BYTE
+ 1;
3203 ad
->attr_mode
= VT_SHORT
+ 1;
3207 ad
->attr_mode
= VT_INT
+ 1;
3210 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3217 ad
->a
.dllexport
= 1;
3220 ad
->a
.dllimport
= 1;
3223 if (tcc_state
->warn_unsupported
)
3224 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3225 /* skip parameters */
3227 int parenthesis
= 0;
3231 else if (tok
== ')')
3234 } while (parenthesis
&& tok
!= -1);
3247 static Sym
* find_field (CType
*type
, int v
)
3251 while ((s
= s
->next
) != NULL
) {
3252 if ((s
->v
& SYM_FIELD
) &&
3253 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3254 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3255 Sym
*ret
= find_field (&s
->type
, v
);
3265 static void struct_add_offset (Sym
*s
, int offset
)
3267 while ((s
= s
->next
) != NULL
) {
3268 if ((s
->v
& SYM_FIELD
) &&
3269 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3270 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3271 struct_add_offset(s
->type
.ref
, offset
);
3277 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3279 int align
, maxalign
, offset
, c
, bit_pos
, bt
, prevbt
, prev_bit_size
;
3280 int pcc
= !tcc_state
->ms_bitfields
;
3281 int packwarn
= tcc_state
->warn_gcc_compat
;
3282 int typealign
, bit_size
, size
;
3286 maxalign
= 1 << (ad
->a
.aligned
- 1);
3292 prevbt
= VT_STRUCT
; /* make it never match */
3296 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3297 size
= type_size(&f
->type
, &typealign
);
3298 if (f
->type
.t
& VT_BITFIELD
)
3299 bit_size
= (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
3302 if (bit_size
== 0 && pcc
) {
3303 /* Zero-width bit-fields in PCC mode aren't affected
3304 by any packing (attribute or pragma). */
3306 } else if (f
->r
> 1) {
3308 } else if (ad
->a
.packed
|| f
->r
== 1) {
3310 /* Packed fields or packed records don't let the base type
3311 influence the records type alignment. */
3316 if (type
->ref
->type
.t
== VT_UNION
) {
3317 if (pcc
&& bit_size
>= 0)
3318 size
= (bit_size
+ 7) >> 3;
3319 /* Bit position is already zero from our caller. */
3323 } else if (bit_size
< 0) {
3324 int addbytes
= pcc
? (bit_pos
+ 7) >> 3 : 0;
3327 c
= (c
+ addbytes
+ align
- 1) & -align
;
3333 /* A bit-field. Layout is more complicated. There are two
3334 options TCC implements: PCC compatible and MS compatible
3335 (PCC compatible is what GCC uses for almost all targets).
3336 In PCC layout the overall size of the struct (in c) is
3337 _excluding_ the current run of bit-fields (that is,
3338 there's at least additional bit_pos bits after c). In
3339 MS layout c does include the current run of bit-fields.
3341 This matters for calculating the natural alignment buckets
3344 /* 'align' will be used to influence records alignment,
3345 so it's the max of specified and type alignment, except
3346 in certain cases that depend on the mode. */
3347 if (align
< typealign
)
3350 /* In PCC layout a non-packed bit-field is placed adjacent
3351 to the preceding bit-fields, except if it would overflow
3352 its container (depending on base type) or it's a zero-width
3353 bit-field. Packed non-zero-width bit-fields always are
3355 int ofs
= (c
* 8 + bit_pos
) % (typealign
* 8);
3356 int ofs2
= ofs
+ bit_size
+ (typealign
* 8) - 1;
3357 if (bit_size
== 0 ||
3359 (ofs2
/ (typealign
* 8)) > (size
/typealign
))) {
3360 c
= (c
+ ((bit_pos
+ 7) >> 3) + typealign
- 1) & -typealign
;
3362 } else if (bit_pos
+ bit_size
> size
* 8) {
3365 if (bit_pos
+ bit_size
> size
* 8) {
3366 c
+= 1, bit_pos
= 0;
3367 if ((ad
->a
.packed
|| f
->r
) && packwarn
) {
3368 tcc_warning("struct layout not compatible with GCC (internal limitation)");
3374 /* In PCC layout named bit-fields influence the alignment
3375 of the containing struct using the base types alignment,
3376 except for packed fields (which here have correct
3377 align/typealign). */
3378 if ((f
->v
& SYM_FIRST_ANOM
))
3381 bt
= f
->type
.t
& VT_BTYPE
;
3382 if ((bit_pos
+ bit_size
> size
* 8) ||
3383 (bit_size
> 0) == (bt
!= prevbt
)) {
3384 c
= (c
+ typealign
- 1) & -typealign
;
3387 /* In MS bitfield mode a bit-field run always uses
3388 at least as many bits as the underlying type.
3389 To start a new run it's also required that this
3390 or the last bit-field had non-zero width. */
3391 if (bit_size
|| prev_bit_size
)
3394 /* In MS layout the records alignment is normally
3395 influenced by the field, except for a zero-width
3396 field at the start of a run (but by further zero-width
3397 fields it is again). */
3398 if (bit_size
== 0 && prevbt
!= bt
)
3401 prev_bit_size
= bit_size
;
3403 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3404 | (bit_pos
<< VT_STRUCT_SHIFT
);
3405 bit_pos
+= bit_size
;
3406 if (pcc
&& bit_pos
>= size
* 8) {
3408 bit_pos
-= size
* 8;
3411 if (align
> maxalign
)
3414 printf("set field %s offset=%d",
3415 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
);
3416 if (f
->type
.t
& VT_BITFIELD
) {
3417 printf(" pos=%d size=%d",
3418 (f
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f,
3419 (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f);
3424 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3426 /* An anonymous struct/union. Adjust member offsets
3427 to reflect the real offset of our containing struct.
3428 Also set the offset of this anon member inside
3429 the outer struct to be zero. Via this it
3430 works when accessing the field offset directly
3431 (from base object), as well as when recursing
3432 members in initializer handling. */
3433 int v2
= f
->type
.ref
->v
;
3434 if (!(v2
& SYM_FIELD
) &&
3435 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3437 /* This happens only with MS extensions. The
3438 anon member has a named struct type, so it
3439 potentially is shared with other references.
3440 We need to unshare members so we can modify
3443 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3444 &f
->type
.ref
->type
, 0,
3446 pps
= &f
->type
.ref
->next
;
3447 while ((ass
= ass
->next
) != NULL
) {
3448 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3449 pps
= &((*pps
)->next
);
3453 struct_add_offset(f
->type
.ref
, offset
);
3461 /* store size and alignment */
3462 type
->ref
->c
= (c
+ (pcc
? (bit_pos
+ 7) >> 3 : 0)
3463 + maxalign
- 1) & -maxalign
;
3464 type
->ref
->r
= maxalign
;
3465 if (offset
+ size
> type
->ref
->c
&& type
->ref
->c
)
3466 tcc_warning("will touch memory past end of the struct (internal limitation)");
3469 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3470 static void struct_decl(CType
*type
, int u
)
3472 int v
, c
, size
, align
, flexible
, alignoverride
;
3473 int bit_size
, bsize
, bt
;
3475 AttributeDef ad
, ad1
;
3478 memset(&ad
, 0, sizeof ad
);
3480 parse_attribute(&ad
);
3484 /* struct already defined ? return it */
3486 expect("struct/union/enum name");
3488 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3491 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3493 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3498 /* Record the original enum/struct/union token. */
3501 /* we put an undefined size for struct/union */
3502 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3503 s
->r
= 0; /* default alignment is zero as gcc */
3505 type
->t
= s
->type
.t
;
3511 tcc_error("struct/union/enum already defined");
3512 /* cannot be empty */
3513 /* non empty enums are not allowed */
3516 long long ll
= 0, pl
= 0, nl
= 0;
3519 /* enum symbols have static storage */
3520 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3524 expect("identifier");
3526 if (ss
&& !local_stack
)
3527 tcc_error("redefinition of enumerator '%s'",
3528 get_tok_str(v
, NULL
));
3532 ll
= expr_const64();
3534 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3536 *ps
= ss
, ps
= &ss
->next
;
3545 /* NOTE: we accept a trailing comma */
3550 /* set integral type of the enum */
3553 if (pl
!= (unsigned)pl
)
3556 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3558 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3560 /* set type for enum members */
3561 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3563 if (ll
== (int)ll
) /* default is int if it fits */
3565 if (t
.t
& VT_UNSIGNED
) {
3566 ss
->type
.t
|= VT_UNSIGNED
;
3567 if (ll
== (unsigned)ll
)
3570 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
) | VT_LLONG
;
3575 while (tok
!= '}') {
3576 if (!parse_btype(&btype
, &ad1
)) {
3582 tcc_error("flexible array member '%s' not at the end of struct",
3583 get_tok_str(v
, NULL
));
3589 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3591 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3592 expect("identifier");
3594 int v
= btype
.ref
->v
;
3595 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3596 if (tcc_state
->ms_extensions
== 0)
3597 expect("identifier");
3601 if (type_size(&type1
, &align
) < 0) {
3602 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3605 tcc_error("field '%s' has incomplete type",
3606 get_tok_str(v
, NULL
));
3608 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3609 (type1
.t
& VT_STORAGE
))
3610 tcc_error("invalid type for '%s'",
3611 get_tok_str(v
, NULL
));
3615 bit_size
= expr_const();
3616 /* XXX: handle v = 0 case for messages */
3618 tcc_error("negative width in bit-field '%s'",
3619 get_tok_str(v
, NULL
));
3620 if (v
&& bit_size
== 0)
3621 tcc_error("zero width for bit-field '%s'",
3622 get_tok_str(v
, NULL
));
3623 parse_attribute(&ad1
);
3625 size
= type_size(&type1
, &align
);
3626 /* Only remember non-default alignment. */
3628 if (ad1
.a
.aligned
) {
3629 int speca
= 1 << (ad1
.a
.aligned
- 1);
3630 alignoverride
= speca
;
3631 } else if (ad1
.a
.packed
|| ad
.a
.packed
) {
3633 } else if (*tcc_state
->pack_stack_ptr
) {
3634 if (align
>= *tcc_state
->pack_stack_ptr
)
3635 alignoverride
= *tcc_state
->pack_stack_ptr
;
3637 if (bit_size
>= 0) {
3638 bt
= type1
.t
& VT_BTYPE
;
3644 tcc_error("bitfields must have scalar type");
3646 if (bit_size
> bsize
) {
3647 tcc_error("width of '%s' exceeds its type",
3648 get_tok_str(v
, NULL
));
3649 } else if (bit_size
== bsize
) {
3650 /* no need for bit fields */
3653 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
3655 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3658 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3659 /* Remember we've seen a real field to check
3660 for placement of flexible array member. */
3663 /* If member is a struct or bit-field, enforce
3664 placing into the struct (as anonymous). */
3666 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3671 ss
= sym_push(v
| SYM_FIELD
, &type1
, alignoverride
, 0);
3675 if (tok
== ';' || tok
== TOK_EOF
)
3682 parse_attribute(&ad
);
3683 struct_layout(type
, &ad
);
3688 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3689 are added to the element type, copied because it could be a typedef. */
3690 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3692 while (type
->t
& VT_ARRAY
) {
3693 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3694 type
= &type
->ref
->type
;
3696 type
->t
|= qualifiers
;
3699 /* return 0 if no type declaration. otherwise, return the basic type
3702 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3704 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
3708 memset(ad
, 0, sizeof(AttributeDef
));
3718 /* currently, we really ignore extension */
3728 if (u
== VT_SHORT
|| u
== VT_LONG
) {
3729 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
3730 tmbt
: tcc_error("too many basic types");
3733 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
3738 t
= (t
& ~VT_BTYPE
) | u
;
3751 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3752 #ifndef TCC_TARGET_PE
3753 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3755 } else if ((t
& VT_BTYPE
) == VT_LONG
) {
3756 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3763 #ifdef TCC_TARGET_ARM64
3765 /* GCC's __uint128_t appears in some Linux header files. Make it a
3766 synonym for long double to get the size and alignment right. */
3777 if ((t
& VT_BTYPE
) == VT_LONG
) {
3778 #ifdef TCC_TARGET_PE
3779 t
= (t
& ~VT_BTYPE
) | VT_DOUBLE
;
3781 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3790 struct_decl(&type1
, VT_ENUM
);
3793 type
->ref
= type1
.ref
;
3796 struct_decl(&type1
, VT_STRUCT
);
3799 struct_decl(&type1
, VT_UNION
);
3802 /* type modifiers */
3807 parse_btype_qualify(type
, VT_CONSTANT
);
3815 parse_btype_qualify(type
, VT_VOLATILE
);
3822 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
3823 tcc_error("signed and unsigned modifier");
3836 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
3837 tcc_error("signed and unsigned modifier");
3838 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
3854 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
3855 tcc_error("multiple storage classes");
3866 /* GNUC attribute */
3867 case TOK_ATTRIBUTE1
:
3868 case TOK_ATTRIBUTE2
:
3869 parse_attribute(ad
);
3870 if (ad
->attr_mode
) {
3871 u
= ad
->attr_mode
-1;
3872 t
= (t
& ~VT_BTYPE
) | u
;
3880 parse_expr_type(&type1
);
3881 /* remove all storage modifiers except typedef */
3882 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
3888 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
3891 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
3892 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
3893 type
->ref
= s
->type
.ref
;
3895 parse_btype_qualify(type
, t
);
3897 /* get attributes from typedef */
3898 if (s
->a
.aligned
&& 0 == ad
->a
.aligned
)
3899 ad
->a
.aligned
= s
->a
.aligned
;
3900 if (s
->f
.func_call
&& 0 == ad
->f
.func_call
)
3901 ad
->f
.func_call
= s
->f
.func_call
;
3912 if (tcc_state
->char_is_unsigned
) {
3913 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
3917 /* long is never used as type */
3918 if ((t
& VT_BTYPE
) == VT_LONG
)
3919 #if PTR_SIZE == 8 && !defined TCC_TARGET_PE
3920 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3922 t
= (t
& ~VT_BTYPE
) | VT_INT
;
3928 /* convert a function parameter type (array to pointer and function to
3929 function pointer) */
3930 static inline void convert_parameter_type(CType
*pt
)
3932 /* remove const and volatile qualifiers (XXX: const could be used
3933 to indicate a const function parameter */
3934 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3935 /* array must be transformed to pointer according to ANSI C */
3937 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
3942 ST_FUNC
void parse_asm_str(CString
*astr
)
3945 parse_mult_str(astr
, "string constant");
3948 /* Parse an asm label and return the token */
3949 static int asm_label_instr(void)
3955 parse_asm_str(&astr
);
3958 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
3960 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
3965 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
3967 int n
, l
, t1
, arg_size
, align
;
3968 Sym
**plast
, *s
, *first
;
3973 /* function type, or recursive declarator (return if so) */
3975 if (td
&& !(td
& TYPE_ABSTRACT
))
3979 else if (parse_btype(&pt
, &ad1
))
3990 /* read param name and compute offset */
3991 if (l
!= FUNC_OLD
) {
3992 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
3994 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
3995 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
3996 tcc_error("parameter declared as void");
3997 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4001 expect("identifier");
4002 pt
.t
= VT_VOID
; /* invalid type */
4005 convert_parameter_type(&pt
);
4006 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4012 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4017 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4018 tcc_error("invalid type");
4021 /* if no parameters, then old type prototype */
4024 /* NOTE: const is ignored in returned type as it has a special
4025 meaning in gcc / C++ */
4026 type
->t
&= ~VT_CONSTANT
;
4027 /* some ancient pre-K&R C allows a function to return an array
4028 and the array brackets to be put after the arguments, such
4029 that "int c()[]" means something like "int[] c()" */
4032 skip(']'); /* only handle simple "[]" */
4035 /* we push a anonymous symbol which will contain the function prototype */
4036 ad
->f
.func_args
= arg_size
;
4037 ad
->f
.func_type
= l
;
4038 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4044 } else if (tok
== '[') {
4045 int saved_nocode_wanted
= nocode_wanted
;
4046 /* array definition */
4048 if (tok
== TOK_RESTRICT1
)
4053 if (!local_stack
|| (storage
& VT_STATIC
))
4054 vpushi(expr_const());
4056 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4057 length must always be evaluated, even under nocode_wanted,
4058 so that its size slot is initialized (e.g. under sizeof
4063 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4066 tcc_error("invalid array size");
4068 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4069 tcc_error("size of variable length array should be an integer");
4074 /* parse next post type */
4075 post_type(type
, ad
, storage
, 0);
4076 if (type
->t
== VT_FUNC
)
4077 tcc_error("declaration of an array of functions");
4078 t1
|= type
->t
& VT_VLA
;
4081 loc
-= type_size(&int_type
, &align
);
4085 vla_runtime_type_size(type
, &align
);
4087 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4093 nocode_wanted
= saved_nocode_wanted
;
4095 /* we push an anonymous symbol which will contain the array
4097 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4098 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4104 /* Parse a type declarator (except basic type), and return the type
4105 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4106 expected. 'type' should contain the basic type. 'ad' is the
4107 attribute definition of the basic type. It can be modified by
4108 type_decl(). If this (possibly abstract) declarator is a pointer chain
4109 it returns the innermost pointed to type (equals *type, but is a different
4110 pointer), otherwise returns type itself, that's used for recursive calls. */
4111 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4114 int qualifiers
, storage
;
4116 /* recursive type, remove storage bits first, apply them later again */
4117 storage
= type
->t
& VT_STORAGE
;
4118 type
->t
&= ~VT_STORAGE
;
4121 while (tok
== '*') {
4129 qualifiers
|= VT_CONSTANT
;
4134 qualifiers
|= VT_VOLATILE
;
4140 /* XXX: clarify attribute handling */
4141 case TOK_ATTRIBUTE1
:
4142 case TOK_ATTRIBUTE2
:
4143 parse_attribute(ad
);
4147 type
->t
|= qualifiers
;
4149 /* innermost pointed to type is the one for the first derivation */
4150 ret
= pointed_type(type
);
4154 /* This is possibly a parameter type list for abstract declarators
4155 ('int ()'), use post_type for testing this. */
4156 if (!post_type(type
, ad
, 0, td
)) {
4157 /* It's not, so it's a nested declarator, and the post operations
4158 apply to the innermost pointed to type (if any). */
4159 /* XXX: this is not correct to modify 'ad' at this point, but
4160 the syntax is not clear */
4161 parse_attribute(ad
);
4162 post
= type_decl(type
, ad
, v
, td
);
4165 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4166 /* type identifier */
4170 if (!(td
& TYPE_ABSTRACT
))
4171 expect("identifier");
4174 post_type(post
, ad
, storage
, 0);
4175 parse_attribute(ad
);
4180 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4181 ST_FUNC
int lvalue_type(int t
)
4186 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4188 else if (bt
== VT_SHORT
)
4192 if (t
& VT_UNSIGNED
)
4193 r
|= VT_LVAL_UNSIGNED
;
4197 /* indirection with full error checking and bound check */
4198 ST_FUNC
void indir(void)
4200 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4201 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4205 if (vtop
->r
& VT_LVAL
)
4207 vtop
->type
= *pointed_type(&vtop
->type
);
4208 /* Arrays and functions are never lvalues */
4209 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4210 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4211 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4212 /* if bound checking, the referenced pointer must be checked */
4213 #ifdef CONFIG_TCC_BCHECK
4214 if (tcc_state
->do_bounds_check
)
4215 vtop
->r
|= VT_MUSTBOUND
;
4220 /* pass a parameter to a function and do type checking and casting */
4221 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4226 func_type
= func
->f
.func_type
;
4227 if (func_type
== FUNC_OLD
||
4228 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4229 /* default casting : only need to convert float to double */
4230 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4231 gen_cast_s(VT_DOUBLE
);
4232 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4233 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4234 type
.ref
= vtop
->type
.ref
;
4237 } else if (arg
== NULL
) {
4238 tcc_error("too many arguments to function");
4241 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4242 gen_assign_cast(&type
);
4246 /* parse an expression and return its type without any side effect.
4247 If UNRY we parse an unary expression, otherwise a full one. */
4248 static void expr_type(CType
*type
, int unry
)
4260 /* parse an expression of the form '(type)' or '(expr)' and return its
4262 static void parse_expr_type(CType
*type
)
4268 if (parse_btype(type
, &ad
)) {
4269 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4276 static void parse_type(CType
*type
)
4281 if (!parse_btype(type
, &ad
)) {
4284 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4287 static void parse_builtin_params(int nc
, const char *args
)
4294 while ((c
= *args
++)) {
4298 case 'e': expr_eq(); continue;
4299 case 't': parse_type(&t
); vpush(&t
); continue;
4300 default: tcc_error("internal error"); break;
4308 ST_FUNC
void unary(void)
4310 int n
, t
, align
, size
, r
, sizeof_caller
;
4315 sizeof_caller
= in_sizeof
;
4318 /* XXX: GCC 2.95.3 does not generate a table although it should be
4331 vsetc(&type
, VT_CONST
, &tokc
);
4335 t
= VT_INT
| VT_UNSIGNED
;
4341 t
= VT_LLONG
| VT_UNSIGNED
;
4353 case TOK___FUNCTION__
:
4355 goto tok_identifier
;
4361 /* special function name identifier */
4362 len
= strlen(funcname
) + 1;
4363 /* generate char[len] type */
4368 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4369 ptr
= section_ptr_add(data_section
, len
);
4370 memcpy(ptr
, funcname
, len
);
4375 #ifdef TCC_TARGET_PE
4376 t
= VT_SHORT
| VT_UNSIGNED
;
4382 /* string parsing */
4384 if (tcc_state
->char_is_unsigned
)
4385 t
= VT_BYTE
| VT_UNSIGNED
;
4387 if (tcc_state
->warn_write_strings
)
4392 memset(&ad
, 0, sizeof(AttributeDef
));
4393 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4398 if (parse_btype(&type
, &ad
)) {
4399 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4401 /* check ISOC99 compound literal */
4403 /* data is allocated locally by default */
4408 /* all except arrays are lvalues */
4409 if (!(type
.t
& VT_ARRAY
))
4410 r
|= lvalue_type(type
.t
);
4411 memset(&ad
, 0, sizeof(AttributeDef
));
4412 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4414 if (sizeof_caller
) {
4421 } else if (tok
== '{') {
4422 int saved_nocode_wanted
= nocode_wanted
;
4424 tcc_error("expected constant");
4425 /* save all registers */
4427 /* statement expression : we do not accept break/continue
4428 inside as GCC does. We do retain the nocode_wanted state,
4429 as statement expressions can't ever be entered from the
4430 outside, so any reactivation of code emission (from labels
4431 or loop heads) can be disabled again after the end of it. */
4432 block(NULL
, NULL
, 1);
4433 nocode_wanted
= saved_nocode_wanted
;
4448 /* functions names must be treated as function pointers,
4449 except for unary '&' and sizeof. Since we consider that
4450 functions are not lvalues, we only have to handle it
4451 there and in function calls. */
4452 /* arrays can also be used although they are not lvalues */
4453 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4454 !(vtop
->type
.t
& VT_ARRAY
))
4456 mk_pointer(&vtop
->type
);
4462 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4463 gen_cast_s(VT_BOOL
);
4464 vtop
->c
.i
= !vtop
->c
.i
;
4465 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4469 vseti(VT_JMP
, gvtst(1, 0));
4481 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4482 tcc_error("pointer not accepted for unary plus");
4483 /* In order to force cast, we add zero, except for floating point
4484 where we really need an noop (otherwise -0.0 will be transformed
4486 if (!is_float(vtop
->type
.t
)) {
4497 expr_type(&type
, 1); // Perform a in_sizeof = 0;
4498 size
= type_size(&type
, &align
);
4499 if (t
== TOK_SIZEOF
) {
4500 if (!(type
.t
& VT_VLA
)) {
4502 tcc_error("sizeof applied to an incomplete type");
4505 vla_runtime_type_size(&type
, &align
);
4510 vtop
->type
.t
|= VT_UNSIGNED
;
4513 case TOK_builtin_expect
:
4514 /* __builtin_expect is a no-op for now */
4515 parse_builtin_params(0, "ee");
4518 case TOK_builtin_types_compatible_p
:
4519 parse_builtin_params(0, "tt");
4520 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4521 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4522 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4526 case TOK_builtin_choose_expr
:
4553 case TOK_builtin_constant_p
:
4554 parse_builtin_params(1, "e");
4555 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4559 case TOK_builtin_frame_address
:
4560 case TOK_builtin_return_address
:
4566 if (tok
!= TOK_CINT
) {
4567 tcc_error("%s only takes positive integers",
4568 tok1
== TOK_builtin_return_address
?
4569 "__builtin_return_address" :
4570 "__builtin_frame_address");
4572 level
= (uint32_t)tokc
.i
;
4577 vset(&type
, VT_LOCAL
, 0); /* local frame */
4579 mk_pointer(&vtop
->type
);
4580 indir(); /* -> parent frame */
4582 if (tok1
== TOK_builtin_return_address
) {
4583 // assume return address is just above frame pointer on stack
4586 mk_pointer(&vtop
->type
);
4591 #ifdef TCC_TARGET_X86_64
4592 #ifdef TCC_TARGET_PE
4593 case TOK_builtin_va_start
:
4594 parse_builtin_params(0, "ee");
4595 r
= vtop
->r
& VT_VALMASK
;
4599 tcc_error("__builtin_va_start expects a local variable");
4601 vtop
->type
= char_pointer_type
;
4606 case TOK_builtin_va_arg_types
:
4607 parse_builtin_params(0, "t");
4608 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4615 #ifdef TCC_TARGET_ARM64
4616 case TOK___va_start
: {
4617 parse_builtin_params(0, "ee");
4621 vtop
->type
.t
= VT_VOID
;
4624 case TOK___va_arg
: {
4625 parse_builtin_params(0, "et");
4633 case TOK___arm64_clear_cache
: {
4634 parse_builtin_params(0, "ee");
4637 vtop
->type
.t
= VT_VOID
;
4641 /* pre operations */
4652 t
= vtop
->type
.t
& VT_BTYPE
;
4654 /* In IEEE negate(x) isn't subtract(0,x), but rather
4658 vtop
->c
.f
= -1.0 * 0.0;
4659 else if (t
== VT_DOUBLE
)
4660 vtop
->c
.d
= -1.0 * 0.0;
4662 vtop
->c
.ld
= -1.0 * 0.0;
4670 goto tok_identifier
;
4672 /* allow to take the address of a label */
4673 if (tok
< TOK_UIDENT
)
4674 expect("label identifier");
4675 s
= label_find(tok
);
4677 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4679 if (s
->r
== LABEL_DECLARED
)
4680 s
->r
= LABEL_FORWARD
;
4683 s
->type
.t
= VT_VOID
;
4684 mk_pointer(&s
->type
);
4685 s
->type
.t
|= VT_STATIC
;
4687 vpushsym(&s
->type
, s
);
4693 CType controlling_type
;
4694 int has_default
= 0;
4697 AttributeDef ad_tmp
;
4699 TokenString
*str
= NULL
;
4700 ParseState saved_parse_state
;
4704 expr_type(&controlling_type
, 1);
4705 if (controlling_type
.t
& VT_ARRAY
)
4706 controlling_type
.t
= VT_PTR
;
4707 controlling_type
.t
&= ~VT_CONSTANT
;
4711 if (tok
== TOK_DEFAULT
) {
4713 tcc_error("too many 'default'");
4722 parse_btype(&cur_type
, &ad_tmp
);
4723 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
4724 if (compare_types(&controlling_type
, &cur_type
, 0)) {
4726 tcc_error("type march twice");
4736 skip_or_save_block(&str
);
4738 skip_or_save_block(NULL
);
4742 else if (tok
== ')')
4745 if (!has_match
&& !has_default
) {
4748 type_to_str(buf
, 256, &controlling_type
, NULL
);
4749 tcc_error("_Generic sellector of type '%s' is not compatible with any assosiation",
4753 save_parse_state(&saved_parse_state
);
4754 begin_macro(str
, 1);
4758 restore_parse_state(&saved_parse_state
);
4761 // special qnan , snan and infinity values
4763 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4767 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
4771 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
4780 expect("identifier");
4783 const char *name
= get_tok_str(t
, NULL
);
4785 tcc_error("'%s' undeclared", name
);
4786 /* for simple function calls, we tolerate undeclared
4787 external reference to int() function */
4788 if (tcc_state
->warn_implicit_function_declaration
4789 #ifdef TCC_TARGET_PE
4790 /* people must be warned about using undeclared WINAPI functions
4791 (which usually start with uppercase letter) */
4792 || (name
[0] >= 'A' && name
[0] <= 'Z')
4795 tcc_warning("implicit declaration of function '%s'", name
);
4796 s
= external_global_sym(t
, &func_old_type
, 0);
4800 /* A symbol that has a register is a local register variable,
4801 which starts out as VT_LOCAL value. */
4802 if ((r
& VT_VALMASK
) < VT_CONST
)
4803 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
4805 vset(&s
->type
, r
, s
->c
);
4806 /* Point to s as backpointer (even without r&VT_SYM).
4807 Will be used by at least the x86 inline asm parser for
4813 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
4814 vtop
->c
.i
= s
->enum_val
;
4819 /* post operations */
4821 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
4824 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
4827 if (tok
== TOK_ARROW
)
4829 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
4832 /* expect pointer on structure */
4833 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
4834 expect("struct or union");
4835 if (tok
== TOK_CDOUBLE
)
4836 expect("field name");
4838 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
4839 expect("field name");
4840 s
= find_field(&vtop
->type
, tok
);
4842 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
4843 /* add field offset to pointer */
4844 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
4847 /* change type to field type, and set to lvalue */
4848 vtop
->type
= s
->type
;
4849 vtop
->type
.t
|= qualifiers
;
4850 /* an array is never an lvalue */
4851 if (!(vtop
->type
.t
& VT_ARRAY
)) {
4852 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4853 #ifdef CONFIG_TCC_BCHECK
4854 /* if bound checking, the referenced pointer must be checked */
4855 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4856 vtop
->r
|= VT_MUSTBOUND
;
4860 } else if (tok
== '[') {
4866 } else if (tok
== '(') {
4869 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
4872 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4873 /* pointer test (no array accepted) */
4874 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
4875 vtop
->type
= *pointed_type(&vtop
->type
);
4876 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4880 expect("function pointer");
4883 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
4885 /* get return type */
4888 sa
= s
->next
; /* first parameter */
4889 nb_args
= regsize
= 0;
4891 /* compute first implicit argument if a structure is returned */
4892 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
4893 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
4894 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
4895 &ret_align
, ®size
);
4897 /* get some space for the returned structure */
4898 size
= type_size(&s
->type
, &align
);
4899 #ifdef TCC_TARGET_ARM64
4900 /* On arm64, a small struct is return in registers.
4901 It is much easier to write it to memory if we know
4902 that we are allowed to write some extra bytes, so
4903 round the allocated space up to a power of 2: */
4905 while (size
& (size
- 1))
4906 size
= (size
| (size
- 1)) + 1;
4908 loc
= (loc
- size
) & -align
;
4910 ret
.r
= VT_LOCAL
| VT_LVAL
;
4911 /* pass it as 'int' to avoid structure arg passing
4913 vseti(VT_LOCAL
, loc
);
4923 /* return in register */
4924 if (is_float(ret
.type
.t
)) {
4925 ret
.r
= reg_fret(ret
.type
.t
);
4926 #ifdef TCC_TARGET_X86_64
4927 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
4931 #ifndef TCC_TARGET_ARM64
4932 #ifdef TCC_TARGET_X86_64
4933 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
4935 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
4946 gfunc_param_typed(s
, sa
);
4956 tcc_error("too few arguments to function");
4958 gfunc_call(nb_args
);
4961 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
4962 vsetc(&ret
.type
, r
, &ret
.c
);
4963 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
4966 /* handle packed struct return */
4967 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
4970 size
= type_size(&s
->type
, &align
);
4971 /* We're writing whole regs often, make sure there's enough
4972 space. Assume register size is power of 2. */
4973 if (regsize
> align
)
4975 loc
= (loc
- size
) & -align
;
4979 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4983 if (--ret_nregs
== 0)
4987 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
4995 ST_FUNC
void expr_prod(void)
5000 while (tok
== '*' || tok
== '/' || tok
== '%') {
5008 ST_FUNC
void expr_sum(void)
5013 while (tok
== '+' || tok
== '-') {
5021 static void expr_shift(void)
5026 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5034 static void expr_cmp(void)
5039 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5040 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5048 static void expr_cmpeq(void)
5053 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5061 static void expr_and(void)
5064 while (tok
== '&') {
5071 static void expr_xor(void)
5074 while (tok
== '^') {
5081 static void expr_or(void)
5084 while (tok
== '|') {
5091 static void expr_land(void)
5094 if (tok
== TOK_LAND
) {
5097 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5098 gen_cast_s(VT_BOOL
);
5103 while (tok
== TOK_LAND
) {
5119 if (tok
!= TOK_LAND
) {
5132 static void expr_lor(void)
5135 if (tok
== TOK_LOR
) {
5138 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5139 gen_cast_s(VT_BOOL
);
5144 while (tok
== TOK_LOR
) {
5160 if (tok
!= TOK_LOR
) {
5173 /* Assuming vtop is a value used in a conditional context
5174 (i.e. compared with zero) return 0 if it's false, 1 if
5175 true and -1 if it can't be statically determined. */
5176 static int condition_3way(void)
5179 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5180 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5182 gen_cast_s(VT_BOOL
);
5189 static void expr_cond(void)
5191 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5193 CType type
, type1
, type2
;
5198 c
= condition_3way();
5199 g
= (tok
== ':' && gnu_ext
);
5201 /* needed to avoid having different registers saved in
5203 if (is_float(vtop
->type
.t
)) {
5205 #ifdef TCC_TARGET_X86_64
5206 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5231 sv
= *vtop
; /* save value to handle it later */
5232 vtop
--; /* no vpop so that FP stack is not flushed */
5250 bt1
= t1
& VT_BTYPE
;
5252 bt2
= t2
& VT_BTYPE
;
5255 /* cast operands to correct type according to ISOC rules */
5256 if (is_float(bt1
) || is_float(bt2
)) {
5257 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5258 type
.t
= VT_LDOUBLE
;
5260 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5265 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5266 /* cast to biggest op */
5268 /* convert to unsigned if it does not fit in a long long */
5269 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5270 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5271 type
.t
|= VT_UNSIGNED
;
5272 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5273 /* If one is a null ptr constant the result type
5275 if (is_null_pointer (vtop
))
5277 else if (is_null_pointer (&sv
))
5279 /* XXX: test pointer compatibility, C99 has more elaborate
5283 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5284 /* XXX: test function pointer compatibility */
5285 type
= bt1
== VT_FUNC
? type1
: type2
;
5286 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5287 /* XXX: test structure compatibility */
5288 type
= bt1
== VT_STRUCT
? type1
: type2
;
5289 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5290 /* NOTE: as an extension, we accept void on only one side */
5293 /* integer operations */
5295 /* convert to unsigned if it does not fit in an integer */
5296 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5297 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5298 type
.t
|= VT_UNSIGNED
;
5300 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5301 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5302 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5305 /* now we convert second operand */
5309 mk_pointer(&vtop
->type
);
5311 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5316 if (is_float(type
.t
)) {
5318 #ifdef TCC_TARGET_X86_64
5319 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5323 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5324 /* for long longs, we use fixed registers to avoid having
5325 to handle a complicated move */
5336 /* this is horrible, but we must also convert first
5342 mk_pointer(&vtop
->type
);
5344 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5350 move_reg(r2
, r1
, type
.t
);
5360 static void expr_eq(void)
5366 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5367 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5368 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5383 ST_FUNC
void gexpr(void)
5394 /* parse a constant expression and return value in vtop. */
5395 static void expr_const1(void)
5402 /* parse an integer constant and return its value. */
5403 static inline int64_t expr_const64(void)
5407 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5408 expect("constant expression");
5414 /* parse an integer constant and return its value.
5415 Complain if it doesn't fit 32bit (signed or unsigned). */
5416 ST_FUNC
int expr_const(void)
5419 int64_t wc
= expr_const64();
5421 if (c
!= wc
&& (unsigned)c
!= wc
)
5422 tcc_error("constant exceeds 32 bit");
5426 /* return the label token if current token is a label, otherwise
5428 static int is_label(void)
5432 /* fast test first */
5433 if (tok
< TOK_UIDENT
)
5435 /* no need to save tokc because tok is an identifier */
5441 unget_tok(last_tok
);
5446 #ifndef TCC_TARGET_ARM64
5447 static void gfunc_return(CType
*func_type
)
5449 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5450 CType type
, ret_type
;
5451 int ret_align
, ret_nregs
, regsize
;
5452 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5453 &ret_align
, ®size
);
5454 if (0 == ret_nregs
) {
5455 /* if returning structure, must copy it to implicit
5456 first pointer arg location */
5459 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5462 /* copy structure value to pointer */
5465 /* returning structure packed into registers */
5466 int r
, size
, addr
, align
;
5467 size
= type_size(func_type
,&align
);
5468 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5469 (vtop
->c
.i
& (ret_align
-1)))
5470 && (align
& (ret_align
-1))) {
5471 loc
= (loc
- size
) & -ret_align
;
5474 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5478 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5480 vtop
->type
= ret_type
;
5481 if (is_float(ret_type
.t
))
5482 r
= rc_fret(ret_type
.t
);
5493 if (--ret_nregs
== 0)
5495 /* We assume that when a structure is returned in multiple
5496 registers, their classes are consecutive values of the
5499 vtop
->c
.i
+= regsize
;
5503 } else if (is_float(func_type
->t
)) {
5504 gv(rc_fret(func_type
->t
));
5508 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5512 static int case_cmp(const void *pa
, const void *pb
)
5514 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5515 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5516 return a
< b
? -1 : a
> b
;
5519 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5523 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5541 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5543 gcase(base
, len
/2, bsym
);
5544 if (cur_switch
->def_sym
)
5545 gjmp_addr(cur_switch
->def_sym
);
5547 *bsym
= gjmp(*bsym
);
5551 base
+= e
; len
-= e
;
5561 if (p
->v1
== p
->v2
) {
5563 gtst_addr(0, p
->sym
);
5573 gtst_addr(0, p
->sym
);
5579 static void block(int *bsym
, int *csym
, int is_expr
)
5581 int a
, b
, c
, d
, cond
;
5584 /* generate line number info */
5585 if (tcc_state
->do_debug
)
5586 tcc_debug_line(tcc_state
);
5589 /* default return value is (void) */
5591 vtop
->type
.t
= VT_VOID
;
5594 if (tok
== TOK_IF
) {
5596 int saved_nocode_wanted
= nocode_wanted
;
5601 cond
= condition_3way();
5607 nocode_wanted
|= 0x20000000;
5608 block(bsym
, csym
, 0);
5610 nocode_wanted
= saved_nocode_wanted
;
5612 if (c
== TOK_ELSE
) {
5617 nocode_wanted
|= 0x20000000;
5618 block(bsym
, csym
, 0);
5619 gsym(d
); /* patch else jmp */
5621 nocode_wanted
= saved_nocode_wanted
;
5624 } else if (tok
== TOK_WHILE
) {
5625 int saved_nocode_wanted
;
5626 nocode_wanted
&= ~0x20000000;
5636 saved_nocode_wanted
= nocode_wanted
;
5638 nocode_wanted
= saved_nocode_wanted
;
5643 } else if (tok
== '{') {
5645 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5648 /* record local declaration stack position */
5650 llabel
= local_label_stack
;
5653 /* handle local labels declarations */
5654 if (tok
== TOK_LABEL
) {
5657 if (tok
< TOK_UIDENT
)
5658 expect("label identifier");
5659 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5669 while (tok
!= '}') {
5670 if ((a
= is_label()))
5677 block(bsym
, csym
, is_expr
);
5680 /* pop locally defined labels */
5681 label_pop(&local_label_stack
, llabel
);
5682 /* pop locally defined symbols */
5684 /* In the is_expr case (a statement expression is finished here),
5685 vtop might refer to symbols on the local_stack. Either via the
5686 type or via vtop->sym. We can't pop those nor any that in turn
5687 might be referred to. To make it easier we don't roll back
5688 any symbols in that case; some upper level call to block() will
5689 do that. We do have to remove such symbols from the lookup
5690 tables, though. sym_pop will do that. */
5691 sym_pop(&local_stack
, s
, is_expr
);
5693 /* Pop VLA frames and restore stack pointer if required */
5694 if (vlas_in_scope
> saved_vlas_in_scope
) {
5695 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5698 vlas_in_scope
= saved_vlas_in_scope
;
5701 } else if (tok
== TOK_RETURN
) {
5705 gen_assign_cast(&func_vt
);
5706 gfunc_return(&func_vt
);
5709 /* jump unless last stmt in top-level block */
5710 if (tok
!= '}' || local_scope
!= 1)
5712 nocode_wanted
|= 0x20000000;
5713 } else if (tok
== TOK_BREAK
) {
5716 tcc_error("cannot break");
5717 *bsym
= gjmp(*bsym
);
5720 nocode_wanted
|= 0x20000000;
5721 } else if (tok
== TOK_CONTINUE
) {
5724 tcc_error("cannot continue");
5725 vla_sp_restore_root();
5726 *csym
= gjmp(*csym
);
5729 } else if (tok
== TOK_FOR
) {
5731 int saved_nocode_wanted
;
5732 nocode_wanted
&= ~0x20000000;
5738 /* c99 for-loop init decl? */
5739 if (!decl0(VT_LOCAL
, 1, NULL
)) {
5740 /* no, regular for-loop init expr */
5766 saved_nocode_wanted
= nocode_wanted
;
5768 nocode_wanted
= saved_nocode_wanted
;
5773 sym_pop(&local_stack
, s
, 0);
5776 if (tok
== TOK_DO
) {
5777 int saved_nocode_wanted
;
5778 nocode_wanted
&= ~0x20000000;
5784 saved_nocode_wanted
= nocode_wanted
;
5792 nocode_wanted
= saved_nocode_wanted
;
5797 if (tok
== TOK_SWITCH
) {
5798 struct switch_t
*saved
, sw
;
5799 int saved_nocode_wanted
= nocode_wanted
;
5805 switchval
= *vtop
--;
5807 b
= gjmp(0); /* jump to first case */
5808 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
5812 nocode_wanted
= saved_nocode_wanted
;
5813 a
= gjmp(a
); /* add implicit break */
5816 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
5817 for (b
= 1; b
< sw
.n
; b
++)
5818 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
5819 tcc_error("duplicate case value");
5820 /* Our switch table sorting is signed, so the compared
5821 value needs to be as well when it's 64bit. */
5822 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5823 switchval
.type
.t
&= ~VT_UNSIGNED
;
5825 gcase(sw
.p
, sw
.n
, &a
);
5828 gjmp_addr(sw
.def_sym
);
5829 dynarray_reset(&sw
.p
, &sw
.n
);
5834 if (tok
== TOK_CASE
) {
5835 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
5838 nocode_wanted
&= ~0x20000000;
5840 cr
->v1
= cr
->v2
= expr_const64();
5841 if (gnu_ext
&& tok
== TOK_DOTS
) {
5843 cr
->v2
= expr_const64();
5844 if (cr
->v2
< cr
->v1
)
5845 tcc_warning("empty case range");
5848 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
5851 goto block_after_label
;
5853 if (tok
== TOK_DEFAULT
) {
5858 if (cur_switch
->def_sym
)
5859 tcc_error("too many 'default'");
5860 cur_switch
->def_sym
= ind
;
5862 goto block_after_label
;
5864 if (tok
== TOK_GOTO
) {
5866 if (tok
== '*' && gnu_ext
) {
5870 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5873 } else if (tok
>= TOK_UIDENT
) {
5874 s
= label_find(tok
);
5875 /* put forward definition if needed */
5877 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5879 if (s
->r
== LABEL_DECLARED
)
5880 s
->r
= LABEL_FORWARD
;
5882 vla_sp_restore_root();
5883 if (s
->r
& LABEL_FORWARD
)
5884 s
->jnext
= gjmp(s
->jnext
);
5886 gjmp_addr(s
->jnext
);
5889 expect("label identifier");
5892 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
5901 if (s
->r
== LABEL_DEFINED
)
5902 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
5904 s
->r
= LABEL_DEFINED
;
5906 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
5910 /* we accept this, but it is a mistake */
5912 nocode_wanted
&= ~0x20000000;
5914 tcc_warning("deprecated use of label at end of compound statement");
5918 block(bsym
, csym
, is_expr
);
5921 /* expression case */
5936 /* This skips over a stream of tokens containing balanced {} and ()
5937 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
5938 with a '{'). If STR then allocates and stores the skipped tokens
5939 in *STR. This doesn't check if () and {} are nested correctly,
5940 i.e. "({)}" is accepted. */
5941 static void skip_or_save_block(TokenString
**str
)
5943 int braces
= tok
== '{';
5946 *str
= tok_str_alloc();
5948 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
5950 if (tok
== TOK_EOF
) {
5951 if (str
|| level
> 0)
5952 tcc_error("unexpected end of file");
5957 tok_str_add_tok(*str
);
5960 if (t
== '{' || t
== '(') {
5962 } else if (t
== '}' || t
== ')') {
5964 if (level
== 0 && braces
&& t
== '}')
5969 tok_str_add(*str
, -1);
5970 tok_str_add(*str
, 0);
5974 #define EXPR_CONST 1
5977 static void parse_init_elem(int expr_type
)
5979 int saved_global_expr
;
5982 /* compound literals must be allocated globally in this case */
5983 saved_global_expr
= global_expr
;
5986 global_expr
= saved_global_expr
;
5987 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
5988 (compound literals). */
5989 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
5990 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
5991 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
5992 #ifdef TCC_TARGET_PE
5993 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
5996 tcc_error("initializer element is not constant");
6004 /* put zeros for variable based init */
6005 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6008 /* nothing to do because globals are already set to zero */
6010 vpush_global_sym(&func_old_type
, TOK_memset
);
6012 #ifdef TCC_TARGET_ARM
6023 /* t is the array or struct type. c is the array or struct
6024 address. cur_field is the pointer to the current
6025 field, for arrays the 'c' member contains the current start
6026 index. 'size_only' is true if only size info is needed (only used
6027 in arrays). al contains the already initialized length of the
6028 current container (starting at c). This returns the new length of that. */
6029 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6030 Sym
**cur_field
, int size_only
, int al
)
6033 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6034 unsigned long corig
= c
;
6038 if (gnu_ext
&& (l
= is_label()) != 0)
6040 /* NOTE: we only support ranges for last designator */
6041 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6043 if (!(type
->t
& VT_ARRAY
))
6044 expect("array type");
6046 index
= index_last
= expr_const();
6047 if (tok
== TOK_DOTS
&& gnu_ext
) {
6049 index_last
= expr_const();
6053 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6055 tcc_error("invalid index");
6057 (*cur_field
)->c
= index_last
;
6058 type
= pointed_type(type
);
6059 elem_size
= type_size(type
, &align
);
6060 c
+= index
* elem_size
;
6061 nb_elems
= index_last
- index
+ 1;
6067 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6068 expect("struct/union type");
6069 f
= find_field(type
, l
);
6082 } else if (!gnu_ext
) {
6086 if (type
->t
& VT_ARRAY
) {
6087 index
= (*cur_field
)->c
;
6088 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6089 tcc_error("index too large");
6090 type
= pointed_type(type
);
6091 c
+= index
* type_size(type
, &align
);
6094 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6095 *cur_field
= f
= f
->next
;
6097 tcc_error("too many field init");
6102 /* must put zero in holes (note that doing it that way
6103 ensures that it even works with designators) */
6104 if (!size_only
&& c
- corig
> al
)
6105 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6106 decl_initializer(type
, sec
, c
, 0, size_only
);
6108 /* XXX: make it more general */
6109 if (!size_only
&& nb_elems
> 1) {
6110 unsigned long c_end
;
6115 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6116 for (i
= 1; i
< nb_elems
; i
++) {
6117 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6123 c_end
= c
+ nb_elems
* elem_size
;
6124 if (c_end
> sec
->data_allocated
)
6125 section_realloc(sec
, c_end
);
6126 src
= sec
->data
+ c
;
6128 for(i
= 1; i
< nb_elems
; i
++) {
6130 memcpy(dst
, src
, elem_size
);
6134 c
+= nb_elems
* type_size(type
, &align
);
6140 /* store a value or an expression directly in global data or in local array */
6141 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6143 int bt
, bit_pos
, bit_size
;
6145 unsigned long long bit_mask
;
6149 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6153 /* XXX: not portable */
6154 /* XXX: generate error if incorrect relocation */
6155 gen_assign_cast(&dtype
);
6156 bt
= type
->t
& VT_BTYPE
;
6157 size
= type_size(type
, &align
);
6158 section_reserve(sec
, c
+ size
);
6159 ptr
= sec
->data
+ c
;
6160 /* XXX: make code faster ? */
6161 if (!(type
->t
& VT_BITFIELD
)) {
6163 bit_size
= PTR_SIZE
* 8;
6166 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
6167 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
6168 bit_mask
= (1LL << bit_size
) - 1;
6170 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6171 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6172 /* XXX This rejects compound literals like
6173 '(void *){ptr}'. The problem is that '&sym' is
6174 represented the same way, which would be ruled out
6175 by the SYM_FIRST_ANOM check above, but also '"string"'
6176 in 'char *p = "string"' is represented the same
6177 with the type being VT_PTR and the symbol being an
6178 anonymous one. That is, there's no difference in vtop
6179 between '(void *){x}' and '&(void *){x}'. Ignore
6180 pointer typed entities here. Hopefully no real code
6181 will every use compound literals with scalar type. */
6182 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6183 /* These come from compound literals, memcpy stuff over. */
6187 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[vtop
->sym
->c
];
6188 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6189 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6191 /* We need to copy over all memory contents, and that
6192 includes relocations. Use the fact that relocs are
6193 created it order, so look from the end of relocs
6194 until we hit one before the copied region. */
6195 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6196 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6197 while (num_relocs
--) {
6199 if (rel
->r_offset
>= esym
->st_value
+ size
)
6201 if (rel
->r_offset
< esym
->st_value
)
6203 /* Note: if the same fields are initialized multiple
6204 times (possible with designators) then we possibly
6205 add multiple relocations for the same offset here.
6206 That would lead to wrong code, the last reloc needs
6207 to win. We clean this up later after the whole
6208 initializer is parsed. */
6209 put_elf_reloca(symtab_section
, sec
,
6210 c
+ rel
->r_offset
- esym
->st_value
,
6211 ELFW(R_TYPE
)(rel
->r_info
),
6212 ELFW(R_SYM
)(rel
->r_info
),
6222 if ((vtop
->r
& VT_SYM
) &&
6228 (bt
== VT_LLONG
&& bit_size
!= 64) ||
6232 (bt
== VT_INT
&& bit_size
!= 32)
6235 tcc_error("initializer element is not computable at load time");
6237 /* XXX: when cross-compiling we assume that each type has the
6238 same representation on host and target, which is likely to
6239 be wrong in the case of long double */
6241 vtop
->c
.i
= (vtop
->c
.i
!= 0);
6243 *(char *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6246 *(short *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6249 *(float*)ptr
= vtop
->c
.f
;
6252 *(double *)ptr
= vtop
->c
.d
;
6255 if (sizeof(long double) == LDOUBLE_SIZE
)
6256 *(long double *)ptr
= vtop
->c
.ld
;
6257 else if (sizeof(double) == LDOUBLE_SIZE
)
6258 *(double *)ptr
= (double)vtop
->c
.ld
;
6259 #if (defined __i386__ || defined __x86_64__) && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
6260 else if (sizeof (long double) >= 10)
6261 memcpy(memset(ptr
, 0, LDOUBLE_SIZE
), &vtop
->c
.ld
, 10);
6263 else if (sizeof (long double) == sizeof (double))
6264 __asm__("fldl %1\nfstpt %0\n" : "=m"
6265 (memset(ptr
, 0, LDOUBLE_SIZE
), ptr
) : "m" (vtop
->c
.ld
));
6269 tcc_error("can't cross compile long double constants");
6273 *(long long *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6280 addr_t val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6282 if (vtop
->r
& VT_SYM
)
6283 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6285 *(addr_t
*)ptr
|= val
;
6287 if (vtop
->r
& VT_SYM
)
6288 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6289 *(addr_t
*)ptr
|= val
;
6295 int val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6297 if (vtop
->r
& VT_SYM
)
6298 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6302 if (vtop
->r
& VT_SYM
)
6303 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6312 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6319 /* 't' contains the type and storage info. 'c' is the offset of the
6320 object in section 'sec'. If 'sec' is NULL, it means stack based
6321 allocation. 'first' is true if array '{' must be read (multi
6322 dimension implicit array init handling). 'size_only' is true if
6323 size only evaluation is wanted (only for arrays). */
6324 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6325 int first
, int size_only
)
6327 int len
, n
, no_oblock
, nb
, i
;
6334 /* If we currently are at an '}' or ',' we have read an initializer
6335 element in one of our callers, and not yet consumed it. */
6336 have_elem
= tok
== '}' || tok
== ',';
6337 if (!have_elem
&& tok
!= '{' &&
6338 /* In case of strings we have special handling for arrays, so
6339 don't consume them as initializer value (which would commit them
6340 to some anonymous symbol). */
6341 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6343 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6348 !(type
->t
& VT_ARRAY
) &&
6349 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6350 The source type might have VT_CONSTANT set, which is
6351 of course assignable to non-const elements. */
6352 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6353 init_putv(type
, sec
, c
);
6354 } else if (type
->t
& VT_ARRAY
) {
6357 t1
= pointed_type(type
);
6358 size1
= type_size(t1
, &align1
);
6361 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6364 tcc_error("character array initializer must be a literal,"
6365 " optionally enclosed in braces");
6370 /* only parse strings here if correct type (otherwise: handle
6371 them as ((w)char *) expressions */
6372 if ((tok
== TOK_LSTR
&&
6373 #ifdef TCC_TARGET_PE
6374 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6376 (t1
->t
& VT_BTYPE
) == VT_INT
6378 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6380 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6383 /* compute maximum number of chars wanted */
6385 cstr_len
= tokc
.str
.size
;
6387 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6390 if (n
>= 0 && nb
> (n
- len
))
6394 tcc_warning("initializer-string for array is too long");
6395 /* in order to go faster for common case (char
6396 string in global variable, we handle it
6398 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6399 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6403 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6405 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6407 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6414 /* only add trailing zero if enough storage (no
6415 warning in this case since it is standard) */
6416 if (n
< 0 || len
< n
) {
6419 init_putv(t1
, sec
, c
+ (len
* size1
));
6430 while (tok
!= '}' || have_elem
) {
6431 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
6433 if (type
->t
& VT_ARRAY
) {
6435 /* special test for multi dimensional arrays (may not
6436 be strictly correct if designators are used at the
6438 if (no_oblock
&& len
>= n
*size1
)
6441 if (s
->type
.t
== VT_UNION
)
6445 if (no_oblock
&& f
== NULL
)
6454 /* put zeros at the end */
6455 if (!size_only
&& len
< n
*size1
)
6456 init_putz(sec
, c
+ len
, n
*size1
- len
);
6459 /* patch type size if needed, which happens only for array types */
6461 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
6462 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6465 if (first
|| tok
== '{') {
6473 } else if (tok
== '{') {
6475 decl_initializer(type
, sec
, c
, first
, size_only
);
6477 } else if (size_only
) {
6478 /* If we supported only ISO C we wouldn't have to accept calling
6479 this on anything than an array size_only==1 (and even then
6480 only on the outermost level, so no recursion would be needed),
6481 because initializing a flex array member isn't supported.
6482 But GNU C supports it, so we need to recurse even into
6483 subfields of structs and arrays when size_only is set. */
6484 /* just skip expression */
6485 skip_or_save_block(NULL
);
6488 /* This should happen only when we haven't parsed
6489 the init element above for fear of committing a
6490 string constant to memory too early. */
6491 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6492 expect("string constant");
6493 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6495 init_putv(type
, sec
, c
);
6499 /* parse an initializer for type 't' if 'has_init' is non zero, and
6500 allocate space in local or global data space ('r' is either
6501 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6502 variable 'v' of scope 'scope' is declared before initializers
6503 are parsed. If 'v' is zero, then a reference to the new object
6504 is put in the value stack. If 'has_init' is 2, a special parsing
6505 is done to handle string constants. */
6506 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6507 int has_init
, int v
, int scope
)
6509 int size
, align
, addr
;
6510 ParseState saved_parse_state
= {0};
6511 TokenString
*init_str
= NULL
;
6513 Sym
*flexible_array
;
6516 flexible_array
= NULL
;
6517 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6518 Sym
*field
= type
->ref
->next
;
6521 field
= field
->next
;
6522 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6523 flexible_array
= field
;
6527 size
= type_size(type
, &align
);
6528 /* If unknown size, we must evaluate it before
6529 evaluating initializers because
6530 initializers can generate global data too
6531 (e.g. string pointers or ISOC99 compound
6532 literals). It also simplifies local
6533 initializers handling */
6534 if (size
< 0 || (flexible_array
&& has_init
)) {
6536 tcc_error("unknown type size");
6537 /* get all init string */
6538 if (has_init
== 2) {
6539 init_str
= tok_str_alloc();
6540 /* only get strings */
6541 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6542 tok_str_add_tok(init_str
);
6545 tok_str_add(init_str
, -1);
6546 tok_str_add(init_str
, 0);
6548 skip_or_save_block(&init_str
);
6552 save_parse_state(&saved_parse_state
);
6554 begin_macro(init_str
, 1);
6556 decl_initializer(type
, NULL
, 0, 1, 1);
6557 /* prepare second initializer parsing */
6558 macro_ptr
= init_str
->str
;
6561 /* if still unknown size, error */
6562 size
= type_size(type
, &align
);
6564 tcc_error("unknown type size");
6566 /* If there's a flex member and it was used in the initializer
6568 if (flexible_array
&&
6569 flexible_array
->type
.ref
->c
> 0)
6570 size
+= flexible_array
->type
.ref
->c
6571 * pointed_size(&flexible_array
->type
);
6572 /* take into account specified alignment if bigger */
6573 if (ad
->a
.aligned
) {
6574 int speca
= 1 << (ad
->a
.aligned
- 1);
6577 } else if (ad
->a
.packed
) {
6580 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6582 #ifdef CONFIG_TCC_BCHECK
6583 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6587 loc
= (loc
- size
) & -align
;
6589 #ifdef CONFIG_TCC_BCHECK
6590 /* handles bounds */
6591 /* XXX: currently, since we do only one pass, we cannot track
6592 '&' operators, so we add only arrays */
6593 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6595 /* add padding between regions */
6597 /* then add local bound info */
6598 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6599 bounds_ptr
[0] = addr
;
6600 bounds_ptr
[1] = size
;
6604 /* local variable */
6605 #ifdef CONFIG_TCC_ASM
6606 if (ad
->asm_label
) {
6607 int reg
= asm_parse_regvar(ad
->asm_label
);
6609 r
= (r
& ~VT_VALMASK
) | reg
;
6612 sym
= sym_push(v
, type
, r
, addr
);
6615 /* push local reference */
6616 vset(type
, r
, addr
);
6619 if (v
&& scope
== VT_CONST
) {
6620 /* see if the symbol was already defined */
6623 patch_storage(sym
, ad
, type
);
6624 if (sym
->type
.t
& VT_EXTERN
) {
6625 /* if the variable is extern, it was not allocated */
6626 sym
->type
.t
&= ~VT_EXTERN
;
6627 /* set array size if it was omitted in extern
6629 if ((sym
->type
.t
& VT_ARRAY
) &&
6630 sym
->type
.ref
->c
< 0 &&
6632 sym
->type
.ref
->c
= type
->ref
->c
;
6633 } else if (!has_init
) {
6634 /* we accept several definitions of the same
6635 global variable. this is tricky, because we
6636 must play with the SHN_COMMON type of the symbol */
6637 /* no init data, we won't add more to the symbol */
6639 } else if (sym
->c
) {
6641 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
6642 if (esym
->st_shndx
== data_section
->sh_num
)
6643 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6648 /* allocate symbol in corresponding section */
6653 else if (tcc_state
->nocommon
)
6658 addr
= section_add(sec
, size
, align
);
6659 #ifdef CONFIG_TCC_BCHECK
6660 /* add padding if bound check */
6661 if (tcc_state
->do_bounds_check
)
6662 section_add(sec
, 1, 1);
6665 addr
= align
; /* SHN_COMMON is special, symbol value is align */
6666 sec
= common_section
;
6671 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6672 patch_storage(sym
, ad
, NULL
);
6674 /* update symbol definition */
6675 put_extern_sym(sym
, sec
, addr
, size
);
6677 /* push global reference */
6678 sym
= get_sym_ref(type
, sec
, addr
, size
);
6679 vpushsym(type
, sym
);
6683 #ifdef CONFIG_TCC_BCHECK
6684 /* handles bounds now because the symbol must be defined
6685 before for the relocation */
6686 if (tcc_state
->do_bounds_check
) {
6689 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
6690 /* then add global bound info */
6691 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6692 bounds_ptr
[0] = 0; /* relocated */
6693 bounds_ptr
[1] = size
;
6698 if (type
->t
& VT_VLA
) {
6701 /* save current stack pointer */
6702 if (vlas_in_scope
== 0) {
6703 if (vla_sp_root_loc
== -1)
6704 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
6705 gen_vla_sp_save(vla_sp_root_loc
);
6708 vla_runtime_type_size(type
, &a
);
6709 gen_vla_alloc(type
, a
);
6710 gen_vla_sp_save(addr
);
6714 } else if (has_init
) {
6715 size_t oldreloc_offset
= 0;
6716 if (sec
&& sec
->reloc
)
6717 oldreloc_offset
= sec
->reloc
->data_offset
;
6718 decl_initializer(type
, sec
, addr
, 1, 0);
6719 if (sec
&& sec
->reloc
)
6720 squeeze_multi_relocs(sec
, oldreloc_offset
);
6721 /* patch flexible array member size back to -1, */
6722 /* for possible subsequent similar declarations */
6724 flexible_array
->type
.ref
->c
= -1;
6728 /* restore parse state if needed */
6731 restore_parse_state(&saved_parse_state
);
6735 /* parse a function defined by symbol 'sym' and generate its code in
6736 'cur_text_section' */
6737 static void gen_function(Sym
*sym
)
6740 ind
= cur_text_section
->data_offset
;
6741 /* NOTE: we patch the symbol size later */
6742 put_extern_sym(sym
, cur_text_section
, ind
, 0);
6743 funcname
= get_tok_str(sym
->v
, NULL
);
6745 /* Initialize VLA state */
6747 vla_sp_root_loc
= -1;
6748 /* put debug symbol */
6749 tcc_debug_funcstart(tcc_state
, sym
);
6750 /* push a dummy symbol to enable local sym storage */
6751 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
6752 local_scope
= 1; /* for function parameters */
6753 gfunc_prolog(&sym
->type
);
6756 block(NULL
, NULL
, 0);
6760 cur_text_section
->data_offset
= ind
;
6761 label_pop(&global_label_stack
, NULL
);
6762 /* reset local stack */
6764 sym_pop(&local_stack
, NULL
, 0);
6765 /* end of function */
6766 /* patch symbol size */
6767 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
6769 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
6770 /* It's better to crash than to generate wrong code */
6771 cur_text_section
= NULL
;
6772 funcname
= ""; /* for safety */
6773 func_vt
.t
= VT_VOID
; /* for safety */
6774 func_var
= 0; /* for safety */
6775 ind
= 0; /* for safety */
6780 static void gen_inline_functions(TCCState
*s
)
6783 int inline_generated
, i
, ln
;
6784 struct InlineFunc
*fn
;
6786 ln
= file
->line_num
;
6787 /* iterate while inline function are referenced */
6789 inline_generated
= 0;
6790 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6791 fn
= s
->inline_fns
[i
];
6793 if (sym
&& sym
->c
) {
6794 /* the function was used: generate its code and
6795 convert it to a normal function */
6798 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
6799 sym
->type
.t
&= ~VT_INLINE
;
6801 begin_macro(fn
->func_str
, 1);
6803 cur_text_section
= text_section
;
6807 inline_generated
= 1;
6810 if (!inline_generated
)
6813 file
->line_num
= ln
;
6816 ST_FUNC
void free_inline_functions(TCCState
*s
)
6819 /* free tokens of unused inline functions */
6820 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6821 struct InlineFunc
*fn
= s
->inline_fns
[i
];
6823 tok_str_free(fn
->func_str
);
6825 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
6828 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
6829 if parsing old style parameter decl list (and FUNC_SYM is set then) */
6830 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
6838 if (!parse_btype(&btype
, &ad
)) {
6839 if (is_for_loop_init
)
6841 /* skip redundant ';' if not in old parameter decl scope */
6842 if (tok
== ';' && l
!= VT_CMP
) {
6846 if (l
== VT_CONST
&&
6847 (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6848 /* global asm block */
6852 /* special test for old K&R protos without explicit int
6853 type. Only accepted when defining global data */
6854 if (l
!= VT_CONST
|| tok
< TOK_UIDENT
)
6859 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
6860 int v
= btype
.ref
->v
;
6861 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
6862 tcc_warning("unnamed struct/union that defines no instances");
6866 if (IS_ENUM(btype
.t
)) {
6871 while (1) { /* iterate thru each declaration */
6873 /* If the base type itself was an array type of unspecified
6874 size (like in 'typedef int arr[]; arr x = {1};') then
6875 we will overwrite the unknown size by the real one for
6876 this decl. We need to unshare the ref symbol holding
6878 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
6879 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
6881 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6885 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
6886 printf("type = '%s'\n", buf
);
6889 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6890 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
6891 tcc_error("function without file scope cannot be static");
6893 /* if old style function prototype, we accept a
6896 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
6897 decl0(VT_CMP
, 0, sym
);
6900 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6901 ad
.asm_label
= asm_label_instr();
6902 /* parse one last attribute list, after asm label */
6903 parse_attribute(&ad
);
6908 #ifdef TCC_TARGET_PE
6909 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
6910 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
6911 tcc_error("cannot have dll linkage with static or typedef");
6912 if (ad
.a
.dllimport
) {
6913 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
6916 type
.t
|= VT_EXTERN
;
6922 tcc_error("cannot use local functions");
6923 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
6924 expect("function definition");
6926 /* reject abstract declarators in function definition
6927 make old style params without decl have int type */
6929 while ((sym
= sym
->next
) != NULL
) {
6930 if (!(sym
->v
& ~SYM_FIELD
))
6931 expect("identifier");
6932 if (sym
->type
.t
== VT_VOID
)
6933 sym
->type
= int_type
;
6936 /* XXX: cannot do better now: convert extern line to static inline */
6937 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
6938 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6943 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6946 ref
= sym
->type
.ref
;
6948 /* use func_call from prototype if not defined */
6949 if (ref
->f
.func_call
!= FUNC_CDECL
6950 && type
.ref
->f
.func_call
== FUNC_CDECL
)
6951 type
.ref
->f
.func_call
= ref
->f
.func_call
;
6953 /* use static from prototype */
6954 if (sym
->type
.t
& VT_STATIC
)
6955 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6957 /* If the definition has no visibility use the
6958 one from prototype. */
6959 if (!type
.ref
->a
.visibility
)
6960 type
.ref
->a
.visibility
= ref
->a
.visibility
;
6961 /* apply other storage attributes from prototype */
6962 type
.ref
->a
.dllexport
|= ref
->a
.dllexport
;
6963 type
.ref
->a
.weak
|= ref
->a
.weak
;
6965 if (!is_compatible_types(&sym
->type
, &type
)) {
6967 tcc_error("incompatible types for redefinition of '%s'",
6968 get_tok_str(v
, NULL
));
6970 if (ref
->f
.func_body
)
6971 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6972 /* if symbol is already defined, then put complete type */
6976 /* put function symbol */
6977 sym
= global_identifier_push(v
, type
.t
, 0);
6978 sym
->type
.ref
= type
.ref
;
6981 sym
->type
.ref
->f
.func_body
= 1;
6982 sym
->r
= VT_SYM
| VT_CONST
;
6983 patch_storage(sym
, &ad
, NULL
);
6985 /* static inline functions are just recorded as a kind
6986 of macro. Their code will be emitted at the end of
6987 the compilation unit only if they are used */
6988 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
6989 (VT_INLINE
| VT_STATIC
)) {
6990 struct InlineFunc
*fn
;
6991 const char *filename
;
6993 filename
= file
? file
->filename
: "";
6994 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
6995 strcpy(fn
->filename
, filename
);
6997 skip_or_save_block(&fn
->func_str
);
6998 dynarray_add(&tcc_state
->inline_fns
,
6999 &tcc_state
->nb_inline_fns
, fn
);
7001 /* compute text section */
7002 cur_text_section
= ad
.section
;
7003 if (!cur_text_section
)
7004 cur_text_section
= text_section
;
7010 /* find parameter in function parameter list */
7011 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7012 if ((sym
->v
& ~SYM_FIELD
) == v
)
7014 tcc_error("declaration for parameter '%s' but no such parameter",
7015 get_tok_str(v
, NULL
));
7017 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7018 tcc_error("storage class specified for '%s'",
7019 get_tok_str(v
, NULL
));
7020 if (sym
->type
.t
!= VT_VOID
)
7021 tcc_error("redefinition of parameter '%s'",
7022 get_tok_str(v
, NULL
));
7023 convert_parameter_type(&type
);
7025 } else if (type
.t
& VT_TYPEDEF
) {
7026 /* save typedefed type */
7027 /* XXX: test storage specifiers ? */
7029 if (sym
&& sym
->sym_scope
== local_scope
) {
7030 if (!is_compatible_types(&sym
->type
, &type
)
7031 || !(sym
->type
.t
& VT_TYPEDEF
))
7032 tcc_error("incompatible redefinition of '%s'",
7033 get_tok_str(v
, NULL
));
7036 sym
= sym_push(v
, &type
, 0, 0);
7041 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7042 /* external function definition */
7043 /* specific case for func_call attribute */
7045 } else if (!(type
.t
& VT_ARRAY
)) {
7046 /* not lvalue if array */
7047 r
|= lvalue_type(type
.t
);
7049 has_init
= (tok
== '=');
7050 if (has_init
&& (type
.t
& VT_VLA
))
7051 tcc_error("variable length array cannot be initialized");
7052 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7053 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7054 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7055 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7056 /* external variable or function */
7057 /* NOTE: as GCC, uninitialized global static
7058 arrays of null size are considered as
7060 sym
= external_sym(v
, &type
, r
, &ad
);
7061 if (ad
.alias_target
) {
7065 alias_target
= sym_find(ad
.alias_target
);
7066 if (!alias_target
|| !alias_target
->c
)
7067 tcc_error("unsupported forward __alias__ attribute");
7068 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[alias_target
->c
];
7069 tsec
.sh_num
= esym
->st_shndx
;
7070 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
7073 if (type
.t
& VT_STATIC
)
7079 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7083 if (is_for_loop_init
)
7096 ST_FUNC
void decl(int l
)
7101 /* ------------------------------------------------------------------------- */