2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* true if no code generation wanted for an expression */
54 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
55 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
56 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
58 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
59 ST_DATA
const char *funcname
;
61 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
;
63 ST_DATA
struct switch_t
{
67 } **p
; int n
; /* list of case ranges */
68 int def_sym
; /* default symbol */
69 } *cur_switch
; /* current switch */
71 /* ------------------------------------------------------------------------- */
73 static void gen_cast(CType
*type
);
74 static void gen_cast_s(int t
);
75 static inline CType
*pointed_type(CType
*type
);
76 static int is_compatible_types(CType
*type1
, CType
*type2
);
77 static int parse_btype(CType
*type
, AttributeDef
*ad
);
78 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
79 static void parse_expr_type(CType
*type
);
80 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
81 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
82 static void block(int *bsym
, int *csym
, int is_expr
);
83 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
84 static int decl0(int l
, int is_for_loop_init
, Sym
*);
85 static void expr_eq(void);
86 static void vla_runtime_type_size(CType
*type
, int *a
);
87 static void vla_sp_restore(void);
88 static void vla_sp_restore_root(void);
89 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
);
90 static inline int64_t expr_const64(void);
91 ST_FUNC
void vpush64(int ty
, unsigned long long v
);
92 ST_FUNC
void vpush(CType
*type
);
93 ST_FUNC
int gvtst(int inv
, int t
);
94 static void gen_inline_functions(TCCState
*s
);
95 static void skip_or_save_block(TokenString
**str
);
97 ST_INLN
int is_float(int t
)
101 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
104 /* we use our own 'finite' function to avoid potential problems with
105 non standard math libs */
106 /* XXX: endianness dependent */
107 ST_FUNC
int ieee_finite(double d
)
110 memcpy(p
, &d
, sizeof(double));
111 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
114 ST_FUNC
void test_lvalue(void)
116 if (!(vtop
->r
& VT_LVAL
))
120 ST_FUNC
void check_vstack(void)
123 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
126 /* ------------------------------------------------------------------------- */
127 /* vstack debugging aid */
130 void pv (const char *lbl
, int a
, int b
)
133 for (i
= a
; i
< a
+ b
; ++i
) {
134 SValue
*p
= &vtop
[-i
];
135 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
136 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
141 /* ------------------------------------------------------------------------- */
142 /* start of translation unit info */
143 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
148 /* file info: full path + filename */
149 section_sym
= put_elf_sym(symtab_section
, 0, 0,
150 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
151 text_section
->sh_num
, NULL
);
152 getcwd(buf
, sizeof(buf
));
154 normalize_slashes(buf
);
156 pstrcat(buf
, sizeof(buf
), "/");
157 put_stabs_r(buf
, N_SO
, 0, 0,
158 text_section
->data_offset
, text_section
, section_sym
);
159 put_stabs_r(file
->filename
, N_SO
, 0, 0,
160 text_section
->data_offset
, text_section
, section_sym
);
165 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
166 symbols can be safely used */
167 put_elf_sym(symtab_section
, 0, 0,
168 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
169 SHN_ABS
, file
->filename
);
172 /* put end of translation unit info */
173 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
177 put_stabs_r(NULL
, N_SO
, 0, 0,
178 text_section
->data_offset
, text_section
, section_sym
);
182 /* generate line number info */
183 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
187 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
188 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
190 last_line_num
= file
->line_num
;
194 /* put function symbol */
195 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
203 /* XXX: we put here a dummy type */
204 snprintf(buf
, sizeof(buf
), "%s:%c1",
205 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
206 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
207 cur_text_section
, sym
->c
);
208 /* //gr gdb wants a line at the function */
209 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
215 /* put function size */
216 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
220 put_stabn(N_FUN
, 0, 0, size
);
223 /* ------------------------------------------------------------------------- */
224 ST_FUNC
void tccgen_start(TCCState
*s1
)
226 cur_text_section
= NULL
;
228 anon_sym
= SYM_FIRST_ANOM
;
233 /* define some often used types */
235 char_pointer_type
.t
= VT_BYTE
;
236 mk_pointer(&char_pointer_type
);
238 size_type
.t
= VT_INT
;
240 size_type
.t
= VT_LLONG
;
242 func_old_type
.t
= VT_FUNC
;
243 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
244 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
245 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
249 #ifdef TCC_TARGET_ARM
254 ST_FUNC
void tccgen_end(TCCState
*s1
)
256 gen_inline_functions(s1
);
258 /* end of translation unit info */
262 /* ------------------------------------------------------------------------- */
263 /* apply storage attributes to Elf symbol */
265 static void update_storage(Sym
*sym
)
270 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
271 if (sym
->a
.visibility
)
272 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
275 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, ELFW(ST_TYPE
)(esym
->st_info
));
277 if (sym
->a
.dllimport
)
278 esym
->st_other
|= ST_PE_IMPORT
;
279 if (sym
->a
.dllexport
)
280 esym
->st_other
|= ST_PE_EXPORT
;
283 printf("storage %s: vis=%d weak=%d exp=%d imp=%d\n",
284 get_tok_str(sym
->v
, NULL
),
293 /* ------------------------------------------------------------------------- */
294 /* update sym->c so that it points to an external symbol in section
295 'section' with value 'value' */
297 ST_FUNC
void put_extern_sym2(Sym
*sym
, Section
*section
,
298 addr_t value
, unsigned long size
,
299 int can_add_underscore
)
301 int sym_type
, sym_bind
, sh_num
, info
, other
, t
;
305 #ifdef CONFIG_TCC_BCHECK
311 else if (section
== SECTION_ABS
)
314 sh_num
= section
->sh_num
;
317 name
= get_tok_str(sym
->v
, NULL
);
318 #ifdef CONFIG_TCC_BCHECK
319 if (tcc_state
->do_bounds_check
) {
320 /* XXX: avoid doing that for statics ? */
321 /* if bound checking is activated, we change some function
322 names by adding the "__bound" prefix */
325 /* XXX: we rely only on malloc hooks */
338 strcpy(buf
, "__bound_");
346 if ((t
& VT_BTYPE
) == VT_FUNC
) {
348 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
349 sym_type
= STT_NOTYPE
;
351 sym_type
= STT_OBJECT
;
354 sym_bind
= STB_LOCAL
;
356 sym_bind
= STB_GLOBAL
;
359 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
360 Sym
*ref
= sym
->type
.ref
;
361 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
362 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
364 other
|= ST_PE_STDCALL
;
365 can_add_underscore
= 0;
369 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
371 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
375 name
= get_tok_str(sym
->asm_label
, NULL
);
376 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
377 sym
->c
= set_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
379 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
380 esym
->st_value
= value
;
381 esym
->st_size
= size
;
382 esym
->st_shndx
= sh_num
;
387 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
388 addr_t value
, unsigned long size
)
390 put_extern_sym2(sym
, section
, value
, size
, 1);
393 /* add a new relocation entry to symbol 'sym' in section 's' */
394 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
399 if (nocode_wanted
&& s
== cur_text_section
)
404 put_extern_sym(sym
, NULL
, 0, 0);
408 /* now we can add ELF relocation info */
409 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
413 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
415 greloca(s
, sym
, offset
, type
, 0);
419 /* ------------------------------------------------------------------------- */
420 /* symbol allocator */
421 static Sym
*__sym_malloc(void)
423 Sym
*sym_pool
, *sym
, *last_sym
;
426 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
427 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
429 last_sym
= sym_free_first
;
431 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
432 sym
->next
= last_sym
;
436 sym_free_first
= last_sym
;
440 static inline Sym
*sym_malloc(void)
444 sym
= sym_free_first
;
446 sym
= __sym_malloc();
447 sym_free_first
= sym
->next
;
450 sym
= tcc_malloc(sizeof(Sym
));
455 ST_INLN
void sym_free(Sym
*sym
)
458 sym
->next
= sym_free_first
;
459 sym_free_first
= sym
;
465 /* push, without hashing */
466 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
471 memset(s
, 0, sizeof *s
);
481 /* find a symbol and return its associated structure. 's' is the top
482 of the symbol stack */
483 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
495 /* structure lookup */
496 ST_INLN Sym
*struct_find(int v
)
499 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
501 return table_ident
[v
]->sym_struct
;
504 /* find an identifier */
505 ST_INLN Sym
*sym_find(int v
)
508 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
510 return table_ident
[v
]->sym_identifier
;
513 /* push a given symbol on the symbol stack */
514 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
523 s
= sym_push2(ps
, v
, type
->t
, c
);
524 s
->type
.ref
= type
->ref
;
526 /* don't record fields or anonymous symbols */
528 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
529 /* record symbol in token array */
530 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
532 ps
= &ts
->sym_struct
;
534 ps
= &ts
->sym_identifier
;
537 s
->sym_scope
= local_scope
;
538 if (s
->prev_tok
&& s
->prev_tok
->sym_scope
== s
->sym_scope
)
539 tcc_error("redeclaration of '%s'",
540 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
545 /* push a global identifier */
546 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
549 s
= sym_push2(&global_stack
, v
, t
, c
);
550 /* don't record anonymous symbol */
551 if (v
< SYM_FIRST_ANOM
) {
552 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
553 /* modify the top most local identifier, so that
554 sym_identifier will point to 's' when popped */
556 ps
= &(*ps
)->prev_tok
;
563 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
564 pop them yet from the list, but do remove them from the token array. */
565 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
575 /* remove symbol in token array */
577 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
578 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
580 ps
= &ts
->sym_struct
;
582 ps
= &ts
->sym_identifier
;
593 /* ------------------------------------------------------------------------- */
595 static void vsetc(CType
*type
, int r
, CValue
*vc
)
599 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
600 tcc_error("memory full (vstack)");
601 /* cannot let cpu flags if other instruction are generated. Also
602 avoid leaving VT_JMP anywhere except on the top of the stack
603 because it would complicate the code generator.
605 Don't do this when nocode_wanted. vtop might come from
606 !nocode_wanted regions (see 88_codeopt.c) and transforming
607 it to a register without actually generating code is wrong
608 as their value might still be used for real. All values
609 we push under nocode_wanted will eventually be popped
610 again, so that the VT_CMP/VT_JMP value will be in vtop
611 when code is unsuppressed again.
613 Same logic below in vswap(); */
614 if (vtop
>= vstack
&& !nocode_wanted
) {
615 v
= vtop
->r
& VT_VALMASK
;
616 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
628 ST_FUNC
void vswap(void)
631 /* cannot vswap cpu flags. See comment at vsetc() above */
632 if (vtop
>= vstack
&& !nocode_wanted
) {
633 int v
= vtop
->r
& VT_VALMASK
;
634 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
642 /* pop stack value */
643 ST_FUNC
void vpop(void)
646 v
= vtop
->r
& VT_VALMASK
;
647 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
648 /* for x86, we need to pop the FP stack */
650 o(0xd8dd); /* fstp %st(0) */
653 if (v
== VT_JMP
|| v
== VT_JMPI
) {
654 /* need to put correct jump if && or || without test */
660 /* push constant of type "type" with useless value */
661 ST_FUNC
void vpush(CType
*type
)
663 vset(type
, VT_CONST
, 0);
666 /* push integer constant */
667 ST_FUNC
void vpushi(int v
)
671 vsetc(&int_type
, VT_CONST
, &cval
);
674 /* push a pointer sized constant */
675 static void vpushs(addr_t v
)
679 vsetc(&size_type
, VT_CONST
, &cval
);
682 /* push arbitrary 64bit constant */
683 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
690 vsetc(&ctype
, VT_CONST
, &cval
);
693 /* push long long constant */
694 static inline void vpushll(long long v
)
696 vpush64(VT_LLONG
, v
);
699 ST_FUNC
void vset(CType
*type
, int r
, int v
)
704 vsetc(type
, r
, &cval
);
707 static void vseti(int r
, int v
)
715 ST_FUNC
void vpushv(SValue
*v
)
717 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
718 tcc_error("memory full (vstack)");
723 static void vdup(void)
728 /* rotate n first stack elements to the bottom
729 I1 ... In -> I2 ... In I1 [top is right]
731 ST_FUNC
void vrotb(int n
)
742 /* rotate the n elements before entry e towards the top
743 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
745 ST_FUNC
void vrote(SValue
*e
, int n
)
751 for(i
= 0;i
< n
- 1; i
++)
756 /* rotate n first stack elements to the top
757 I1 ... In -> In I1 ... I(n-1) [top is right]
759 ST_FUNC
void vrott(int n
)
764 /* push a symbol value of TYPE */
765 static inline void vpushsym(CType
*type
, Sym
*sym
)
769 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
773 /* Return a static symbol pointing to a section */
774 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
780 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
781 sym
->type
.ref
= type
->ref
;
782 sym
->r
= VT_CONST
| VT_SYM
;
783 put_extern_sym(sym
, sec
, offset
, size
);
787 /* push a reference to a section offset by adding a dummy symbol */
788 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
790 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
793 /* define a new external reference to a symbol 'v' of type 'u' */
794 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
800 /* push forward reference */
801 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
802 s
->type
.ref
= type
->ref
;
803 s
->r
= r
| VT_CONST
| VT_SYM
;
808 /* Merge some storage attributes. */
809 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
811 if (type
&& !is_compatible_types(&sym
->type
, type
))
812 tcc_error("incompatible types for redefinition of '%s'",
813 get_tok_str(sym
->v
, NULL
));
815 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
816 tcc_error("incompatible dll linkage for redefinition of '%s'",
817 get_tok_str(sym
->v
, NULL
));
819 sym
->a
.dllexport
|= ad
->a
.dllexport
;
820 sym
->a
.weak
|= ad
->a
.weak
;
821 if (ad
->a
.visibility
) {
822 int vis
= sym
->a
.visibility
;
823 int vis2
= ad
->a
.visibility
;
824 if (vis
== STV_DEFAULT
)
826 else if (vis2
!= STV_DEFAULT
)
827 vis
= (vis
< vis2
) ? vis
: vis2
;
828 sym
->a
.visibility
= vis
;
831 sym
->a
.aligned
= ad
->a
.aligned
;
833 sym
->asm_label
= ad
->asm_label
;
837 /* define a new external reference to a symbol 'v' */
838 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
843 /* push forward reference */
844 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
845 s
->type
.t
|= VT_EXTERN
;
849 if (s
->type
.ref
== func_old_type
.ref
) {
850 s
->type
.ref
= type
->ref
;
851 s
->r
= r
| VT_CONST
| VT_SYM
;
852 s
->type
.t
|= VT_EXTERN
;
854 patch_storage(s
, ad
, type
);
859 /* push a reference to global symbol v */
860 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
862 vpushsym(type
, external_global_sym(v
, type
, 0));
865 /* save registers up to (vtop - n) stack entry */
866 ST_FUNC
void save_regs(int n
)
869 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
873 /* save r to the memory stack, and mark it as being free */
874 ST_FUNC
void save_reg(int r
)
876 save_reg_upstack(r
, 0);
879 /* save r to the memory stack, and mark it as being free,
880 if seen up to (vtop - n) stack entry */
881 ST_FUNC
void save_reg_upstack(int r
, int n
)
883 int l
, saved
, size
, align
;
887 if ((r
&= VT_VALMASK
) >= VT_CONST
)
892 /* modify all stack values */
895 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
896 if ((p
->r
& VT_VALMASK
) == r
||
897 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
898 /* must save value on stack if not already done */
900 /* NOTE: must reload 'r' because r might be equal to r2 */
901 r
= p
->r
& VT_VALMASK
;
902 /* store register in the stack */
904 if ((p
->r
& VT_LVAL
) ||
905 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
907 type
= &char_pointer_type
;
911 size
= type_size(type
, &align
);
912 loc
= (loc
- size
) & -align
;
914 sv
.r
= VT_LOCAL
| VT_LVAL
;
917 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
918 /* x86 specific: need to pop fp register ST0 if saved */
920 o(0xd8dd); /* fstp %st(0) */
924 /* special long long case */
925 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
933 /* mark that stack entry as being saved on the stack */
934 if (p
->r
& VT_LVAL
) {
935 /* also clear the bounded flag because the
936 relocation address of the function was stored in
938 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
940 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
948 #ifdef TCC_TARGET_ARM
949 /* find a register of class 'rc2' with at most one reference on stack.
950 * If none, call get_reg(rc) */
951 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
956 for(r
=0;r
<NB_REGS
;r
++) {
957 if (reg_classes
[r
] & rc2
) {
960 for(p
= vstack
; p
<= vtop
; p
++) {
961 if ((p
->r
& VT_VALMASK
) == r
||
962 (p
->r2
& VT_VALMASK
) == r
)
973 /* find a free register of class 'rc'. If none, save one register */
974 ST_FUNC
int get_reg(int rc
)
979 /* find a free register */
980 for(r
=0;r
<NB_REGS
;r
++) {
981 if (reg_classes
[r
] & rc
) {
984 for(p
=vstack
;p
<=vtop
;p
++) {
985 if ((p
->r
& VT_VALMASK
) == r
||
986 (p
->r2
& VT_VALMASK
) == r
)
994 /* no register left : free the first one on the stack (VERY
995 IMPORTANT to start from the bottom to ensure that we don't
996 spill registers used in gen_opi()) */
997 for(p
=vstack
;p
<=vtop
;p
++) {
998 /* look at second register (if long long) */
999 r
= p
->r2
& VT_VALMASK
;
1000 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1002 r
= p
->r
& VT_VALMASK
;
1003 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1009 /* Should never comes here */
1013 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1015 static void move_reg(int r
, int s
, int t
)
1029 /* get address of vtop (vtop MUST BE an lvalue) */
1030 ST_FUNC
void gaddrof(void)
1032 vtop
->r
&= ~VT_LVAL
;
1033 /* tricky: if saved lvalue, then we can go back to lvalue */
1034 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1035 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1040 #ifdef CONFIG_TCC_BCHECK
1041 /* generate lvalue bound code */
1042 static void gbound(void)
1047 vtop
->r
&= ~VT_MUSTBOUND
;
1048 /* if lvalue, then use checking code before dereferencing */
1049 if (vtop
->r
& VT_LVAL
) {
1050 /* if not VT_BOUNDED value, then make one */
1051 if (!(vtop
->r
& VT_BOUNDED
)) {
1052 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1053 /* must save type because we must set it to int to get pointer */
1055 vtop
->type
.t
= VT_PTR
;
1058 gen_bounded_ptr_add();
1059 vtop
->r
|= lval_type
;
1062 /* then check for dereferencing */
1063 gen_bounded_ptr_deref();
1068 /* store vtop a register belonging to class 'rc'. lvalues are
1069 converted to values. Cannot be used if cannot be converted to
1070 register value (such as structures). */
1071 ST_FUNC
int gv(int rc
)
1073 int r
, bit_pos
, bit_size
, size
, align
;
1076 /* NOTE: get_reg can modify vstack[] */
1077 if (vtop
->type
.t
& VT_BITFIELD
) {
1080 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
1081 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
1082 /* remove bit field info to avoid loops */
1083 vtop
->type
.t
&= ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
1084 /* cast to int to propagate signedness in following ops */
1085 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1090 if((vtop
->type
.t
& VT_UNSIGNED
) ||
1091 (vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
||
1092 (((vtop
->type
.t
& VT_BTYPE
) == VT_ENUM
) &&
1093 vtop
->type
.ref
->a
.unsigned_enum
))
1094 type
.t
|= VT_UNSIGNED
;
1096 /* generate shifts */
1097 vpushi(bits
- (bit_pos
+ bit_size
));
1099 vpushi(bits
- bit_size
);
1100 /* NOTE: transformed to SHR if unsigned */
1104 if (is_float(vtop
->type
.t
) &&
1105 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1106 unsigned long offset
;
1107 /* CPUs usually cannot use float constants, so we store them
1108 generically in data segment */
1109 size
= type_size(&vtop
->type
, &align
);
1110 offset
= section_add(data_section
, size
, align
);
1111 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1113 init_putv(&vtop
->type
, data_section
, offset
);
1116 #ifdef CONFIG_TCC_BCHECK
1117 if (vtop
->r
& VT_MUSTBOUND
)
1121 r
= vtop
->r
& VT_VALMASK
;
1122 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1123 #ifndef TCC_TARGET_ARM64
1126 #ifdef TCC_TARGET_X86_64
1127 else if (rc
== RC_FRET
)
1131 /* need to reload if:
1133 - lvalue (need to dereference pointer)
1134 - already a register, but not in the right class */
1136 || (vtop
->r
& VT_LVAL
)
1137 || !(reg_classes
[r
] & rc
)
1139 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1140 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1142 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1148 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1149 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1151 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1152 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1153 unsigned long long ll
;
1155 int r2
, original_type
;
1156 original_type
= vtop
->type
.t
;
1157 /* two register type load : expand to two words
1160 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1163 vtop
->c
.i
= ll
; /* first word */
1165 vtop
->r
= r
; /* save register value */
1166 vpushi(ll
>> 32); /* second word */
1169 if (vtop
->r
& VT_LVAL
) {
1170 /* We do not want to modifier the long long
1171 pointer here, so the safest (and less
1172 efficient) is to save all the other registers
1173 in the stack. XXX: totally inefficient. */
1177 /* lvalue_save: save only if used further down the stack */
1178 save_reg_upstack(vtop
->r
, 1);
1180 /* load from memory */
1181 vtop
->type
.t
= load_type
;
1184 vtop
[-1].r
= r
; /* save register value */
1185 /* increment pointer to get second word */
1186 vtop
->type
.t
= addr_type
;
1191 vtop
->type
.t
= load_type
;
1193 /* move registers */
1196 vtop
[-1].r
= r
; /* save register value */
1197 vtop
->r
= vtop
[-1].r2
;
1199 /* Allocate second register. Here we rely on the fact that
1200 get_reg() tries first to free r2 of an SValue. */
1204 /* write second register */
1206 vtop
->type
.t
= original_type
;
1207 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1209 /* lvalue of scalar type : need to use lvalue type
1210 because of possible cast */
1213 /* compute memory access type */
1214 if (vtop
->r
& VT_LVAL_BYTE
)
1216 else if (vtop
->r
& VT_LVAL_SHORT
)
1218 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1222 /* restore wanted type */
1225 /* one register type load */
1230 #ifdef TCC_TARGET_C67
1231 /* uses register pairs for doubles */
1232 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1239 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1240 ST_FUNC
void gv2(int rc1
, int rc2
)
1244 /* generate more generic register first. But VT_JMP or VT_CMP
1245 values must be generated first in all cases to avoid possible
1247 v
= vtop
[0].r
& VT_VALMASK
;
1248 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1253 /* test if reload is needed for first register */
1254 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1264 /* test if reload is needed for first register */
1265 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1271 #ifndef TCC_TARGET_ARM64
1272 /* wrapper around RC_FRET to return a register by type */
1273 static int rc_fret(int t
)
1275 #ifdef TCC_TARGET_X86_64
1276 if (t
== VT_LDOUBLE
) {
1284 /* wrapper around REG_FRET to return a register by type */
1285 static int reg_fret(int t
)
1287 #ifdef TCC_TARGET_X86_64
1288 if (t
== VT_LDOUBLE
) {
1296 /* expand 64bit on stack in two ints */
1297 static void lexpand(void)
1300 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1301 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1302 if (v
== VT_CONST
) {
1305 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1311 vtop
[0].r
= vtop
[-1].r2
;
1312 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1314 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1318 #ifdef TCC_TARGET_ARM
1319 /* expand long long on stack */
1320 ST_FUNC
void lexpand_nr(void)
1324 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1326 vtop
->r2
= VT_CONST
;
1327 vtop
->type
.t
= VT_INT
| u
;
1328 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1329 if (v
== VT_CONST
) {
1330 vtop
[-1].c
.i
= vtop
->c
.i
;
1331 vtop
->c
.i
= vtop
->c
.i
>> 32;
1333 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1335 vtop
->r
= vtop
[-1].r
;
1336 } else if (v
> VT_CONST
) {
1340 vtop
->r
= vtop
[-1].r2
;
1341 vtop
[-1].r2
= VT_CONST
;
1342 vtop
[-1].type
.t
= VT_INT
| u
;
1347 /* build a long long from two ints */
1348 static void lbuild(int t
)
1350 gv2(RC_INT
, RC_INT
);
1351 vtop
[-1].r2
= vtop
[0].r
;
1352 vtop
[-1].type
.t
= t
;
1357 /* convert stack entry to register and duplicate its value in another
1359 static void gv_dup(void)
1366 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1373 /* stack: H L L1 H1 */
1383 /* duplicate value */
1388 #ifdef TCC_TARGET_X86_64
1389 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1399 load(r1
, &sv
); /* move r to r1 */
1401 /* duplicates value */
1407 /* Generate value test
1409 * Generate a test for any value (jump, comparison and integers) */
1410 ST_FUNC
int gvtst(int inv
, int t
)
1412 int v
= vtop
->r
& VT_VALMASK
;
1413 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1417 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1418 /* constant jmp optimization */
1419 if ((vtop
->c
.i
!= 0) != inv
)
1424 return gtst(inv
, t
);
1428 /* generate CPU independent (unsigned) long long operations */
1429 static void gen_opl(int op
)
1431 int t
, a
, b
, op1
, c
, i
;
1433 unsigned short reg_iret
= REG_IRET
;
1434 unsigned short reg_lret
= REG_LRET
;
1440 func
= TOK___divdi3
;
1443 func
= TOK___udivdi3
;
1446 func
= TOK___moddi3
;
1449 func
= TOK___umoddi3
;
1456 /* call generic long long function */
1457 vpush_global_sym(&func_old_type
, func
);
1462 vtop
->r2
= reg_lret
;
1470 //pv("gen_opl A",0,2);
1476 /* stack: L1 H1 L2 H2 */
1481 vtop
[-2] = vtop
[-3];
1484 /* stack: H1 H2 L1 L2 */
1485 //pv("gen_opl B",0,4);
1491 /* stack: H1 H2 L1 L2 ML MH */
1494 /* stack: ML MH H1 H2 L1 L2 */
1498 /* stack: ML MH H1 L2 H2 L1 */
1503 /* stack: ML MH M1 M2 */
1506 } else if (op
== '+' || op
== '-') {
1507 /* XXX: add non carry method too (for MIPS or alpha) */
1513 /* stack: H1 H2 (L1 op L2) */
1516 gen_op(op1
+ 1); /* TOK_xxxC2 */
1519 /* stack: H1 H2 (L1 op L2) */
1522 /* stack: (L1 op L2) H1 H2 */
1524 /* stack: (L1 op L2) (H1 op H2) */
1532 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1533 t
= vtop
[-1].type
.t
;
1537 /* stack: L H shift */
1539 /* constant: simpler */
1540 /* NOTE: all comments are for SHL. the other cases are
1541 done by swapping words */
1552 if (op
!= TOK_SAR
) {
1585 /* XXX: should provide a faster fallback on x86 ? */
1588 func
= TOK___ashrdi3
;
1591 func
= TOK___lshrdi3
;
1594 func
= TOK___ashldi3
;
1600 /* compare operations */
1606 /* stack: L1 H1 L2 H2 */
1608 vtop
[-1] = vtop
[-2];
1610 /* stack: L1 L2 H1 H2 */
1613 /* when values are equal, we need to compare low words. since
1614 the jump is inverted, we invert the test too. */
1617 else if (op1
== TOK_GT
)
1619 else if (op1
== TOK_ULT
)
1621 else if (op1
== TOK_UGT
)
1631 /* generate non equal test */
1637 /* compare low. Always unsigned */
1641 else if (op1
== TOK_LE
)
1643 else if (op1
== TOK_GT
)
1645 else if (op1
== TOK_GE
)
1656 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1658 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1659 return (a
^ b
) >> 63 ? -x
: x
;
1662 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1664 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1667 /* handle integer constant optimizations and various machine
1669 static void gen_opic(int op
)
1671 SValue
*v1
= vtop
- 1;
1673 int t1
= v1
->type
.t
& VT_BTYPE
;
1674 int t2
= v2
->type
.t
& VT_BTYPE
;
1675 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1676 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1677 uint64_t l1
= c1
? v1
->c
.i
: 0;
1678 uint64_t l2
= c2
? v2
->c
.i
: 0;
1679 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1681 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1682 l1
= ((uint32_t)l1
|
1683 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1684 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1685 l2
= ((uint32_t)l2
|
1686 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1690 case '+': l1
+= l2
; break;
1691 case '-': l1
-= l2
; break;
1692 case '&': l1
&= l2
; break;
1693 case '^': l1
^= l2
; break;
1694 case '|': l1
|= l2
; break;
1695 case '*': l1
*= l2
; break;
1702 /* if division by zero, generate explicit division */
1705 tcc_error("division by zero in constant");
1709 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1710 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1711 case TOK_UDIV
: l1
= l1
/ l2
; break;
1712 case TOK_UMOD
: l1
= l1
% l2
; break;
1715 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1716 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1718 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1721 case TOK_ULT
: l1
= l1
< l2
; break;
1722 case TOK_UGE
: l1
= l1
>= l2
; break;
1723 case TOK_EQ
: l1
= l1
== l2
; break;
1724 case TOK_NE
: l1
= l1
!= l2
; break;
1725 case TOK_ULE
: l1
= l1
<= l2
; break;
1726 case TOK_UGT
: l1
= l1
> l2
; break;
1727 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1728 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1729 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1730 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1732 case TOK_LAND
: l1
= l1
&& l2
; break;
1733 case TOK_LOR
: l1
= l1
|| l2
; break;
1737 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1738 l1
= ((uint32_t)l1
|
1739 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1743 /* if commutative ops, put c2 as constant */
1744 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1745 op
== '|' || op
== '*')) {
1747 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1748 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1750 if (!const_wanted
&&
1752 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1753 (l1
== -1 && op
== TOK_SAR
))) {
1754 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1756 } else if (!const_wanted
&&
1757 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1759 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
1760 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1761 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1766 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1769 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1770 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1773 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
1774 /* filter out NOP operations like x*1, x-0, x&-1... */
1776 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1777 /* try to use shifts instead of muls or divs */
1778 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1787 else if (op
== TOK_PDIV
)
1793 } else if (c2
&& (op
== '+' || op
== '-') &&
1794 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1795 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1796 /* symbol + constant case */
1800 /* The backends can't always deal with addends to symbols
1801 larger than +-1<<31. Don't construct such. */
1808 /* call low level op generator */
1809 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1810 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1818 /* generate a floating point operation with constant propagation */
1819 static void gen_opif(int op
)
1823 #if defined _MSC_VER && defined _AMD64_
1824 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
1831 /* currently, we cannot do computations with forward symbols */
1832 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1833 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1835 if (v1
->type
.t
== VT_FLOAT
) {
1838 } else if (v1
->type
.t
== VT_DOUBLE
) {
1846 /* NOTE: we only do constant propagation if finite number (not
1847 NaN or infinity) (ANSI spec) */
1848 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1852 case '+': f1
+= f2
; break;
1853 case '-': f1
-= f2
; break;
1854 case '*': f1
*= f2
; break;
1858 tcc_error("division by zero in constant");
1863 /* XXX: also handles tests ? */
1867 /* XXX: overflow test ? */
1868 if (v1
->type
.t
== VT_FLOAT
) {
1870 } else if (v1
->type
.t
== VT_DOUBLE
) {
1882 static int pointed_size(CType
*type
)
1885 return type_size(pointed_type(type
), &align
);
1888 static void vla_runtime_pointed_size(CType
*type
)
1891 vla_runtime_type_size(pointed_type(type
), &align
);
1894 static inline int is_null_pointer(SValue
*p
)
1896 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
1898 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
1899 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
1900 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
1901 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
1904 static inline int is_integer_btype(int bt
)
1906 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
1907 bt
== VT_INT
|| bt
== VT_LLONG
);
1910 /* check types for comparison or subtraction of pointers */
1911 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
1913 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
1916 /* null pointers are accepted for all comparisons as gcc */
1917 if (is_null_pointer(p1
) || is_null_pointer(p2
))
1921 bt1
= type1
->t
& VT_BTYPE
;
1922 bt2
= type2
->t
& VT_BTYPE
;
1923 /* accept comparison between pointer and integer with a warning */
1924 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
1925 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
1926 tcc_warning("comparison between pointer and integer");
1930 /* both must be pointers or implicit function pointers */
1931 if (bt1
== VT_PTR
) {
1932 type1
= pointed_type(type1
);
1933 } else if (bt1
!= VT_FUNC
)
1934 goto invalid_operands
;
1936 if (bt2
== VT_PTR
) {
1937 type2
= pointed_type(type2
);
1938 } else if (bt2
!= VT_FUNC
) {
1940 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
1942 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
1943 (type2
->t
& VT_BTYPE
) == VT_VOID
)
1947 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1948 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1949 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
1950 /* gcc-like error if '-' is used */
1952 goto invalid_operands
;
1954 tcc_warning("comparison of distinct pointer types lacks a cast");
1958 /* generic gen_op: handles types problems */
1959 ST_FUNC
void gen_op(int op
)
1961 int u
, t1
, t2
, bt1
, bt2
, t
;
1965 t1
= vtop
[-1].type
.t
;
1966 t2
= vtop
[0].type
.t
;
1967 bt1
= t1
& VT_BTYPE
;
1968 bt2
= t2
& VT_BTYPE
;
1970 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
1971 tcc_error("operation on a struct");
1972 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
1973 if (bt2
== VT_FUNC
) {
1974 mk_pointer(&vtop
->type
);
1977 if (bt1
== VT_FUNC
) {
1979 mk_pointer(&vtop
->type
);
1984 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
1985 /* at least one operand is a pointer */
1986 /* relational op: must be both pointers */
1987 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
1988 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1989 /* pointers are handled are unsigned */
1991 t
= VT_LLONG
| VT_UNSIGNED
;
1993 t
= VT_INT
| VT_UNSIGNED
;
1997 /* if both pointers, then it must be the '-' op */
1998 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2000 tcc_error("cannot use pointers here");
2001 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2002 /* XXX: check that types are compatible */
2003 if (vtop
[-1].type
.t
& VT_VLA
) {
2004 vla_runtime_pointed_size(&vtop
[-1].type
);
2006 vpushi(pointed_size(&vtop
[-1].type
));
2010 /* set to integer type */
2012 vtop
->type
.t
= VT_LLONG
;
2014 vtop
->type
.t
= VT_INT
;
2019 /* exactly one pointer : must be '+' or '-'. */
2020 if (op
!= '-' && op
!= '+')
2021 tcc_error("cannot use pointers here");
2022 /* Put pointer as first operand */
2023 if (bt2
== VT_PTR
) {
2025 t
= t1
, t1
= t2
, t2
= t
;
2028 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2029 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2032 type1
= vtop
[-1].type
;
2033 type1
.t
&= ~VT_ARRAY
;
2034 if (vtop
[-1].type
.t
& VT_VLA
)
2035 vla_runtime_pointed_size(&vtop
[-1].type
);
2037 u
= pointed_size(&vtop
[-1].type
);
2039 tcc_error("unknown array element size");
2043 /* XXX: cast to int ? (long long case) */
2049 /* #ifdef CONFIG_TCC_BCHECK
2050 The main reason to removing this code:
2057 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2058 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2060 When this code is on. then the output looks like
2062 v+(i-j) = 0xbff84000
2064 /* if evaluating constant expression, no code should be
2065 generated, so no bound check */
2066 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2067 /* if bounded pointers, we generate a special code to
2074 gen_bounded_ptr_add();
2080 /* put again type if gen_opic() swaped operands */
2083 } else if (is_float(bt1
) || is_float(bt2
)) {
2084 /* compute bigger type and do implicit casts */
2085 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2087 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2092 /* floats can only be used for a few operations */
2093 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2094 (op
< TOK_ULT
|| op
> TOK_GT
))
2095 tcc_error("invalid operands for binary operation");
2097 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2098 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2099 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2102 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2103 /* cast to biggest op */
2105 /* convert to unsigned if it does not fit in a long long */
2106 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2107 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2111 /* integer operations */
2113 /* convert to unsigned if it does not fit in an integer */
2114 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2115 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2118 /* XXX: currently, some unsigned operations are explicit, so
2119 we modify them here */
2120 if (t
& VT_UNSIGNED
) {
2127 else if (op
== TOK_LT
)
2129 else if (op
== TOK_GT
)
2131 else if (op
== TOK_LE
)
2133 else if (op
== TOK_GE
)
2141 /* special case for shifts and long long: we keep the shift as
2143 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2150 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2151 /* relational op: the result is an int */
2152 vtop
->type
.t
= VT_INT
;
2157 // Make sure that we have converted to an rvalue:
2158 if (vtop
->r
& VT_LVAL
)
2159 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2162 #ifndef TCC_TARGET_ARM
2163 /* generic itof for unsigned long long case */
2164 static void gen_cvt_itof1(int t
)
2166 #ifdef TCC_TARGET_ARM64
2169 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2170 (VT_LLONG
| VT_UNSIGNED
)) {
2173 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2174 #if LDOUBLE_SIZE != 8
2175 else if (t
== VT_LDOUBLE
)
2176 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2179 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2183 vtop
->r
= reg_fret(t
);
2191 /* generic ftoi for unsigned long long case */
2192 static void gen_cvt_ftoi1(int t
)
2194 #ifdef TCC_TARGET_ARM64
2199 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2200 /* not handled natively */
2201 st
= vtop
->type
.t
& VT_BTYPE
;
2203 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2204 #if LDOUBLE_SIZE != 8
2205 else if (st
== VT_LDOUBLE
)
2206 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2209 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2214 vtop
->r2
= REG_LRET
;
2221 /* force char or short cast */
2222 static void force_charshort_cast(int t
)
2226 /* XXX: add optimization if lvalue : just change type and offset */
2231 if (t
& VT_UNSIGNED
) {
2232 vpushi((1 << bits
) - 1);
2235 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2241 /* result must be signed or the SAR is converted to an SHL
2242 This was not the case when "t" was a signed short
2243 and the last value on the stack was an unsigned int */
2244 vtop
->type
.t
&= ~VT_UNSIGNED
;
2250 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2251 static void gen_cast_s(int t
)
2259 static void gen_cast(CType
*type
)
2261 int sbt
, dbt
, sf
, df
, c
, p
;
2263 /* special delayed cast for char/short */
2264 /* XXX: in some cases (multiple cascaded casts), it may still
2266 if (vtop
->r
& VT_MUSTCAST
) {
2267 vtop
->r
&= ~VT_MUSTCAST
;
2268 force_charshort_cast(vtop
->type
.t
);
2271 /* bitfields first get cast to ints */
2272 if (vtop
->type
.t
& VT_BITFIELD
) {
2276 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2277 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2282 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2283 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2285 /* constant case: we can do it now */
2286 /* XXX: in ISOC, cannot do it if error in convert */
2287 if (sbt
== VT_FLOAT
)
2288 vtop
->c
.ld
= vtop
->c
.f
;
2289 else if (sbt
== VT_DOUBLE
)
2290 vtop
->c
.ld
= vtop
->c
.d
;
2293 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2294 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2295 vtop
->c
.ld
= vtop
->c
.i
;
2297 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2299 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2300 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2302 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2305 if (dbt
== VT_FLOAT
)
2306 vtop
->c
.f
= (float)vtop
->c
.ld
;
2307 else if (dbt
== VT_DOUBLE
)
2308 vtop
->c
.d
= (double)vtop
->c
.ld
;
2309 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2310 vtop
->c
.i
= vtop
->c
.ld
;
2311 } else if (sf
&& dbt
== VT_BOOL
) {
2312 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2315 vtop
->c
.i
= vtop
->c
.ld
;
2316 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2318 else if (sbt
& VT_UNSIGNED
)
2319 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2321 else if (sbt
== VT_PTR
)
2324 else if (sbt
!= VT_LLONG
)
2325 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2326 -(vtop
->c
.i
& 0x80000000));
2328 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2330 else if (dbt
== VT_BOOL
)
2331 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2333 else if (dbt
== VT_PTR
)
2336 else if (dbt
!= VT_LLONG
) {
2337 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2338 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2341 if (!(dbt
& VT_UNSIGNED
))
2342 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2345 } else if (p
&& dbt
== VT_BOOL
) {
2349 /* non constant case: generate code */
2351 /* convert from fp to fp */
2354 /* convert int to fp */
2357 /* convert fp to int */
2358 if (dbt
== VT_BOOL
) {
2362 /* we handle char/short/etc... with generic code */
2363 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2364 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2368 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2369 /* additional cast for char/short... */
2375 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2376 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2377 /* scalar to long long */
2378 /* machine independent conversion */
2380 /* generate high word */
2381 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2385 if (sbt
== VT_PTR
) {
2386 /* cast from pointer to int before we apply
2387 shift operation, which pointers don't support*/
2394 /* patch second register */
2395 vtop
[-1].r2
= vtop
->r
;
2399 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2400 (dbt
& VT_BTYPE
) == VT_PTR
||
2401 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2402 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2403 (sbt
& VT_BTYPE
) != VT_PTR
&&
2404 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2405 /* need to convert from 32bit to 64bit */
2407 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2408 #if defined(TCC_TARGET_ARM64)
2410 #elif defined(TCC_TARGET_X86_64)
2412 /* x86_64 specific: movslq */
2414 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2421 } else if (dbt
== VT_BOOL
) {
2422 /* scalar to bool */
2425 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2426 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2427 if (sbt
== VT_PTR
) {
2428 vtop
->type
.t
= VT_INT
;
2429 tcc_warning("nonportable conversion from pointer to char/short");
2431 force_charshort_cast(dbt
);
2433 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2435 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2436 /* from long long: just take low order word */
2440 /* if lvalue and single word type, nothing to do because
2441 the lvalue already contains the real type size (see
2442 VT_LVAL_xxx constants) */
2446 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2447 /* if we are casting between pointer types,
2448 we must update the VT_LVAL_xxx size */
2449 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2450 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2455 /* return type size as known at compile time. Put alignment at 'a' */
2456 ST_FUNC
int type_size(CType
*type
, int *a
)
2461 bt
= type
->t
& VT_BTYPE
;
2462 if (bt
== VT_STRUCT
) {
2467 } else if (bt
== VT_PTR
) {
2468 if (type
->t
& VT_ARRAY
) {
2472 ts
= type_size(&s
->type
, a
);
2474 if (ts
< 0 && s
->c
< 0)
2482 } else if (bt
== VT_LDOUBLE
) {
2484 return LDOUBLE_SIZE
;
2485 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2486 #ifdef TCC_TARGET_I386
2487 #ifdef TCC_TARGET_PE
2492 #elif defined(TCC_TARGET_ARM)
2502 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2505 } else if (bt
== VT_SHORT
) {
2508 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2511 } else if (bt
== VT_ENUM
) {
2513 /* Enums might be incomplete, so don't just return '4' here. */
2514 return type
->ref
->c
;
2516 /* char, void, function, _Bool */
2522 /* push type size as known at runtime time on top of value stack. Put
2524 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2526 if (type
->t
& VT_VLA
) {
2527 type_size(&type
->ref
->type
, a
);
2528 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2530 vpushi(type_size(type
, a
));
2534 static void vla_sp_restore(void) {
2535 if (vlas_in_scope
) {
2536 gen_vla_sp_restore(vla_sp_loc
);
2540 static void vla_sp_restore_root(void) {
2541 if (vlas_in_scope
) {
2542 gen_vla_sp_restore(vla_sp_root_loc
);
2546 /* return the pointed type of t */
2547 static inline CType
*pointed_type(CType
*type
)
2549 return &type
->ref
->type
;
2552 /* modify type so that its it is a pointer to type. */
2553 ST_FUNC
void mk_pointer(CType
*type
)
2556 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2557 type
->t
= VT_PTR
| (type
->t
& ~VT_TYPE
);
2561 /* compare function types. OLD functions match any new functions */
2562 static int is_compatible_func(CType
*type1
, CType
*type2
)
2568 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2570 /* check func_call */
2571 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2573 /* XXX: not complete */
2574 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2576 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2578 while (s1
!= NULL
) {
2581 if (!is_compatible_parameter_types(&s1
->type
, &s2
->type
))
2591 /* return true if type1 and type2 are the same. If unqualified is
2592 true, qualifiers on the types are ignored.
2594 - enums are not checked as gcc __builtin_types_compatible_p ()
2596 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2600 t1
= type1
->t
& VT_TYPE
;
2601 t2
= type2
->t
& VT_TYPE
;
2603 /* strip qualifiers before comparing */
2604 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2605 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2607 /* Default Vs explicit signedness only matters for char */
2608 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2612 /* An enum is compatible with (unsigned) int. Ideally we would
2613 store the enums signedness in type->ref.a.<some_bit> and
2614 only accept unsigned enums with unsigned int and vice versa.
2615 But one of our callers (gen_assign_cast) always strips VT_UNSIGNED
2616 from pointer target types, so we can't add it here either. */
2617 if ((t1
& VT_BTYPE
) == VT_ENUM
) {
2619 if (type1
->ref
->a
.unsigned_enum
)
2622 if ((t2
& VT_BTYPE
) == VT_ENUM
) {
2624 if (type2
->ref
->a
.unsigned_enum
)
2627 /* XXX: bitfields ? */
2630 /* test more complicated cases */
2631 bt1
= t1
& VT_BTYPE
;
2632 if (bt1
== VT_PTR
) {
2633 type1
= pointed_type(type1
);
2634 type2
= pointed_type(type2
);
2635 return is_compatible_types(type1
, type2
);
2636 } else if (bt1
== VT_STRUCT
) {
2637 return (type1
->ref
== type2
->ref
);
2638 } else if (bt1
== VT_FUNC
) {
2639 return is_compatible_func(type1
, type2
);
2645 /* return true if type1 and type2 are exactly the same (including
2648 static int is_compatible_types(CType
*type1
, CType
*type2
)
2650 return compare_types(type1
,type2
,0);
2653 /* return true if type1 and type2 are the same (ignoring qualifiers).
2655 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
)
2657 return compare_types(type1
,type2
,1);
2660 /* print a type. If 'varstr' is not NULL, then the variable is also
2661 printed in the type */
2663 /* XXX: add array and function pointers */
2664 static void type_to_str(char *buf
, int buf_size
,
2665 CType
*type
, const char *varstr
)
2675 if (t
& VT_CONSTANT
)
2676 pstrcat(buf
, buf_size
, "const ");
2677 if (t
& VT_VOLATILE
)
2678 pstrcat(buf
, buf_size
, "volatile ");
2679 if ((t
& (VT_DEFSIGN
| VT_UNSIGNED
)) == (VT_DEFSIGN
| VT_UNSIGNED
))
2680 pstrcat(buf
, buf_size
, "unsigned ");
2681 else if (t
& VT_DEFSIGN
)
2682 pstrcat(buf
, buf_size
, "signed ");
2684 pstrcat(buf
, buf_size
, "extern ");
2686 pstrcat(buf
, buf_size
, "static ");
2688 pstrcat(buf
, buf_size
, "typedef ");
2690 pstrcat(buf
, buf_size
, "inline ");
2691 buf_size
-= strlen(buf
);
2722 tstr
= "long double";
2724 pstrcat(buf
, buf_size
, tstr
);
2728 if (bt
== VT_STRUCT
)
2732 pstrcat(buf
, buf_size
, tstr
);
2733 v
= type
->ref
->v
& ~SYM_STRUCT
;
2734 if (v
>= SYM_FIRST_ANOM
)
2735 pstrcat(buf
, buf_size
, "<anonymous>");
2737 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2741 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2742 pstrcat(buf
, buf_size
, "(");
2744 while (sa
!= NULL
) {
2745 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2746 pstrcat(buf
, buf_size
, buf1
);
2749 pstrcat(buf
, buf_size
, ", ");
2751 pstrcat(buf
, buf_size
, ")");
2756 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2757 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2760 pstrcpy(buf1
, sizeof(buf1
), "*");
2761 if (t
& VT_CONSTANT
)
2762 pstrcat(buf1
, buf_size
, "const ");
2763 if (t
& VT_VOLATILE
)
2764 pstrcat(buf1
, buf_size
, "volatile ");
2766 pstrcat(buf1
, sizeof(buf1
), varstr
);
2767 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2771 pstrcat(buf
, buf_size
, " ");
2772 pstrcat(buf
, buf_size
, varstr
);
2777 /* verify type compatibility to store vtop in 'dt' type, and generate
2779 static void gen_assign_cast(CType
*dt
)
2781 CType
*st
, *type1
, *type2
, tmp_type1
, tmp_type2
;
2782 char buf1
[256], buf2
[256];
2785 st
= &vtop
->type
; /* source type */
2786 dbt
= dt
->t
& VT_BTYPE
;
2787 sbt
= st
->t
& VT_BTYPE
;
2788 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2789 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2791 It is Ok if both are void
2797 gcc accepts this program
2800 tcc_error("cannot cast from/to void");
2802 if (dt
->t
& VT_CONSTANT
)
2803 tcc_warning("assignment of read-only location");
2806 /* special cases for pointers */
2807 /* '0' can also be a pointer */
2808 if (is_null_pointer(vtop
))
2810 /* accept implicit pointer to integer cast with warning */
2811 if (is_integer_btype(sbt
)) {
2812 tcc_warning("assignment makes pointer from integer without a cast");
2815 type1
= pointed_type(dt
);
2816 /* a function is implicitly a function pointer */
2817 if (sbt
== VT_FUNC
) {
2818 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2819 !is_compatible_types(pointed_type(dt
), st
))
2820 tcc_warning("assignment from incompatible pointer type");
2825 type2
= pointed_type(st
);
2826 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2827 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2828 /* void * can match anything */
2830 /* exact type match, except for qualifiers */
2833 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2834 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2835 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2836 /* Like GCC don't warn by default for merely changes
2837 in pointer target signedness. Do warn for different
2838 base types, though, in particular for unsigned enums
2839 and signed int targets. */
2840 if ((tmp_type1
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) !=
2841 (tmp_type2
.t
& (VT_DEFSIGN
| VT_UNSIGNED
)) &&
2842 (tmp_type1
.t
& VT_BTYPE
) == (tmp_type2
.t
& VT_BTYPE
))
2845 tcc_warning("assignment from incompatible pointer type");
2848 /* check const and volatile */
2849 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2850 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2851 tcc_warning("assignment discards qualifiers from pointer target type");
2857 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2858 tcc_warning("assignment makes integer from pointer without a cast");
2859 } else if (sbt
== VT_STRUCT
) {
2860 goto case_VT_STRUCT
;
2862 /* XXX: more tests */
2868 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2869 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2870 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2872 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2873 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2874 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
2882 /* store vtop in lvalue pushed on stack */
2883 ST_FUNC
void vstore(void)
2885 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
2887 ft
= vtop
[-1].type
.t
;
2888 sbt
= vtop
->type
.t
& VT_BTYPE
;
2889 dbt
= ft
& VT_BTYPE
;
2890 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
2891 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
2892 && !(vtop
->type
.t
& VT_BITFIELD
)) {
2893 /* optimize char/short casts */
2894 delayed_cast
= VT_MUSTCAST
;
2895 vtop
->type
.t
= ft
& VT_TYPE
;
2896 /* XXX: factorize */
2897 if (ft
& VT_CONSTANT
)
2898 tcc_warning("assignment of read-only location");
2901 if (!(ft
& VT_BITFIELD
))
2902 gen_assign_cast(&vtop
[-1].type
);
2905 if (sbt
== VT_STRUCT
) {
2906 /* if structure, only generate pointer */
2907 /* structure assignment : generate memcpy */
2908 /* XXX: optimize if small size */
2909 size
= type_size(&vtop
->type
, &align
);
2913 vtop
->type
.t
= VT_PTR
;
2916 /* address of memcpy() */
2919 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
2920 else if(!(align
& 3))
2921 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
2924 /* Use memmove, rather than memcpy, as dest and src may be same: */
2925 vpush_global_sym(&func_old_type
, TOK_memmove
);
2930 vtop
->type
.t
= VT_PTR
;
2936 /* leave source on stack */
2937 } else if (ft
& VT_BITFIELD
) {
2938 /* bitfield store handling */
2940 /* save lvalue as expression result (example: s.b = s.a = n;) */
2941 vdup(), vtop
[-1] = vtop
[-2];
2943 bit_pos
= (ft
>> VT_STRUCT_SHIFT
) & 0x3f;
2944 bit_size
= (ft
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
2945 /* remove bit field info to avoid loops */
2946 vtop
[-1].type
.t
= ft
& ~VT_BITFIELD
& ((1 << VT_STRUCT_SHIFT
) - 1);
2948 if((ft
& VT_BTYPE
) == VT_BOOL
) {
2949 gen_cast(&vtop
[-1].type
);
2950 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
2953 /* duplicate destination */
2955 vtop
[-1] = vtop
[-2];
2957 /* mask and shift source */
2958 if((ft
& VT_BTYPE
) != VT_BOOL
) {
2959 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2960 vpushll((1ULL << bit_size
) - 1ULL);
2962 vpushi((1 << bit_size
) - 1);
2968 /* load destination, mask and or with source */
2970 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2971 vpushll(~(((1ULL << bit_size
) - 1ULL) << bit_pos
));
2973 vpushi(~(((1 << bit_size
) - 1) << bit_pos
));
2979 /* ... and discard */
2983 #ifdef CONFIG_TCC_BCHECK
2984 /* bound check case */
2985 if (vtop
[-1].r
& VT_MUSTBOUND
) {
2994 #ifdef TCC_TARGET_X86_64
2995 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
2997 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3002 r
= gv(rc
); /* generate value */
3003 /* if lvalue was saved on stack, must read it */
3004 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3006 t
= get_reg(RC_INT
);
3012 sv
.r
= VT_LOCAL
| VT_LVAL
;
3013 sv
.c
.i
= vtop
[-1].c
.i
;
3015 vtop
[-1].r
= t
| VT_LVAL
;
3017 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3019 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3020 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3022 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3023 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3025 vtop
[-1].type
.t
= load_type
;
3028 /* convert to int to increment easily */
3029 vtop
->type
.t
= addr_type
;
3035 vtop
[-1].type
.t
= load_type
;
3036 /* XXX: it works because r2 is spilled last ! */
3037 store(vtop
->r2
, vtop
- 1);
3043 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3044 vtop
->r
|= delayed_cast
;
3048 /* post defines POST/PRE add. c is the token ++ or -- */
3049 ST_FUNC
void inc(int post
, int c
)
3052 vdup(); /* save lvalue */
3054 gv_dup(); /* duplicate value */
3059 vpushi(c
- TOK_MID
);
3061 vstore(); /* store value */
3063 vpop(); /* if post op, return saved value */
3066 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3068 /* read the string */
3072 while (tok
== TOK_STR
) {
3073 /* XXX: add \0 handling too ? */
3074 cstr_cat(astr
, tokc
.str
.data
, -1);
3077 cstr_ccat(astr
, '\0');
3080 /* If I is >= 1 and a power of two, returns log2(i)+1.
3081 If I is 0 returns 0. */
3082 static int exact_log2p1(int i
)
3087 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3098 /* Parse __attribute__((...)) GNUC extension. */
3099 static void parse_attribute(AttributeDef
*ad
)
3105 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3110 while (tok
!= ')') {
3111 if (tok
< TOK_IDENT
)
3112 expect("attribute name");
3119 parse_mult_str(&astr
, "section name");
3120 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3127 parse_mult_str(&astr
, "alias(\"target\")");
3128 ad
->alias_target
= /* save string as token, for later */
3129 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3133 case TOK_VISIBILITY1
:
3134 case TOK_VISIBILITY2
:
3136 parse_mult_str(&astr
,
3137 "visibility(\"default|hidden|internal|protected\")");
3138 if (!strcmp (astr
.data
, "default"))
3139 ad
->a
.visibility
= STV_DEFAULT
;
3140 else if (!strcmp (astr
.data
, "hidden"))
3141 ad
->a
.visibility
= STV_HIDDEN
;
3142 else if (!strcmp (astr
.data
, "internal"))
3143 ad
->a
.visibility
= STV_INTERNAL
;
3144 else if (!strcmp (astr
.data
, "protected"))
3145 ad
->a
.visibility
= STV_PROTECTED
;
3147 expect("visibility(\"default|hidden|internal|protected\")");
3156 if (n
<= 0 || (n
& (n
- 1)) != 0)
3157 tcc_error("alignment must be a positive power of two");
3162 ad
->a
.aligned
= exact_log2p1(n
);
3163 if (n
!= 1 << (ad
->a
.aligned
- 1))
3164 tcc_error("alignment of %d is larger than implemented", n
);
3176 /* currently, no need to handle it because tcc does not
3177 track unused objects */
3181 /* currently, no need to handle it because tcc does not
3182 track unused objects */
3187 ad
->f
.func_call
= FUNC_CDECL
;
3192 ad
->f
.func_call
= FUNC_STDCALL
;
3194 #ifdef TCC_TARGET_I386
3204 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3210 ad
->f
.func_call
= FUNC_FASTCALLW
;
3217 ad
->attr_mode
= VT_LLONG
+ 1;
3220 ad
->attr_mode
= VT_BYTE
+ 1;
3223 ad
->attr_mode
= VT_SHORT
+ 1;
3227 ad
->attr_mode
= VT_INT
+ 1;
3230 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3237 ad
->a
.dllexport
= 1;
3240 ad
->a
.dllimport
= 1;
3243 if (tcc_state
->warn_unsupported
)
3244 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3245 /* skip parameters */
3247 int parenthesis
= 0;
3251 else if (tok
== ')')
3254 } while (parenthesis
&& tok
!= -1);
3267 static Sym
* find_field (CType
*type
, int v
)
3271 while ((s
= s
->next
) != NULL
) {
3272 if ((s
->v
& SYM_FIELD
) &&
3273 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3274 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3275 Sym
*ret
= find_field (&s
->type
, v
);
3285 static void struct_add_offset (Sym
*s
, int offset
)
3287 while ((s
= s
->next
) != NULL
) {
3288 if ((s
->v
& SYM_FIELD
) &&
3289 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3290 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3291 struct_add_offset(s
->type
.ref
, offset
);
3297 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3299 int align
, maxalign
, offset
, c
, bit_pos
, bt
, prevbt
, prev_bit_size
;
3300 int pcc
= !tcc_state
->ms_bitfields
;
3301 int packwarn
= tcc_state
->warn_gcc_compat
;
3302 int typealign
, bit_size
, size
;
3306 maxalign
= 1 << (ad
->a
.aligned
- 1);
3312 prevbt
= VT_STRUCT
; /* make it never match */
3316 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3317 size
= type_size(&f
->type
, &typealign
);
3318 if (f
->type
.t
& VT_BITFIELD
)
3319 bit_size
= (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
3322 if (bit_size
== 0 && pcc
) {
3323 /* Zero-width bit-fields in PCC mode aren't affected
3324 by any packing (attribute or pragma). */
3326 } else if (f
->r
> 1) {
3328 } else if (ad
->a
.packed
|| f
->r
== 1) {
3330 /* Packed fields or packed records don't let the base type
3331 influence the records type alignment. */
3336 if (type
->ref
->type
.t
== VT_UNION
) {
3337 if (pcc
&& bit_size
>= 0)
3338 size
= (bit_size
+ 7) >> 3;
3339 /* Bit position is already zero from our caller. */
3343 } else if (bit_size
< 0) {
3344 int addbytes
= pcc
? (bit_pos
+ 7) >> 3 : 0;
3347 c
= (c
+ addbytes
+ align
- 1) & -align
;
3353 /* A bit-field. Layout is more complicated. There are two
3354 options TCC implements: PCC compatible and MS compatible
3355 (PCC compatible is what GCC uses for almost all targets).
3356 In PCC layout the overall size of the struct (in c) is
3357 _excluding_ the current run of bit-fields (that is,
3358 there's at least additional bit_pos bits after c). In
3359 MS layout c does include the current run of bit-fields.
3361 This matters for calculating the natural alignment buckets
3364 /* 'align' will be used to influence records alignment,
3365 so it's the max of specified and type alignment, except
3366 in certain cases that depend on the mode. */
3367 if (align
< typealign
)
3370 /* In PCC layout a non-packed bit-field is placed adjacent
3371 to the preceding bit-fields, except if it would overflow
3372 its container (depending on base type) or it's a zero-width
3373 bit-field. Packed non-zero-width bit-fields always are
3375 int ofs
= (c
* 8 + bit_pos
) % (typealign
* 8);
3376 int ofs2
= ofs
+ bit_size
+ (typealign
* 8) - 1;
3377 if (bit_size
== 0 ||
3379 (ofs2
/ (typealign
* 8)) > (size
/typealign
))) {
3380 c
= (c
+ ((bit_pos
+ 7) >> 3) + typealign
- 1) & -typealign
;
3382 } else if (bit_pos
+ bit_size
> size
* 8) {
3385 if (bit_pos
+ bit_size
> size
* 8) {
3386 c
+= 1, bit_pos
= 0;
3387 if ((ad
->a
.packed
|| f
->r
) && packwarn
) {
3388 tcc_warning("struct layout not compatible with GCC (internal limitation)");
3394 /* In PCC layout named bit-fields influence the alignment
3395 of the containing struct using the base types alignment,
3396 except for packed fields (which here have correct
3397 align/typealign). */
3398 if ((f
->v
& SYM_FIRST_ANOM
))
3401 bt
= f
->type
.t
& VT_BTYPE
;
3402 if ((bit_pos
+ bit_size
> size
* 8) ||
3403 (bit_size
> 0) == (bt
!= prevbt
)) {
3404 c
= (c
+ typealign
- 1) & -typealign
;
3407 /* In MS bitfield mode a bit-field run always uses
3408 at least as many bits as the underlying type.
3409 To start a new run it's also required that this
3410 or the last bit-field had non-zero width. */
3411 if (bit_size
|| prev_bit_size
)
3414 /* In MS layout the records alignment is normally
3415 influenced by the field, except for a zero-width
3416 field at the start of a run (but by further zero-width
3417 fields it is again). */
3418 if (bit_size
== 0 && prevbt
!= bt
)
3421 prev_bit_size
= bit_size
;
3423 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3424 | (bit_pos
<< VT_STRUCT_SHIFT
);
3425 bit_pos
+= bit_size
;
3426 if (pcc
&& bit_pos
>= size
* 8) {
3428 bit_pos
-= size
* 8;
3431 if (align
> maxalign
)
3434 printf("set field %s offset=%d",
3435 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
);
3436 if (f
->type
.t
& VT_BITFIELD
) {
3437 printf(" pos=%d size=%d",
3438 (f
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f,
3439 (f
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f);
3444 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3446 /* An anonymous struct/union. Adjust member offsets
3447 to reflect the real offset of our containing struct.
3448 Also set the offset of this anon member inside
3449 the outer struct to be zero. Via this it
3450 works when accessing the field offset directly
3451 (from base object), as well as when recursing
3452 members in initializer handling. */
3453 int v2
= f
->type
.ref
->v
;
3454 if (!(v2
& SYM_FIELD
) &&
3455 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3457 /* This happens only with MS extensions. The
3458 anon member has a named struct type, so it
3459 potentially is shared with other references.
3460 We need to unshare members so we can modify
3463 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3464 &f
->type
.ref
->type
, 0,
3466 pps
= &f
->type
.ref
->next
;
3467 while ((ass
= ass
->next
) != NULL
) {
3468 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3469 pps
= &((*pps
)->next
);
3473 struct_add_offset(f
->type
.ref
, offset
);
3481 /* store size and alignment */
3482 type
->ref
->c
= (c
+ (pcc
? (bit_pos
+ 7) >> 3 : 0)
3483 + maxalign
- 1) & -maxalign
;
3484 type
->ref
->r
= maxalign
;
3485 if (offset
+ size
> type
->ref
->c
&& type
->ref
->c
)
3486 tcc_warning("will touch memory past end of the struct (internal limitation)");
3489 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
3490 static void struct_decl(CType
*type
, int u
)
3492 int v
, c
, size
, align
, flexible
, alignoverride
;
3493 int bit_size
, bsize
, bt
;
3495 AttributeDef ad
, ad1
;
3498 memset(&ad
, 0, sizeof ad
);
3500 parse_attribute(&ad
);
3504 /* struct already defined ? return it */
3506 expect("struct/union/enum name");
3508 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3509 if ((s
->type
.t
& (VT_BTYPE
|VT_UNION
)) != u
)
3510 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3516 /* Record the original enum/struct/union token. */
3519 /* we put an undefined size for struct/union */
3520 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3521 s
->r
= 0; /* default alignment is zero as gcc */
3523 type
->t
= u
& VT_BTYPE
; /* VT_UNION becomes VT_STRUCT */
3529 tcc_error("struct/union/enum already defined");
3530 /* cannot be empty */
3532 /* non empty enums are not allowed */
3537 CType
*t
= &int_type
;
3540 expect("identifier");
3542 if (ss
&& !local_stack
)
3543 tcc_error("redefinition of enumerator '%s'",
3544 get_tok_str(v
, NULL
));
3551 /* We really want to support long long enums
3552 on i386 as well, but the Sym structure only
3553 holds a 'long' for associated constants,
3554 and enlarging it would bump its size (no
3555 available padding). So punt for now. */
3561 if (c
!= (int)c
&& (unsigned long)c
!= (unsigned int)c
)
3562 seen_wide
= 1, t
= &size_type
;
3563 /* enum symbols have static storage */
3564 ss
= sym_push(v
, t
, VT_CONST
, c
);
3565 ss
->type
.t
|= VT_STATIC
;
3570 /* NOTE: we accept a trailing comma */
3575 s
->a
.unsigned_enum
= 1;
3576 s
->c
= type_size(seen_wide
? &size_type
: &int_type
, &align
);
3581 while (tok
!= '}') {
3582 if (!parse_btype(&btype
, &ad1
)) {
3588 tcc_error("flexible array member '%s' not at the end of struct",
3589 get_tok_str(v
, NULL
));
3595 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3597 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3598 expect("identifier");
3600 int v
= btype
.ref
->v
;
3601 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3602 if (tcc_state
->ms_extensions
== 0)
3603 expect("identifier");
3607 if (type_size(&type1
, &align
) < 0) {
3608 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3611 tcc_error("field '%s' has incomplete type",
3612 get_tok_str(v
, NULL
));
3614 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3615 (type1
.t
& VT_STORAGE
))
3616 tcc_error("invalid type for '%s'",
3617 get_tok_str(v
, NULL
));
3621 bit_size
= expr_const();
3622 /* XXX: handle v = 0 case for messages */
3624 tcc_error("negative width in bit-field '%s'",
3625 get_tok_str(v
, NULL
));
3626 if (v
&& bit_size
== 0)
3627 tcc_error("zero width for bit-field '%s'",
3628 get_tok_str(v
, NULL
));
3629 parse_attribute(&ad1
);
3631 size
= type_size(&type1
, &align
);
3632 /* Only remember non-default alignment. */
3634 if (ad1
.a
.aligned
) {
3635 int speca
= 1 << (ad1
.a
.aligned
- 1);
3636 alignoverride
= speca
;
3637 } else if (ad1
.a
.packed
|| ad
.a
.packed
) {
3639 } else if (*tcc_state
->pack_stack_ptr
) {
3640 if (align
>= *tcc_state
->pack_stack_ptr
)
3641 alignoverride
= *tcc_state
->pack_stack_ptr
;
3643 if (bit_size
>= 0) {
3644 bt
= type1
.t
& VT_BTYPE
;
3651 tcc_error("bitfields must have scalar type");
3653 if (bit_size
> bsize
) {
3654 tcc_error("width of '%s' exceeds its type",
3655 get_tok_str(v
, NULL
));
3656 } else if (bit_size
== bsize
) {
3657 /* no need for bit fields */
3660 type1
.t
|= VT_BITFIELD
|
3661 (0 << VT_STRUCT_SHIFT
) |
3662 (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3665 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3666 /* Remember we've seen a real field to check
3667 for placement of flexible array member. */
3670 /* If member is a struct or bit-field, enforce
3671 placing into the struct (as anonymous). */
3673 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3678 ss
= sym_push(v
| SYM_FIELD
, &type1
, alignoverride
, 0);
3682 if (tok
== ';' || tok
== TOK_EOF
)
3689 parse_attribute(&ad
);
3690 struct_layout(type
, &ad
);
3695 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3696 are added to the element type, copied because it could be a typedef. */
3697 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3699 while (type
->t
& VT_ARRAY
) {
3700 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3701 type
= &type
->ref
->type
;
3703 type
->t
|= qualifiers
;
3706 /* return 0 if no type declaration. otherwise, return the basic type
3709 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3711 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
3715 memset(ad
, 0, sizeof(AttributeDef
));
3725 /* currently, we really ignore extension */
3735 if (u
== VT_SHORT
|| u
== VT_LONG
) {
3736 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
3737 tmbt
: tcc_error("too many basic types");
3740 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
3745 t
= (t
& ~VT_BTYPE
) | u
;
3758 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3759 #ifndef TCC_TARGET_PE
3760 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3762 } else if ((t
& VT_BTYPE
) == VT_LONG
) {
3763 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3770 #ifdef TCC_TARGET_ARM64
3772 /* GCC's __uint128_t appears in some Linux header files. Make it a
3773 synonym for long double to get the size and alignment right. */
3784 if ((t
& VT_BTYPE
) == VT_LONG
) {
3785 #ifdef TCC_TARGET_PE
3786 t
= (t
& ~VT_BTYPE
) | VT_DOUBLE
;
3788 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3797 struct_decl(&type1
, VT_ENUM
);
3800 type
->ref
= type1
.ref
;
3803 struct_decl(&type1
, VT_STRUCT
);
3806 struct_decl(&type1
, VT_UNION
);
3809 /* type modifiers */
3814 parse_btype_qualify(type
, VT_CONSTANT
);
3822 parse_btype_qualify(type
, VT_VOLATILE
);
3829 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
3830 tcc_error("signed and unsigned modifier");
3843 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
3844 tcc_error("signed and unsigned modifier");
3845 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
3861 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
3862 tcc_error("multiple storage classes");
3873 /* GNUC attribute */
3874 case TOK_ATTRIBUTE1
:
3875 case TOK_ATTRIBUTE2
:
3876 parse_attribute(ad
);
3877 if (ad
->attr_mode
) {
3878 u
= ad
->attr_mode
-1;
3879 t
= (t
& ~VT_BTYPE
) | u
;
3887 parse_expr_type(&type1
);
3888 /* remove all storage modifiers except typedef */
3889 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
3895 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
3898 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
3899 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
3900 type
->ref
= s
->type
.ref
;
3902 parse_btype_qualify(type
, t
);
3904 /* get attributes from typedef */
3905 if (s
->a
.aligned
&& 0 == ad
->a
.aligned
)
3906 ad
->a
.aligned
= s
->a
.aligned
;
3907 if (s
->f
.func_call
&& 0 == ad
->f
.func_call
)
3908 ad
->f
.func_call
= s
->f
.func_call
;
3919 if (tcc_state
->char_is_unsigned
) {
3920 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
3924 /* long is never used as type */
3925 if ((t
& VT_BTYPE
) == VT_LONG
)
3926 #if PTR_SIZE == 8 && !defined TCC_TARGET_PE
3927 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3929 t
= (t
& ~VT_BTYPE
) | VT_INT
;
3935 /* convert a function parameter type (array to pointer and function to
3936 function pointer) */
3937 static inline void convert_parameter_type(CType
*pt
)
3939 /* remove const and volatile qualifiers (XXX: const could be used
3940 to indicate a const function parameter */
3941 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3942 /* array must be transformed to pointer according to ANSI C */
3944 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
3949 ST_FUNC
void parse_asm_str(CString
*astr
)
3952 parse_mult_str(astr
, "string constant");
3955 /* Parse an asm label and return the token */
3956 static int asm_label_instr(void)
3962 parse_asm_str(&astr
);
3965 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
3967 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
3972 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
3974 int n
, l
, t1
, arg_size
, align
;
3975 Sym
**plast
, *s
, *first
;
3980 /* function type, or recursive declarator (return if so) */
3982 if (td
&& !(td
& TYPE_ABSTRACT
))
3986 else if (parse_btype(&pt
, &ad1
))
3997 /* read param name and compute offset */
3998 if (l
!= FUNC_OLD
) {
3999 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4001 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4002 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4003 tcc_error("parameter declared as void");
4004 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4008 expect("identifier");
4009 pt
.t
= VT_VOID
; /* invalid type */
4012 convert_parameter_type(&pt
);
4013 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4019 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4024 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4025 tcc_error("invalid type");
4028 /* if no parameters, then old type prototype */
4031 /* NOTE: const is ignored in returned type as it has a special
4032 meaning in gcc / C++ */
4033 type
->t
&= ~VT_CONSTANT
;
4034 /* some ancient pre-K&R C allows a function to return an array
4035 and the array brackets to be put after the arguments, such
4036 that "int c()[]" means something like "int[] c()" */
4039 skip(']'); /* only handle simple "[]" */
4042 /* we push a anonymous symbol which will contain the function prototype */
4043 ad
->f
.func_args
= arg_size
;
4044 ad
->f
.func_type
= l
;
4045 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4051 } else if (tok
== '[') {
4052 int saved_nocode_wanted
= nocode_wanted
;
4053 /* array definition */
4055 if (tok
== TOK_RESTRICT1
)
4060 if (!local_stack
|| (storage
& VT_STATIC
))
4061 vpushi(expr_const());
4063 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4064 length must always be evaluated, even under nocode_wanted,
4065 so that its size slot is initialized (e.g. under sizeof
4070 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4073 tcc_error("invalid array size");
4075 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4076 tcc_error("size of variable length array should be an integer");
4081 /* parse next post type */
4082 post_type(type
, ad
, storage
, 0);
4083 if (type
->t
== VT_FUNC
)
4084 tcc_error("declaration of an array of functions");
4085 t1
|= type
->t
& VT_VLA
;
4088 loc
-= type_size(&int_type
, &align
);
4092 vla_runtime_type_size(type
, &align
);
4094 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4100 nocode_wanted
= saved_nocode_wanted
;
4102 /* we push an anonymous symbol which will contain the array
4104 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4105 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4111 /* Parse a type declarator (except basic type), and return the type
4112 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4113 expected. 'type' should contain the basic type. 'ad' is the
4114 attribute definition of the basic type. It can be modified by
4115 type_decl(). If this (possibly abstract) declarator is a pointer chain
4116 it returns the innermost pointed to type (equals *type, but is a different
4117 pointer), otherwise returns type itself, that's used for recursive calls. */
4118 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4121 int qualifiers
, storage
;
4123 /* recursive type, remove storage bits first, apply them later again */
4124 storage
= type
->t
& VT_STORAGE
;
4125 type
->t
&= ~VT_STORAGE
;
4127 while (tok
== '*') {
4135 qualifiers
|= VT_CONSTANT
;
4140 qualifiers
|= VT_VOLATILE
;
4146 /* XXX: clarify attribute handling */
4147 case TOK_ATTRIBUTE1
:
4148 case TOK_ATTRIBUTE2
:
4149 parse_attribute(ad
);
4153 type
->t
|= qualifiers
;
4155 /* innermost pointed to type is the one for the first derivation */
4156 ret
= pointed_type(type
);
4160 /* This is possibly a parameter type list for abstract declarators
4161 ('int ()'), use post_type for testing this. */
4162 if (!post_type(type
, ad
, 0, td
)) {
4163 /* It's not, so it's a nested declarator, and the post operations
4164 apply to the innermost pointed to type (if any). */
4165 /* XXX: this is not correct to modify 'ad' at this point, but
4166 the syntax is not clear */
4167 parse_attribute(ad
);
4168 post
= type_decl(type
, ad
, v
, td
);
4171 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4172 /* type identifier */
4176 if (!(td
& TYPE_ABSTRACT
))
4177 expect("identifier");
4180 post_type(post
, ad
, storage
, 0);
4181 parse_attribute(ad
);
4186 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4187 ST_FUNC
int lvalue_type(int t
)
4192 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4194 else if (bt
== VT_SHORT
)
4198 if (t
& VT_UNSIGNED
)
4199 r
|= VT_LVAL_UNSIGNED
;
4203 /* indirection with full error checking and bound check */
4204 ST_FUNC
void indir(void)
4206 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4207 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4211 if (vtop
->r
& VT_LVAL
)
4213 vtop
->type
= *pointed_type(&vtop
->type
);
4214 /* Arrays and functions are never lvalues */
4215 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4216 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4217 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4218 /* if bound checking, the referenced pointer must be checked */
4219 #ifdef CONFIG_TCC_BCHECK
4220 if (tcc_state
->do_bounds_check
)
4221 vtop
->r
|= VT_MUSTBOUND
;
4226 /* pass a parameter to a function and do type checking and casting */
4227 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4232 func_type
= func
->f
.func_type
;
4233 if (func_type
== FUNC_OLD
||
4234 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4235 /* default casting : only need to convert float to double */
4236 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4237 gen_cast_s(VT_DOUBLE
);
4238 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4239 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4240 type
.ref
= vtop
->type
.ref
;
4243 } else if (arg
== NULL
) {
4244 tcc_error("too many arguments to function");
4247 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4248 gen_assign_cast(&type
);
4252 /* parse an expression and return its type without any side effect.
4253 If UNRY we parse an unary expression, otherwise a full one. */
4254 static void expr_type(CType
*type
, int unry
)
4266 /* parse an expression of the form '(type)' or '(expr)' and return its
4268 static void parse_expr_type(CType
*type
)
4274 if (parse_btype(type
, &ad
)) {
4275 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4282 static void parse_type(CType
*type
)
4287 if (!parse_btype(type
, &ad
)) {
4290 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4293 static void parse_builtin_params(int nc
, const char *args
)
4300 while ((c
= *args
++)) {
4304 case 'e': expr_eq(); continue;
4305 case 't': parse_type(&t
); vpush(&t
); continue;
4306 default: tcc_error("internal error"); break;
4314 ST_FUNC
void unary(void)
4316 int n
, t
, align
, size
, r
, sizeof_caller
;
4321 sizeof_caller
= in_sizeof
;
4324 /* XXX: GCC 2.95.3 does not generate a table although it should be
4337 vsetc(&type
, VT_CONST
, &tokc
);
4341 t
= VT_INT
| VT_UNSIGNED
;
4347 t
= VT_LLONG
| VT_UNSIGNED
;
4359 case TOK___FUNCTION__
:
4361 goto tok_identifier
;
4367 /* special function name identifier */
4368 len
= strlen(funcname
) + 1;
4369 /* generate char[len] type */
4374 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4375 ptr
= section_ptr_add(data_section
, len
);
4376 memcpy(ptr
, funcname
, len
);
4381 #ifdef TCC_TARGET_PE
4382 t
= VT_SHORT
| VT_UNSIGNED
;
4388 /* string parsing */
4390 if (tcc_state
->char_is_unsigned
)
4391 t
= VT_BYTE
| VT_UNSIGNED
;
4393 if (tcc_state
->warn_write_strings
)
4398 memset(&ad
, 0, sizeof(AttributeDef
));
4399 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4404 if (parse_btype(&type
, &ad
)) {
4405 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4407 /* check ISOC99 compound literal */
4409 /* data is allocated locally by default */
4414 /* all except arrays are lvalues */
4415 if (!(type
.t
& VT_ARRAY
))
4416 r
|= lvalue_type(type
.t
);
4417 memset(&ad
, 0, sizeof(AttributeDef
));
4418 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4420 if (sizeof_caller
) {
4427 } else if (tok
== '{') {
4428 int saved_nocode_wanted
= nocode_wanted
;
4430 tcc_error("expected constant");
4431 /* save all registers */
4433 /* statement expression : we do not accept break/continue
4434 inside as GCC does. We do retain the nocode_wanted state,
4435 as statement expressions can't ever be entered from the
4436 outside, so any reactivation of code emission (from labels
4437 or loop heads) can be disabled again after the end of it. */
4438 block(NULL
, NULL
, 1);
4439 nocode_wanted
= saved_nocode_wanted
;
4454 /* functions names must be treated as function pointers,
4455 except for unary '&' and sizeof. Since we consider that
4456 functions are not lvalues, we only have to handle it
4457 there and in function calls. */
4458 /* arrays can also be used although they are not lvalues */
4459 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4460 !(vtop
->type
.t
& VT_ARRAY
))
4462 mk_pointer(&vtop
->type
);
4468 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4469 gen_cast_s(VT_BOOL
);
4470 vtop
->c
.i
= !vtop
->c
.i
;
4471 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4475 vseti(VT_JMP
, gvtst(1, 0));
4487 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4488 tcc_error("pointer not accepted for unary plus");
4489 /* In order to force cast, we add zero, except for floating point
4490 where we really need an noop (otherwise -0.0 will be transformed
4492 if (!is_float(vtop
->type
.t
)) {
4503 expr_type(&type
, 1); // Perform a in_sizeof = 0;
4504 size
= type_size(&type
, &align
);
4505 if (t
== TOK_SIZEOF
) {
4506 if (!(type
.t
& VT_VLA
)) {
4508 tcc_error("sizeof applied to an incomplete type");
4511 vla_runtime_type_size(&type
, &align
);
4516 vtop
->type
.t
|= VT_UNSIGNED
;
4519 case TOK_builtin_expect
:
4520 /* __builtin_expect is a no-op for now */
4521 parse_builtin_params(0, "ee");
4524 case TOK_builtin_types_compatible_p
:
4525 parse_builtin_params(0, "tt");
4526 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4527 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4528 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4532 case TOK_builtin_choose_expr
:
4559 case TOK_builtin_constant_p
:
4560 parse_builtin_params(1, "e");
4561 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4565 case TOK_builtin_frame_address
:
4566 case TOK_builtin_return_address
:
4572 if (tok
!= TOK_CINT
) {
4573 tcc_error("%s only takes positive integers",
4574 tok1
== TOK_builtin_return_address
?
4575 "__builtin_return_address" :
4576 "__builtin_frame_address");
4578 level
= (uint32_t)tokc
.i
;
4583 vset(&type
, VT_LOCAL
, 0); /* local frame */
4585 mk_pointer(&vtop
->type
);
4586 indir(); /* -> parent frame */
4588 if (tok1
== TOK_builtin_return_address
) {
4589 // assume return address is just above frame pointer on stack
4592 mk_pointer(&vtop
->type
);
4597 #ifdef TCC_TARGET_X86_64
4598 #ifdef TCC_TARGET_PE
4599 case TOK_builtin_va_start
:
4600 parse_builtin_params(0, "ee");
4601 r
= vtop
->r
& VT_VALMASK
;
4605 tcc_error("__builtin_va_start expects a local variable");
4607 vtop
->type
= char_pointer_type
;
4612 case TOK_builtin_va_arg_types
:
4613 parse_builtin_params(0, "t");
4614 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4621 #ifdef TCC_TARGET_ARM64
4622 case TOK___va_start
: {
4623 parse_builtin_params(0, "ee");
4627 vtop
->type
.t
= VT_VOID
;
4630 case TOK___va_arg
: {
4631 parse_builtin_params(0, "et");
4639 case TOK___arm64_clear_cache
: {
4640 parse_builtin_params(0, "ee");
4643 vtop
->type
.t
= VT_VOID
;
4647 /* pre operations */
4658 t
= vtop
->type
.t
& VT_BTYPE
;
4660 /* In IEEE negate(x) isn't subtract(0,x), but rather
4664 vtop
->c
.f
= -1.0 * 0.0;
4665 else if (t
== VT_DOUBLE
)
4666 vtop
->c
.d
= -1.0 * 0.0;
4668 vtop
->c
.ld
= -1.0 * 0.0;
4676 goto tok_identifier
;
4678 /* allow to take the address of a label */
4679 if (tok
< TOK_UIDENT
)
4680 expect("label identifier");
4681 s
= label_find(tok
);
4683 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4685 if (s
->r
== LABEL_DECLARED
)
4686 s
->r
= LABEL_FORWARD
;
4689 s
->type
.t
= VT_VOID
;
4690 mk_pointer(&s
->type
);
4691 s
->type
.t
|= VT_STATIC
;
4693 vpushsym(&s
->type
, s
);
4699 CType controlling_type
;
4700 int has_default
= 0;
4703 AttributeDef ad_tmp
;
4705 TokenString
*str
= NULL
;
4706 ParseState saved_parse_state
;
4710 expr_type(&controlling_type
, 1);
4711 if (controlling_type
.t
& VT_ARRAY
)
4712 controlling_type
.t
= VT_PTR
;
4713 controlling_type
.t
&= ~VT_CONSTANT
;
4717 if (tok
== TOK_DEFAULT
) {
4719 tcc_error("too many 'default'");
4728 parse_btype(&cur_type
, &ad_tmp
);
4729 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
4730 if (compare_types(&controlling_type
, &cur_type
, 0)) {
4732 tcc_error("type march twice");
4742 skip_or_save_block(&str
);
4744 skip_or_save_block(NULL
);
4748 else if (tok
== ')')
4751 if (!has_match
&& !has_default
) {
4754 type_to_str(buf
, 256, &controlling_type
, NULL
);
4755 tcc_error("_Generic sellector of type '%s' is not compatible with any assosiation",
4759 save_parse_state(&saved_parse_state
);
4760 begin_macro(str
, 1);
4764 restore_parse_state(&saved_parse_state
);
4767 // special qnan , snan and infinity values
4769 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4773 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
4777 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
4786 expect("identifier");
4789 const char *name
= get_tok_str(t
, NULL
);
4791 tcc_error("'%s' undeclared", name
);
4792 /* for simple function calls, we tolerate undeclared
4793 external reference to int() function */
4794 if (tcc_state
->warn_implicit_function_declaration
4795 #ifdef TCC_TARGET_PE
4796 /* people must be warned about using undeclared WINAPI functions
4797 (which usually start with uppercase letter) */
4798 || (name
[0] >= 'A' && name
[0] <= 'Z')
4801 tcc_warning("implicit declaration of function '%s'", name
);
4802 s
= external_global_sym(t
, &func_old_type
, 0);
4806 /* A symbol that has a register is a local register variable,
4807 which starts out as VT_LOCAL value. */
4808 if ((r
& VT_VALMASK
) < VT_CONST
)
4809 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
4811 vset(&s
->type
, r
, s
->c
);
4812 /* Point to s as backpointer (even without r&VT_SYM).
4813 Will be used by at least the x86 inline asm parser for
4816 if (vtop
->r
& VT_SYM
) {
4822 /* post operations */
4824 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
4827 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
4830 if (tok
== TOK_ARROW
)
4832 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
4835 /* expect pointer on structure */
4836 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
4837 expect("struct or union");
4838 if (tok
== TOK_CDOUBLE
)
4839 expect("field name");
4841 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
4842 expect("field name");
4843 s
= find_field(&vtop
->type
, tok
);
4845 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
4846 /* add field offset to pointer */
4847 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
4850 /* change type to field type, and set to lvalue */
4851 vtop
->type
= s
->type
;
4852 vtop
->type
.t
|= qualifiers
;
4853 /* an array is never an lvalue */
4854 if (!(vtop
->type
.t
& VT_ARRAY
)) {
4855 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4856 #ifdef CONFIG_TCC_BCHECK
4857 /* if bound checking, the referenced pointer must be checked */
4858 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
4859 vtop
->r
|= VT_MUSTBOUND
;
4863 } else if (tok
== '[') {
4869 } else if (tok
== '(') {
4872 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
4875 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4876 /* pointer test (no array accepted) */
4877 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
4878 vtop
->type
= *pointed_type(&vtop
->type
);
4879 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4883 expect("function pointer");
4886 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
4888 /* get return type */
4891 sa
= s
->next
; /* first parameter */
4892 nb_args
= regsize
= 0;
4894 /* compute first implicit argument if a structure is returned */
4895 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
4896 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
4897 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
4898 &ret_align
, ®size
);
4900 /* get some space for the returned structure */
4901 size
= type_size(&s
->type
, &align
);
4902 #ifdef TCC_TARGET_ARM64
4903 /* On arm64, a small struct is return in registers.
4904 It is much easier to write it to memory if we know
4905 that we are allowed to write some extra bytes, so
4906 round the allocated space up to a power of 2: */
4908 while (size
& (size
- 1))
4909 size
= (size
| (size
- 1)) + 1;
4911 loc
= (loc
- size
) & -align
;
4913 ret
.r
= VT_LOCAL
| VT_LVAL
;
4914 /* pass it as 'int' to avoid structure arg passing
4916 vseti(VT_LOCAL
, loc
);
4926 /* return in register */
4927 if (is_float(ret
.type
.t
)) {
4928 ret
.r
= reg_fret(ret
.type
.t
);
4929 #ifdef TCC_TARGET_X86_64
4930 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
4934 #ifndef TCC_TARGET_ARM64
4935 #ifdef TCC_TARGET_X86_64
4936 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
4938 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
4949 gfunc_param_typed(s
, sa
);
4959 tcc_error("too few arguments to function");
4961 gfunc_call(nb_args
);
4964 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
4965 vsetc(&ret
.type
, r
, &ret
.c
);
4966 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
4969 /* handle packed struct return */
4970 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
4973 size
= type_size(&s
->type
, &align
);
4974 /* We're writing whole regs often, make sure there's enough
4975 space. Assume register size is power of 2. */
4976 if (regsize
> align
)
4978 loc
= (loc
- size
) & -align
;
4982 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4986 if (--ret_nregs
== 0)
4990 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
4998 ST_FUNC
void expr_prod(void)
5003 while (tok
== '*' || tok
== '/' || tok
== '%') {
5011 ST_FUNC
void expr_sum(void)
5016 while (tok
== '+' || tok
== '-') {
5024 static void expr_shift(void)
5029 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5037 static void expr_cmp(void)
5042 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5043 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5051 static void expr_cmpeq(void)
5056 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5064 static void expr_and(void)
5067 while (tok
== '&') {
5074 static void expr_xor(void)
5077 while (tok
== '^') {
5084 static void expr_or(void)
5087 while (tok
== '|') {
5094 static void expr_land(void)
5097 if (tok
== TOK_LAND
) {
5100 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5101 gen_cast_s(VT_BOOL
);
5106 while (tok
== TOK_LAND
) {
5122 if (tok
!= TOK_LAND
) {
5135 static void expr_lor(void)
5138 if (tok
== TOK_LOR
) {
5141 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5142 gen_cast_s(VT_BOOL
);
5147 while (tok
== TOK_LOR
) {
5163 if (tok
!= TOK_LOR
) {
5176 /* Assuming vtop is a value used in a conditional context
5177 (i.e. compared with zero) return 0 if it's false, 1 if
5178 true and -1 if it can't be statically determined. */
5179 static int condition_3way(void)
5182 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5183 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5185 gen_cast_s(VT_BOOL
);
5192 static void expr_cond(void)
5194 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5196 CType type
, type1
, type2
;
5201 c
= condition_3way();
5202 g
= (tok
== ':' && gnu_ext
);
5204 /* needed to avoid having different registers saved in
5206 if (is_float(vtop
->type
.t
)) {
5208 #ifdef TCC_TARGET_X86_64
5209 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5234 sv
= *vtop
; /* save value to handle it later */
5235 vtop
--; /* no vpop so that FP stack is not flushed */
5253 bt1
= t1
& VT_BTYPE
;
5255 bt2
= t2
& VT_BTYPE
;
5258 /* cast operands to correct type according to ISOC rules */
5259 if (is_float(bt1
) || is_float(bt2
)) {
5260 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5261 type
.t
= VT_LDOUBLE
;
5263 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5268 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5269 /* cast to biggest op */
5271 /* convert to unsigned if it does not fit in a long long */
5272 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5273 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5274 type
.t
|= VT_UNSIGNED
;
5275 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5276 /* If one is a null ptr constant the result type
5278 if (is_null_pointer (vtop
))
5280 else if (is_null_pointer (&sv
))
5282 /* XXX: test pointer compatibility, C99 has more elaborate
5286 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5287 /* XXX: test function pointer compatibility */
5288 type
= bt1
== VT_FUNC
? type1
: type2
;
5289 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5290 /* XXX: test structure compatibility */
5291 type
= bt1
== VT_STRUCT
? type1
: type2
;
5292 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5293 /* NOTE: as an extension, we accept void on only one side */
5296 /* integer operations */
5298 /* convert to unsigned if it does not fit in an integer */
5299 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5300 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5301 type
.t
|= VT_UNSIGNED
;
5303 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5304 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5305 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5308 /* now we convert second operand */
5312 mk_pointer(&vtop
->type
);
5314 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5319 if (is_float(type
.t
)) {
5321 #ifdef TCC_TARGET_X86_64
5322 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5326 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5327 /* for long longs, we use fixed registers to avoid having
5328 to handle a complicated move */
5339 /* this is horrible, but we must also convert first
5345 mk_pointer(&vtop
->type
);
5347 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5353 move_reg(r2
, r1
, type
.t
);
5363 static void expr_eq(void)
5369 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5370 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5371 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5386 ST_FUNC
void gexpr(void)
5397 /* parse a constant expression and return value in vtop. */
5398 static void expr_const1(void)
5405 /* parse an integer constant and return its value. */
5406 static inline int64_t expr_const64(void)
5410 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5411 expect("constant expression");
5417 /* parse an integer constant and return its value.
5418 Complain if it doesn't fit 32bit (signed or unsigned). */
5419 ST_FUNC
int expr_const(void)
5422 int64_t wc
= expr_const64();
5424 if (c
!= wc
&& (unsigned)c
!= wc
)
5425 tcc_error("constant exceeds 32 bit");
5429 /* return the label token if current token is a label, otherwise
5431 static int is_label(void)
5435 /* fast test first */
5436 if (tok
< TOK_UIDENT
)
5438 /* no need to save tokc because tok is an identifier */
5444 unget_tok(last_tok
);
5449 #ifndef TCC_TARGET_ARM64
5450 static void gfunc_return(CType
*func_type
)
5452 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5453 CType type
, ret_type
;
5454 int ret_align
, ret_nregs
, regsize
;
5455 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5456 &ret_align
, ®size
);
5457 if (0 == ret_nregs
) {
5458 /* if returning structure, must copy it to implicit
5459 first pointer arg location */
5462 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5465 /* copy structure value to pointer */
5468 /* returning structure packed into registers */
5469 int r
, size
, addr
, align
;
5470 size
= type_size(func_type
,&align
);
5471 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5472 (vtop
->c
.i
& (ret_align
-1)))
5473 && (align
& (ret_align
-1))) {
5474 loc
= (loc
- size
) & -ret_align
;
5477 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5481 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5483 vtop
->type
= ret_type
;
5484 if (is_float(ret_type
.t
))
5485 r
= rc_fret(ret_type
.t
);
5496 if (--ret_nregs
== 0)
5498 /* We assume that when a structure is returned in multiple
5499 registers, their classes are consecutive values of the
5502 vtop
->c
.i
+= regsize
;
5506 } else if (is_float(func_type
->t
)) {
5507 gv(rc_fret(func_type
->t
));
5511 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5515 static int case_cmp(const void *pa
, const void *pb
)
5517 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5518 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5519 return a
< b
? -1 : a
> b
;
5522 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5526 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5544 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5546 gcase(base
, len
/2, bsym
);
5547 if (cur_switch
->def_sym
)
5548 gjmp_addr(cur_switch
->def_sym
);
5550 *bsym
= gjmp(*bsym
);
5554 base
+= e
; len
-= e
;
5564 if (p
->v1
== p
->v2
) {
5566 gtst_addr(0, p
->sym
);
5576 gtst_addr(0, p
->sym
);
5582 static void block(int *bsym
, int *csym
, int is_expr
)
5584 int a
, b
, c
, d
, cond
;
5587 /* generate line number info */
5588 if (tcc_state
->do_debug
)
5589 tcc_debug_line(tcc_state
);
5592 /* default return value is (void) */
5594 vtop
->type
.t
= VT_VOID
;
5597 if (tok
== TOK_IF
) {
5599 int saved_nocode_wanted
= nocode_wanted
;
5604 cond
= condition_3way();
5610 nocode_wanted
|= 0x20000000;
5611 block(bsym
, csym
, 0);
5613 nocode_wanted
= saved_nocode_wanted
;
5615 if (c
== TOK_ELSE
) {
5620 nocode_wanted
|= 0x20000000;
5621 block(bsym
, csym
, 0);
5622 gsym(d
); /* patch else jmp */
5624 nocode_wanted
= saved_nocode_wanted
;
5627 } else if (tok
== TOK_WHILE
) {
5628 int saved_nocode_wanted
;
5629 nocode_wanted
&= ~0x20000000;
5639 saved_nocode_wanted
= nocode_wanted
;
5641 nocode_wanted
= saved_nocode_wanted
;
5646 } else if (tok
== '{') {
5648 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5651 /* record local declaration stack position */
5653 llabel
= local_label_stack
;
5656 /* handle local labels declarations */
5657 if (tok
== TOK_LABEL
) {
5660 if (tok
< TOK_UIDENT
)
5661 expect("label identifier");
5662 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5672 while (tok
!= '}') {
5673 if ((a
= is_label()))
5680 block(bsym
, csym
, is_expr
);
5683 /* pop locally defined labels */
5684 label_pop(&local_label_stack
, llabel
);
5685 /* pop locally defined symbols */
5687 /* In the is_expr case (a statement expression is finished here),
5688 vtop might refer to symbols on the local_stack. Either via the
5689 type or via vtop->sym. We can't pop those nor any that in turn
5690 might be referred to. To make it easier we don't roll back
5691 any symbols in that case; some upper level call to block() will
5692 do that. We do have to remove such symbols from the lookup
5693 tables, though. sym_pop will do that. */
5694 sym_pop(&local_stack
, s
, is_expr
);
5696 /* Pop VLA frames and restore stack pointer if required */
5697 if (vlas_in_scope
> saved_vlas_in_scope
) {
5698 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5701 vlas_in_scope
= saved_vlas_in_scope
;
5704 } else if (tok
== TOK_RETURN
) {
5708 gen_assign_cast(&func_vt
);
5709 gfunc_return(&func_vt
);
5712 /* jump unless last stmt in top-level block */
5713 if (tok
!= '}' || local_scope
!= 1)
5715 nocode_wanted
|= 0x20000000;
5716 } else if (tok
== TOK_BREAK
) {
5719 tcc_error("cannot break");
5720 *bsym
= gjmp(*bsym
);
5723 nocode_wanted
|= 0x20000000;
5724 } else if (tok
== TOK_CONTINUE
) {
5727 tcc_error("cannot continue");
5728 vla_sp_restore_root();
5729 *csym
= gjmp(*csym
);
5732 } else if (tok
== TOK_FOR
) {
5734 int saved_nocode_wanted
;
5735 nocode_wanted
&= ~0x20000000;
5741 /* c99 for-loop init decl? */
5742 if (!decl0(VT_LOCAL
, 1, NULL
)) {
5743 /* no, regular for-loop init expr */
5769 saved_nocode_wanted
= nocode_wanted
;
5771 nocode_wanted
= saved_nocode_wanted
;
5776 sym_pop(&local_stack
, s
, 0);
5779 if (tok
== TOK_DO
) {
5780 int saved_nocode_wanted
;
5781 nocode_wanted
&= ~0x20000000;
5787 saved_nocode_wanted
= nocode_wanted
;
5795 nocode_wanted
= saved_nocode_wanted
;
5800 if (tok
== TOK_SWITCH
) {
5801 struct switch_t
*saved
, sw
;
5802 int saved_nocode_wanted
= nocode_wanted
;
5808 switchval
= *vtop
--;
5810 b
= gjmp(0); /* jump to first case */
5811 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
5815 nocode_wanted
= saved_nocode_wanted
;
5816 a
= gjmp(a
); /* add implicit break */
5819 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
5820 for (b
= 1; b
< sw
.n
; b
++)
5821 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
5822 tcc_error("duplicate case value");
5823 /* Our switch table sorting is signed, so the compared
5824 value needs to be as well when it's 64bit. */
5825 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5826 switchval
.type
.t
&= ~VT_UNSIGNED
;
5828 gcase(sw
.p
, sw
.n
, &a
);
5831 gjmp_addr(sw
.def_sym
);
5832 dynarray_reset(&sw
.p
, &sw
.n
);
5837 if (tok
== TOK_CASE
) {
5838 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
5841 nocode_wanted
&= ~0x20000000;
5843 cr
->v1
= cr
->v2
= expr_const64();
5844 if (gnu_ext
&& tok
== TOK_DOTS
) {
5846 cr
->v2
= expr_const64();
5847 if (cr
->v2
< cr
->v1
)
5848 tcc_warning("empty case range");
5851 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
5854 goto block_after_label
;
5856 if (tok
== TOK_DEFAULT
) {
5861 if (cur_switch
->def_sym
)
5862 tcc_error("too many 'default'");
5863 cur_switch
->def_sym
= ind
;
5865 goto block_after_label
;
5867 if (tok
== TOK_GOTO
) {
5869 if (tok
== '*' && gnu_ext
) {
5873 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5876 } else if (tok
>= TOK_UIDENT
) {
5877 s
= label_find(tok
);
5878 /* put forward definition if needed */
5880 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5882 if (s
->r
== LABEL_DECLARED
)
5883 s
->r
= LABEL_FORWARD
;
5885 vla_sp_restore_root();
5886 if (s
->r
& LABEL_FORWARD
)
5887 s
->jnext
= gjmp(s
->jnext
);
5889 gjmp_addr(s
->jnext
);
5892 expect("label identifier");
5895 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
5904 if (s
->r
== LABEL_DEFINED
)
5905 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
5907 s
->r
= LABEL_DEFINED
;
5909 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
5913 /* we accept this, but it is a mistake */
5915 nocode_wanted
&= ~0x20000000;
5917 tcc_warning("deprecated use of label at end of compound statement");
5921 block(bsym
, csym
, is_expr
);
5924 /* expression case */
5939 /* This skips over a stream of tokens containing balanced {} and ()
5940 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
5941 with a '{'). If STR then allocates and stores the skipped tokens
5942 in *STR. This doesn't check if () and {} are nested correctly,
5943 i.e. "({)}" is accepted. */
5944 static void skip_or_save_block(TokenString
**str
)
5946 int braces
= tok
== '{';
5949 *str
= tok_str_alloc();
5951 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
5953 if (tok
== TOK_EOF
) {
5954 if (str
|| level
> 0)
5955 tcc_error("unexpected end of file");
5960 tok_str_add_tok(*str
);
5963 if (t
== '{' || t
== '(') {
5965 } else if (t
== '}' || t
== ')') {
5967 if (level
== 0 && braces
&& t
== '}')
5972 tok_str_add(*str
, -1);
5973 tok_str_add(*str
, 0);
5977 #define EXPR_CONST 1
5980 static void parse_init_elem(int expr_type
)
5982 int saved_global_expr
;
5985 /* compound literals must be allocated globally in this case */
5986 saved_global_expr
= global_expr
;
5989 global_expr
= saved_global_expr
;
5990 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
5991 (compound literals). */
5992 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
5993 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
5994 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
5995 #ifdef TCC_TARGET_PE
5996 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
5999 tcc_error("initializer element is not constant");
6007 /* put zeros for variable based init */
6008 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6011 /* nothing to do because globals are already set to zero */
6013 vpush_global_sym(&func_old_type
, TOK_memset
);
6015 #ifdef TCC_TARGET_ARM
6026 /* t is the array or struct type. c is the array or struct
6027 address. cur_field is the pointer to the current
6028 field, for arrays the 'c' member contains the current start
6029 index. 'size_only' is true if only size info is needed (only used
6030 in arrays). al contains the already initialized length of the
6031 current container (starting at c). This returns the new length of that. */
6032 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6033 Sym
**cur_field
, int size_only
, int al
)
6036 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6037 unsigned long corig
= c
;
6041 if (gnu_ext
&& (l
= is_label()) != 0)
6043 /* NOTE: we only support ranges for last designator */
6044 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6046 if (!(type
->t
& VT_ARRAY
))
6047 expect("array type");
6049 index
= index_last
= expr_const();
6050 if (tok
== TOK_DOTS
&& gnu_ext
) {
6052 index_last
= expr_const();
6056 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6058 tcc_error("invalid index");
6060 (*cur_field
)->c
= index_last
;
6061 type
= pointed_type(type
);
6062 elem_size
= type_size(type
, &align
);
6063 c
+= index
* elem_size
;
6064 nb_elems
= index_last
- index
+ 1;
6070 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6071 expect("struct/union type");
6072 f
= find_field(type
, l
);
6085 } else if (!gnu_ext
) {
6089 if (type
->t
& VT_ARRAY
) {
6090 index
= (*cur_field
)->c
;
6091 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6092 tcc_error("index too large");
6093 type
= pointed_type(type
);
6094 c
+= index
* type_size(type
, &align
);
6097 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6098 *cur_field
= f
= f
->next
;
6100 tcc_error("too many field init");
6105 /* must put zero in holes (note that doing it that way
6106 ensures that it even works with designators) */
6107 if (!size_only
&& c
- corig
> al
)
6108 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6109 decl_initializer(type
, sec
, c
, 0, size_only
);
6111 /* XXX: make it more general */
6112 if (!size_only
&& nb_elems
> 1) {
6113 unsigned long c_end
;
6118 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6119 for (i
= 1; i
< nb_elems
; i
++) {
6120 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6126 c_end
= c
+ nb_elems
* elem_size
;
6127 if (c_end
> sec
->data_allocated
)
6128 section_realloc(sec
, c_end
);
6129 src
= sec
->data
+ c
;
6131 for(i
= 1; i
< nb_elems
; i
++) {
6133 memcpy(dst
, src
, elem_size
);
6137 c
+= nb_elems
* type_size(type
, &align
);
6143 /* store a value or an expression directly in global data or in local array */
6144 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6146 int bt
, bit_pos
, bit_size
;
6148 unsigned long long bit_mask
;
6152 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6156 /* XXX: not portable */
6157 /* XXX: generate error if incorrect relocation */
6158 gen_assign_cast(&dtype
);
6159 bt
= type
->t
& VT_BTYPE
;
6160 size
= type_size(type
, &align
);
6161 section_reserve(sec
, c
+ size
);
6162 ptr
= sec
->data
+ c
;
6163 /* XXX: make code faster ? */
6164 if (!(type
->t
& VT_BITFIELD
)) {
6166 bit_size
= PTR_SIZE
* 8;
6169 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
6170 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
6171 bit_mask
= (1LL << bit_size
) - 1;
6173 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6174 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6175 /* XXX This rejects compound literals like
6176 '(void *){ptr}'. The problem is that '&sym' is
6177 represented the same way, which would be ruled out
6178 by the SYM_FIRST_ANOM check above, but also '"string"'
6179 in 'char *p = "string"' is represented the same
6180 with the type being VT_PTR and the symbol being an
6181 anonymous one. That is, there's no difference in vtop
6182 between '(void *){x}' and '&(void *){x}'. Ignore
6183 pointer typed entities here. Hopefully no real code
6184 will every use compound literals with scalar type. */
6185 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6186 /* These come from compound literals, memcpy stuff over. */
6190 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[vtop
->sym
->c
];
6191 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6192 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6194 /* We need to copy over all memory contents, and that
6195 includes relocations. Use the fact that relocs are
6196 created it order, so look from the end of relocs
6197 until we hit one before the copied region. */
6198 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6199 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6200 while (num_relocs
--) {
6202 if (rel
->r_offset
>= esym
->st_value
+ size
)
6204 if (rel
->r_offset
< esym
->st_value
)
6206 /* Note: if the same fields are initialized multiple
6207 times (possible with designators) then we possibly
6208 add multiple relocations for the same offset here.
6209 That would lead to wrong code, the last reloc needs
6210 to win. We clean this up later after the whole
6211 initializer is parsed. */
6212 put_elf_reloca(symtab_section
, sec
,
6213 c
+ rel
->r_offset
- esym
->st_value
,
6214 ELFW(R_TYPE
)(rel
->r_info
),
6215 ELFW(R_SYM
)(rel
->r_info
),
6225 if ((vtop
->r
& VT_SYM
) &&
6231 (bt
== VT_LLONG
&& bit_size
!= 64) ||
6235 (bt
== VT_INT
&& bit_size
!= 32)
6238 tcc_error("initializer element is not computable at load time");
6240 /* XXX: when cross-compiling we assume that each type has the
6241 same representation on host and target, which is likely to
6242 be wrong in the case of long double */
6244 vtop
->c
.i
= (vtop
->c
.i
!= 0);
6246 *(char *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6249 *(short *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6252 *(float*)ptr
= vtop
->c
.f
;
6255 *(double *)ptr
= vtop
->c
.d
;
6258 if (sizeof(long double) == LDOUBLE_SIZE
)
6259 *(long double *)ptr
= vtop
->c
.ld
;
6260 else if (sizeof(double) == LDOUBLE_SIZE
)
6261 *(double *)ptr
= (double)vtop
->c
.ld
;
6262 #if (defined __i386__ || defined __x86_64__) && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
6263 else if (sizeof (long double) >= 10)
6264 memcpy(memset(ptr
, 0, LDOUBLE_SIZE
), &vtop
->c
.ld
, 10);
6266 else if (sizeof (long double) == sizeof (double))
6267 __asm__("fldl %1\nfstpt %0\n" : "=m"
6268 (memset(ptr
, 0, LDOUBLE_SIZE
), ptr
) : "m" (vtop
->c
.ld
));
6272 tcc_error("can't cross compile long double constants");
6276 *(long long *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6283 addr_t val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6285 if (vtop
->r
& VT_SYM
)
6286 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6288 *(addr_t
*)ptr
|= val
;
6290 if (vtop
->r
& VT_SYM
)
6291 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6292 *(addr_t
*)ptr
|= val
;
6298 int val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
6300 if (vtop
->r
& VT_SYM
)
6301 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6305 if (vtop
->r
& VT_SYM
)
6306 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6315 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6322 /* 't' contains the type and storage info. 'c' is the offset of the
6323 object in section 'sec'. If 'sec' is NULL, it means stack based
6324 allocation. 'first' is true if array '{' must be read (multi
6325 dimension implicit array init handling). 'size_only' is true if
6326 size only evaluation is wanted (only for arrays). */
6327 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6328 int first
, int size_only
)
6330 int len
, n
, no_oblock
, nb
, i
;
6337 /* If we currently are at an '}' or ',' we have read an initializer
6338 element in one of our callers, and not yet consumed it. */
6339 have_elem
= tok
== '}' || tok
== ',';
6340 if (!have_elem
&& tok
!= '{' &&
6341 /* In case of strings we have special handling for arrays, so
6342 don't consume them as initializer value (which would commit them
6343 to some anonymous symbol). */
6344 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6346 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6351 !(type
->t
& VT_ARRAY
) &&
6352 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6353 The source type might have VT_CONSTANT set, which is
6354 of course assignable to non-const elements. */
6355 is_compatible_parameter_types(type
, &vtop
->type
)) {
6356 init_putv(type
, sec
, c
);
6357 } else if (type
->t
& VT_ARRAY
) {
6360 t1
= pointed_type(type
);
6361 size1
= type_size(t1
, &align1
);
6364 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6367 tcc_error("character array initializer must be a literal,"
6368 " optionally enclosed in braces");
6373 /* only parse strings here if correct type (otherwise: handle
6374 them as ((w)char *) expressions */
6375 if ((tok
== TOK_LSTR
&&
6376 #ifdef TCC_TARGET_PE
6377 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6379 (t1
->t
& VT_BTYPE
) == VT_INT
6381 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6383 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6386 /* compute maximum number of chars wanted */
6388 cstr_len
= tokc
.str
.size
;
6390 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6393 if (n
>= 0 && nb
> (n
- len
))
6397 tcc_warning("initializer-string for array is too long");
6398 /* in order to go faster for common case (char
6399 string in global variable, we handle it
6401 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6402 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6406 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6408 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6410 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6417 /* only add trailing zero if enough storage (no
6418 warning in this case since it is standard) */
6419 if (n
< 0 || len
< n
) {
6422 init_putv(t1
, sec
, c
+ (len
* size1
));
6433 while (tok
!= '}' || have_elem
) {
6434 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
6436 if (type
->t
& VT_ARRAY
) {
6438 /* special test for multi dimensional arrays (may not
6439 be strictly correct if designators are used at the
6441 if (no_oblock
&& len
>= n
*size1
)
6444 if (s
->type
.t
== VT_UNION
)
6448 if (no_oblock
&& f
== NULL
)
6457 /* put zeros at the end */
6458 if (!size_only
&& len
< n
*size1
)
6459 init_putz(sec
, c
+ len
, n
*size1
- len
);
6462 /* patch type size if needed, which happens only for array types */
6464 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
6465 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6468 if (first
|| tok
== '{') {
6476 } else if (tok
== '{') {
6478 decl_initializer(type
, sec
, c
, first
, size_only
);
6480 } else if (size_only
) {
6481 /* If we supported only ISO C we wouldn't have to accept calling
6482 this on anything than an array size_only==1 (and even then
6483 only on the outermost level, so no recursion would be needed),
6484 because initializing a flex array member isn't supported.
6485 But GNU C supports it, so we need to recurse even into
6486 subfields of structs and arrays when size_only is set. */
6487 /* just skip expression */
6488 skip_or_save_block(NULL
);
6491 /* This should happen only when we haven't parsed
6492 the init element above for fear of committing a
6493 string constant to memory too early. */
6494 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6495 expect("string constant");
6496 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6498 init_putv(type
, sec
, c
);
6502 /* parse an initializer for type 't' if 'has_init' is non zero, and
6503 allocate space in local or global data space ('r' is either
6504 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6505 variable 'v' of scope 'scope' is declared before initializers
6506 are parsed. If 'v' is zero, then a reference to the new object
6507 is put in the value stack. If 'has_init' is 2, a special parsing
6508 is done to handle string constants. */
6509 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6510 int has_init
, int v
, int scope
)
6512 int size
, align
, addr
;
6513 ParseState saved_parse_state
= {0};
6514 TokenString
*init_str
= NULL
;
6516 Sym
*flexible_array
;
6519 flexible_array
= NULL
;
6520 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6521 Sym
*field
= type
->ref
->next
;
6524 field
= field
->next
;
6525 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6526 flexible_array
= field
;
6530 size
= type_size(type
, &align
);
6531 /* If unknown size, we must evaluate it before
6532 evaluating initializers because
6533 initializers can generate global data too
6534 (e.g. string pointers or ISOC99 compound
6535 literals). It also simplifies local
6536 initializers handling */
6537 if (size
< 0 || (flexible_array
&& has_init
)) {
6539 tcc_error("unknown type size");
6540 /* get all init string */
6541 if (has_init
== 2) {
6542 init_str
= tok_str_alloc();
6543 /* only get strings */
6544 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6545 tok_str_add_tok(init_str
);
6548 tok_str_add(init_str
, -1);
6549 tok_str_add(init_str
, 0);
6551 skip_or_save_block(&init_str
);
6555 save_parse_state(&saved_parse_state
);
6557 begin_macro(init_str
, 1);
6559 decl_initializer(type
, NULL
, 0, 1, 1);
6560 /* prepare second initializer parsing */
6561 macro_ptr
= init_str
->str
;
6564 /* if still unknown size, error */
6565 size
= type_size(type
, &align
);
6567 tcc_error("unknown type size");
6569 /* If there's a flex member and it was used in the initializer
6571 if (flexible_array
&&
6572 flexible_array
->type
.ref
->c
> 0)
6573 size
+= flexible_array
->type
.ref
->c
6574 * pointed_size(&flexible_array
->type
);
6575 /* take into account specified alignment if bigger */
6576 if (ad
->a
.aligned
) {
6577 int speca
= 1 << (ad
->a
.aligned
- 1);
6580 } else if (ad
->a
.packed
) {
6583 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6585 #ifdef CONFIG_TCC_BCHECK
6586 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6590 loc
= (loc
- size
) & -align
;
6592 #ifdef CONFIG_TCC_BCHECK
6593 /* handles bounds */
6594 /* XXX: currently, since we do only one pass, we cannot track
6595 '&' operators, so we add only arrays */
6596 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
6598 /* add padding between regions */
6600 /* then add local bound info */
6601 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6602 bounds_ptr
[0] = addr
;
6603 bounds_ptr
[1] = size
;
6607 /* local variable */
6608 #ifdef CONFIG_TCC_ASM
6609 if (ad
->asm_label
) {
6610 int reg
= asm_parse_regvar(ad
->asm_label
);
6612 r
= (r
& ~VT_VALMASK
) | reg
;
6615 sym
= sym_push(v
, type
, r
, addr
);
6618 /* push local reference */
6619 vset(type
, r
, addr
);
6622 if (v
&& scope
== VT_CONST
) {
6623 /* see if the symbol was already defined */
6626 patch_storage(sym
, ad
, type
);
6627 if (sym
->type
.t
& VT_EXTERN
) {
6628 /* if the variable is extern, it was not allocated */
6629 sym
->type
.t
&= ~VT_EXTERN
;
6630 /* set array size if it was omitted in extern
6632 if ((sym
->type
.t
& VT_ARRAY
) &&
6633 sym
->type
.ref
->c
< 0 &&
6635 sym
->type
.ref
->c
= type
->ref
->c
;
6636 } else if (!has_init
) {
6637 /* we accept several definitions of the same
6638 global variable. this is tricky, because we
6639 must play with the SHN_COMMON type of the symbol */
6640 /* no init data, we won't add more to the symbol */
6642 } else if (sym
->c
) {
6644 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
6645 if (esym
->st_shndx
== data_section
->sh_num
)
6646 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6651 /* allocate symbol in corresponding section */
6656 else if (tcc_state
->nocommon
)
6661 addr
= section_add(sec
, size
, align
);
6662 #ifdef CONFIG_TCC_BCHECK
6663 /* add padding if bound check */
6664 if (tcc_state
->do_bounds_check
)
6665 section_add(sec
, 1, 1);
6668 addr
= align
; /* SHN_COMMON is special, symbol value is align */
6669 sec
= common_section
;
6674 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6675 patch_storage(sym
, ad
, NULL
);
6677 /* update symbol definition */
6678 put_extern_sym(sym
, sec
, addr
, size
);
6680 /* push global reference */
6681 sym
= get_sym_ref(type
, sec
, addr
, size
);
6682 vpushsym(type
, sym
);
6686 #ifdef CONFIG_TCC_BCHECK
6687 /* handles bounds now because the symbol must be defined
6688 before for the relocation */
6689 if (tcc_state
->do_bounds_check
) {
6692 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
6693 /* then add global bound info */
6694 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6695 bounds_ptr
[0] = 0; /* relocated */
6696 bounds_ptr
[1] = size
;
6701 if (type
->t
& VT_VLA
) {
6704 /* save current stack pointer */
6705 if (vlas_in_scope
== 0) {
6706 if (vla_sp_root_loc
== -1)
6707 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
6708 gen_vla_sp_save(vla_sp_root_loc
);
6711 vla_runtime_type_size(type
, &a
);
6712 gen_vla_alloc(type
, a
);
6713 gen_vla_sp_save(addr
);
6717 } else if (has_init
) {
6718 size_t oldreloc_offset
= 0;
6719 if (sec
&& sec
->reloc
)
6720 oldreloc_offset
= sec
->reloc
->data_offset
;
6721 decl_initializer(type
, sec
, addr
, 1, 0);
6722 if (sec
&& sec
->reloc
)
6723 squeeze_multi_relocs(sec
, oldreloc_offset
);
6724 /* patch flexible array member size back to -1, */
6725 /* for possible subsequent similar declarations */
6727 flexible_array
->type
.ref
->c
= -1;
6731 /* restore parse state if needed */
6734 restore_parse_state(&saved_parse_state
);
6738 /* parse a function defined by symbol 'sym' and generate its code in
6739 'cur_text_section' */
6740 static void gen_function(Sym
*sym
)
6743 ind
= cur_text_section
->data_offset
;
6744 /* NOTE: we patch the symbol size later */
6745 put_extern_sym(sym
, cur_text_section
, ind
, 0);
6746 funcname
= get_tok_str(sym
->v
, NULL
);
6748 /* Initialize VLA state */
6750 vla_sp_root_loc
= -1;
6751 /* put debug symbol */
6752 tcc_debug_funcstart(tcc_state
, sym
);
6753 /* push a dummy symbol to enable local sym storage */
6754 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
6755 local_scope
= 1; /* for function parameters */
6756 gfunc_prolog(&sym
->type
);
6759 block(NULL
, NULL
, 0);
6763 cur_text_section
->data_offset
= ind
;
6764 label_pop(&global_label_stack
, NULL
);
6765 /* reset local stack */
6767 sym_pop(&local_stack
, NULL
, 0);
6768 /* end of function */
6769 /* patch symbol size */
6770 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
6772 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
6773 /* It's better to crash than to generate wrong code */
6774 cur_text_section
= NULL
;
6775 funcname
= ""; /* for safety */
6776 func_vt
.t
= VT_VOID
; /* for safety */
6777 func_var
= 0; /* for safety */
6778 ind
= 0; /* for safety */
6783 static void gen_inline_functions(TCCState
*s
)
6786 int inline_generated
, i
, ln
;
6787 struct InlineFunc
*fn
;
6789 ln
= file
->line_num
;
6790 /* iterate while inline function are referenced */
6792 inline_generated
= 0;
6793 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6794 fn
= s
->inline_fns
[i
];
6796 if (sym
&& sym
->c
) {
6797 /* the function was used: generate its code and
6798 convert it to a normal function */
6801 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
6802 sym
->type
.t
&= ~VT_INLINE
;
6804 begin_macro(fn
->func_str
, 1);
6806 cur_text_section
= text_section
;
6810 inline_generated
= 1;
6813 if (!inline_generated
)
6816 file
->line_num
= ln
;
6819 ST_FUNC
void free_inline_functions(TCCState
*s
)
6822 /* free tokens of unused inline functions */
6823 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
6824 struct InlineFunc
*fn
= s
->inline_fns
[i
];
6826 tok_str_free(fn
->func_str
);
6828 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
6831 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
6832 if parsing old style parameter decl list (and FUNC_SYM is set then) */
6833 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
6841 if (!parse_btype(&btype
, &ad
)) {
6842 if (is_for_loop_init
)
6844 /* skip redundant ';' if not in old parameter decl scope */
6845 if (tok
== ';' && l
!= VT_CMP
) {
6849 if (l
== VT_CONST
&&
6850 (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6851 /* global asm block */
6855 /* special test for old K&R protos without explicit int
6856 type. Only accepted when defining global data */
6857 if (l
!= VT_CONST
|| tok
< TOK_UIDENT
)
6861 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6862 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6864 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
6865 int v
= btype
.ref
->v
;
6866 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
6867 tcc_warning("unnamed struct/union that defines no instances");
6872 while (1) { /* iterate thru each declaration */
6874 /* If the base type itself was an array type of unspecified
6875 size (like in 'typedef int arr[]; arr x = {1};') then
6876 we will overwrite the unknown size by the real one for
6877 this decl. We need to unshare the ref symbol holding
6879 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
6880 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
6882 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6886 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
6887 printf("type = '%s'\n", buf
);
6890 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6891 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
6892 tcc_error("function without file scope cannot be static");
6894 /* if old style function prototype, we accept a
6897 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
6898 decl0(VT_CMP
, 0, sym
);
6901 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6902 ad
.asm_label
= asm_label_instr();
6903 /* parse one last attribute list, after asm label */
6904 parse_attribute(&ad
);
6909 #ifdef TCC_TARGET_PE
6910 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
6911 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
6912 tcc_error("cannot have dll linkage with static or typedef");
6913 if (ad
.a
.dllimport
) {
6914 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
6917 type
.t
|= VT_EXTERN
;
6923 tcc_error("cannot use local functions");
6924 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
6925 expect("function definition");
6927 /* reject abstract declarators in function definition
6928 make old style params without decl have int type */
6930 while ((sym
= sym
->next
) != NULL
) {
6931 if (!(sym
->v
& ~SYM_FIELD
))
6932 expect("identifier");
6933 if (sym
->type
.t
== VT_VOID
)
6934 sym
->type
= int_type
;
6937 /* XXX: cannot do better now: convert extern line to static inline */
6938 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
6939 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6944 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6947 ref
= sym
->type
.ref
;
6949 /* use func_call from prototype if not defined */
6950 if (ref
->f
.func_call
!= FUNC_CDECL
6951 && type
.ref
->f
.func_call
== FUNC_CDECL
)
6952 type
.ref
->f
.func_call
= ref
->f
.func_call
;
6954 /* use static from prototype */
6955 if (sym
->type
.t
& VT_STATIC
)
6956 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6958 /* If the definition has no visibility use the
6959 one from prototype. */
6960 if (!type
.ref
->a
.visibility
)
6961 type
.ref
->a
.visibility
= ref
->a
.visibility
;
6962 /* apply other storage attributes from prototype */
6963 type
.ref
->a
.dllexport
|= ref
->a
.dllexport
;
6964 type
.ref
->a
.weak
|= ref
->a
.weak
;
6966 if (!is_compatible_types(&sym
->type
, &type
)) {
6968 tcc_error("incompatible types for redefinition of '%s'",
6969 get_tok_str(v
, NULL
));
6971 if (ref
->f
.func_body
)
6972 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6973 /* if symbol is already defined, then put complete type */
6977 /* put function symbol */
6978 sym
= global_identifier_push(v
, type
.t
, 0);
6979 sym
->type
.ref
= type
.ref
;
6982 sym
->type
.ref
->f
.func_body
= 1;
6983 sym
->r
= VT_SYM
| VT_CONST
;
6984 patch_storage(sym
, &ad
, NULL
);
6986 /* static inline functions are just recorded as a kind
6987 of macro. Their code will be emitted at the end of
6988 the compilation unit only if they are used */
6989 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
6990 (VT_INLINE
| VT_STATIC
)) {
6991 struct InlineFunc
*fn
;
6992 const char *filename
;
6994 filename
= file
? file
->filename
: "";
6995 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
6996 strcpy(fn
->filename
, filename
);
6998 skip_or_save_block(&fn
->func_str
);
6999 dynarray_add(&tcc_state
->inline_fns
,
7000 &tcc_state
->nb_inline_fns
, fn
);
7002 /* compute text section */
7003 cur_text_section
= ad
.section
;
7004 if (!cur_text_section
)
7005 cur_text_section
= text_section
;
7011 /* find parameter in function parameter list */
7012 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7013 if ((sym
->v
& ~SYM_FIELD
) == v
)
7015 tcc_error("declaration for parameter '%s' but no such parameter",
7016 get_tok_str(v
, NULL
));
7018 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7019 tcc_error("storage class specified for '%s'",
7020 get_tok_str(v
, NULL
));
7021 if (sym
->type
.t
!= VT_VOID
)
7022 tcc_error("redefinition of parameter '%s'",
7023 get_tok_str(v
, NULL
));
7024 convert_parameter_type(&type
);
7026 } else if (type
.t
& VT_TYPEDEF
) {
7027 /* save typedefed type */
7028 /* XXX: test storage specifiers ? */
7030 if (sym
&& sym
->sym_scope
== local_scope
) {
7031 if (!is_compatible_types(&sym
->type
, &type
)
7032 || !(sym
->type
.t
& VT_TYPEDEF
))
7033 tcc_error("incompatible redefinition of '%s'",
7034 get_tok_str(v
, NULL
));
7037 sym
= sym_push(v
, &type
, 0, 0);
7042 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7043 /* external function definition */
7044 /* specific case for func_call attribute */
7046 } else if (!(type
.t
& VT_ARRAY
)) {
7047 /* not lvalue if array */
7048 r
|= lvalue_type(type
.t
);
7050 has_init
= (tok
== '=');
7051 if (has_init
&& (type
.t
& VT_VLA
))
7052 tcc_error("variable length array cannot be initialized");
7053 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7054 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7055 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7056 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7057 /* external variable or function */
7058 /* NOTE: as GCC, uninitialized global static
7059 arrays of null size are considered as
7061 sym
= external_sym(v
, &type
, r
, &ad
);
7062 if (ad
.alias_target
) {
7066 alias_target
= sym_find(ad
.alias_target
);
7067 if (!alias_target
|| !alias_target
->c
)
7068 tcc_error("unsupported forward __alias__ attribute");
7069 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[alias_target
->c
];
7070 tsec
.sh_num
= esym
->st_shndx
;
7071 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
7074 if (type
.t
& VT_STATIC
)
7080 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7084 if (is_for_loop_init
)
7097 ST_FUNC
void decl(int l
)
7102 /* ------------------------------------------------------------------------- */