2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Section
*text_section
, *data_section
, *bss_section
; /* predefined sections */
34 ST_DATA Section
*cur_text_section
; /* current section where function code is generated */
36 ST_DATA Section
*last_text_section
; /* to handle .previous asm directive */
38 #ifdef CONFIG_TCC_BCHECK
39 /* bound check related sections */
40 ST_DATA Section
*bounds_section
; /* contains global data bound description */
41 ST_DATA Section
*lbounds_section
; /* contains local data bound description */
44 ST_DATA Section
*symtab_section
, *strtab_section
;
46 ST_DATA Section
*stab_section
, *stabstr_section
;
47 ST_DATA Sym
*sym_free_first
;
48 ST_DATA
void **sym_pools
;
49 ST_DATA
int nb_sym_pools
;
51 ST_DATA Sym
*global_stack
;
52 ST_DATA Sym
*local_stack
;
53 ST_DATA Sym
*scope_stack_bottom
;
54 ST_DATA Sym
*define_stack
;
55 ST_DATA Sym
*global_label_stack
;
56 ST_DATA Sym
*local_label_stack
;
58 ST_DATA
int vla_sp_loc_tmp
; /* vla_sp_loc is set to this when the value won't be needed later */
59 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
60 ST_DATA
int *vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
61 ST_DATA
int vla_flags
; /* VLA_* flags */
63 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
;
65 ST_DATA
int const_wanted
; /* true if constant wanted */
66 ST_DATA
int nocode_wanted
; /* true if no code generation wanted for an expression */
67 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
68 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
69 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
71 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
72 ST_DATA
char *funcname
;
74 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
;
76 /* ------------------------------------------------------------------------- */
77 static void gen_cast(CType
*type
);
78 static inline CType
*pointed_type(CType
*type
);
79 static int is_compatible_types(CType
*type1
, CType
*type2
);
80 static int parse_btype(CType
*type
, AttributeDef
*ad
);
81 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
82 static void parse_expr_type(CType
*type
);
83 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
84 static void block(int *bsym
, int *csym
, int *case_sym
, int *def_sym
, int case_reg
, int is_expr
);
85 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, char *asm_label
, int scope
);
86 static int decl0(int l
, int is_for_loop_init
);
87 static void expr_eq(void);
88 static void unary_type(CType
*type
);
89 static void vla_runtime_type_size(CType
*type
, int *a
);
90 static void vla_sp_save(void);
91 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
);
92 static void expr_type(CType
*type
);
93 ST_FUNC
void vpush64(int ty
, unsigned long long v
);
94 ST_FUNC
void vpush(CType
*type
);
95 ST_FUNC
int gvtst(int inv
, int t
);
96 ST_FUNC
int is_btype_size(int bt
);
98 ST_INLN
int is_float(int t
)
102 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC
int ieee_finite(double d
)
111 memcpy(p
, &d
, sizeof(double));
112 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC
void test_lvalue(void)
117 if (!(vtop
->r
& VT_LVAL
))
121 /* ------------------------------------------------------------------------- */
122 /* symbol allocator */
123 static Sym
*__sym_malloc(void)
125 Sym
*sym_pool
, *sym
, *last_sym
;
128 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
129 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
131 last_sym
= sym_free_first
;
133 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
134 sym
->next
= last_sym
;
138 sym_free_first
= last_sym
;
142 static inline Sym
*sym_malloc(void)
145 sym
= sym_free_first
;
147 sym
= __sym_malloc();
148 sym_free_first
= sym
->next
;
152 ST_INLN
void sym_free(Sym
*sym
)
154 sym
->next
= sym_free_first
;
155 tcc_free(sym
->asm_label
);
156 sym_free_first
= sym
;
159 /* push, without hashing */
160 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, long c
)
163 if (ps
== &local_stack
) {
164 for (s
= *ps
; s
&& s
!= scope_stack_bottom
; s
= s
->prev
)
165 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
&& s
->v
== v
)
166 tcc_error("incompatible types for redefinition of '%s'",
167 get_tok_str(v
, NULL
));
185 /* find a symbol and return its associated structure. 's' is the top
186 of the symbol stack */
187 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
199 /* structure lookup */
200 ST_INLN Sym
*struct_find(int v
)
203 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
205 return table_ident
[v
]->sym_struct
;
208 /* find an identifier */
209 ST_INLN Sym
*sym_find(int v
)
212 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
214 return table_ident
[v
]->sym_identifier
;
217 /* push a given symbol on the symbol stack */
218 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
227 s
= sym_push2(ps
, v
, type
->t
, c
);
228 s
->type
.ref
= type
->ref
;
230 /* don't record fields or anonymous symbols */
232 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
233 /* record symbol in token array */
234 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
236 ps
= &ts
->sym_struct
;
238 ps
= &ts
->sym_identifier
;
245 /* push a global identifier */
246 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
249 s
= sym_push2(&global_stack
, v
, t
, c
);
250 /* don't record anonymous symbol */
251 if (v
< SYM_FIRST_ANOM
) {
252 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
253 /* modify the top most local identifier, so that
254 sym_identifier will point to 's' when popped */
256 ps
= &(*ps
)->prev_tok
;
263 /* pop symbols until top reaches 'b' */
264 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
)
274 /* remove symbol in token array */
276 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
277 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
279 ps
= &ts
->sym_struct
;
281 ps
= &ts
->sym_identifier
;
290 static void weaken_symbol(Sym
*sym
)
292 sym
->type
.t
|= VT_WEAK
;
297 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
298 esym_type
= ELFW(ST_TYPE
)(esym
->st_info
);
299 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, esym_type
);
303 static void apply_visibility(Sym
*sym
, CType
*type
)
305 int vis
= sym
->type
.t
& VT_VIS_MASK
;
306 int vis2
= type
->t
& VT_VIS_MASK
;
307 if (vis
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
309 else if (vis2
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
312 vis
= (vis
< vis2
) ? vis
: vis2
;
313 sym
->type
.t
&= ~VT_VIS_MASK
;
319 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
320 vis
>>= VT_VIS_SHIFT
;
321 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1)) | vis
;
325 /* ------------------------------------------------------------------------- */
327 ST_FUNC
void swap(int *p
, int *q
)
335 static void vsetc(CType
*type
, int r
, CValue
*vc
)
339 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
340 tcc_error("memory full (vstack)");
341 /* cannot let cpu flags if other instruction are generated. Also
342 avoid leaving VT_JMP anywhere except on the top of the stack
343 because it would complicate the code generator. */
344 if (vtop
>= vstack
) {
345 v
= vtop
->r
& VT_VALMASK
;
346 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
356 /* push constant of type "type" with useless value */
357 ST_FUNC
void vpush(CType
*type
)
360 vsetc(type
, VT_CONST
, &cval
);
363 /* push integer constant */
364 ST_FUNC
void vpushi(int v
)
368 vsetc(&int_type
, VT_CONST
, &cval
);
371 /* push a pointer sized constant */
372 static void vpushs(addr_t v
)
376 vsetc(&size_type
, VT_CONST
, &cval
);
379 /* push arbitrary 64bit constant */
380 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
387 vsetc(&ctype
, VT_CONST
, &cval
);
390 /* push long long constant */
391 static inline void vpushll(long long v
)
393 vpush64(VT_LLONG
, v
);
396 /* push a symbol value of TYPE */
397 static inline void vpushsym(CType
*type
, Sym
*sym
)
401 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
405 /* Return a static symbol pointing to a section */
406 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
412 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
413 sym
->type
.ref
= type
->ref
;
414 sym
->r
= VT_CONST
| VT_SYM
;
415 put_extern_sym(sym
, sec
, offset
, size
);
419 /* push a reference to a section offset by adding a dummy symbol */
420 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
422 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
425 /* define a new external reference to a symbol 'v' of type 'u' */
426 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
432 /* push forward reference */
433 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
434 s
->type
.ref
= type
->ref
;
435 s
->r
= r
| VT_CONST
| VT_SYM
;
440 /* define a new external reference to a symbol 'v' with alternate asm
441 name 'asm_label' of type 'u'. 'asm_label' is equal to NULL if there
442 is no alternate name (most cases) */
443 static Sym
*external_sym(int v
, CType
*type
, int r
, char *asm_label
)
449 /* push forward reference */
450 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
451 s
->asm_label
= asm_label
;
452 s
->type
.t
|= VT_EXTERN
;
453 } else if (s
->type
.ref
== func_old_type
.ref
) {
454 s
->type
.ref
= type
->ref
;
455 s
->r
= r
| VT_CONST
| VT_SYM
;
456 s
->type
.t
|= VT_EXTERN
;
457 } else if (!is_compatible_types(&s
->type
, type
)) {
458 tcc_error("incompatible types for redefinition of '%s'",
459 get_tok_str(v
, NULL
));
461 /* Merge some storage attributes. */
462 if (type
->t
& VT_WEAK
)
465 if (type
->t
& VT_VIS_MASK
)
466 apply_visibility(s
, type
);
471 /* push a reference to global symbol v */
472 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
474 vpushsym(type
, external_global_sym(v
, type
, 0));
477 ST_FUNC
void vset(CType
*type
, int r
, int v
)
482 vsetc(type
, r
, &cval
);
485 static void vseti(int r
, int v
)
493 ST_FUNC
void vswap(void)
496 /* cannot let cpu flags if other instruction are generated. Also
497 avoid leaving VT_JMP anywhere except on the top of the stack
498 because it would complicate the code generator. */
499 if (vtop
>= vstack
) {
500 int v
= vtop
->r
& VT_VALMASK
;
501 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
508 /* XXX: +2% overall speed possible with optimized memswap
510 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
514 ST_FUNC
void vpushv(SValue
*v
)
516 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
517 tcc_error("memory full (vstack)");
522 static void vdup(void)
527 /* save r to the memory stack, and mark it as being free */
528 ST_FUNC
void save_reg(int r
)
530 int l
, saved
, size
, align
;
534 /* modify all stack values */
537 for(p
=vstack
;p
<=vtop
;p
++) {
538 if ((p
->r
& VT_VALMASK
) == r
||
539 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
540 /* must save value on stack if not already done */
542 /* NOTE: must reload 'r' because r might be equal to r2 */
543 r
= p
->r
& VT_VALMASK
;
544 /* store register in the stack */
546 if ((p
->r
& VT_LVAL
) ||
547 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
548 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
549 type
= &char_pointer_type
;
553 size
= type_size(type
, &align
);
554 loc
= (loc
- size
) & -align
;
556 sv
.r
= VT_LOCAL
| VT_LVAL
;
559 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
560 /* x86 specific: need to pop fp register ST0 if saved */
562 o(0xd8dd); /* fstp %st(0) */
565 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
566 /* special long long case */
567 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
575 /* mark that stack entry as being saved on the stack */
576 if (p
->r
& VT_LVAL
) {
577 /* also clear the bounded flag because the
578 relocation address of the function was stored in
580 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
582 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
590 #ifdef TCC_TARGET_ARM
591 /* find a register of class 'rc2' with at most one reference on stack.
592 * If none, call get_reg(rc) */
593 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
598 for(r
=0;r
<NB_REGS
;r
++) {
599 if (reg_classes
[r
] & rc2
) {
602 for(p
= vstack
; p
<= vtop
; p
++) {
603 if ((p
->r
& VT_VALMASK
) == r
||
604 (p
->r2
& VT_VALMASK
) == r
)
615 /* find a free register of class 'rc'. If none, save one register */
616 ST_FUNC
int get_reg(int rc
)
621 /* find a free register */
622 for(r
=0;r
<NB_REGS
;r
++) {
623 if (reg_classes
[r
] & rc
) {
624 for(p
=vstack
;p
<=vtop
;p
++) {
625 if ((p
->r
& VT_VALMASK
) == r
||
626 (p
->r2
& VT_VALMASK
) == r
)
634 /* no register left : free the first one on the stack (VERY
635 IMPORTANT to start from the bottom to ensure that we don't
636 spill registers used in gen_opi()) */
637 for(p
=vstack
;p
<=vtop
;p
++) {
638 /* look at second register (if long long) */
639 r
= p
->r2
& VT_VALMASK
;
640 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
642 r
= p
->r
& VT_VALMASK
;
643 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
649 /* Should never comes here */
653 /* save registers up to (vtop - n) stack entry */
654 ST_FUNC
void save_regs(int n
)
659 for(p
= vstack
;p
<= p1
; p
++) {
660 r
= p
->r
& VT_VALMASK
;
667 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
669 static void move_reg(int r
, int s
, int t
)
683 /* get address of vtop (vtop MUST BE an lvalue) */
684 ST_FUNC
void gaddrof(void)
686 if (vtop
->r
& VT_REF
&& !nocode_wanted
)
689 /* tricky: if saved lvalue, then we can go back to lvalue */
690 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
691 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
696 #ifdef CONFIG_TCC_BCHECK
697 /* generate lvalue bound code */
698 static void gbound(void)
703 vtop
->r
&= ~VT_MUSTBOUND
;
704 /* if lvalue, then use checking code before dereferencing */
705 if (vtop
->r
& VT_LVAL
) {
706 /* if not VT_BOUNDED value, then make one */
707 if (!(vtop
->r
& VT_BOUNDED
)) {
708 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
709 /* must save type because we must set it to int to get pointer */
711 vtop
->type
.t
= VT_PTR
;
714 gen_bounded_ptr_add();
715 vtop
->r
|= lval_type
;
718 /* then check for dereferencing */
719 gen_bounded_ptr_deref();
724 /* store vtop a register belonging to class 'rc'. lvalues are
725 converted to values. Cannot be used if cannot be converted to
726 register value (such as structures). */
727 ST_FUNC
int gv(int rc
)
729 int r
, bit_pos
, bit_size
, size
, align
, i
;
732 /* NOTE: get_reg can modify vstack[] */
733 if (vtop
->type
.t
& VT_BITFIELD
) {
736 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
737 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
738 /* remove bit field info to avoid loops */
739 vtop
->type
.t
&= ~(VT_BITFIELD
| (-1 << VT_STRUCT_SHIFT
));
740 /* cast to int to propagate signedness in following ops */
741 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
746 if((vtop
->type
.t
& VT_UNSIGNED
) ||
747 (vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
748 type
.t
|= VT_UNSIGNED
;
750 /* generate shifts */
751 vpushi(bits
- (bit_pos
+ bit_size
));
753 vpushi(bits
- bit_size
);
754 /* NOTE: transformed to SHR if unsigned */
758 if (is_float(vtop
->type
.t
) &&
759 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
762 unsigned long offset
;
763 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
767 /* XXX: unify with initializers handling ? */
768 /* CPUs usually cannot use float constants, so we store them
769 generically in data segment */
770 size
= type_size(&vtop
->type
, &align
);
771 offset
= (data_section
->data_offset
+ align
- 1) & -align
;
772 data_section
->data_offset
= offset
;
773 /* XXX: not portable yet */
774 #if defined(__i386__) || defined(__x86_64__)
775 /* Zero pad x87 tenbyte long doubles */
776 if (size
== LDOUBLE_SIZE
) {
777 vtop
->c
.tab
[2] &= 0xffff;
778 #if LDOUBLE_SIZE == 16
783 ptr
= section_ptr_add(data_section
, size
);
785 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
789 ptr
[i
] = vtop
->c
.tab
[size
-1-i
];
793 ptr
[i
] = vtop
->c
.tab
[i
];
794 sym
= get_sym_ref(&vtop
->type
, data_section
, offset
, size
<< 2);
795 vtop
->r
|= VT_LVAL
| VT_SYM
;
797 vtop
->c
.ptr_offset
= 0;
799 #ifdef CONFIG_TCC_BCHECK
800 if (vtop
->r
& VT_MUSTBOUND
)
804 r
= vtop
->r
& VT_VALMASK
;
805 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
806 #ifndef TCC_TARGET_ARM64
809 #ifdef TCC_TARGET_X86_64
810 else if (rc
== RC_FRET
)
815 /* need to reload if:
817 - lvalue (need to dereference pointer)
818 - already a register, but not in the right class */
820 || (vtop
->r
& VT_LVAL
)
821 || !(reg_classes
[r
] & rc
)
822 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
823 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
824 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
826 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
831 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
832 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
833 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
835 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
836 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
837 unsigned long long ll
;
839 int r2
, original_type
;
840 original_type
= vtop
->type
.t
;
841 /* two register type load : expand to two words
843 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
844 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
847 vtop
->c
.ui
= ll
; /* first word */
849 vtop
->r
= r
; /* save register value */
850 vpushi(ll
>> 32); /* second word */
853 if (r
>= VT_CONST
|| /* XXX: test to VT_CONST incorrect ? */
854 (vtop
->r
& VT_LVAL
)) {
855 /* We do not want to modifier the long long
856 pointer here, so the safest (and less
857 efficient) is to save all the other registers
858 in the stack. XXX: totally inefficient. */
860 /* load from memory */
861 vtop
->type
.t
= load_type
;
864 vtop
[-1].r
= r
; /* save register value */
865 /* increment pointer to get second word */
866 vtop
->type
.t
= addr_type
;
871 vtop
->type
.t
= load_type
;
876 vtop
[-1].r
= r
; /* save register value */
877 vtop
->r
= vtop
[-1].r2
;
879 /* Allocate second register. Here we rely on the fact that
880 get_reg() tries first to free r2 of an SValue. */
884 /* write second register */
886 vtop
->type
.t
= original_type
;
887 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
889 /* lvalue of scalar type : need to use lvalue type
890 because of possible cast */
893 /* compute memory access type */
894 if (vtop
->r
& VT_REF
)
895 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
900 else if (vtop
->r
& VT_LVAL_BYTE
)
902 else if (vtop
->r
& VT_LVAL_SHORT
)
904 if (vtop
->r
& VT_LVAL_UNSIGNED
)
908 /* restore wanted type */
911 /* one register type load */
916 #ifdef TCC_TARGET_C67
917 /* uses register pairs for doubles */
918 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
925 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
926 ST_FUNC
void gv2(int rc1
, int rc2
)
930 /* generate more generic register first. But VT_JMP or VT_CMP
931 values must be generated first in all cases to avoid possible
933 v
= vtop
[0].r
& VT_VALMASK
;
934 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
939 /* test if reload is needed for first register */
940 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
950 /* test if reload is needed for first register */
951 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
957 #ifndef TCC_TARGET_ARM64
958 /* wrapper around RC_FRET to return a register by type */
959 static int rc_fret(int t
)
961 #ifdef TCC_TARGET_X86_64
962 if (t
== VT_LDOUBLE
) {
970 /* wrapper around REG_FRET to return a register by type */
971 static int reg_fret(int t
)
973 #ifdef TCC_TARGET_X86_64
974 if (t
== VT_LDOUBLE
) {
981 /* expand long long on stack in two int registers */
982 static void lexpand(void)
986 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
989 vtop
[0].r
= vtop
[-1].r2
;
990 vtop
[0].r2
= VT_CONST
;
991 vtop
[-1].r2
= VT_CONST
;
992 vtop
[0].type
.t
= VT_INT
| u
;
993 vtop
[-1].type
.t
= VT_INT
| u
;
996 #ifdef TCC_TARGET_ARM
997 /* expand long long on stack */
998 ST_FUNC
void lexpand_nr(void)
1002 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1004 vtop
->r2
= VT_CONST
;
1005 vtop
->type
.t
= VT_INT
| u
;
1006 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1007 if (v
== VT_CONST
) {
1008 vtop
[-1].c
.ui
= vtop
->c
.ull
;
1009 vtop
->c
.ui
= vtop
->c
.ull
>> 32;
1011 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1013 vtop
->r
= vtop
[-1].r
;
1014 } else if (v
> VT_CONST
) {
1018 vtop
->r
= vtop
[-1].r2
;
1019 vtop
[-1].r2
= VT_CONST
;
1020 vtop
[-1].type
.t
= VT_INT
| u
;
1024 /* build a long long from two ints */
1025 static void lbuild(int t
)
1027 gv2(RC_INT
, RC_INT
);
1028 vtop
[-1].r2
= vtop
[0].r
;
1029 vtop
[-1].type
.t
= t
;
1033 /* rotate n first stack elements to the bottom
1034 I1 ... In -> I2 ... In I1 [top is right]
1036 ST_FUNC
void vrotb(int n
)
1042 for(i
=-n
+1;i
!=0;i
++)
1043 vtop
[i
] = vtop
[i
+1];
1047 /* rotate the n elements before entry e towards the top
1048 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1050 ST_FUNC
void vrote(SValue
*e
, int n
)
1056 for(i
= 0;i
< n
- 1; i
++)
1061 /* rotate n first stack elements to the top
1062 I1 ... In -> In I1 ... I(n-1) [top is right]
1064 ST_FUNC
void vrott(int n
)
1069 /* pop stack value */
1070 ST_FUNC
void vpop(void)
1073 v
= vtop
->r
& VT_VALMASK
;
1074 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1075 /* for x86, we need to pop the FP stack */
1076 if (v
== TREG_ST0
&& !nocode_wanted
) {
1077 o(0xd8dd); /* fstp %st(0) */
1080 if (v
== VT_JMP
|| v
== VT_JMPI
) {
1081 /* need to put correct jump if && or || without test */
1087 /* convert stack entry to register and duplicate its value in another
1089 static void gv_dup(void)
1095 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1102 /* stack: H L L1 H1 */
1110 /* duplicate value */
1115 #ifdef TCC_TARGET_X86_64
1116 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1126 load(r1
, &sv
); /* move r to r1 */
1128 /* duplicates value */
1134 /* Generate value test
1136 * Generate a test for any value (jump, comparison and integers) */
1137 ST_FUNC
int gvtst(int inv
, int t
)
1139 int v
= vtop
->r
& VT_VALMASK
;
1140 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1144 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1145 /* constant jmp optimization */
1146 if ((vtop
->c
.i
!= 0) != inv
)
1151 return gtst(inv
, t
);
1154 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1155 /* generate CPU independent (unsigned) long long operations */
1156 static void gen_opl(int op
)
1158 int t
, a
, b
, op1
, c
, i
;
1160 unsigned short reg_iret
= REG_IRET
;
1161 unsigned short reg_lret
= REG_LRET
;
1167 func
= TOK___divdi3
;
1170 func
= TOK___udivdi3
;
1173 func
= TOK___moddi3
;
1176 func
= TOK___umoddi3
;
1183 /* call generic long long function */
1184 vpush_global_sym(&func_old_type
, func
);
1189 vtop
->r2
= reg_lret
;
1202 /* stack: L1 H1 L2 H2 */
1207 vtop
[-2] = vtop
[-3];
1210 /* stack: H1 H2 L1 L2 */
1216 /* stack: H1 H2 L1 L2 ML MH */
1219 /* stack: ML MH H1 H2 L1 L2 */
1223 /* stack: ML MH H1 L2 H2 L1 */
1228 /* stack: ML MH M1 M2 */
1231 } else if (op
== '+' || op
== '-') {
1232 /* XXX: add non carry method too (for MIPS or alpha) */
1238 /* stack: H1 H2 (L1 op L2) */
1241 gen_op(op1
+ 1); /* TOK_xxxC2 */
1244 /* stack: H1 H2 (L1 op L2) */
1247 /* stack: (L1 op L2) H1 H2 */
1249 /* stack: (L1 op L2) (H1 op H2) */
1257 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1258 t
= vtop
[-1].type
.t
;
1262 /* stack: L H shift */
1264 /* constant: simpler */
1265 /* NOTE: all comments are for SHL. the other cases are
1266 done by swaping words */
1277 if (op
!= TOK_SAR
) {
1310 /* XXX: should provide a faster fallback on x86 ? */
1313 func
= TOK___ashrdi3
;
1316 func
= TOK___lshrdi3
;
1319 func
= TOK___ashldi3
;
1325 /* compare operations */
1331 /* stack: L1 H1 L2 H2 */
1333 vtop
[-1] = vtop
[-2];
1335 /* stack: L1 L2 H1 H2 */
1338 /* when values are equal, we need to compare low words. since
1339 the jump is inverted, we invert the test too. */
1342 else if (op1
== TOK_GT
)
1344 else if (op1
== TOK_ULT
)
1346 else if (op1
== TOK_UGT
)
1351 if (op1
!= TOK_NE
) {
1355 /* generate non equal test */
1356 /* XXX: NOT PORTABLE yet */
1360 #if defined(TCC_TARGET_I386)
1361 b
= psym(0x850f, 0);
1362 #elif defined(TCC_TARGET_ARM)
1364 o(0x1A000000 | encbranch(ind
, 0, 1));
1365 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1366 tcc_error("not implemented");
1368 #error not supported
1372 /* compare low. Always unsigned */
1376 else if (op1
== TOK_LE
)
1378 else if (op1
== TOK_GT
)
1380 else if (op1
== TOK_GE
)
1391 /* handle integer constant optimizations and various machine
1393 static void gen_opic(int op
)
1395 int c1
, c2
, t1
, t2
, n
;
1398 typedef unsigned long long U
;
1402 t1
= v1
->type
.t
& VT_BTYPE
;
1403 t2
= v2
->type
.t
& VT_BTYPE
;
1407 else if (v1
->type
.t
& VT_UNSIGNED
)
1414 else if (v2
->type
.t
& VT_UNSIGNED
)
1419 /* currently, we cannot do computations with forward symbols */
1420 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1421 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1424 case '+': l1
+= l2
; break;
1425 case '-': l1
-= l2
; break;
1426 case '&': l1
&= l2
; break;
1427 case '^': l1
^= l2
; break;
1428 case '|': l1
|= l2
; break;
1429 case '*': l1
*= l2
; break;
1436 /* if division by zero, generate explicit division */
1439 tcc_error("division by zero in constant");
1443 default: l1
/= l2
; break;
1444 case '%': l1
%= l2
; break;
1445 case TOK_UDIV
: l1
= (U
)l1
/ l2
; break;
1446 case TOK_UMOD
: l1
= (U
)l1
% l2
; break;
1449 case TOK_SHL
: l1
<<= l2
; break;
1450 case TOK_SHR
: l1
= (U
)l1
>> l2
; break;
1451 case TOK_SAR
: l1
>>= l2
; break;
1453 case TOK_ULT
: l1
= (U
)l1
< (U
)l2
; break;
1454 case TOK_UGE
: l1
= (U
)l1
>= (U
)l2
; break;
1455 case TOK_EQ
: l1
= l1
== l2
; break;
1456 case TOK_NE
: l1
= l1
!= l2
; break;
1457 case TOK_ULE
: l1
= (U
)l1
<= (U
)l2
; break;
1458 case TOK_UGT
: l1
= (U
)l1
> (U
)l2
; break;
1459 case TOK_LT
: l1
= l1
< l2
; break;
1460 case TOK_GE
: l1
= l1
>= l2
; break;
1461 case TOK_LE
: l1
= l1
<= l2
; break;
1462 case TOK_GT
: l1
= l1
> l2
; break;
1464 case TOK_LAND
: l1
= l1
&& l2
; break;
1465 case TOK_LOR
: l1
= l1
|| l2
; break;
1472 /* if commutative ops, put c2 as constant */
1473 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1474 op
== '|' || op
== '*')) {
1476 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1477 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1479 if (!const_wanted
&&
1481 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1482 (l1
== -1 && op
== TOK_SAR
))) {
1483 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1485 } else if (!const_wanted
&&
1486 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1487 (l2
== -1 && op
== '|') ||
1488 (l2
== 0xffffffff && t2
!= VT_LLONG
&& op
== '|') ||
1489 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1490 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1495 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1498 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1499 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1503 /* filter out NOP operations like x*1, x-0, x&-1... */
1505 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1506 /* try to use shifts instead of muls or divs */
1507 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1516 else if (op
== TOK_PDIV
)
1522 } else if (c2
&& (op
== '+' || op
== '-') &&
1523 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1524 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1525 /* symbol + constant case */
1532 if (!nocode_wanted
) {
1533 /* call low level op generator */
1534 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1535 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1546 /* generate a floating point operation with constant propagation */
1547 static void gen_opif(int op
)
1555 /* currently, we cannot do computations with forward symbols */
1556 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1557 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1559 if (v1
->type
.t
== VT_FLOAT
) {
1562 } else if (v1
->type
.t
== VT_DOUBLE
) {
1570 /* NOTE: we only do constant propagation if finite number (not
1571 NaN or infinity) (ANSI spec) */
1572 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1576 case '+': f1
+= f2
; break;
1577 case '-': f1
-= f2
; break;
1578 case '*': f1
*= f2
; break;
1582 tcc_error("division by zero in constant");
1587 /* XXX: also handles tests ? */
1591 /* XXX: overflow test ? */
1592 if (v1
->type
.t
== VT_FLOAT
) {
1594 } else if (v1
->type
.t
== VT_DOUBLE
) {
1602 if (!nocode_wanted
) {
1610 static int pointed_size(CType
*type
)
1613 return type_size(pointed_type(type
), &align
);
1616 static void vla_runtime_pointed_size(CType
*type
)
1619 vla_runtime_type_size(pointed_type(type
), &align
);
1622 static inline int is_null_pointer(SValue
*p
)
1624 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
1626 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& p
->c
.i
== 0) ||
1627 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.ll
== 0) ||
1628 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&& p
->c
.ptr_offset
== 0);
1631 static inline int is_integer_btype(int bt
)
1633 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
1634 bt
== VT_INT
|| bt
== VT_LLONG
);
1637 /* check types for comparison or subtraction of pointers */
1638 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
1640 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
1643 /* null pointers are accepted for all comparisons as gcc */
1644 if (is_null_pointer(p1
) || is_null_pointer(p2
))
1648 bt1
= type1
->t
& VT_BTYPE
;
1649 bt2
= type2
->t
& VT_BTYPE
;
1650 /* accept comparison between pointer and integer with a warning */
1651 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
1652 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
1653 tcc_warning("comparison between pointer and integer");
1657 /* both must be pointers or implicit function pointers */
1658 if (bt1
== VT_PTR
) {
1659 type1
= pointed_type(type1
);
1660 } else if (bt1
!= VT_FUNC
)
1661 goto invalid_operands
;
1663 if (bt2
== VT_PTR
) {
1664 type2
= pointed_type(type2
);
1665 } else if (bt2
!= VT_FUNC
) {
1667 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
1669 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
1670 (type2
->t
& VT_BTYPE
) == VT_VOID
)
1674 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1675 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1676 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
1677 /* gcc-like error if '-' is used */
1679 goto invalid_operands
;
1681 tcc_warning("comparison of distinct pointer types lacks a cast");
1685 /* generic gen_op: handles types problems */
1686 ST_FUNC
void gen_op(int op
)
1688 int u
, t1
, t2
, bt1
, bt2
, t
;
1691 t1
= vtop
[-1].type
.t
;
1692 t2
= vtop
[0].type
.t
;
1693 bt1
= t1
& VT_BTYPE
;
1694 bt2
= t2
& VT_BTYPE
;
1696 if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
1697 /* at least one operand is a pointer */
1698 /* relationnal op: must be both pointers */
1699 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
1700 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1701 /* pointers are handled are unsigned */
1702 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1703 t
= VT_LLONG
| VT_UNSIGNED
;
1705 t
= VT_INT
| VT_UNSIGNED
;
1709 /* if both pointers, then it must be the '-' op */
1710 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
1712 tcc_error("cannot use pointers here");
1713 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1714 /* XXX: check that types are compatible */
1715 if (vtop
[-1].type
.t
& VT_VLA
) {
1716 vla_runtime_pointed_size(&vtop
[-1].type
);
1718 vpushi(pointed_size(&vtop
[-1].type
));
1722 /* set to integer type */
1723 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1724 vtop
->type
.t
= VT_LLONG
;
1726 vtop
->type
.t
= VT_INT
;
1731 /* exactly one pointer : must be '+' or '-'. */
1732 if (op
!= '-' && op
!= '+')
1733 tcc_error("cannot use pointers here");
1734 /* Put pointer as first operand */
1735 if (bt2
== VT_PTR
) {
1739 type1
= vtop
[-1].type
;
1740 type1
.t
&= ~VT_ARRAY
;
1741 if (vtop
[-1].type
.t
& VT_VLA
)
1742 vla_runtime_pointed_size(&vtop
[-1].type
);
1744 u
= pointed_size(&vtop
[-1].type
);
1746 tcc_error("unknown array element size");
1747 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1750 /* XXX: cast to int ? (long long case) */
1756 /* #ifdef CONFIG_TCC_BCHECK
1757 The main reason to removing this code:
1764 fprintf(stderr, "v+i-j = %p\n", v+i-j);
1765 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
1767 When this code is on. then the output looks like
1769 v+(i-j) = 0xbff84000
1771 /* if evaluating constant expression, no code should be
1772 generated, so no bound check */
1773 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
1774 /* if bounded pointers, we generate a special code to
1781 gen_bounded_ptr_add();
1787 /* put again type if gen_opic() swaped operands */
1790 } else if (is_float(bt1
) || is_float(bt2
)) {
1791 /* compute bigger type and do implicit casts */
1792 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
1794 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
1799 /* floats can only be used for a few operations */
1800 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
1801 (op
< TOK_ULT
|| op
> TOK_GT
))
1802 tcc_error("invalid operands for binary operation");
1804 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
1805 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
1806 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (t
| VT_UNSIGNED
))
1809 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
1810 /* cast to biggest op */
1812 /* convert to unsigned if it does not fit in a long long */
1813 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
1814 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
1817 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
1818 tcc_error("comparison of struct");
1820 /* integer operations */
1822 /* convert to unsigned if it does not fit in an integer */
1823 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
1824 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
1827 /* XXX: currently, some unsigned operations are explicit, so
1828 we modify them here */
1829 if (t
& VT_UNSIGNED
) {
1836 else if (op
== TOK_LT
)
1838 else if (op
== TOK_GT
)
1840 else if (op
== TOK_LE
)
1842 else if (op
== TOK_GE
)
1849 /* special case for shifts and long long: we keep the shift as
1851 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
1858 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
1859 /* relationnal op: the result is an int */
1860 vtop
->type
.t
= VT_INT
;
1865 // Make sure that we have converted to an rvalue:
1866 if (vtop
->r
& VT_LVAL
&& !nocode_wanted
)
1867 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
1870 #ifndef TCC_TARGET_ARM
1871 /* generic itof for unsigned long long case */
1872 static void gen_cvt_itof1(int t
)
1874 #ifdef TCC_TARGET_ARM64
1877 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
1878 (VT_LLONG
| VT_UNSIGNED
)) {
1881 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
1882 #if LDOUBLE_SIZE != 8
1883 else if (t
== VT_LDOUBLE
)
1884 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
1887 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
1891 vtop
->r
= reg_fret(t
);
1899 /* generic ftoi for unsigned long long case */
1900 static void gen_cvt_ftoi1(int t
)
1902 #ifdef TCC_TARGET_ARM64
1907 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
1908 /* not handled natively */
1909 st
= vtop
->type
.t
& VT_BTYPE
;
1911 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
1912 #if LDOUBLE_SIZE != 8
1913 else if (st
== VT_LDOUBLE
)
1914 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
1917 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
1922 vtop
->r2
= REG_LRET
;
1929 /* force char or short cast */
1930 static void force_charshort_cast(int t
)
1934 /* XXX: add optimization if lvalue : just change type and offset */
1939 if (t
& VT_UNSIGNED
) {
1940 vpushi((1 << bits
) - 1);
1946 /* result must be signed or the SAR is converted to an SHL
1947 This was not the case when "t" was a signed short
1948 and the last value on the stack was an unsigned int */
1949 vtop
->type
.t
&= ~VT_UNSIGNED
;
1955 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
1956 static void gen_cast(CType
*type
)
1958 int sbt
, dbt
, sf
, df
, c
, p
;
1960 /* special delayed cast for char/short */
1961 /* XXX: in some cases (multiple cascaded casts), it may still
1963 if (vtop
->r
& VT_MUSTCAST
) {
1964 vtop
->r
&= ~VT_MUSTCAST
;
1965 force_charshort_cast(vtop
->type
.t
);
1968 /* bitfields first get cast to ints */
1969 if (vtop
->type
.t
& VT_BITFIELD
&& !nocode_wanted
) {
1973 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
1974 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
1979 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1980 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
1982 /* constant case: we can do it now */
1983 /* XXX: in ISOC, cannot do it if error in convert */
1984 if (sbt
== VT_FLOAT
)
1985 vtop
->c
.ld
= vtop
->c
.f
;
1986 else if (sbt
== VT_DOUBLE
)
1987 vtop
->c
.ld
= vtop
->c
.d
;
1990 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
1991 if (sbt
& VT_UNSIGNED
)
1992 vtop
->c
.ld
= vtop
->c
.ull
;
1994 vtop
->c
.ld
= vtop
->c
.ll
;
1996 if (sbt
& VT_UNSIGNED
)
1997 vtop
->c
.ld
= vtop
->c
.ui
;
1999 vtop
->c
.ld
= vtop
->c
.i
;
2002 if (dbt
== VT_FLOAT
)
2003 vtop
->c
.f
= (float)vtop
->c
.ld
;
2004 else if (dbt
== VT_DOUBLE
)
2005 vtop
->c
.d
= (double)vtop
->c
.ld
;
2006 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2007 vtop
->c
.ull
= (unsigned long long)vtop
->c
.ld
;
2008 } else if (sf
&& dbt
== VT_BOOL
) {
2009 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2012 vtop
->c
.ll
= (long long)vtop
->c
.ld
;
2013 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2014 vtop
->c
.ll
= vtop
->c
.ull
;
2015 else if (sbt
& VT_UNSIGNED
)
2016 vtop
->c
.ll
= vtop
->c
.ui
;
2017 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2018 else if (sbt
== VT_PTR
)
2021 else if (sbt
!= VT_LLONG
)
2022 vtop
->c
.ll
= vtop
->c
.i
;
2024 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2025 vtop
->c
.ull
= vtop
->c
.ll
;
2026 else if (dbt
== VT_BOOL
)
2027 vtop
->c
.i
= (vtop
->c
.ll
!= 0);
2028 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2029 else if (dbt
== VT_PTR
)
2032 else if (dbt
!= VT_LLONG
) {
2034 if ((dbt
& VT_BTYPE
) == VT_BYTE
)
2036 else if ((dbt
& VT_BTYPE
) == VT_SHORT
)
2038 if(dbt
& VT_UNSIGNED
)
2039 vtop
->c
.ui
= ((unsigned int)vtop
->c
.ll
<< s
) >> s
;
2041 vtop
->c
.i
= ((int)vtop
->c
.ll
<< s
) >> s
;
2044 } else if (p
&& dbt
== VT_BOOL
) {
2047 } else if (!nocode_wanted
) {
2048 /* non constant case: generate code */
2050 /* convert from fp to fp */
2053 /* convert int to fp */
2056 /* convert fp to int */
2057 if (dbt
== VT_BOOL
) {
2061 /* we handle char/short/etc... with generic code */
2062 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2063 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2067 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2068 /* additional cast for char/short... */
2073 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2074 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2075 if ((sbt
& VT_BTYPE
) != VT_LLONG
&& !nocode_wanted
) {
2076 /* scalar to long long */
2077 /* machine independent conversion */
2079 /* generate high word */
2080 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2084 if (sbt
== VT_PTR
) {
2085 /* cast from pointer to int before we apply
2086 shift operation, which pointers don't support*/
2087 gen_cast(&int_type
);
2093 /* patch second register */
2094 vtop
[-1].r2
= vtop
->r
;
2098 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2099 (dbt
& VT_BTYPE
) == VT_PTR
||
2100 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2101 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2102 (sbt
& VT_BTYPE
) != VT_PTR
&&
2103 (sbt
& VT_BTYPE
) != VT_FUNC
&& !nocode_wanted
) {
2104 /* need to convert from 32bit to 64bit */
2106 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2107 #if defined(TCC_TARGET_ARM64)
2109 #elif defined(TCC_TARGET_X86_64)
2111 /* x86_64 specific: movslq */
2113 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2120 } else if (dbt
== VT_BOOL
) {
2121 /* scalar to bool */
2124 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2125 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2126 if (sbt
== VT_PTR
) {
2127 vtop
->type
.t
= VT_INT
;
2128 tcc_warning("nonportable conversion from pointer to char/short");
2130 force_charshort_cast(dbt
);
2131 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2133 if (sbt
== VT_LLONG
&& !nocode_wanted
) {
2134 /* from long long: just take low order word */
2138 /* if lvalue and single word type, nothing to do because
2139 the lvalue already contains the real type size (see
2140 VT_LVAL_xxx constants) */
2143 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2144 /* if we are casting between pointer types,
2145 we must update the VT_LVAL_xxx size */
2146 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2147 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2152 /* return type size as known at compile time. Put alignment at 'a' */
2153 ST_FUNC
int type_size(CType
*type
, int *a
)
2158 bt
= type
->t
& VT_BTYPE
;
2159 if (bt
== VT_STRUCT
) {
2164 } else if (bt
== VT_PTR
) {
2165 if (type
->t
& VT_ARRAY
) {
2169 ts
= type_size(&s
->type
, a
);
2171 if (ts
< 0 && s
->c
< 0)
2179 } else if (bt
== VT_LDOUBLE
) {
2181 return LDOUBLE_SIZE
;
2182 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2183 #ifdef TCC_TARGET_I386
2184 #ifdef TCC_TARGET_PE
2189 #elif defined(TCC_TARGET_ARM)
2199 } else if (bt
== VT_INT
|| bt
== VT_ENUM
|| bt
== VT_FLOAT
) {
2202 } else if (bt
== VT_SHORT
) {
2205 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2209 /* char, void, function, _Bool */
2215 /* push type size as known at runtime time on top of value stack. Put
2217 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2219 if (type
->t
& VT_VLA
) {
2220 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2222 vpushi(type_size(type
, a
));
2226 static void vla_sp_save(void) {
2227 if (!(vla_flags
& VLA_SP_LOC_SET
)) {
2228 *vla_sp_loc
= (loc
-= PTR_SIZE
);
2229 vla_flags
|= VLA_SP_LOC_SET
;
2231 if (!(vla_flags
& VLA_SP_SAVED
)) {
2232 gen_vla_sp_save(*vla_sp_loc
);
2233 vla_flags
|= VLA_SP_SAVED
;
2237 /* return the pointed type of t */
2238 static inline CType
*pointed_type(CType
*type
)
2240 return &type
->ref
->type
;
2243 /* modify type so that its it is a pointer to type. */
2244 ST_FUNC
void mk_pointer(CType
*type
)
2247 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2248 type
->t
= VT_PTR
| (type
->t
& ~VT_TYPE
);
2252 /* compare function types. OLD functions match any new functions */
2253 static int is_compatible_func(CType
*type1
, CType
*type2
)
2259 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2261 /* check func_call */
2262 if (s1
->a
.func_call
!= s2
->a
.func_call
)
2264 /* XXX: not complete */
2265 if (s1
->c
== FUNC_OLD
|| s2
->c
== FUNC_OLD
)
2269 while (s1
!= NULL
) {
2272 if (!is_compatible_parameter_types(&s1
->type
, &s2
->type
))
2282 /* return true if type1 and type2 are the same. If unqualified is
2283 true, qualifiers on the types are ignored.
2285 - enums are not checked as gcc __builtin_types_compatible_p ()
2287 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2291 t1
= type1
->t
& VT_TYPE
;
2292 t2
= type2
->t
& VT_TYPE
;
2294 /* strip qualifiers before comparing */
2295 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2296 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2298 /* Default Vs explicit signedness only matters for char */
2299 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2303 /* XXX: bitfields ? */
2306 /* test more complicated cases */
2307 bt1
= t1
& VT_BTYPE
;
2308 if (bt1
== VT_PTR
) {
2309 type1
= pointed_type(type1
);
2310 type2
= pointed_type(type2
);
2311 return is_compatible_types(type1
, type2
);
2312 } else if (bt1
== VT_STRUCT
) {
2313 return (type1
->ref
== type2
->ref
);
2314 } else if (bt1
== VT_FUNC
) {
2315 return is_compatible_func(type1
, type2
);
2321 /* return true if type1 and type2 are exactly the same (including
2324 static int is_compatible_types(CType
*type1
, CType
*type2
)
2326 return compare_types(type1
,type2
,0);
2329 /* return true if type1 and type2 are the same (ignoring qualifiers).
2331 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
)
2333 return compare_types(type1
,type2
,1);
2336 /* print a type. If 'varstr' is not NULL, then the variable is also
2337 printed in the type */
2339 /* XXX: add array and function pointers */
2340 static void type_to_str(char *buf
, int buf_size
,
2341 CType
*type
, const char *varstr
)
2348 t
= type
->t
& VT_TYPE
;
2351 if (t
& VT_CONSTANT
)
2352 pstrcat(buf
, buf_size
, "const ");
2353 if (t
& VT_VOLATILE
)
2354 pstrcat(buf
, buf_size
, "volatile ");
2355 if ((t
& (VT_DEFSIGN
| VT_UNSIGNED
)) == (VT_DEFSIGN
| VT_UNSIGNED
))
2356 pstrcat(buf
, buf_size
, "unsigned ");
2357 else if (t
& VT_DEFSIGN
)
2358 pstrcat(buf
, buf_size
, "signed ");
2388 tstr
= "long double";
2390 pstrcat(buf
, buf_size
, tstr
);
2394 if (bt
== VT_STRUCT
)
2398 pstrcat(buf
, buf_size
, tstr
);
2399 v
= type
->ref
->v
& ~SYM_STRUCT
;
2400 if (v
>= SYM_FIRST_ANOM
)
2401 pstrcat(buf
, buf_size
, "<anonymous>");
2403 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2407 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2408 pstrcat(buf
, buf_size
, "(");
2410 while (sa
!= NULL
) {
2411 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2412 pstrcat(buf
, buf_size
, buf1
);
2415 pstrcat(buf
, buf_size
, ", ");
2417 pstrcat(buf
, buf_size
, ")");
2421 pstrcpy(buf1
, sizeof(buf1
), "*");
2423 pstrcat(buf1
, sizeof(buf1
), varstr
);
2424 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2428 pstrcat(buf
, buf_size
, " ");
2429 pstrcat(buf
, buf_size
, varstr
);
2434 /* verify type compatibility to store vtop in 'dt' type, and generate
2436 static void gen_assign_cast(CType
*dt
)
2438 CType
*st
, *type1
, *type2
, tmp_type1
, tmp_type2
;
2439 char buf1
[256], buf2
[256];
2442 st
= &vtop
->type
; /* source type */
2443 dbt
= dt
->t
& VT_BTYPE
;
2444 sbt
= st
->t
& VT_BTYPE
;
2445 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2446 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2448 It is Ok if both are void
2454 gcc accepts this program
2457 tcc_error("cannot cast from/to void");
2459 if (dt
->t
& VT_CONSTANT
)
2460 tcc_warning("assignment of read-only location");
2463 /* special cases for pointers */
2464 /* '0' can also be a pointer */
2465 if (is_null_pointer(vtop
))
2467 /* accept implicit pointer to integer cast with warning */
2468 if (is_integer_btype(sbt
)) {
2469 tcc_warning("assignment makes pointer from integer without a cast");
2472 type1
= pointed_type(dt
);
2473 /* a function is implicitely a function pointer */
2474 if (sbt
== VT_FUNC
) {
2475 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2476 !is_compatible_types(pointed_type(dt
), st
))
2477 tcc_warning("assignment from incompatible pointer type");
2482 type2
= pointed_type(st
);
2483 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2484 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2485 /* void * can match anything */
2487 /* exact type match, except for unsigned */
2490 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
|
2492 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
|
2494 if (!is_compatible_types(&tmp_type1
, &tmp_type2
))
2495 tcc_warning("assignment from incompatible pointer type");
2497 /* check const and volatile */
2498 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2499 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2500 tcc_warning("assignment discards qualifiers from pointer target type");
2506 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2507 tcc_warning("assignment makes integer from pointer without a cast");
2509 /* XXX: more tests */
2514 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2515 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2516 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2518 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2519 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2520 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
2528 /* store vtop in lvalue pushed on stack */
2529 ST_FUNC
void vstore(void)
2531 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
2533 ft
= vtop
[-1].type
.t
;
2534 sbt
= vtop
->type
.t
& VT_BTYPE
;
2535 dbt
= ft
& VT_BTYPE
;
2536 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
2537 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
2538 && !(vtop
->type
.t
& VT_BITFIELD
)) {
2539 /* optimize char/short casts */
2540 delayed_cast
= VT_MUSTCAST
;
2541 vtop
->type
.t
= ft
& (VT_TYPE
& ~(VT_BITFIELD
| (-1 << VT_STRUCT_SHIFT
)));
2542 /* XXX: factorize */
2543 if (ft
& VT_CONSTANT
)
2544 tcc_warning("assignment of read-only location");
2547 if (!(ft
& VT_BITFIELD
))
2548 gen_assign_cast(&vtop
[-1].type
);
2551 if (sbt
== VT_STRUCT
) {
2552 /* if structure, only generate pointer */
2553 /* structure assignment : generate memcpy */
2554 /* XXX: optimize if small size */
2555 if (!nocode_wanted
) {
2556 size
= type_size(&vtop
->type
, &align
);
2560 vtop
->type
.t
= VT_PTR
;
2563 /* address of memcpy() */
2566 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
2567 else if(!(align
& 3))
2568 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
2571 vpush_global_sym(&func_old_type
, TOK_memcpy
);
2576 vtop
->type
.t
= VT_PTR
;
2585 /* leave source on stack */
2586 } else if (ft
& VT_BITFIELD
) {
2587 /* bitfield store handling */
2589 /* save lvalue as expression result (example: s.b = s.a = n;) */
2590 vdup(), vtop
[-1] = vtop
[-2];
2592 bit_pos
= (ft
>> VT_STRUCT_SHIFT
) & 0x3f;
2593 bit_size
= (ft
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
2594 /* remove bit field info to avoid loops */
2595 vtop
[-1].type
.t
= ft
& ~(VT_BITFIELD
| (-1 << VT_STRUCT_SHIFT
));
2597 if((ft
& VT_BTYPE
) == VT_BOOL
) {
2598 gen_cast(&vtop
[-1].type
);
2599 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
2602 /* duplicate destination */
2604 vtop
[-1] = vtop
[-2];
2606 /* mask and shift source */
2607 if((ft
& VT_BTYPE
) != VT_BOOL
) {
2608 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2609 vpushll((1ULL << bit_size
) - 1ULL);
2611 vpushi((1 << bit_size
) - 1);
2617 /* load destination, mask and or with source */
2619 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2620 vpushll(~(((1ULL << bit_size
) - 1ULL) << bit_pos
));
2622 vpushi(~(((1 << bit_size
) - 1) << bit_pos
));
2628 /* ... and discard */
2632 if (!nocode_wanted
) {
2633 #ifdef CONFIG_TCC_BCHECK
2634 /* bound check case */
2635 if (vtop
[-1].r
& VT_MUSTBOUND
) {
2644 #ifdef TCC_TARGET_X86_64
2645 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
2647 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
2652 r
= gv(rc
); /* generate value */
2653 /* if lvalue was saved on stack, must read it */
2654 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
2656 t
= get_reg(RC_INT
);
2657 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2662 sv
.r
= VT_LOCAL
| VT_LVAL
;
2663 sv
.c
.ul
= vtop
[-1].c
.ul
;
2665 vtop
[-1].r
= t
| VT_LVAL
;
2667 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2668 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2669 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
2670 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
2672 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
2673 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
2675 vtop
[-1].type
.t
= load_type
;
2678 /* convert to int to increment easily */
2679 vtop
->type
.t
= addr_type
;
2685 vtop
[-1].type
.t
= load_type
;
2686 /* XXX: it works because r2 is spilled last ! */
2687 store(vtop
->r2
, vtop
- 1);
2693 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
2694 vtop
->r
|= delayed_cast
;
2698 /* post defines POST/PRE add. c is the token ++ or -- */
2699 ST_FUNC
void inc(int post
, int c
)
2702 vdup(); /* save lvalue */
2705 gv_dup(); /* duplicate value */
2707 vdup(); /* duplicate value */
2712 vpushi(c
- TOK_MID
);
2714 vstore(); /* store value */
2716 vpop(); /* if post op, return saved value */
2719 /* Parse GNUC __attribute__ extension. Currently, the following
2720 extensions are recognized:
2721 - aligned(n) : set data/function alignment.
2722 - packed : force data alignment to 1
2723 - section(x) : generate data/code in this section.
2724 - unused : currently ignored, but may be used someday.
2725 - regparm(n) : pass function parameters in registers (i386 only)
2727 static void parse_attribute(AttributeDef
*ad
)
2731 while (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
) {
2735 while (tok
!= ')') {
2736 if (tok
< TOK_IDENT
)
2737 expect("attribute name");
2745 expect("section name");
2746 ad
->section
= find_section(tcc_state
, (char *)tokc
.cstr
->data
);
2754 expect("alias(\"target\")");
2755 ad
->alias_target
= /* save string as token, for later */
2756 tok_alloc((char*)tokc
.cstr
->data
, tokc
.cstr
->size
-1)->tok
;
2760 case TOK_VISIBILITY1
:
2761 case TOK_VISIBILITY2
:
2764 expect("visibility(\"default|hidden|internal|protected\")");
2765 if (!strcmp (tokc
.cstr
->data
, "default"))
2766 ad
->a
.visibility
= STV_DEFAULT
;
2767 else if (!strcmp (tokc
.cstr
->data
, "hidden"))
2768 ad
->a
.visibility
= STV_HIDDEN
;
2769 else if (!strcmp (tokc
.cstr
->data
, "internal"))
2770 ad
->a
.visibility
= STV_INTERNAL
;
2771 else if (!strcmp (tokc
.cstr
->data
, "protected"))
2772 ad
->a
.visibility
= STV_PROTECTED
;
2774 expect("visibility(\"default|hidden|internal|protected\")");
2783 if (n
<= 0 || (n
& (n
- 1)) != 0)
2784 tcc_error("alignment must be a positive power of two");
2801 /* currently, no need to handle it because tcc does not
2802 track unused objects */
2806 /* currently, no need to handle it because tcc does not
2807 track unused objects */
2812 ad
->a
.func_call
= FUNC_CDECL
;
2817 ad
->a
.func_call
= FUNC_STDCALL
;
2819 #ifdef TCC_TARGET_I386
2829 ad
->a
.func_call
= FUNC_FASTCALL1
+ n
- 1;
2835 ad
->a
.func_call
= FUNC_FASTCALLW
;
2842 ad
->a
.mode
= VT_LLONG
+ 1;
2845 ad
->a
.mode
= VT_SHORT
+ 1;
2848 ad
->a
.mode
= VT_INT
+ 1;
2851 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
2858 ad
->a
.func_export
= 1;
2861 ad
->a
.func_import
= 1;
2864 if (tcc_state
->warn_unsupported
)
2865 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
2866 /* skip parameters */
2868 int parenthesis
= 0;
2872 else if (tok
== ')')
2875 } while (parenthesis
&& tok
!= -1);
2888 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
2889 static void struct_decl(CType
*type
, int u
, int tdef
)
2891 int a
, v
, size
, align
, maxalign
, c
, offset
, flexible
;
2892 int bit_size
, bit_pos
, bsize
, bt
, lbit_pos
, prevbt
;
2893 Sym
*s
, *ss
, *ass
, **ps
;
2897 a
= tok
; /* save decl type */
2902 /* struct already defined ? return it */
2904 expect("struct/union/enum name");
2908 tcc_error("invalid type");
2910 } else if (tok
>= TOK_IDENT
&& !tdef
)
2911 tcc_error("unknown struct/union/enum");
2917 /* we put an undefined size for struct/union */
2918 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
2919 s
->r
= 0; /* default alignment is zero as gcc */
2920 /* put struct/union/enum name in type */
2928 tcc_error("struct/union/enum already defined");
2929 /* cannot be empty */
2931 /* non empty enums are not allowed */
2932 if (a
== TOK_ENUM
) {
2936 expect("identifier");
2938 if (ss
&& !local_stack
)
2939 tcc_error("redefinition of enumerator '%s'",
2940 get_tok_str(v
, NULL
));
2946 /* enum symbols have static storage */
2947 ss
= sym_push(v
, &int_type
, VT_CONST
, c
);
2948 ss
->type
.t
|= VT_STATIC
;
2953 /* NOTE: we accept a trailing comma */
2957 s
->c
= type_size(&int_type
, &align
);
2966 while (tok
!= '}') {
2967 parse_btype(&btype
, &ad
);
2970 tcc_error("flexible array member '%s' not at the end of struct",
2971 get_tok_str(v
, NULL
));
2976 type_decl(&type1
, &ad
, &v
, TYPE_DIRECT
| TYPE_ABSTRACT
);
2978 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
2979 expect("identifier");
2981 int v
= btype
.ref
->v
;
2982 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
2983 if (tcc_state
->ms_extensions
== 0)
2984 expect("identifier");
2988 if (type_size(&type1
, &align
) < 0) {
2989 if ((a
== TOK_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
2992 tcc_error("field '%s' has incomplete type",
2993 get_tok_str(v
, NULL
));
2995 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
2996 (type1
.t
& (VT_TYPEDEF
| VT_STATIC
| VT_EXTERN
| VT_INLINE
)))
2997 tcc_error("invalid type for '%s'",
2998 get_tok_str(v
, NULL
));
3002 bit_size
= expr_const();
3003 /* XXX: handle v = 0 case for messages */
3005 tcc_error("negative width in bit-field '%s'",
3006 get_tok_str(v
, NULL
));
3007 if (v
&& bit_size
== 0)
3008 tcc_error("zero width for bit-field '%s'",
3009 get_tok_str(v
, NULL
));
3011 size
= type_size(&type1
, &align
);
3013 if (align
< ad
.a
.aligned
)
3014 align
= ad
.a
.aligned
;
3015 } else if (ad
.a
.packed
) {
3017 } else if (*tcc_state
->pack_stack_ptr
) {
3018 if (align
> *tcc_state
->pack_stack_ptr
)
3019 align
= *tcc_state
->pack_stack_ptr
;
3022 if (bit_size
>= 0) {
3023 bt
= type1
.t
& VT_BTYPE
;
3030 tcc_error("bitfields must have scalar type");
3032 if (bit_size
> bsize
) {
3033 tcc_error("width of '%s' exceeds its type",
3034 get_tok_str(v
, NULL
));
3035 } else if (bit_size
== bsize
) {
3036 /* no need for bit fields */
3038 } else if (bit_size
== 0) {
3039 /* XXX: what to do if only padding in a
3041 /* zero size: means to pad */
3044 /* we do not have enough room ?
3045 did the type change?
3047 if ((bit_pos
+ bit_size
) > bsize
||
3048 bt
!= prevbt
|| a
== TOK_UNION
)
3051 /* XXX: handle LSB first */
3052 type1
.t
|= VT_BITFIELD
|
3053 (bit_pos
<< VT_STRUCT_SHIFT
) |
3054 (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3055 bit_pos
+= bit_size
;
3061 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3062 /* add new memory data only if starting
3064 if (lbit_pos
== 0) {
3065 if (a
== TOK_STRUCT
) {
3066 c
= (c
+ align
- 1) & -align
;
3075 if (align
> maxalign
)
3079 printf("add field %s offset=%d",
3080 get_tok_str(v
, NULL
), offset
);
3081 if (type1
.t
& VT_BITFIELD
) {
3082 printf(" pos=%d size=%d",
3083 (type1
.t
>> VT_STRUCT_SHIFT
) & 0x3f,
3084 (type1
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f);
3089 if (v
== 0 && (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3091 while ((ass
= ass
->next
) != NULL
) {
3092 ss
= sym_push(ass
->v
, &ass
->type
, 0, offset
+ ass
->c
);
3097 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, offset
);
3101 if (tok
== ';' || tok
== TOK_EOF
)
3108 /* store size and alignment */
3109 s
->c
= (c
+ maxalign
- 1) & -maxalign
;
3115 /* return 1 if basic type is a type size (short, long, long long) */
3116 ST_FUNC
int is_btype_size(int bt
)
3118 return bt
== VT_SHORT
|| bt
== VT_LONG
|| bt
== VT_LLONG
;
3121 /* return 0 if no type declaration. otherwise, return the basic type
3124 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3126 int t
, u
, bt_size
, complete
, type_found
, typespec_found
;
3130 memset(ad
, 0, sizeof(AttributeDef
));
3138 /* currently, we really ignore extension */
3149 tcc_error("too many basic types");
3151 bt_size
= is_btype_size (u
& VT_BTYPE
);
3152 if (u
== VT_INT
|| (!bt_size
&& !(t
& VT_TYPEDEF
)))
3167 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3168 #ifndef TCC_TARGET_PE
3169 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3171 } else if ((t
& VT_BTYPE
) == VT_LONG
) {
3172 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3178 #ifdef TCC_TARGET_ARM64
3180 /* GCC's __uint128_t appears in some Linux header files. Make it a
3181 synonym for long double to get the size and alignment right. */
3193 if ((t
& VT_BTYPE
) == VT_LONG
) {
3194 #ifdef TCC_TARGET_PE
3195 t
= (t
& ~VT_BTYPE
) | VT_DOUBLE
;
3197 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3205 struct_decl(&type1
, VT_ENUM
, t
& (VT_TYPEDEF
| VT_EXTERN
));
3208 type
->ref
= type1
.ref
;
3212 struct_decl(&type1
, VT_STRUCT
, t
& (VT_TYPEDEF
| VT_EXTERN
));
3215 /* type modifiers */
3231 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
3232 tcc_error("signed and unsigned modifier");
3245 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
3246 tcc_error("signed and unsigned modifier");
3247 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
3272 /* GNUC attribute */
3273 case TOK_ATTRIBUTE1
:
3274 case TOK_ATTRIBUTE2
:
3275 parse_attribute(ad
);
3278 t
= (t
& ~VT_BTYPE
) | u
;
3286 parse_expr_type(&type1
);
3287 /* remove all storage modifiers except typedef */
3288 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
3294 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
3296 t
|= (s
->type
.t
& ~VT_TYPEDEF
);
3297 type
->ref
= s
->type
.ref
;
3299 /* get attributes from typedef */
3300 if (0 == ad
->a
.aligned
)
3301 ad
->a
.aligned
= s
->a
.aligned
;
3302 if (0 == ad
->a
.func_call
)
3303 ad
->a
.func_call
= s
->a
.func_call
;
3304 ad
->a
.packed
|= s
->a
.packed
;
3313 if (tcc_state
->char_is_unsigned
) {
3314 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
3318 /* long is never used as type */
3319 if ((t
& VT_BTYPE
) == VT_LONG
)
3320 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3321 defined TCC_TARGET_PE
3322 t
= (t
& ~VT_BTYPE
) | VT_INT
;
3324 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3330 /* convert a function parameter type (array to pointer and function to
3331 function pointer) */
3332 static inline void convert_parameter_type(CType
*pt
)
3334 /* remove const and volatile qualifiers (XXX: const could be used
3335 to indicate a const function parameter */
3336 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3337 /* array must be transformed to pointer according to ANSI C */
3339 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
3344 ST_FUNC
void parse_asm_str(CString
*astr
)
3347 /* read the string */
3349 expect("string constant");
3351 while (tok
== TOK_STR
) {
3352 /* XXX: add \0 handling too ? */
3353 cstr_cat(astr
, tokc
.cstr
->data
);
3356 cstr_ccat(astr
, '\0');
3359 /* Parse an asm label and return the label
3360 * Don't forget to free the CString in the caller! */
3361 static void asm_label_instr(CString
*astr
)
3364 parse_asm_str(astr
);
3367 printf("asm_alias: \"%s\"\n", (char *)astr
->data
);
3371 static void post_type(CType
*type
, AttributeDef
*ad
)
3373 int n
, l
, t1
, arg_size
, align
;
3374 Sym
**plast
, *s
, *first
;
3379 /* function declaration */
3387 /* read param name and compute offset */
3388 if (l
!= FUNC_OLD
) {
3389 if (!parse_btype(&pt
, &ad1
)) {
3391 tcc_error("invalid type");
3398 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
3400 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
3401 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
3402 tcc_error("parameter declared as void");
3403 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
3408 expect("identifier");
3412 convert_parameter_type(&pt
);
3413 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
3419 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
3426 /* if no parameters, then old type prototype */
3430 /* NOTE: const is ignored in returned type as it has a special
3431 meaning in gcc / C++ */
3432 type
->t
&= ~VT_CONSTANT
;
3433 /* some ancient pre-K&R C allows a function to return an array
3434 and the array brackets to be put after the arguments, such
3435 that "int c()[]" means something like "int[] c()" */
3438 skip(']'); /* only handle simple "[]" */
3441 /* we push a anonymous symbol which will contain the function prototype */
3442 ad
->a
.func_args
= arg_size
;
3443 s
= sym_push(SYM_FIELD
, type
, 0, l
);
3448 } else if (tok
== '[') {
3449 /* array definition */
3451 if (tok
== TOK_RESTRICT1
)
3456 if (!local_stack
|| nocode_wanted
)
3457 vpushi(expr_const());
3459 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
3462 tcc_error("invalid array size");
3464 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
3465 tcc_error("size of variable length array should be an integer");
3470 /* parse next post type */
3471 post_type(type
, ad
);
3472 if (type
->t
== VT_FUNC
)
3473 tcc_error("declaration of an array of functions");
3474 t1
|= type
->t
& VT_VLA
;
3477 loc
-= type_size(&int_type
, &align
);
3481 vla_runtime_type_size(type
, &align
);
3483 vset(&int_type
, VT_LOCAL
|VT_LVAL
, loc
);
3490 /* we push an anonymous symbol which will contain the array
3492 s
= sym_push(SYM_FIELD
, type
, 0, n
);
3493 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
3498 /* Parse a type declaration (except basic type), and return the type
3499 in 'type'. 'td' is a bitmask indicating which kind of type decl is
3500 expected. 'type' should contain the basic type. 'ad' is the
3501 attribute definition of the basic type. It can be modified by
3504 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
3507 CType type1
, *type2
;
3508 int qualifiers
, storage
;
3510 while (tok
== '*') {
3518 qualifiers
|= VT_CONSTANT
;
3523 qualifiers
|= VT_VOLATILE
;
3531 type
->t
|= qualifiers
;
3534 /* XXX: clarify attribute handling */
3535 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3536 parse_attribute(ad
);
3538 /* recursive type */
3539 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
3540 type1
.t
= 0; /* XXX: same as int */
3543 /* XXX: this is not correct to modify 'ad' at this point, but
3544 the syntax is not clear */
3545 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3546 parse_attribute(ad
);
3547 type_decl(&type1
, ad
, v
, td
);
3550 /* type identifier */
3551 if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
3555 if (!(td
& TYPE_ABSTRACT
))
3556 expect("identifier");
3560 storage
= type
->t
& VT_STORAGE
;
3561 type
->t
&= ~VT_STORAGE
;
3562 if (storage
& VT_STATIC
) {
3563 int saved_nocode_wanted
= nocode_wanted
;
3565 post_type(type
, ad
);
3566 nocode_wanted
= saved_nocode_wanted
;
3568 post_type(type
, ad
);
3570 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3571 parse_attribute(ad
);
3575 /* append type at the end of type1 */
3588 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
3589 ST_FUNC
int lvalue_type(int t
)
3594 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
3596 else if (bt
== VT_SHORT
)
3600 if (t
& VT_UNSIGNED
)
3601 r
|= VT_LVAL_UNSIGNED
;
3605 /* indirection with full error checking and bound check */
3606 ST_FUNC
void indir(void)
3608 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
3609 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
3613 if ((vtop
->r
& VT_LVAL
) && !nocode_wanted
)
3615 vtop
->type
= *pointed_type(&vtop
->type
);
3616 /* Arrays and functions are never lvalues */
3617 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
3618 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
3619 vtop
->r
|= lvalue_type(vtop
->type
.t
);
3620 /* if bound checking, the referenced pointer must be checked */
3621 #ifdef CONFIG_TCC_BCHECK
3622 if (tcc_state
->do_bounds_check
)
3623 vtop
->r
|= VT_MUSTBOUND
;
3628 /* pass a parameter to a function and do type checking and casting */
3629 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
3634 func_type
= func
->c
;
3635 if (func_type
== FUNC_OLD
||
3636 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
3637 /* default casting : only need to convert float to double */
3638 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
3641 } else if (vtop
->type
.t
& VT_BITFIELD
) {
3642 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3645 } else if (arg
== NULL
) {
3646 tcc_error("too many arguments to function");
3649 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
3650 gen_assign_cast(&type
);
3654 /* parse an expression of the form '(type)' or '(expr)' and return its
3656 static void parse_expr_type(CType
*type
)
3662 if (parse_btype(type
, &ad
)) {
3663 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
3670 static void parse_type(CType
*type
)
3675 if (!parse_btype(type
, &ad
)) {
3678 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
3681 static void vpush_tokc(int t
)
3686 vsetc(&type
, VT_CONST
, &tokc
);
3689 ST_FUNC
void unary(void)
3691 int n
, t
, align
, size
, r
, sizeof_caller
;
3695 static int in_sizeof
= 0;
3697 sizeof_caller
= in_sizeof
;
3699 /* XXX: GCC 2.95.3 does not generate a table although it should be
3713 vpush_tokc(VT_INT
| VT_UNSIGNED
);
3717 vpush_tokc(VT_LLONG
);
3721 vpush_tokc(VT_LLONG
| VT_UNSIGNED
);
3725 vpush_tokc(VT_FLOAT
);
3729 vpush_tokc(VT_DOUBLE
);
3733 vpush_tokc(VT_LDOUBLE
);
3736 case TOK___FUNCTION__
:
3738 goto tok_identifier
;
3744 /* special function name identifier */
3745 len
= strlen(funcname
) + 1;
3746 /* generate char[len] type */
3751 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
3752 ptr
= section_ptr_add(data_section
, len
);
3753 memcpy(ptr
, funcname
, len
);
3758 #ifdef TCC_TARGET_PE
3759 t
= VT_SHORT
| VT_UNSIGNED
;
3765 /* string parsing */
3768 if (tcc_state
->warn_write_strings
)
3773 memset(&ad
, 0, sizeof(AttributeDef
));
3774 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, NULL
, 0);
3779 if (parse_btype(&type
, &ad
)) {
3780 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
3782 /* check ISOC99 compound literal */
3784 /* data is allocated locally by default */
3789 /* all except arrays are lvalues */
3790 if (!(type
.t
& VT_ARRAY
))
3791 r
|= lvalue_type(type
.t
);
3792 memset(&ad
, 0, sizeof(AttributeDef
));
3793 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, NULL
, 0);
3795 if (sizeof_caller
) {
3802 } else if (tok
== '{') {
3805 tcc_error("statement expression in global scope"); */
3806 /* this check breaks compilation of the linux 2.4.26 with the meesage:
3807 linux/include/net/tcp.h:945: error: statement expression in global scope */
3809 /* save all registers */
3811 /* statement expression : we do not accept break/continue
3812 inside as GCC does */
3813 block(NULL
, NULL
, NULL
, NULL
, 0, 1);
3828 /* functions names must be treated as function pointers,
3829 except for unary '&' and sizeof. Since we consider that
3830 functions are not lvalues, we only have to handle it
3831 there and in function calls. */
3832 /* arrays can also be used although they are not lvalues */
3833 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
3834 !(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_LLOCAL
))
3836 mk_pointer(&vtop
->type
);
3842 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
3844 boolean
.t
= VT_BOOL
;
3846 vtop
->c
.i
= !vtop
->c
.i
;
3847 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
3848 vtop
->c
.i
= vtop
->c
.i
^ 1;
3849 else if (!nocode_wanted
) {
3851 vseti(VT_JMP
, gvtst(1, 0));
3865 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
3866 tcc_error("pointer not accepted for unary plus");
3867 /* In order to force cast, we add zero, except for floating point
3868 where we really need an noop (otherwise -0.0 will be transformed
3870 if (!is_float(vtop
->type
.t
)) {
3881 unary_type(&type
); // Perform a in_sizeof = 0;
3882 size
= type_size(&type
, &align
);
3883 if (t
== TOK_SIZEOF
) {
3884 if (!(type
.t
& VT_VLA
)) {
3886 tcc_error("sizeof applied to an incomplete type");
3889 vla_runtime_type_size(&type
, &align
);
3894 vtop
->type
.t
|= VT_UNSIGNED
;
3897 case TOK_builtin_types_compatible_p
:
3906 type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3907 type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3908 vpushi(is_compatible_types(&type1
, &type2
));
3911 case TOK_builtin_constant_p
:
3913 int saved_nocode_wanted
, res
;
3916 saved_nocode_wanted
= nocode_wanted
;
3919 res
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3921 nocode_wanted
= saved_nocode_wanted
;
3926 case TOK_builtin_frame_address
:
3927 case TOK_builtin_return_address
:
3934 if (tok
!= TOK_CINT
|| tokc
.i
< 0) {
3935 tcc_error("%s only takes positive integers",
3936 tok1
== TOK_builtin_return_address
?
3937 "__builtin_return_address" :
3938 "__builtin_frame_address");
3945 vset(&type
, VT_LOCAL
, 0); /* local frame */
3947 mk_pointer(&vtop
->type
);
3948 indir(); /* -> parent frame */
3950 if (tok1
== TOK_builtin_return_address
) {
3951 // assume return address is just above frame pointer on stack
3954 mk_pointer(&vtop
->type
);
3959 #ifdef TCC_TARGET_X86_64
3960 #ifdef TCC_TARGET_PE
3961 case TOK_builtin_va_start
:
3969 if ((vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
3970 tcc_error("__builtin_va_start expects a local variable");
3971 vtop
->r
&= ~(VT_LVAL
| VT_REF
);
3972 vtop
->type
= char_pointer_type
;
3977 case TOK_builtin_va_arg_types
:
3984 vpushi(classify_x86_64_va_arg(&type
));
3990 #ifdef TCC_TARGET_ARM64
3991 case TOK___va_start
: {
3993 tcc_error("statement in global scope");
4003 vtop
->type
.t
= VT_VOID
;
4006 case TOK___va_arg
: {
4008 tcc_error("statement in global scope");
4021 case TOK___arm64_clear_cache
: {
4030 vtop
->type
.t
= VT_VOID
;
4034 /* pre operations */
4045 t
= vtop
->type
.t
& VT_BTYPE
;
4047 /* In IEEE negate(x) isn't subtract(0,x), but rather
4052 else if (t
== VT_DOUBLE
)
4063 goto tok_identifier
;
4065 /* allow to take the address of a label */
4066 if (tok
< TOK_UIDENT
)
4067 expect("label identifier");
4068 s
= label_find(tok
);
4070 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4072 if (s
->r
== LABEL_DECLARED
)
4073 s
->r
= LABEL_FORWARD
;
4076 s
->type
.t
= VT_VOID
;
4077 mk_pointer(&s
->type
);
4078 s
->type
.t
|= VT_STATIC
;
4080 vpushsym(&s
->type
, s
);
4084 // special qnan , snan and infinity values
4086 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4090 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
4094 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
4103 expect("identifier");
4106 const char *name
= get_tok_str(t
, NULL
);
4108 tcc_error("'%s' undeclared", name
);
4109 /* for simple function calls, we tolerate undeclared
4110 external reference to int() function */
4111 if (tcc_state
->warn_implicit_function_declaration
4112 #ifdef TCC_TARGET_PE
4113 /* people must be warned about using undeclared WINAPI functions
4114 (which usually start with uppercase letter) */
4115 || (name
[0] >= 'A' && name
[0] <= 'Z')
4118 tcc_warning("implicit declaration of function '%s'", name
);
4119 s
= external_global_sym(t
, &func_old_type
, 0);
4121 if ((s
->type
.t
& (VT_STATIC
| VT_INLINE
| VT_BTYPE
)) ==
4122 (VT_STATIC
| VT_INLINE
| VT_FUNC
)) {
4123 /* if referencing an inline function, then we generate a
4124 symbol to it if not already done. It will have the
4125 effect to generate code for it at the end of the
4126 compilation unit. Inline function as always
4127 generated in the text section. */
4129 put_extern_sym(s
, text_section
, 0, 0);
4130 r
= VT_SYM
| VT_CONST
;
4134 vset(&s
->type
, r
, s
->c
);
4135 /* if forward reference, we must point to s */
4136 if (vtop
->r
& VT_SYM
) {
4138 vtop
->c
.ptr_offset
= 0;
4143 /* post operations */
4145 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
4148 } else if (tok
== '.' || tok
== TOK_ARROW
) {
4151 if (tok
== TOK_ARROW
)
4153 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
4157 /* expect pointer on structure */
4158 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
4159 expect("struct or union");
4163 while ((s
= s
->next
) != NULL
) {
4168 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, NULL
));
4169 /* add field offset to pointer */
4170 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
4173 /* change type to field type, and set to lvalue */
4174 vtop
->type
= s
->type
;
4175 vtop
->type
.t
|= qualifiers
;
4176 /* an array is never an lvalue */
4177 if (!(vtop
->type
.t
& VT_ARRAY
)) {
4178 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4179 #ifdef CONFIG_TCC_BCHECK
4180 /* if bound checking, the referenced pointer must be checked */
4181 if (tcc_state
->do_bounds_check
)
4182 vtop
->r
|= VT_MUSTBOUND
;
4186 } else if (tok
== '[') {
4192 } else if (tok
== '(') {
4195 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
4198 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4199 /* pointer test (no array accepted) */
4200 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
4201 vtop
->type
= *pointed_type(&vtop
->type
);
4202 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4206 expect("function pointer");
4209 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
4211 /* get return type */
4214 sa
= s
->next
; /* first parameter */
4217 /* compute first implicit argument if a structure is returned */
4218 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
4219 variadic
= (s
->c
== FUNC_ELLIPSIS
);
4220 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
4221 &ret_align
, ®size
);
4223 /* get some space for the returned structure */
4224 size
= type_size(&s
->type
, &align
);
4225 #ifdef TCC_TARGET_ARM64
4226 /* On arm64, a small struct is return in registers.
4227 It is much easier to write it to memory if we know
4228 that we are allowed to write some extra bytes, so
4229 round the allocated space up to a power of 2: */
4231 while (size
& (size
- 1))
4232 size
= (size
| (size
- 1)) + 1;
4234 loc
= (loc
- size
) & -align
;
4236 ret
.r
= VT_LOCAL
| VT_LVAL
;
4237 /* pass it as 'int' to avoid structure arg passing
4239 vseti(VT_LOCAL
, loc
);
4249 /* return in register */
4250 if (is_float(ret
.type
.t
)) {
4251 ret
.r
= reg_fret(ret
.type
.t
);
4252 #ifdef TCC_TARGET_X86_64
4253 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
4257 #ifndef TCC_TARGET_ARM64
4258 #ifdef TCC_TARGET_X86_64
4259 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
4261 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
4272 gfunc_param_typed(s
, sa
);
4282 tcc_error("too few arguments to function");
4284 if (!nocode_wanted
) {
4285 gfunc_call(nb_args
);
4287 vtop
-= (nb_args
+ 1);
4291 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
4292 vsetc(&ret
.type
, r
, &ret
.c
);
4293 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
4296 /* handle packed struct return */
4297 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
4300 size
= type_size(&s
->type
, &align
);
4301 /* We're writing whole regs often, make sure there's enough
4302 space. Assume register size is power of 2. */
4303 if (regsize
> align
)
4305 loc
= (loc
- size
) & -align
;
4309 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4313 if (--ret_nregs
== 0)
4317 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
4325 ST_FUNC
void expr_prod(void)
4330 while (tok
== '*' || tok
== '/' || tok
== '%') {
4338 ST_FUNC
void expr_sum(void)
4343 while (tok
== '+' || tok
== '-') {
4351 static void expr_shift(void)
4356 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
4364 static void expr_cmp(void)
4369 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
4370 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
4378 static void expr_cmpeq(void)
4383 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
4391 static void expr_and(void)
4394 while (tok
== '&') {
4401 static void expr_xor(void)
4404 while (tok
== '^') {
4411 static void expr_or(void)
4414 while (tok
== '|') {
4421 /* XXX: fix this mess */
4422 static void expr_land_const(void)
4425 while (tok
== TOK_LAND
) {
4432 /* XXX: fix this mess */
4433 static void expr_lor_const(void)
4436 while (tok
== TOK_LOR
) {
4443 /* only used if non constant */
4444 static void expr_land(void)
4449 if (tok
== TOK_LAND
) {
4454 if (tok
!= TOK_LAND
) {
4464 static void expr_lor(void)
4469 if (tok
== TOK_LOR
) {
4474 if (tok
!= TOK_LOR
) {
4484 /* XXX: better constant handling */
4485 static void expr_cond(void)
4487 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
;
4489 CType type
, type1
, type2
;
4496 boolean
.t
= VT_BOOL
;
4502 if (tok
!= ':' || !gnu_ext
) {
4517 if (vtop
!= vstack
) {
4518 /* needed to avoid having different registers saved in
4520 if (is_float(vtop
->type
.t
)) {
4522 #ifdef TCC_TARGET_X86_64
4523 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
4533 if (tok
== ':' && gnu_ext
) {
4541 sv
= *vtop
; /* save value to handle it later */
4542 vtop
--; /* no vpop so that FP stack is not flushed */
4550 bt1
= t1
& VT_BTYPE
;
4552 bt2
= t2
& VT_BTYPE
;
4553 /* cast operands to correct type according to ISOC rules */
4554 if (is_float(bt1
) || is_float(bt2
)) {
4555 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
4556 type
.t
= VT_LDOUBLE
;
4557 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
4562 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
4563 /* cast to biggest op */
4565 /* convert to unsigned if it does not fit in a long long */
4566 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
4567 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
4568 type
.t
|= VT_UNSIGNED
;
4569 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
4570 /* If one is a null ptr constant the result type
4572 if (is_null_pointer (vtop
))
4574 else if (is_null_pointer (&sv
))
4576 /* XXX: test pointer compatibility, C99 has more elaborate
4580 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
4581 /* XXX: test function pointer compatibility */
4582 type
= bt1
== VT_FUNC
? type1
: type2
;
4583 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
4584 /* XXX: test structure compatibility */
4585 type
= bt1
== VT_STRUCT
? type1
: type2
;
4586 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
4587 /* NOTE: as an extension, we accept void on only one side */
4590 /* integer operations */
4592 /* convert to unsigned if it does not fit in an integer */
4593 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
4594 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
4595 type
.t
|= VT_UNSIGNED
;
4598 /* now we convert second operand */
4600 if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
4603 if (is_float(type
.t
)) {
4605 #ifdef TCC_TARGET_X86_64
4606 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
4610 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
4611 /* for long longs, we use fixed registers to avoid having
4612 to handle a complicated move */
4617 /* this is horrible, but we must also convert first
4621 /* put again first value and cast it */
4624 if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
4627 move_reg(r2
, r1
, type
.t
);
4634 static void expr_eq(void)
4640 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
4641 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
4642 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
4657 ST_FUNC
void gexpr(void)
4668 /* parse an expression and return its type without any side effect. */
4669 static void expr_type(CType
*type
)
4671 int saved_nocode_wanted
;
4673 saved_nocode_wanted
= nocode_wanted
;
4678 nocode_wanted
= saved_nocode_wanted
;
4681 /* parse a unary expression and return its type without any side
4683 static void unary_type(CType
*type
)
4695 /* parse a constant expression and return value in vtop. */
4696 static void expr_const1(void)
4705 /* parse an integer constant and return its value. */
4706 ST_FUNC
int expr_const(void)
4710 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
4711 expect("constant expression");
4717 /* return the label token if current token is a label, otherwise
4719 static int is_label(void)
4723 /* fast test first */
4724 if (tok
< TOK_UIDENT
)
4726 /* no need to save tokc because tok is an identifier */
4733 unget_tok(last_tok
);
4738 static void label_or_decl(int l
)
4742 /* fast test first */
4743 if (tok
>= TOK_UIDENT
)
4745 /* no need to save tokc because tok is an identifier */
4749 unget_tok(last_tok
);
4752 unget_tok(last_tok
);
4757 static void block(int *bsym
, int *csym
, int *case_sym
, int *def_sym
,
4758 int case_reg
, int is_expr
)
4761 Sym
*s
, *frame_bottom
;
4763 /* generate line number info */
4764 if (tcc_state
->do_debug
&&
4765 (last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
4766 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
4768 last_line_num
= file
->line_num
;
4772 /* default return value is (void) */
4774 vtop
->type
.t
= VT_VOID
;
4777 if (tok
== TOK_IF
) {
4784 block(bsym
, csym
, case_sym
, def_sym
, case_reg
, 0);
4786 if (c
== TOK_ELSE
) {
4790 block(bsym
, csym
, case_sym
, def_sym
, case_reg
, 0);
4791 gsym(d
); /* patch else jmp */
4794 } else if (tok
== TOK_WHILE
) {
4802 block(&a
, &b
, case_sym
, def_sym
, case_reg
, 0);
4806 } else if (tok
== '{') {
4808 int block_vla_sp_loc
, *saved_vla_sp_loc
, saved_vla_flags
;
4811 /* record local declaration stack position */
4813 frame_bottom
= sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
4814 frame_bottom
->next
= scope_stack_bottom
;
4815 scope_stack_bottom
= frame_bottom
;
4816 llabel
= local_label_stack
;
4818 /* save VLA state */
4819 block_vla_sp_loc
= *(saved_vla_sp_loc
= vla_sp_loc
);
4820 if (saved_vla_sp_loc
!= &vla_sp_root_loc
)
4821 vla_sp_loc
= &block_vla_sp_loc
;
4823 saved_vla_flags
= vla_flags
;
4824 vla_flags
|= VLA_NEED_NEW_FRAME
;
4826 /* handle local labels declarations */
4827 if (tok
== TOK_LABEL
) {
4830 if (tok
< TOK_UIDENT
)
4831 expect("label identifier");
4832 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
4842 while (tok
!= '}') {
4843 label_or_decl(VT_LOCAL
);
4847 block(bsym
, csym
, case_sym
, def_sym
, case_reg
, is_expr
);
4850 /* pop locally defined labels */
4851 label_pop(&local_label_stack
, llabel
);
4853 /* XXX: this solution makes only valgrind happy...
4854 triggered by gcc.c-torture/execute/20000917-1.c */
4856 switch(vtop
->type
.t
& VT_BTYPE
) {
4858 /* this breaks a compilation of the linux kernel v2.4.26 */
4859 /* pmd_t *new = ({ __asm__ __volatile__("ud2\n") ; ((pmd_t *)1); }); */
4860 /* Look a commit a80acab: Display error on statement expressions with complex return type */
4861 /* A pointer is not a complex return type */
4865 for(p
=vtop
->type
.ref
;p
;p
=p
->prev
)
4867 tcc_error("unsupported expression type");
4870 /* pop locally defined symbols */
4871 scope_stack_bottom
= scope_stack_bottom
->next
;
4872 sym_pop(&local_stack
, s
);
4874 /* Pop VLA frames and restore stack pointer if required */
4875 if (saved_vla_sp_loc
!= &vla_sp_root_loc
)
4876 *saved_vla_sp_loc
= block_vla_sp_loc
;
4877 if (vla_sp_loc
!= (saved_vla_sp_loc
== &vla_sp_root_loc
? &vla_sp_root_loc
: &block_vla_sp_loc
)) {
4878 vla_sp_loc
= saved_vla_sp_loc
;
4879 gen_vla_sp_restore(*vla_sp_loc
);
4881 vla_flags
= (vla_flags
& ~VLA_SCOPE_FLAGS
) | (saved_vla_flags
& VLA_SCOPE_FLAGS
);
4884 } else if (tok
== TOK_RETURN
) {
4888 gen_assign_cast(&func_vt
);
4889 #ifdef TCC_TARGET_ARM64
4890 // Perhaps it would be better to use this for all backends:
4893 if ((func_vt
.t
& VT_BTYPE
) == VT_STRUCT
) {
4894 CType type
, ret_type
;
4895 int ret_align
, ret_nregs
, regsize
;
4896 ret_nregs
= gfunc_sret(&func_vt
, func_var
, &ret_type
,
4897 &ret_align
, ®size
);
4898 if (0 == ret_nregs
) {
4899 /* if returning structure, must copy it to implicit
4900 first pointer arg location */
4903 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
4906 /* copy structure value to pointer */
4909 /* returning structure packed into registers */
4910 int r
, size
, addr
, align
;
4911 size
= type_size(&func_vt
,&align
);
4912 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) || (vtop
->c
.i
& (ret_align
-1)))
4913 && (align
& (ret_align
-1))) {
4914 loc
= (loc
- size
) & -align
;
4917 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
4920 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
4922 vtop
->type
= ret_type
;
4923 if (is_float(ret_type
.t
))
4924 r
= rc_fret(ret_type
.t
);
4930 if (--ret_nregs
== 0)
4932 /* We assume that when a structure is returned in multiple
4933 registers, their classes are consecutive values of the
4936 vtop
->c
.i
+= regsize
;
4937 vtop
->r
= VT_LOCAL
| VT_LVAL
;
4940 } else if (is_float(func_vt
.t
)) {
4941 gv(rc_fret(func_vt
.t
));
4946 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
4949 rsym
= gjmp(rsym
); /* jmp */
4950 } else if (tok
== TOK_BREAK
) {
4953 tcc_error("cannot break");
4954 *bsym
= gjmp(*bsym
);
4957 } else if (tok
== TOK_CONTINUE
) {
4960 tcc_error("cannot continue");
4961 *csym
= gjmp(*csym
);
4964 } else if (tok
== TOK_FOR
) {
4969 frame_bottom
= sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
4970 frame_bottom
->next
= scope_stack_bottom
;
4971 scope_stack_bottom
= frame_bottom
;
4973 /* c99 for-loop init decl? */
4974 if (!decl0(VT_LOCAL
, 1)) {
4975 /* no, regular for-loop init expr */
4999 block(&a
, &b
, case_sym
, def_sym
, case_reg
, 0);
5003 scope_stack_bottom
= scope_stack_bottom
->next
;
5004 sym_pop(&local_stack
, s
);
5006 if (tok
== TOK_DO
) {
5011 block(&a
, &b
, case_sym
, def_sym
, case_reg
, 0);
5022 if (tok
== TOK_SWITCH
) {
5026 /* XXX: other types than integer */
5027 case_reg
= gv(RC_INT
);
5031 b
= gjmp(0); /* jump to first case */
5033 block(&a
, csym
, &b
, &c
, case_reg
, 0);
5034 /* if no default, jmp after switch */
5042 if (tok
== TOK_CASE
) {
5049 if (gnu_ext
&& tok
== TOK_DOTS
) {
5053 tcc_warning("empty case range");
5055 /* since a case is like a label, we must skip it with a jmp */
5063 *case_sym
= gtst(1, 0);
5066 *case_sym
= gtst(1, 0);
5070 *case_sym
= gtst(1, *case_sym
);
5072 case_reg
= gv(RC_INT
);
5077 goto block_after_label
;
5079 if (tok
== TOK_DEFAULT
) {
5085 tcc_error("too many 'default'");
5088 goto block_after_label
;
5090 if (tok
== TOK_GOTO
) {
5092 if (tok
== '*' && gnu_ext
) {
5096 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5099 } else if (tok
>= TOK_UIDENT
) {
5100 s
= label_find(tok
);
5101 /* put forward definition if needed */
5103 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5105 if (s
->r
== LABEL_DECLARED
)
5106 s
->r
= LABEL_FORWARD
;
5108 /* label already defined */
5109 if (vla_flags
& VLA_IN_SCOPE
) {
5110 /* If VLAs are in use, save the current stack pointer and
5111 reset the stack pointer to what it was at function entry
5112 (label will restore stack pointer in inner scopes) */
5114 gen_vla_sp_restore(vla_sp_root_loc
);
5116 if (s
->r
& LABEL_FORWARD
)
5117 s
->jnext
= gjmp(s
->jnext
);
5119 gjmp_addr(s
->jnext
);
5122 expect("label identifier");
5125 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
5131 if (vla_flags
& VLA_IN_SCOPE
) {
5132 /* save/restore stack pointer across label
5133 this is a no-op when combined with the load immediately
5134 after the label unless we arrive via goto */
5139 if (s
->r
== LABEL_DEFINED
)
5140 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
5142 s
->r
= LABEL_DEFINED
;
5144 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
5147 if (vla_flags
& VLA_IN_SCOPE
) {
5148 gen_vla_sp_restore(*vla_sp_loc
);
5149 vla_flags
|= VLA_NEED_NEW_FRAME
;
5151 /* we accept this, but it is a mistake */
5154 tcc_warning("deprecated use of label at end of compound statement");
5158 block(bsym
, csym
, case_sym
, def_sym
, case_reg
, is_expr
);
5161 /* expression case */
5176 /* t is the array or struct type. c is the array or struct
5177 address. cur_index/cur_field is the pointer to the current
5178 value. 'size_only' is true if only size info is needed (only used
5180 static void decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
5181 int *cur_index
, Sym
**cur_field
,
5185 int notfirst
, index
, index_last
, align
, l
, nb_elems
, elem_size
;
5191 if (gnu_ext
&& (l
= is_label()) != 0)
5193 while (tok
== '[' || tok
== '.') {
5195 if (!(type
->t
& VT_ARRAY
))
5196 expect("array type");
5199 index
= expr_const();
5200 if (index
< 0 || (s
->c
>= 0 && index
>= s
->c
))
5201 expect("invalid index");
5202 if (tok
== TOK_DOTS
&& gnu_ext
) {
5204 index_last
= expr_const();
5205 if (index_last
< 0 ||
5206 (s
->c
>= 0 && index_last
>= s
->c
) ||
5208 expect("invalid index");
5214 *cur_index
= index_last
;
5215 type
= pointed_type(type
);
5216 elem_size
= type_size(type
, &align
);
5217 c
+= index
* elem_size
;
5218 /* NOTE: we only support ranges for last designator */
5219 nb_elems
= index_last
- index
+ 1;
5220 if (nb_elems
!= 1) {
5229 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
5230 expect("struct/union type");
5243 /* XXX: fix this mess by using explicit storage field */
5245 type1
.t
|= (type
->t
& ~VT_TYPE
);
5259 if (type
->t
& VT_ARRAY
) {
5261 type
= pointed_type(type
);
5262 c
+= index
* type_size(type
, &align
);
5266 tcc_error("too many field init");
5267 /* XXX: fix this mess by using explicit storage field */
5269 type1
.t
|= (type
->t
& ~VT_TYPE
);
5274 decl_initializer(type
, sec
, c
, 0, size_only
);
5276 /* XXX: make it more general */
5277 if (!size_only
&& nb_elems
> 1) {
5278 unsigned long c_end
;
5283 tcc_error("range init not supported yet for dynamic storage");
5284 c_end
= c
+ nb_elems
* elem_size
;
5285 if (c_end
> sec
->data_allocated
)
5286 section_realloc(sec
, c_end
);
5287 src
= sec
->data
+ c
;
5289 for(i
= 1; i
< nb_elems
; i
++) {
5291 memcpy(dst
, src
, elem_size
);
5297 #define EXPR_CONST 1
5300 /* store a value or an expression directly in global data or in local array */
5301 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
,
5302 int v
, int expr_type
)
5304 int saved_global_expr
, bt
, bit_pos
, bit_size
;
5306 unsigned long long bit_mask
;
5314 /* compound literals must be allocated globally in this case */
5315 saved_global_expr
= global_expr
;
5318 global_expr
= saved_global_expr
;
5319 /* NOTE: symbols are accepted */
5320 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
)
5321 tcc_error("initializer element is not constant");
5329 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5332 /* XXX: not portable */
5333 /* XXX: generate error if incorrect relocation */
5334 gen_assign_cast(&dtype
);
5335 bt
= type
->t
& VT_BTYPE
;
5336 /* we'll write at most 16 bytes */
5337 if (c
+ 16 > sec
->data_allocated
) {
5338 section_realloc(sec
, c
+ 16);
5340 ptr
= sec
->data
+ c
;
5341 /* XXX: make code faster ? */
5342 if (!(type
->t
& VT_BITFIELD
)) {
5347 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
5348 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
5349 bit_mask
= (1LL << bit_size
) - 1;
5351 if ((vtop
->r
& VT_SYM
) &&
5357 (bt
== VT_INT
&& bit_size
!= 32)))
5358 tcc_error("initializer element is not computable at load time");
5360 /* XXX: when cross-compiling we assume that each type has the
5361 same representation on host and target, which is likely to
5362 be wrong in the case of long double */
5364 vtop
->c
.i
= (vtop
->c
.i
!= 0);
5366 *(char *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
5369 *(short *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
5372 *(double *)ptr
= vtop
->c
.d
;
5375 *(long double *)ptr
= vtop
->c
.ld
;
5378 *(long long *)ptr
|= (vtop
->c
.ll
& bit_mask
) << bit_pos
;
5381 addr_t val
= (vtop
->c
.ptr_offset
& bit_mask
) << bit_pos
;
5382 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
5383 if (vtop
->r
& VT_SYM
)
5384 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
5386 *(addr_t
*)ptr
|= val
;
5388 if (vtop
->r
& VT_SYM
)
5389 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
5390 *(addr_t
*)ptr
|= val
;
5395 int val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
5396 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
5397 if (vtop
->r
& VT_SYM
)
5398 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
5402 if (vtop
->r
& VT_SYM
)
5403 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
5411 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
5418 /* put zeros for variable based init */
5419 static void init_putz(CType
*t
, Section
*sec
, unsigned long c
, int size
)
5422 /* nothing to do because globals are already set to zero */
5424 vpush_global_sym(&func_old_type
, TOK_memset
);
5426 #ifdef TCC_TARGET_ARM
5437 /* 't' contains the type and storage info. 'c' is the offset of the
5438 object in section 'sec'. If 'sec' is NULL, it means stack based
5439 allocation. 'first' is true if array '{' must be read (multi
5440 dimension implicit array init handling). 'size_only' is true if
5441 size only evaluation is wanted (only for arrays). */
5442 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
5443 int first
, int size_only
)
5445 int index
, array_length
, n
, no_oblock
, nb
, parlevel
, parlevel1
, i
;
5446 int size1
, align1
, expr_type
;
5450 if (type
->t
& VT_VLA
) {
5453 /* save current stack pointer */
5454 if (vla_flags
& VLA_NEED_NEW_FRAME
) {
5456 vla_flags
= VLA_IN_SCOPE
;
5457 vla_sp_loc
= &vla_sp_loc_tmp
;
5460 vla_runtime_type_size(type
, &a
);
5461 gen_vla_alloc(type
, a
);
5462 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
5466 } else if (type
->t
& VT_ARRAY
) {
5470 t1
= pointed_type(type
);
5471 size1
= type_size(t1
, &align1
);
5474 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
5477 tcc_error("character array initializer must be a literal,"
5478 " optionally enclosed in braces");
5483 /* only parse strings here if correct type (otherwise: handle
5484 them as ((w)char *) expressions */
5485 if ((tok
== TOK_LSTR
&&
5486 #ifdef TCC_TARGET_PE
5487 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
5489 (t1
->t
& VT_BTYPE
) == VT_INT
5491 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
5492 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
5497 /* compute maximum number of chars wanted */
5499 cstr_len
= cstr
->size
;
5501 cstr_len
= cstr
->size
/ sizeof(nwchar_t
);
5504 if (n
>= 0 && nb
> (n
- array_length
))
5505 nb
= n
- array_length
;
5508 tcc_warning("initializer-string for array is too long");
5509 /* in order to go faster for common case (char
5510 string in global variable, we handle it
5512 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
5513 memcpy(sec
->data
+ c
+ array_length
, cstr
->data
, nb
);
5517 ch
= ((unsigned char *)cstr
->data
)[i
];
5519 ch
= ((nwchar_t
*)cstr
->data
)[i
];
5520 init_putv(t1
, sec
, c
+ (array_length
+ i
) * size1
,
5528 /* only add trailing zero if enough storage (no
5529 warning in this case since it is standard) */
5530 if (n
< 0 || array_length
< n
) {
5532 init_putv(t1
, sec
, c
+ (array_length
* size1
), 0, EXPR_VAL
);
5538 while (tok
!= '}') {
5539 decl_designator(type
, sec
, c
, &index
, NULL
, size_only
);
5540 if (n
>= 0 && index
>= n
)
5541 tcc_error("index too large");
5542 /* must put zero in holes (note that doing it that way
5543 ensures that it even works with designators) */
5544 if (!size_only
&& array_length
< index
) {
5545 init_putz(t1
, sec
, c
+ array_length
* size1
,
5546 (index
- array_length
) * size1
);
5549 if (index
> array_length
)
5550 array_length
= index
;
5551 /* special test for multi dimensional arrays (may not
5552 be strictly correct if designators are used at the
5554 if (index
>= n
&& no_oblock
)
5563 /* put zeros at the end */
5564 if (!size_only
&& n
>= 0 && array_length
< n
) {
5565 init_putz(t1
, sec
, c
+ array_length
* size1
,
5566 (n
- array_length
) * size1
);
5568 /* patch type size if needed */
5570 s
->c
= array_length
;
5571 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
&&
5572 (sec
|| !first
|| tok
== '{')) {
5574 /* NOTE: the previous test is a specific case for automatic
5575 struct/union init */
5576 /* XXX: union needs only one init */
5583 if (tcc_state
->old_struct_init_code
) {
5584 /* an old version of struct initialization.
5585 It have a problems. But with a new version
5586 linux 2.4.26 can't load ramdisk.
5588 while (tok
== '(') {
5592 if (!parse_btype(&type1
, &ad1
))
5594 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
5596 if (!is_assignable_types(type
, &type1
))
5597 tcc_error("invalid type for cast");
5604 if (!parse_btype(&type1
, &ad1
))
5606 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
5608 if (!is_assignable_types(type
, &type1
))
5609 tcc_error("invalid type for cast");
5618 if (first
|| tok
== '{') {
5627 while (tok
!= '}') {
5628 decl_designator(type
, sec
, c
, NULL
, &f
, size_only
);
5630 if (!size_only
&& array_length
< index
) {
5631 init_putz(type
, sec
, c
+ array_length
,
5632 index
- array_length
);
5634 index
= index
+ type_size(&f
->type
, &align1
);
5635 if (index
> array_length
)
5636 array_length
= index
;
5638 /* gr: skip fields from same union - ugly. */
5641 int f_size
= type_size(&f
->type
, &align
);
5642 int f_type
= (f
->type
.t
& VT_BTYPE
);
5644 ///printf("index: %2d %08x -- %2d %08x\n", f->c, f->type.t, f->next->c, f->next->type.t);
5645 /* test for same offset */
5646 if (f
->next
->c
!= f
->c
)
5648 if ((f_type
== VT_STRUCT
) && (f_size
== 0)) {
5650 Lets assume a structure of size 0 can't be a member of the union.
5651 This allow to compile the following code from a linux kernel v2.4.26
5652 typedef struct { } rwlock_t;
5658 struct fs_struct init_fs = { { (1) }, (rwlock_t) {}, 0022, };
5659 tcc-0.9.23 can succesfully compile this version of the kernel.
5660 gcc don't have problems with this code too.
5664 /* if yes, test for bitfield shift */
5665 if ((f
->type
.t
& VT_BITFIELD
) && (f
->next
->type
.t
& VT_BITFIELD
)) {
5666 int bit_pos_1
= (f
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
5667 int bit_pos_2
= (f
->next
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
5668 //printf("bitfield %d %d\n", bit_pos_1, bit_pos_2);
5669 if (bit_pos_1
!= bit_pos_2
)
5676 if (no_oblock
&& f
== NULL
)
5682 /* put zeros at the end */
5683 if (!size_only
&& array_length
< n
) {
5684 init_putz(type
, sec
, c
+ array_length
,
5693 } else if (tok
== '{') {
5695 decl_initializer(type
, sec
, c
, first
, size_only
);
5697 } else if (size_only
) {
5698 /* just skip expression */
5699 parlevel
= parlevel1
= 0;
5700 while ((parlevel
> 0 || parlevel1
> 0 ||
5701 (tok
!= '}' && tok
!= ',')) && tok
!= -1) {
5704 else if (tok
== ')') {
5705 if (parlevel
== 0 && parlevel1
== 0)
5709 else if (tok
== '{')
5711 else if (tok
== '}') {
5712 if (parlevel
== 0 && parlevel1
== 0)
5719 /* currently, we always use constant expression for globals
5720 (may change for scripting case) */
5721 expr_type
= EXPR_CONST
;
5723 expr_type
= EXPR_ANY
;
5724 init_putv(type
, sec
, c
, 0, expr_type
);
5728 /* parse an initializer for type 't' if 'has_init' is non zero, and
5729 allocate space in local or global data space ('r' is either
5730 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
5731 variable 'v' with an associated name represented by 'asm_label' of
5732 scope 'scope' is declared before initializers are parsed. If 'v' is
5733 zero, then a reference to the new object is put in the value stack.
5734 If 'has_init' is 2, a special parsing is done to handle string
5736 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
5737 int has_init
, int v
, char *asm_label
,
5740 int size
, align
, addr
, data_offset
;
5742 ParseState saved_parse_state
= {0};
5743 TokenString init_str
;
5745 Sym
*flexible_array
;
5747 flexible_array
= NULL
;
5748 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5749 Sym
*field
= type
->ref
->next
;
5752 field
= field
->next
;
5753 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
5754 flexible_array
= field
;
5758 size
= type_size(type
, &align
);
5759 /* If unknown size, we must evaluate it before
5760 evaluating initializers because
5761 initializers can generate global data too
5762 (e.g. string pointers or ISOC99 compound
5763 literals). It also simplifies local
5764 initializers handling */
5765 tok_str_new(&init_str
);
5766 if (size
< 0 || (flexible_array
&& has_init
)) {
5768 tcc_error("unknown type size");
5769 /* get all init string */
5770 if (has_init
== 2) {
5771 /* only get strings */
5772 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
5773 tok_str_add_tok(&init_str
);
5778 while (level
> 0 || (tok
!= ',' && tok
!= ';')) {
5780 tcc_error("unexpected end of file in initializer");
5781 tok_str_add_tok(&init_str
);
5784 else if (tok
== '}') {
5794 tok_str_add(&init_str
, -1);
5795 tok_str_add(&init_str
, 0);
5798 save_parse_state(&saved_parse_state
);
5800 macro_ptr
= init_str
.str
;
5802 decl_initializer(type
, NULL
, 0, 1, 1);
5803 /* prepare second initializer parsing */
5804 macro_ptr
= init_str
.str
;
5807 /* if still unknown size, error */
5808 size
= type_size(type
, &align
);
5810 tcc_error("unknown type size");
5813 size
+= flexible_array
->type
.ref
->c
* pointed_size(&flexible_array
->type
);
5814 /* take into account specified alignment if bigger */
5815 if (ad
->a
.aligned
) {
5816 if (ad
->a
.aligned
> align
)
5817 align
= ad
->a
.aligned
;
5818 } else if (ad
->a
.packed
) {
5821 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
5823 #ifdef CONFIG_TCC_BCHECK
5824 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
5828 loc
= (loc
- size
) & -align
;
5830 #ifdef CONFIG_TCC_BCHECK
5831 /* handles bounds */
5832 /* XXX: currently, since we do only one pass, we cannot track
5833 '&' operators, so we add only arrays */
5834 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
5836 /* add padding between regions */
5838 /* then add local bound info */
5839 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
5840 bounds_ptr
[0] = addr
;
5841 bounds_ptr
[1] = size
;
5845 /* local variable */
5846 sym_push(v
, type
, r
, addr
);
5848 /* push local reference */
5849 vset(type
, r
, addr
);
5855 if (v
&& scope
== VT_CONST
) {
5856 /* see if the symbol was already defined */
5859 if (!is_compatible_types(&sym
->type
, type
))
5860 tcc_error("incompatible types for redefinition of '%s'",
5861 get_tok_str(v
, NULL
));
5862 if (sym
->type
.t
& VT_EXTERN
) {
5863 /* if the variable is extern, it was not allocated */
5864 sym
->type
.t
&= ~VT_EXTERN
;
5865 /* set array size if it was omitted in extern
5867 if ((sym
->type
.t
& VT_ARRAY
) &&
5868 sym
->type
.ref
->c
< 0 &&
5870 sym
->type
.ref
->c
= type
->ref
->c
;
5872 /* we accept several definitions of the same
5873 global variable. this is tricky, because we
5874 must play with the SHN_COMMON type of the symbol */
5875 /* XXX: should check if the variable was already
5876 initialized. It is incorrect to initialized it
5878 /* no init data, we won't add more to the symbol */
5885 /* allocate symbol in corresponding section */
5890 else if (tcc_state
->nocommon
)
5894 data_offset
= sec
->data_offset
;
5895 data_offset
= (data_offset
+ align
- 1) & -align
;
5897 /* very important to increment global pointer at this time
5898 because initializers themselves can create new initializers */
5899 data_offset
+= size
;
5900 #ifdef CONFIG_TCC_BCHECK
5901 /* add padding if bound check */
5902 if (tcc_state
->do_bounds_check
)
5905 sec
->data_offset
= data_offset
;
5906 /* allocate section space to put the data */
5907 if (sec
->sh_type
!= SHT_NOBITS
&&
5908 data_offset
> sec
->data_allocated
)
5909 section_realloc(sec
, data_offset
);
5910 /* align section if needed */
5911 if (align
> sec
->sh_addralign
)
5912 sec
->sh_addralign
= align
;
5914 addr
= 0; /* avoid warning */
5918 if (scope
!= VT_CONST
|| !sym
) {
5919 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
5920 sym
->asm_label
= asm_label
;
5922 /* update symbol definition */
5924 put_extern_sym(sym
, sec
, addr
, size
);
5927 /* put a common area */
5928 put_extern_sym(sym
, NULL
, align
, size
);
5929 /* XXX: find a nicer way */
5930 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
5931 esym
->st_shndx
= SHN_COMMON
;
5934 /* push global reference */
5935 sym
= get_sym_ref(type
, sec
, addr
, size
);
5936 vpushsym(type
, sym
);
5938 /* patch symbol weakness */
5939 if (type
->t
& VT_WEAK
)
5941 apply_visibility(sym
, type
);
5942 #ifdef CONFIG_TCC_BCHECK
5943 /* handles bounds now because the symbol must be defined
5944 before for the relocation */
5945 if (tcc_state
->do_bounds_check
) {
5948 greloc(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
);
5949 /* then add global bound info */
5950 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
5951 bounds_ptr
[0] = 0; /* relocated */
5952 bounds_ptr
[1] = size
;
5956 if (has_init
|| (type
->t
& VT_VLA
)) {
5957 decl_initializer(type
, sec
, addr
, 1, 0);
5958 /* restore parse state if needed */
5960 tok_str_free(init_str
.str
);
5961 restore_parse_state(&saved_parse_state
);
5963 /* patch flexible array member size back to -1, */
5964 /* for possible subsequent similar declarations */
5966 flexible_array
->type
.ref
->c
= -1;
5971 static void put_func_debug(Sym
*sym
)
5976 /* XXX: we put here a dummy type */
5977 snprintf(buf
, sizeof(buf
), "%s:%c1",
5978 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
5979 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
5980 cur_text_section
, sym
->c
);
5981 /* //gr gdb wants a line at the function */
5982 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
5987 /* parse an old style function declaration list */
5988 /* XXX: check multiple parameter */
5989 static void func_decl_list(Sym
*func_sym
)
5996 /* parse each declaration */
5997 while (tok
!= '{' && tok
!= ';' && tok
!= ',' && tok
!= TOK_EOF
&&
5998 tok
!= TOK_ASM1
&& tok
!= TOK_ASM2
&& tok
!= TOK_ASM3
) {
5999 if (!parse_btype(&btype
, &ad
))
6000 expect("declaration list");
6001 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6002 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6004 /* we accept no variable after */
6008 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6009 /* find parameter in function parameter list */
6012 if ((s
->v
& ~SYM_FIELD
) == v
)
6016 tcc_error("declaration for parameter '%s' but no such parameter",
6017 get_tok_str(v
, NULL
));
6019 /* check that no storage specifier except 'register' was given */
6020 if (type
.t
& VT_STORAGE
)
6021 tcc_error("storage class specified for '%s'", get_tok_str(v
, NULL
));
6022 convert_parameter_type(&type
);
6023 /* we can add the type (NOTE: it could be local to the function) */
6025 /* accept other parameters */
6036 /* parse a function defined by symbol 'sym' and generate its code in
6037 'cur_text_section' */
6038 static void gen_function(Sym
*sym
)
6040 int saved_nocode_wanted
= nocode_wanted
;
6042 ind
= cur_text_section
->data_offset
;
6043 /* NOTE: we patch the symbol size later */
6044 put_extern_sym(sym
, cur_text_section
, ind
, 0);
6045 funcname
= get_tok_str(sym
->v
, NULL
);
6047 /* Initialize VLA state */
6048 vla_sp_loc
= &vla_sp_root_loc
;
6049 vla_flags
= VLA_NEED_NEW_FRAME
;
6050 /* put debug symbol */
6051 if (tcc_state
->do_debug
)
6052 put_func_debug(sym
);
6053 /* push a dummy symbol to enable local sym storage */
6054 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
6055 gfunc_prolog(&sym
->type
);
6056 #ifdef CONFIG_TCC_BCHECK
6057 if (tcc_state
->do_bounds_check
6058 && !strcmp(get_tok_str(sym
->v
, NULL
), "main")) {
6062 for (i
= 0, sym
= local_stack
; i
< 2; i
++, sym
= sym
->prev
) {
6063 if (sym
->v
& SYM_FIELD
|| sym
->prev
->v
& SYM_FIELD
)
6065 vpush_global_sym(&func_old_type
, TOK___bound_main_arg
);
6066 vset(&sym
->type
, sym
->r
, sym
->c
);
6072 block(NULL
, NULL
, NULL
, NULL
, 0, 0);
6075 cur_text_section
->data_offset
= ind
;
6076 label_pop(&global_label_stack
, NULL
);
6077 /* reset local stack */
6078 scope_stack_bottom
= NULL
;
6079 sym_pop(&local_stack
, NULL
);
6080 /* end of function */
6081 /* patch symbol size */
6082 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
6084 /* patch symbol weakness (this definition overrules any prototype) */
6085 if (sym
->type
.t
& VT_WEAK
)
6087 apply_visibility(sym
, &sym
->type
);
6088 if (tcc_state
->do_debug
) {
6089 put_stabn(N_FUN
, 0, 0, ind
- func_ind
);
6091 /* It's better to crash than to generate wrong code */
6092 cur_text_section
= NULL
;
6093 funcname
= ""; /* for safety */
6094 func_vt
.t
= VT_VOID
; /* for safety */
6095 func_var
= 0; /* for safety */
6096 ind
= 0; /* for safety */
6097 nocode_wanted
= saved_nocode_wanted
;
6100 ST_FUNC
void gen_inline_functions(void)
6103 int *str
, inline_generated
, i
;
6104 struct InlineFunc
*fn
;
6106 /* iterate while inline function are referenced */
6108 inline_generated
= 0;
6109 for (i
= 0; i
< tcc_state
->nb_inline_fns
; ++i
) {
6110 fn
= tcc_state
->inline_fns
[i
];
6112 if (sym
&& sym
->c
) {
6113 /* the function was used: generate its code and
6114 convert it to a normal function */
6115 str
= fn
->token_str
;
6118 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
6119 sym
->r
= VT_SYM
| VT_CONST
;
6120 sym
->type
.t
&= ~VT_INLINE
;
6124 cur_text_section
= text_section
;
6126 macro_ptr
= NULL
; /* fail safe */
6128 inline_generated
= 1;
6131 if (!inline_generated
)
6134 for (i
= 0; i
< tcc_state
->nb_inline_fns
; ++i
) {
6135 fn
= tcc_state
->inline_fns
[i
];
6136 str
= fn
->token_str
;
6139 dynarray_reset(&tcc_state
->inline_fns
, &tcc_state
->nb_inline_fns
);
6142 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6143 static int decl0(int l
, int is_for_loop_init
)
6151 if (!parse_btype(&btype
, &ad
)) {
6152 if (is_for_loop_init
)
6154 /* skip redundant ';' */
6155 /* XXX: find more elegant solution */
6160 if (l
== VT_CONST
&&
6161 (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6162 /* global asm block */
6166 /* special test for old K&R protos without explicit int
6167 type. Only accepted when defining global data */
6168 if (l
== VT_LOCAL
|| tok
< TOK_DEFINE
)
6172 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6173 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6175 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
6176 int v
= btype
.ref
->v
;
6177 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
6178 tcc_warning("unnamed struct/union that defines no instances");
6183 while (1) { /* iterate thru each declaration */
6184 char *asm_label
; // associated asm label
6186 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6190 type_to_str(buf
, sizeof(buf
), t
, get_tok_str(v
, NULL
));
6191 printf("type = '%s'\n", buf
);
6194 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6195 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
6196 tcc_error("function without file scope cannot be static");
6198 /* if old style function prototype, we accept a
6201 if (sym
->c
== FUNC_OLD
)
6202 func_decl_list(sym
);
6206 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6209 asm_label_instr(&astr
);
6210 asm_label
= tcc_strdup(astr
.data
);
6213 /* parse one last attribute list, after asm label */
6214 parse_attribute(&ad
);
6219 #ifdef TCC_TARGET_PE
6220 if (ad
.a
.func_import
)
6221 type
.t
|= VT_IMPORT
;
6222 if (ad
.a
.func_export
)
6223 type
.t
|= VT_EXPORT
;
6225 type
.t
|= ad
.a
.visibility
<< VT_VIS_SHIFT
;
6229 tcc_error("cannot use local functions");
6230 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
6231 expect("function definition");
6233 /* reject abstract declarators in function definition */
6235 while ((sym
= sym
->next
) != NULL
)
6236 if (!(sym
->v
& ~SYM_FIELD
))
6237 expect("identifier");
6239 /* XXX: cannot do better now: convert extern line to static inline */
6240 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
6241 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6246 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6249 ref
= sym
->type
.ref
;
6250 if (0 == ref
->a
.func_proto
)
6251 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6253 /* use func_call from prototype if not defined */
6254 if (ref
->a
.func_call
!= FUNC_CDECL
6255 && type
.ref
->a
.func_call
== FUNC_CDECL
)
6256 type
.ref
->a
.func_call
= ref
->a
.func_call
;
6258 /* use export from prototype */
6259 if (ref
->a
.func_export
)
6260 type
.ref
->a
.func_export
= 1;
6262 /* use static from prototype */
6263 if (sym
->type
.t
& VT_STATIC
)
6264 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6266 /* If the definition has no visibility use the
6267 one from prototype. */
6268 if (! (type
.t
& VT_VIS_MASK
))
6269 type
.t
|= sym
->type
.t
& VT_VIS_MASK
;
6271 if (!is_compatible_types(&sym
->type
, &type
)) {
6273 tcc_error("incompatible types for redefinition of '%s'",
6274 get_tok_str(v
, NULL
));
6276 type
.ref
->a
.func_proto
= 0;
6277 /* if symbol is already defined, then put complete type */
6280 /* put function symbol */
6281 sym
= global_identifier_push(v
, type
.t
, 0);
6282 sym
->type
.ref
= type
.ref
;
6285 /* static inline functions are just recorded as a kind
6286 of macro. Their code will be emitted at the end of
6287 the compilation unit only if they are used */
6288 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
6289 (VT_INLINE
| VT_STATIC
)) {
6290 TokenString func_str
;
6292 struct InlineFunc
*fn
;
6293 const char *filename
;
6295 tok_str_new(&func_str
);
6301 tcc_error("unexpected end of file");
6302 tok_str_add_tok(&func_str
);
6307 } else if (t
== '}') {
6309 if (block_level
== 0)
6313 tok_str_add(&func_str
, -1);
6314 tok_str_add(&func_str
, 0);
6315 filename
= file
? file
->filename
: "";
6316 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
6317 strcpy(fn
->filename
, filename
);
6319 fn
->token_str
= func_str
.str
;
6320 dynarray_add((void ***)&tcc_state
->inline_fns
, &tcc_state
->nb_inline_fns
, fn
);
6323 /* compute text section */
6324 cur_text_section
= ad
.section
;
6325 if (!cur_text_section
)
6326 cur_text_section
= text_section
;
6327 sym
->r
= VT_SYM
| VT_CONST
;
6332 if (btype
.t
& VT_TYPEDEF
) {
6333 /* save typedefed type */
6334 /* XXX: test storage specifiers ? */
6335 sym
= sym_push(v
, &type
, 0, 0);
6337 sym
->type
.t
|= VT_TYPEDEF
;
6340 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6341 /* external function definition */
6342 /* specific case for func_call attribute */
6343 ad
.a
.func_proto
= 1;
6345 } else if (!(type
.t
& VT_ARRAY
)) {
6346 /* not lvalue if array */
6347 r
|= lvalue_type(type
.t
);
6349 has_init
= (tok
== '=');
6350 if (has_init
&& (type
.t
& VT_VLA
))
6351 tcc_error("Variable length array cannot be initialized");
6352 if ((btype
.t
& VT_EXTERN
) || ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
6353 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
6354 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
6355 /* external variable or function */
6356 /* NOTE: as GCC, uninitialized global static
6357 arrays of null size are considered as
6359 sym
= external_sym(v
, &type
, r
, asm_label
);
6361 if (ad
.alias_target
) {
6366 alias_target
= sym_find(ad
.alias_target
);
6367 if (!alias_target
|| !alias_target
->c
)
6368 tcc_error("unsupported forward __alias__ attribute");
6369 esym
= &((Elf32_Sym
*)symtab_section
->data
)[alias_target
->c
];
6370 tsec
.sh_num
= esym
->st_shndx
;
6371 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
6374 type
.t
|= (btype
.t
& VT_STATIC
); /* Retain "static". */
6375 if (type
.t
& VT_STATIC
)
6381 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, asm_label
, l
);
6385 if (is_for_loop_init
)
6398 ST_FUNC
void decl(int l
)