2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Section
*text_section
, *data_section
, *bss_section
; /* predefined sections */
34 ST_DATA Section
*cur_text_section
; /* current section where function code is generated */
36 ST_DATA Section
*last_text_section
; /* to handle .previous asm directive */
38 #ifdef CONFIG_TCC_BCHECK
39 /* bound check related sections */
40 ST_DATA Section
*bounds_section
; /* contains global data bound description */
41 ST_DATA Section
*lbounds_section
; /* contains local data bound description */
44 ST_DATA Section
*symtab_section
, *strtab_section
;
46 ST_DATA Section
*stab_section
, *stabstr_section
;
47 ST_DATA Sym
*sym_free_first
;
48 ST_DATA
void **sym_pools
;
49 ST_DATA
int nb_sym_pools
;
51 ST_DATA Sym
*global_stack
;
52 ST_DATA Sym
*local_stack
;
53 ST_DATA Sym
*scope_stack_bottom
;
54 ST_DATA Sym
*define_stack
;
55 ST_DATA Sym
*global_label_stack
;
56 ST_DATA Sym
*local_label_stack
;
58 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
59 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
60 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
62 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
64 ST_DATA
int const_wanted
; /* true if constant wanted */
65 ST_DATA
int nocode_wanted
; /* true if no code generation wanted for an expression */
66 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
67 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
68 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
70 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
71 ST_DATA
const char *funcname
;
73 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
;
75 /* ------------------------------------------------------------------------- */
76 static void gen_cast(CType
*type
);
77 static inline CType
*pointed_type(CType
*type
);
78 static int is_compatible_types(CType
*type1
, CType
*type2
);
79 static int parse_btype(CType
*type
, AttributeDef
*ad
);
80 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
81 static void parse_expr_type(CType
*type
);
82 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
83 static void block(int *bsym
, int *csym
, int *case_sym
, int *def_sym
, int case_reg
, int is_expr
);
84 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, char *asm_label
, int scope
);
85 static int decl0(int l
, int is_for_loop_init
);
86 static void expr_eq(void);
87 static void unary_type(CType
*type
);
88 static void vla_runtime_type_size(CType
*type
, int *a
);
89 static void vla_sp_restore(void);
90 static void vla_sp_restore_root(void);
91 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
);
92 static void expr_type(CType
*type
);
93 ST_FUNC
void vpush64(int ty
, unsigned long long v
);
94 ST_FUNC
void vpush(CType
*type
);
95 ST_FUNC
int gvtst(int inv
, int t
);
96 ST_FUNC
int is_btype_size(int bt
);
98 ST_INLN
int is_float(int t
)
102 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC
int ieee_finite(double d
)
111 memcpy(p
, &d
, sizeof(double));
112 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC
void test_lvalue(void)
117 if (!(vtop
->r
& VT_LVAL
))
121 ST_FUNC
void check_vstack(void)
124 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
127 /* ------------------------------------------------------------------------- */
128 /* symbol allocator */
129 static Sym
*__sym_malloc(void)
131 Sym
*sym_pool
, *sym
, *last_sym
;
134 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
135 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
137 last_sym
= sym_free_first
;
139 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
140 sym
->next
= last_sym
;
144 sym_free_first
= last_sym
;
148 static inline Sym
*sym_malloc(void)
151 sym
= sym_free_first
;
153 sym
= __sym_malloc();
154 sym_free_first
= sym
->next
;
158 ST_INLN
void sym_free(Sym
*sym
)
160 sym
->next
= sym_free_first
;
161 tcc_free(sym
->asm_label
);
162 sym_free_first
= sym
;
165 /* push, without hashing */
166 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, long c
)
169 if (ps
== &local_stack
) {
170 for (s
= *ps
; s
&& s
!= scope_stack_bottom
; s
= s
->prev
)
171 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
&& s
->v
== v
)
172 tcc_error("incompatible types for redefinition of '%s'",
173 get_tok_str(v
, NULL
));
191 /* find a symbol and return its associated structure. 's' is the top
192 of the symbol stack */
193 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
205 /* structure lookup */
206 ST_INLN Sym
*struct_find(int v
)
209 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
211 return table_ident
[v
]->sym_struct
;
214 /* find an identifier */
215 ST_INLN Sym
*sym_find(int v
)
218 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
220 return table_ident
[v
]->sym_identifier
;
223 /* push a given symbol on the symbol stack */
224 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
233 s
= sym_push2(ps
, v
, type
->t
, c
);
234 s
->type
.ref
= type
->ref
;
236 /* don't record fields or anonymous symbols */
238 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
239 /* record symbol in token array */
240 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
242 ps
= &ts
->sym_struct
;
244 ps
= &ts
->sym_identifier
;
251 /* push a global identifier */
252 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
255 s
= sym_push2(&global_stack
, v
, t
, c
);
256 /* don't record anonymous symbol */
257 if (v
< SYM_FIRST_ANOM
) {
258 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
259 /* modify the top most local identifier, so that
260 sym_identifier will point to 's' when popped */
262 ps
= &(*ps
)->prev_tok
;
269 /* pop symbols until top reaches 'b' */
270 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
)
280 /* remove symbol in token array */
282 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
283 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
285 ps
= &ts
->sym_struct
;
287 ps
= &ts
->sym_identifier
;
296 static void weaken_symbol(Sym
*sym
)
298 sym
->type
.t
|= VT_WEAK
;
303 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
304 esym_type
= ELFW(ST_TYPE
)(esym
->st_info
);
305 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, esym_type
);
309 static void apply_visibility(Sym
*sym
, CType
*type
)
311 int vis
= sym
->type
.t
& VT_VIS_MASK
;
312 int vis2
= type
->t
& VT_VIS_MASK
;
313 if (vis
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
315 else if (vis2
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
318 vis
= (vis
< vis2
) ? vis
: vis2
;
319 sym
->type
.t
&= ~VT_VIS_MASK
;
325 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
326 vis
>>= VT_VIS_SHIFT
;
327 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1)) | vis
;
331 /* ------------------------------------------------------------------------- */
333 ST_FUNC
void swap(int *p
, int *q
)
341 static void vsetc(CType
*type
, int r
, CValue
*vc
)
345 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
346 tcc_error("memory full (vstack)");
347 /* cannot let cpu flags if other instruction are generated. Also
348 avoid leaving VT_JMP anywhere except on the top of the stack
349 because it would complicate the code generator. */
350 if (vtop
>= vstack
) {
351 v
= vtop
->r
& VT_VALMASK
;
352 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
362 /* push constant of type "type" with useless value */
363 ST_FUNC
void vpush(CType
*type
)
366 vsetc(type
, VT_CONST
, &cval
);
369 /* push integer constant */
370 ST_FUNC
void vpushi(int v
)
374 vsetc(&int_type
, VT_CONST
, &cval
);
377 /* push a pointer sized constant */
378 static void vpushs(addr_t v
)
382 vsetc(&size_type
, VT_CONST
, &cval
);
385 /* push arbitrary 64bit constant */
386 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
393 vsetc(&ctype
, VT_CONST
, &cval
);
396 /* push long long constant */
397 static inline void vpushll(long long v
)
399 vpush64(VT_LLONG
, v
);
402 /* push a symbol value of TYPE */
403 static inline void vpushsym(CType
*type
, Sym
*sym
)
407 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
411 /* Return a static symbol pointing to a section */
412 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
418 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
419 sym
->type
.ref
= type
->ref
;
420 sym
->r
= VT_CONST
| VT_SYM
;
421 put_extern_sym(sym
, sec
, offset
, size
);
425 /* push a reference to a section offset by adding a dummy symbol */
426 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
428 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
431 /* define a new external reference to a symbol 'v' of type 'u' */
432 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
438 /* push forward reference */
439 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
440 s
->type
.ref
= type
->ref
;
441 s
->r
= r
| VT_CONST
| VT_SYM
;
446 /* define a new external reference to a symbol 'v' with alternate asm
447 name 'asm_label' of type 'u'. 'asm_label' is equal to NULL if there
448 is no alternate name (most cases) */
449 static Sym
*external_sym(int v
, CType
*type
, int r
, char *asm_label
)
455 /* push forward reference */
456 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
457 s
->asm_label
= asm_label
;
458 s
->type
.t
|= VT_EXTERN
;
459 } else if (s
->type
.ref
== func_old_type
.ref
) {
460 s
->type
.ref
= type
->ref
;
461 s
->r
= r
| VT_CONST
| VT_SYM
;
462 s
->type
.t
|= VT_EXTERN
;
463 } else if (!is_compatible_types(&s
->type
, type
)) {
464 tcc_error("incompatible types for redefinition of '%s'",
465 get_tok_str(v
, NULL
));
467 /* Merge some storage attributes. */
468 if (type
->t
& VT_WEAK
)
471 if (type
->t
& VT_VIS_MASK
)
472 apply_visibility(s
, type
);
477 /* push a reference to global symbol v */
478 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
480 vpushsym(type
, external_global_sym(v
, type
, 0));
483 ST_FUNC
void vset(CType
*type
, int r
, int v
)
488 vsetc(type
, r
, &cval
);
491 static void vseti(int r
, int v
)
499 ST_FUNC
void vswap(void)
502 /* cannot let cpu flags if other instruction are generated. Also
503 avoid leaving VT_JMP anywhere except on the top of the stack
504 because it would complicate the code generator. */
505 if (vtop
>= vstack
) {
506 int v
= vtop
->r
& VT_VALMASK
;
507 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
514 /* XXX: +2% overall speed possible with optimized memswap
516 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
520 ST_FUNC
void vpushv(SValue
*v
)
522 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
523 tcc_error("memory full (vstack)");
528 ST_FUNC
void vdup(void)
533 /* save r to the memory stack, and mark it as being free */
534 ST_FUNC
void save_reg(int r
)
536 int l
, saved
, size
, align
;
540 /* modify all stack values */
543 for(p
=vstack
;p
<=vtop
;p
++) {
544 if ((p
->r
& VT_VALMASK
) == r
||
545 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
546 /* must save value on stack if not already done */
548 /* NOTE: must reload 'r' because r might be equal to r2 */
549 r
= p
->r
& VT_VALMASK
;
550 /* store register in the stack */
552 if ((p
->r
& VT_LVAL
) ||
553 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
554 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
555 type
= &char_pointer_type
;
559 size
= type_size(type
, &align
);
560 loc
= (loc
- size
) & -align
;
562 sv
.r
= VT_LOCAL
| VT_LVAL
;
565 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
566 /* x86 specific: need to pop fp register ST0 if saved */
568 o(0xd8dd); /* fstp %st(0) */
571 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
572 /* special long long case */
573 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
581 /* mark that stack entry as being saved on the stack */
582 if (p
->r
& VT_LVAL
) {
583 /* also clear the bounded flag because the
584 relocation address of the function was stored in
586 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
588 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
596 #ifdef TCC_TARGET_ARM
597 /* find a register of class 'rc2' with at most one reference on stack.
598 * If none, call get_reg(rc) */
599 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
604 for(r
=0;r
<NB_REGS
;r
++) {
605 if (reg_classes
[r
] & rc2
) {
608 for(p
= vstack
; p
<= vtop
; p
++) {
609 if ((p
->r
& VT_VALMASK
) == r
||
610 (p
->r2
& VT_VALMASK
) == r
)
621 /* find a free register of class 'rc'. If none, save one register */
622 ST_FUNC
int get_reg(int rc
)
627 /* find a free register */
628 for(r
=0;r
<NB_REGS
;r
++) {
629 if (reg_classes
[r
] & rc
) {
630 for(p
=vstack
;p
<=vtop
;p
++) {
631 if ((p
->r
& VT_VALMASK
) == r
||
632 (p
->r2
& VT_VALMASK
) == r
)
640 /* no register left : free the first one on the stack (VERY
641 IMPORTANT to start from the bottom to ensure that we don't
642 spill registers used in gen_opi()) */
643 for(p
=vstack
;p
<=vtop
;p
++) {
644 /* look at second register (if long long) */
645 r
= p
->r2
& VT_VALMASK
;
646 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
648 r
= p
->r
& VT_VALMASK
;
649 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
655 /* Should never comes here */
659 /* save registers up to (vtop - n) stack entry */
660 ST_FUNC
void save_regs(int n
)
665 for(p
= vstack
;p
<= p1
; p
++) {
666 r
= p
->r
& VT_VALMASK
;
673 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
675 static void move_reg(int r
, int s
, int t
)
689 /* get address of vtop (vtop MUST BE an lvalue) */
690 ST_FUNC
void gaddrof(void)
692 if (vtop
->r
& VT_REF
&& !nocode_wanted
)
695 /* tricky: if saved lvalue, then we can go back to lvalue */
696 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
697 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
702 #ifdef CONFIG_TCC_BCHECK
703 /* generate lvalue bound code */
704 static void gbound(void)
709 vtop
->r
&= ~VT_MUSTBOUND
;
710 /* if lvalue, then use checking code before dereferencing */
711 if (vtop
->r
& VT_LVAL
) {
712 /* if not VT_BOUNDED value, then make one */
713 if (!(vtop
->r
& VT_BOUNDED
)) {
714 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
715 /* must save type because we must set it to int to get pointer */
717 vtop
->type
.t
= VT_PTR
;
720 gen_bounded_ptr_add();
721 vtop
->r
|= lval_type
;
724 /* then check for dereferencing */
725 gen_bounded_ptr_deref();
730 /* store vtop a register belonging to class 'rc'. lvalues are
731 converted to values. Cannot be used if cannot be converted to
732 register value (such as structures). */
733 ST_FUNC
int gv(int rc
)
735 int r
, bit_pos
, bit_size
, size
, align
, i
;
738 /* NOTE: get_reg can modify vstack[] */
739 if (vtop
->type
.t
& VT_BITFIELD
) {
742 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
743 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
744 /* remove bit field info to avoid loops */
745 vtop
->type
.t
&= ~(VT_BITFIELD
| (-1 << VT_STRUCT_SHIFT
));
746 /* cast to int to propagate signedness in following ops */
747 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
752 if((vtop
->type
.t
& VT_UNSIGNED
) ||
753 (vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
754 type
.t
|= VT_UNSIGNED
;
756 /* generate shifts */
757 vpushi(bits
- (bit_pos
+ bit_size
));
759 vpushi(bits
- bit_size
);
760 /* NOTE: transformed to SHR if unsigned */
764 if (is_float(vtop
->type
.t
) &&
765 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
768 unsigned long offset
;
769 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
773 /* XXX: unify with initializers handling ? */
774 /* CPUs usually cannot use float constants, so we store them
775 generically in data segment */
776 size
= type_size(&vtop
->type
, &align
);
777 offset
= (data_section
->data_offset
+ align
- 1) & -align
;
778 data_section
->data_offset
= offset
;
779 /* XXX: not portable yet */
780 #if defined(__i386__) || defined(__x86_64__)
781 /* Zero pad x87 tenbyte long doubles */
782 if (size
== LDOUBLE_SIZE
) {
783 vtop
->c
.tab
[2] &= 0xffff;
784 #if LDOUBLE_SIZE == 16
789 ptr
= section_ptr_add(data_section
, size
);
791 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
795 ptr
[i
] = vtop
->c
.tab
[size
-1-i
];
799 ptr
[i
] = vtop
->c
.tab
[i
];
800 sym
= get_sym_ref(&vtop
->type
, data_section
, offset
, size
<< 2);
801 vtop
->r
|= VT_LVAL
| VT_SYM
;
803 vtop
->c
.ptr_offset
= 0;
805 #ifdef CONFIG_TCC_BCHECK
806 if (vtop
->r
& VT_MUSTBOUND
)
810 r
= vtop
->r
& VT_VALMASK
;
811 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
812 #ifndef TCC_TARGET_ARM64
815 #ifdef TCC_TARGET_X86_64
816 else if (rc
== RC_FRET
)
821 /* need to reload if:
823 - lvalue (need to dereference pointer)
824 - already a register, but not in the right class */
826 || (vtop
->r
& VT_LVAL
)
827 || !(reg_classes
[r
] & rc
)
828 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
829 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
830 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
832 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
837 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
838 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
839 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
841 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
842 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
843 unsigned long long ll
;
845 int r2
, original_type
;
846 original_type
= vtop
->type
.t
;
847 /* two register type load : expand to two words
849 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
850 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
853 vtop
->c
.ui
= ll
; /* first word */
855 vtop
->r
= r
; /* save register value */
856 vpushi(ll
>> 32); /* second word */
859 if (r
>= VT_CONST
|| /* XXX: test to VT_CONST incorrect ? */
860 (vtop
->r
& VT_LVAL
)) {
861 /* We do not want to modifier the long long
862 pointer here, so the safest (and less
863 efficient) is to save all the other registers
864 in the stack. XXX: totally inefficient. */
866 /* load from memory */
867 vtop
->type
.t
= load_type
;
870 vtop
[-1].r
= r
; /* save register value */
871 /* increment pointer to get second word */
872 vtop
->type
.t
= addr_type
;
877 vtop
->type
.t
= load_type
;
882 vtop
[-1].r
= r
; /* save register value */
883 vtop
->r
= vtop
[-1].r2
;
885 /* Allocate second register. Here we rely on the fact that
886 get_reg() tries first to free r2 of an SValue. */
890 /* write second register */
892 vtop
->type
.t
= original_type
;
893 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
895 /* lvalue of scalar type : need to use lvalue type
896 because of possible cast */
899 /* compute memory access type */
900 if (vtop
->r
& VT_REF
)
901 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
906 else if (vtop
->r
& VT_LVAL_BYTE
)
908 else if (vtop
->r
& VT_LVAL_SHORT
)
910 if (vtop
->r
& VT_LVAL_UNSIGNED
)
914 /* restore wanted type */
917 /* one register type load */
922 #ifdef TCC_TARGET_C67
923 /* uses register pairs for doubles */
924 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
931 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
932 ST_FUNC
void gv2(int rc1
, int rc2
)
936 /* generate more generic register first. But VT_JMP or VT_CMP
937 values must be generated first in all cases to avoid possible
939 v
= vtop
[0].r
& VT_VALMASK
;
940 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
945 /* test if reload is needed for first register */
946 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
956 /* test if reload is needed for first register */
957 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
963 #ifndef TCC_TARGET_ARM64
964 /* wrapper around RC_FRET to return a register by type */
965 static int rc_fret(int t
)
967 #ifdef TCC_TARGET_X86_64
968 if (t
== VT_LDOUBLE
) {
976 /* wrapper around REG_FRET to return a register by type */
977 static int reg_fret(int t
)
979 #ifdef TCC_TARGET_X86_64
980 if (t
== VT_LDOUBLE
) {
987 /* expand long long on stack in two int registers */
988 static void lexpand(void)
992 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
995 vtop
[0].r
= vtop
[-1].r2
;
996 vtop
[0].r2
= VT_CONST
;
997 vtop
[-1].r2
= VT_CONST
;
998 vtop
[0].type
.t
= VT_INT
| u
;
999 vtop
[-1].type
.t
= VT_INT
| u
;
1002 #ifdef TCC_TARGET_ARM
1003 /* expand long long on stack */
1004 ST_FUNC
void lexpand_nr(void)
1008 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1010 vtop
->r2
= VT_CONST
;
1011 vtop
->type
.t
= VT_INT
| u
;
1012 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1013 if (v
== VT_CONST
) {
1014 vtop
[-1].c
.ui
= vtop
->c
.ull
;
1015 vtop
->c
.ui
= vtop
->c
.ull
>> 32;
1017 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1019 vtop
->r
= vtop
[-1].r
;
1020 } else if (v
> VT_CONST
) {
1024 vtop
->r
= vtop
[-1].r2
;
1025 vtop
[-1].r2
= VT_CONST
;
1026 vtop
[-1].type
.t
= VT_INT
| u
;
1030 /* build a long long from two ints */
1031 static void lbuild(int t
)
1033 gv2(RC_INT
, RC_INT
);
1034 vtop
[-1].r2
= vtop
[0].r
;
1035 vtop
[-1].type
.t
= t
;
1039 /* rotate n first stack elements to the bottom
1040 I1 ... In -> I2 ... In I1 [top is right]
1042 ST_FUNC
void vrotb(int n
)
1048 for(i
=-n
+1;i
!=0;i
++)
1049 vtop
[i
] = vtop
[i
+1];
1053 /* rotate the n elements before entry e towards the top
1054 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1056 ST_FUNC
void vrote(SValue
*e
, int n
)
1062 for(i
= 0;i
< n
- 1; i
++)
1067 /* rotate n first stack elements to the top
1068 I1 ... In -> In I1 ... I(n-1) [top is right]
1070 ST_FUNC
void vrott(int n
)
1075 /* pop stack value */
1076 ST_FUNC
void vpop(void)
1079 v
= vtop
->r
& VT_VALMASK
;
1080 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1081 /* for x86, we need to pop the FP stack */
1082 if (v
== TREG_ST0
&& !nocode_wanted
) {
1083 o(0xd8dd); /* fstp %st(0) */
1086 if (v
== VT_JMP
|| v
== VT_JMPI
) {
1087 /* need to put correct jump if && or || without test */
1093 /* convert stack entry to register and duplicate its value in another
1095 static void gv_dup(void)
1101 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1108 /* stack: H L L1 H1 */
1116 /* duplicate value */
1121 #ifdef TCC_TARGET_X86_64
1122 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1132 load(r1
, &sv
); /* move r to r1 */
1134 /* duplicates value */
1140 /* Generate value test
1142 * Generate a test for any value (jump, comparison and integers) */
1143 ST_FUNC
int gvtst(int inv
, int t
)
1145 int v
= vtop
->r
& VT_VALMASK
;
1146 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1150 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1151 /* constant jmp optimization */
1152 if ((vtop
->c
.i
!= 0) != inv
)
1157 return gtst(inv
, t
);
1160 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
1161 /* generate CPU independent (unsigned) long long operations */
1162 static void gen_opl(int op
)
1164 int t
, a
, b
, op1
, c
, i
;
1166 unsigned short reg_iret
= REG_IRET
;
1167 unsigned short reg_lret
= REG_LRET
;
1173 func
= TOK___divdi3
;
1176 func
= TOK___udivdi3
;
1179 func
= TOK___moddi3
;
1182 func
= TOK___umoddi3
;
1189 /* call generic long long function */
1190 vpush_global_sym(&func_old_type
, func
);
1195 vtop
->r2
= reg_lret
;
1208 /* stack: L1 H1 L2 H2 */
1213 vtop
[-2] = vtop
[-3];
1216 /* stack: H1 H2 L1 L2 */
1222 /* stack: H1 H2 L1 L2 ML MH */
1225 /* stack: ML MH H1 H2 L1 L2 */
1229 /* stack: ML MH H1 L2 H2 L1 */
1234 /* stack: ML MH M1 M2 */
1237 } else if (op
== '+' || op
== '-') {
1238 /* XXX: add non carry method too (for MIPS or alpha) */
1244 /* stack: H1 H2 (L1 op L2) */
1247 gen_op(op1
+ 1); /* TOK_xxxC2 */
1250 /* stack: H1 H2 (L1 op L2) */
1253 /* stack: (L1 op L2) H1 H2 */
1255 /* stack: (L1 op L2) (H1 op H2) */
1263 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1264 t
= vtop
[-1].type
.t
;
1268 /* stack: L H shift */
1270 /* constant: simpler */
1271 /* NOTE: all comments are for SHL. the other cases are
1272 done by swaping words */
1283 if (op
!= TOK_SAR
) {
1316 /* XXX: should provide a faster fallback on x86 ? */
1319 func
= TOK___ashrdi3
;
1322 func
= TOK___lshrdi3
;
1325 func
= TOK___ashldi3
;
1331 /* compare operations */
1337 /* stack: L1 H1 L2 H2 */
1339 vtop
[-1] = vtop
[-2];
1341 /* stack: L1 L2 H1 H2 */
1344 /* when values are equal, we need to compare low words. since
1345 the jump is inverted, we invert the test too. */
1348 else if (op1
== TOK_GT
)
1350 else if (op1
== TOK_ULT
)
1352 else if (op1
== TOK_UGT
)
1357 if (op1
!= TOK_NE
) {
1361 /* generate non equal test */
1362 /* XXX: NOT PORTABLE yet */
1366 #if defined(TCC_TARGET_I386)
1367 b
= psym(0x850f, 0);
1368 #elif defined(TCC_TARGET_ARM)
1370 o(0x1A000000 | encbranch(ind
, 0, 1));
1371 #elif defined(TCC_TARGET_C67) || defined(TCC_TARGET_ARM64)
1372 tcc_error("not implemented");
1374 #error not supported
1378 /* compare low. Always unsigned */
1382 else if (op1
== TOK_LE
)
1384 else if (op1
== TOK_GT
)
1386 else if (op1
== TOK_GE
)
1397 /* handle integer constant optimizations and various machine
1399 static void gen_opic(int op
)
1401 int c1
, c2
, t1
, t2
, n
;
1404 typedef unsigned long long U
;
1408 t1
= v1
->type
.t
& VT_BTYPE
;
1409 t2
= v2
->type
.t
& VT_BTYPE
;
1413 else if (v1
->type
.t
& VT_UNSIGNED
)
1420 else if (v2
->type
.t
& VT_UNSIGNED
)
1425 /* currently, we cannot do computations with forward symbols */
1426 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1427 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1430 case '+': l1
+= l2
; break;
1431 case '-': l1
-= l2
; break;
1432 case '&': l1
&= l2
; break;
1433 case '^': l1
^= l2
; break;
1434 case '|': l1
|= l2
; break;
1435 case '*': l1
*= l2
; break;
1442 /* if division by zero, generate explicit division */
1445 tcc_error("division by zero in constant");
1449 default: l1
/= l2
; break;
1450 case '%': l1
%= l2
; break;
1451 case TOK_UDIV
: l1
= (U
)l1
/ l2
; break;
1452 case TOK_UMOD
: l1
= (U
)l1
% l2
; break;
1455 case TOK_SHL
: l1
<<= l2
; break;
1456 case TOK_SHR
: l1
= (U
)l1
>> l2
; break;
1457 case TOK_SAR
: l1
>>= l2
; break;
1459 case TOK_ULT
: l1
= (U
)l1
< (U
)l2
; break;
1460 case TOK_UGE
: l1
= (U
)l1
>= (U
)l2
; break;
1461 case TOK_EQ
: l1
= l1
== l2
; break;
1462 case TOK_NE
: l1
= l1
!= l2
; break;
1463 case TOK_ULE
: l1
= (U
)l1
<= (U
)l2
; break;
1464 case TOK_UGT
: l1
= (U
)l1
> (U
)l2
; break;
1465 case TOK_LT
: l1
= l1
< l2
; break;
1466 case TOK_GE
: l1
= l1
>= l2
; break;
1467 case TOK_LE
: l1
= l1
<= l2
; break;
1468 case TOK_GT
: l1
= l1
> l2
; break;
1470 case TOK_LAND
: l1
= l1
&& l2
; break;
1471 case TOK_LOR
: l1
= l1
|| l2
; break;
1478 /* if commutative ops, put c2 as constant */
1479 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1480 op
== '|' || op
== '*')) {
1482 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1483 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1485 if (!const_wanted
&&
1487 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1488 (l1
== -1 && op
== TOK_SAR
))) {
1489 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1491 } else if (!const_wanted
&&
1492 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1493 (l2
== -1 && op
== '|') ||
1494 (l2
== 0xffffffff && t2
!= VT_LLONG
&& op
== '|') ||
1495 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1496 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1501 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1504 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1505 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1509 /* filter out NOP operations like x*1, x-0, x&-1... */
1511 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1512 /* try to use shifts instead of muls or divs */
1513 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1522 else if (op
== TOK_PDIV
)
1528 } else if (c2
&& (op
== '+' || op
== '-') &&
1529 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1530 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1531 /* symbol + constant case */
1538 if (!nocode_wanted
) {
1539 /* call low level op generator */
1540 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1541 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1552 /* generate a floating point operation with constant propagation */
1553 static void gen_opif(int op
)
1561 /* currently, we cannot do computations with forward symbols */
1562 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1563 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1565 if (v1
->type
.t
== VT_FLOAT
) {
1568 } else if (v1
->type
.t
== VT_DOUBLE
) {
1576 /* NOTE: we only do constant propagation if finite number (not
1577 NaN or infinity) (ANSI spec) */
1578 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1582 case '+': f1
+= f2
; break;
1583 case '-': f1
-= f2
; break;
1584 case '*': f1
*= f2
; break;
1588 tcc_error("division by zero in constant");
1593 /* XXX: also handles tests ? */
1597 /* XXX: overflow test ? */
1598 if (v1
->type
.t
== VT_FLOAT
) {
1600 } else if (v1
->type
.t
== VT_DOUBLE
) {
1608 if (!nocode_wanted
) {
1616 static int pointed_size(CType
*type
)
1619 return type_size(pointed_type(type
), &align
);
1622 static void vla_runtime_pointed_size(CType
*type
)
1625 vla_runtime_type_size(pointed_type(type
), &align
);
1628 static inline int is_null_pointer(SValue
*p
)
1630 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
1632 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& p
->c
.i
== 0) ||
1633 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.ll
== 0) ||
1634 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&& p
->c
.ptr_offset
== 0);
1637 static inline int is_integer_btype(int bt
)
1639 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
1640 bt
== VT_INT
|| bt
== VT_LLONG
);
1643 /* check types for comparison or subtraction of pointers */
1644 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
1646 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
1649 /* null pointers are accepted for all comparisons as gcc */
1650 if (is_null_pointer(p1
) || is_null_pointer(p2
))
1654 bt1
= type1
->t
& VT_BTYPE
;
1655 bt2
= type2
->t
& VT_BTYPE
;
1656 /* accept comparison between pointer and integer with a warning */
1657 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
1658 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
1659 tcc_warning("comparison between pointer and integer");
1663 /* both must be pointers or implicit function pointers */
1664 if (bt1
== VT_PTR
) {
1665 type1
= pointed_type(type1
);
1666 } else if (bt1
!= VT_FUNC
)
1667 goto invalid_operands
;
1669 if (bt2
== VT_PTR
) {
1670 type2
= pointed_type(type2
);
1671 } else if (bt2
!= VT_FUNC
) {
1673 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
1675 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
1676 (type2
->t
& VT_BTYPE
) == VT_VOID
)
1680 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1681 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1682 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
1683 /* gcc-like error if '-' is used */
1685 goto invalid_operands
;
1687 tcc_warning("comparison of distinct pointer types lacks a cast");
1691 /* generic gen_op: handles types problems */
1692 ST_FUNC
void gen_op(int op
)
1694 int u
, t1
, t2
, bt1
, bt2
, t
;
1697 t1
= vtop
[-1].type
.t
;
1698 t2
= vtop
[0].type
.t
;
1699 bt1
= t1
& VT_BTYPE
;
1700 bt2
= t2
& VT_BTYPE
;
1702 if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
1703 /* at least one operand is a pointer */
1704 /* relationnal op: must be both pointers */
1705 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
1706 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1707 /* pointers are handled are unsigned */
1708 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1709 t
= VT_LLONG
| VT_UNSIGNED
;
1711 t
= VT_INT
| VT_UNSIGNED
;
1715 /* if both pointers, then it must be the '-' op */
1716 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
1718 tcc_error("cannot use pointers here");
1719 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1720 /* XXX: check that types are compatible */
1721 if (vtop
[-1].type
.t
& VT_VLA
) {
1722 vla_runtime_pointed_size(&vtop
[-1].type
);
1724 vpushi(pointed_size(&vtop
[-1].type
));
1728 /* set to integer type */
1729 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1730 vtop
->type
.t
= VT_LLONG
;
1732 vtop
->type
.t
= VT_INT
;
1737 /* exactly one pointer : must be '+' or '-'. */
1738 if (op
!= '-' && op
!= '+')
1739 tcc_error("cannot use pointers here");
1740 /* Put pointer as first operand */
1741 if (bt2
== VT_PTR
) {
1745 type1
= vtop
[-1].type
;
1746 type1
.t
&= ~VT_ARRAY
;
1747 if (vtop
[-1].type
.t
& VT_VLA
)
1748 vla_runtime_pointed_size(&vtop
[-1].type
);
1750 u
= pointed_size(&vtop
[-1].type
);
1752 tcc_error("unknown array element size");
1753 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
1756 /* XXX: cast to int ? (long long case) */
1762 /* #ifdef CONFIG_TCC_BCHECK
1763 The main reason to removing this code:
1770 fprintf(stderr, "v+i-j = %p\n", v+i-j);
1771 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
1773 When this code is on. then the output looks like
1775 v+(i-j) = 0xbff84000
1777 /* if evaluating constant expression, no code should be
1778 generated, so no bound check */
1779 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
1780 /* if bounded pointers, we generate a special code to
1787 gen_bounded_ptr_add();
1793 /* put again type if gen_opic() swaped operands */
1796 } else if (is_float(bt1
) || is_float(bt2
)) {
1797 /* compute bigger type and do implicit casts */
1798 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
1800 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
1805 /* floats can only be used for a few operations */
1806 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
1807 (op
< TOK_ULT
|| op
> TOK_GT
))
1808 tcc_error("invalid operands for binary operation");
1810 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
1811 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
1812 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (t
| VT_UNSIGNED
))
1815 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
1816 /* cast to biggest op */
1818 /* convert to unsigned if it does not fit in a long long */
1819 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
1820 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
1823 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
1824 tcc_error("comparison of struct");
1826 /* integer operations */
1828 /* convert to unsigned if it does not fit in an integer */
1829 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
1830 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
1833 /* XXX: currently, some unsigned operations are explicit, so
1834 we modify them here */
1835 if (t
& VT_UNSIGNED
) {
1842 else if (op
== TOK_LT
)
1844 else if (op
== TOK_GT
)
1846 else if (op
== TOK_LE
)
1848 else if (op
== TOK_GE
)
1855 /* special case for shifts and long long: we keep the shift as
1857 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
1864 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
1865 /* relationnal op: the result is an int */
1866 vtop
->type
.t
= VT_INT
;
1871 // Make sure that we have converted to an rvalue:
1872 if (vtop
->r
& VT_LVAL
&& !nocode_wanted
)
1873 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
1876 #ifndef TCC_TARGET_ARM
1877 /* generic itof for unsigned long long case */
1878 static void gen_cvt_itof1(int t
)
1880 #ifdef TCC_TARGET_ARM64
1883 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
1884 (VT_LLONG
| VT_UNSIGNED
)) {
1887 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
1888 #if LDOUBLE_SIZE != 8
1889 else if (t
== VT_LDOUBLE
)
1890 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
1893 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
1897 vtop
->r
= reg_fret(t
);
1905 /* generic ftoi for unsigned long long case */
1906 static void gen_cvt_ftoi1(int t
)
1908 #ifdef TCC_TARGET_ARM64
1913 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
1914 /* not handled natively */
1915 st
= vtop
->type
.t
& VT_BTYPE
;
1917 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
1918 #if LDOUBLE_SIZE != 8
1919 else if (st
== VT_LDOUBLE
)
1920 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
1923 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
1928 vtop
->r2
= REG_LRET
;
1935 /* force char or short cast */
1936 static void force_charshort_cast(int t
)
1940 /* XXX: add optimization if lvalue : just change type and offset */
1945 if (t
& VT_UNSIGNED
) {
1946 vpushi((1 << bits
) - 1);
1952 /* result must be signed or the SAR is converted to an SHL
1953 This was not the case when "t" was a signed short
1954 and the last value on the stack was an unsigned int */
1955 vtop
->type
.t
&= ~VT_UNSIGNED
;
1961 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
1962 static void gen_cast(CType
*type
)
1964 int sbt
, dbt
, sf
, df
, c
, p
;
1966 /* special delayed cast for char/short */
1967 /* XXX: in some cases (multiple cascaded casts), it may still
1969 if (vtop
->r
& VT_MUSTCAST
) {
1970 vtop
->r
&= ~VT_MUSTCAST
;
1971 force_charshort_cast(vtop
->type
.t
);
1974 /* bitfields first get cast to ints */
1975 if (vtop
->type
.t
& VT_BITFIELD
&& !nocode_wanted
) {
1979 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
1980 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
1985 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1986 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
1988 /* constant case: we can do it now */
1989 /* XXX: in ISOC, cannot do it if error in convert */
1990 if (sbt
== VT_FLOAT
)
1991 vtop
->c
.ld
= vtop
->c
.f
;
1992 else if (sbt
== VT_DOUBLE
)
1993 vtop
->c
.ld
= vtop
->c
.d
;
1996 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
1997 if (sbt
& VT_UNSIGNED
)
1998 vtop
->c
.ld
= vtop
->c
.ull
;
2000 vtop
->c
.ld
= vtop
->c
.ll
;
2002 if (sbt
& VT_UNSIGNED
)
2003 vtop
->c
.ld
= vtop
->c
.ui
;
2005 vtop
->c
.ld
= vtop
->c
.i
;
2008 if (dbt
== VT_FLOAT
)
2009 vtop
->c
.f
= (float)vtop
->c
.ld
;
2010 else if (dbt
== VT_DOUBLE
)
2011 vtop
->c
.d
= (double)vtop
->c
.ld
;
2012 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2013 vtop
->c
.ull
= (unsigned long long)vtop
->c
.ld
;
2014 } else if (sf
&& dbt
== VT_BOOL
) {
2015 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2018 vtop
->c
.ll
= (long long)vtop
->c
.ld
;
2019 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2020 vtop
->c
.ll
= vtop
->c
.ull
;
2021 else if (sbt
& VT_UNSIGNED
)
2022 vtop
->c
.ll
= vtop
->c
.ui
;
2023 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2024 else if (sbt
== VT_PTR
)
2027 else if (sbt
!= VT_LLONG
)
2028 vtop
->c
.ll
= vtop
->c
.i
;
2030 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2031 vtop
->c
.ull
= vtop
->c
.ll
;
2032 else if (dbt
== VT_BOOL
)
2033 vtop
->c
.i
= (vtop
->c
.ll
!= 0);
2034 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2035 else if (dbt
== VT_PTR
)
2038 else if (dbt
!= VT_LLONG
) {
2040 if ((dbt
& VT_BTYPE
) == VT_BYTE
)
2042 else if ((dbt
& VT_BTYPE
) == VT_SHORT
)
2044 if(dbt
& VT_UNSIGNED
)
2045 vtop
->c
.ui
= ((unsigned int)vtop
->c
.ll
<< s
) >> s
;
2047 vtop
->c
.i
= ((int)vtop
->c
.ll
<< s
) >> s
;
2050 } else if (p
&& dbt
== VT_BOOL
) {
2053 } else if (!nocode_wanted
) {
2054 /* non constant case: generate code */
2056 /* convert from fp to fp */
2059 /* convert int to fp */
2062 /* convert fp to int */
2063 if (dbt
== VT_BOOL
) {
2067 /* we handle char/short/etc... with generic code */
2068 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2069 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2073 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2074 /* additional cast for char/short... */
2079 #if !defined(TCC_TARGET_ARM64) && !defined(TCC_TARGET_X86_64)
2080 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2081 if ((sbt
& VT_BTYPE
) != VT_LLONG
&& !nocode_wanted
) {
2082 /* scalar to long long */
2083 /* machine independent conversion */
2085 /* generate high word */
2086 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2090 if (sbt
== VT_PTR
) {
2091 /* cast from pointer to int before we apply
2092 shift operation, which pointers don't support*/
2093 gen_cast(&int_type
);
2099 /* patch second register */
2100 vtop
[-1].r2
= vtop
->r
;
2104 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2105 (dbt
& VT_BTYPE
) == VT_PTR
||
2106 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2107 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2108 (sbt
& VT_BTYPE
) != VT_PTR
&&
2109 (sbt
& VT_BTYPE
) != VT_FUNC
&& !nocode_wanted
) {
2110 /* need to convert from 32bit to 64bit */
2112 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2113 #if defined(TCC_TARGET_ARM64)
2115 #elif defined(TCC_TARGET_X86_64)
2117 /* x86_64 specific: movslq */
2119 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2126 } else if (dbt
== VT_BOOL
) {
2127 /* scalar to bool */
2130 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2131 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2132 if (sbt
== VT_PTR
) {
2133 vtop
->type
.t
= VT_INT
;
2134 tcc_warning("nonportable conversion from pointer to char/short");
2136 force_charshort_cast(dbt
);
2137 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2139 if (sbt
== VT_LLONG
&& !nocode_wanted
) {
2140 /* from long long: just take low order word */
2144 /* if lvalue and single word type, nothing to do because
2145 the lvalue already contains the real type size (see
2146 VT_LVAL_xxx constants) */
2149 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2150 /* if we are casting between pointer types,
2151 we must update the VT_LVAL_xxx size */
2152 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2153 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2158 /* return type size as known at compile time. Put alignment at 'a' */
2159 ST_FUNC
int type_size(CType
*type
, int *a
)
2164 bt
= type
->t
& VT_BTYPE
;
2165 if (bt
== VT_STRUCT
) {
2170 } else if (bt
== VT_PTR
) {
2171 if (type
->t
& VT_ARRAY
) {
2175 ts
= type_size(&s
->type
, a
);
2177 if (ts
< 0 && s
->c
< 0)
2185 } else if (bt
== VT_LDOUBLE
) {
2187 return LDOUBLE_SIZE
;
2188 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2189 #ifdef TCC_TARGET_I386
2190 #ifdef TCC_TARGET_PE
2195 #elif defined(TCC_TARGET_ARM)
2205 } else if (bt
== VT_INT
|| bt
== VT_ENUM
|| bt
== VT_FLOAT
) {
2208 } else if (bt
== VT_SHORT
) {
2211 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2215 /* char, void, function, _Bool */
2221 /* push type size as known at runtime time on top of value stack. Put
2223 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2225 if (type
->t
& VT_VLA
) {
2226 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2228 vpushi(type_size(type
, a
));
2232 static void vla_sp_restore(void) {
2233 if (vlas_in_scope
) {
2234 gen_vla_sp_restore(vla_sp_loc
);
2238 static void vla_sp_restore_root(void) {
2239 if (vlas_in_scope
) {
2240 gen_vla_sp_restore(vla_sp_root_loc
);
2244 /* return the pointed type of t */
2245 static inline CType
*pointed_type(CType
*type
)
2247 return &type
->ref
->type
;
2250 /* modify type so that its it is a pointer to type. */
2251 ST_FUNC
void mk_pointer(CType
*type
)
2254 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2255 type
->t
= VT_PTR
| (type
->t
& ~VT_TYPE
);
2259 /* compare function types. OLD functions match any new functions */
2260 static int is_compatible_func(CType
*type1
, CType
*type2
)
2266 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2268 /* check func_call */
2269 if (s1
->a
.func_call
!= s2
->a
.func_call
)
2271 /* XXX: not complete */
2272 if (s1
->c
== FUNC_OLD
|| s2
->c
== FUNC_OLD
)
2276 while (s1
!= NULL
) {
2279 if (!is_compatible_parameter_types(&s1
->type
, &s2
->type
))
2289 /* return true if type1 and type2 are the same. If unqualified is
2290 true, qualifiers on the types are ignored.
2292 - enums are not checked as gcc __builtin_types_compatible_p ()
2294 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2298 t1
= type1
->t
& VT_TYPE
;
2299 t2
= type2
->t
& VT_TYPE
;
2301 /* strip qualifiers before comparing */
2302 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2303 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2305 /* Default Vs explicit signedness only matters for char */
2306 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2310 /* XXX: bitfields ? */
2313 /* test more complicated cases */
2314 bt1
= t1
& VT_BTYPE
;
2315 if (bt1
== VT_PTR
) {
2316 type1
= pointed_type(type1
);
2317 type2
= pointed_type(type2
);
2318 return is_compatible_types(type1
, type2
);
2319 } else if (bt1
== VT_STRUCT
) {
2320 return (type1
->ref
== type2
->ref
);
2321 } else if (bt1
== VT_FUNC
) {
2322 return is_compatible_func(type1
, type2
);
2328 /* return true if type1 and type2 are exactly the same (including
2331 static int is_compatible_types(CType
*type1
, CType
*type2
)
2333 return compare_types(type1
,type2
,0);
2336 /* return true if type1 and type2 are the same (ignoring qualifiers).
2338 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
)
2340 return compare_types(type1
,type2
,1);
2343 /* print a type. If 'varstr' is not NULL, then the variable is also
2344 printed in the type */
2346 /* XXX: add array and function pointers */
2347 static void type_to_str(char *buf
, int buf_size
,
2348 CType
*type
, const char *varstr
)
2355 t
= type
->t
& VT_TYPE
;
2358 if (t
& VT_CONSTANT
)
2359 pstrcat(buf
, buf_size
, "const ");
2360 if (t
& VT_VOLATILE
)
2361 pstrcat(buf
, buf_size
, "volatile ");
2362 if ((t
& (VT_DEFSIGN
| VT_UNSIGNED
)) == (VT_DEFSIGN
| VT_UNSIGNED
))
2363 pstrcat(buf
, buf_size
, "unsigned ");
2364 else if (t
& VT_DEFSIGN
)
2365 pstrcat(buf
, buf_size
, "signed ");
2395 tstr
= "long double";
2397 pstrcat(buf
, buf_size
, tstr
);
2401 if (bt
== VT_STRUCT
)
2405 pstrcat(buf
, buf_size
, tstr
);
2406 v
= type
->ref
->v
& ~SYM_STRUCT
;
2407 if (v
>= SYM_FIRST_ANOM
)
2408 pstrcat(buf
, buf_size
, "<anonymous>");
2410 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2414 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2415 pstrcat(buf
, buf_size
, "(");
2417 while (sa
!= NULL
) {
2418 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2419 pstrcat(buf
, buf_size
, buf1
);
2422 pstrcat(buf
, buf_size
, ", ");
2424 pstrcat(buf
, buf_size
, ")");
2428 pstrcpy(buf1
, sizeof(buf1
), "*");
2430 pstrcat(buf1
, sizeof(buf1
), varstr
);
2431 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2435 pstrcat(buf
, buf_size
, " ");
2436 pstrcat(buf
, buf_size
, varstr
);
2441 /* verify type compatibility to store vtop in 'dt' type, and generate
2443 static void gen_assign_cast(CType
*dt
)
2445 CType
*st
, *type1
, *type2
, tmp_type1
, tmp_type2
;
2446 char buf1
[256], buf2
[256];
2449 st
= &vtop
->type
; /* source type */
2450 dbt
= dt
->t
& VT_BTYPE
;
2451 sbt
= st
->t
& VT_BTYPE
;
2452 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2453 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2455 It is Ok if both are void
2461 gcc accepts this program
2464 tcc_error("cannot cast from/to void");
2466 if (dt
->t
& VT_CONSTANT
)
2467 tcc_warning("assignment of read-only location");
2470 /* special cases for pointers */
2471 /* '0' can also be a pointer */
2472 if (is_null_pointer(vtop
))
2474 /* accept implicit pointer to integer cast with warning */
2475 if (is_integer_btype(sbt
)) {
2476 tcc_warning("assignment makes pointer from integer without a cast");
2479 type1
= pointed_type(dt
);
2480 /* a function is implicitely a function pointer */
2481 if (sbt
== VT_FUNC
) {
2482 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2483 !is_compatible_types(pointed_type(dt
), st
))
2484 tcc_warning("assignment from incompatible pointer type");
2489 type2
= pointed_type(st
);
2490 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2491 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2492 /* void * can match anything */
2494 /* exact type match, except for unsigned */
2497 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
|
2499 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
|
2501 if (!is_compatible_types(&tmp_type1
, &tmp_type2
))
2502 tcc_warning("assignment from incompatible pointer type");
2504 /* check const and volatile */
2505 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2506 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2507 tcc_warning("assignment discards qualifiers from pointer target type");
2513 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2514 tcc_warning("assignment makes integer from pointer without a cast");
2516 /* XXX: more tests */
2521 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2522 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2523 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2525 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2526 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2527 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
2535 /* store vtop in lvalue pushed on stack */
2536 ST_FUNC
void vstore(void)
2538 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
2540 ft
= vtop
[-1].type
.t
;
2541 sbt
= vtop
->type
.t
& VT_BTYPE
;
2542 dbt
= ft
& VT_BTYPE
;
2543 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
2544 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
2545 && !(vtop
->type
.t
& VT_BITFIELD
)) {
2546 /* optimize char/short casts */
2547 delayed_cast
= VT_MUSTCAST
;
2548 vtop
->type
.t
= ft
& (VT_TYPE
& ~(VT_BITFIELD
| (-1 << VT_STRUCT_SHIFT
)));
2549 /* XXX: factorize */
2550 if (ft
& VT_CONSTANT
)
2551 tcc_warning("assignment of read-only location");
2554 if (!(ft
& VT_BITFIELD
))
2555 gen_assign_cast(&vtop
[-1].type
);
2558 if (sbt
== VT_STRUCT
) {
2559 /* if structure, only generate pointer */
2560 /* structure assignment : generate memcpy */
2561 /* XXX: optimize if small size */
2562 if (!nocode_wanted
) {
2563 size
= type_size(&vtop
->type
, &align
);
2567 vtop
->type
.t
= VT_PTR
;
2570 /* address of memcpy() */
2573 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
2574 else if(!(align
& 3))
2575 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
2578 vpush_global_sym(&func_old_type
, TOK_memcpy
);
2583 vtop
->type
.t
= VT_PTR
;
2592 /* leave source on stack */
2593 } else if (ft
& VT_BITFIELD
) {
2594 /* bitfield store handling */
2596 /* save lvalue as expression result (example: s.b = s.a = n;) */
2597 vdup(), vtop
[-1] = vtop
[-2];
2599 bit_pos
= (ft
>> VT_STRUCT_SHIFT
) & 0x3f;
2600 bit_size
= (ft
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
2601 /* remove bit field info to avoid loops */
2602 vtop
[-1].type
.t
= ft
& ~(VT_BITFIELD
| (-1 << VT_STRUCT_SHIFT
));
2604 if((ft
& VT_BTYPE
) == VT_BOOL
) {
2605 gen_cast(&vtop
[-1].type
);
2606 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
2609 /* duplicate destination */
2611 vtop
[-1] = vtop
[-2];
2613 /* mask and shift source */
2614 if((ft
& VT_BTYPE
) != VT_BOOL
) {
2615 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2616 vpushll((1ULL << bit_size
) - 1ULL);
2618 vpushi((1 << bit_size
) - 1);
2624 /* load destination, mask and or with source */
2626 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2627 vpushll(~(((1ULL << bit_size
) - 1ULL) << bit_pos
));
2629 vpushi(~(((1 << bit_size
) - 1) << bit_pos
));
2635 /* ... and discard */
2639 if (!nocode_wanted
) {
2640 #ifdef CONFIG_TCC_BCHECK
2641 /* bound check case */
2642 if (vtop
[-1].r
& VT_MUSTBOUND
) {
2651 #ifdef TCC_TARGET_X86_64
2652 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
2654 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
2659 r
= gv(rc
); /* generate value */
2660 /* if lvalue was saved on stack, must read it */
2661 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
2663 t
= get_reg(RC_INT
);
2664 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2669 sv
.r
= VT_LOCAL
| VT_LVAL
;
2670 sv
.c
.ul
= vtop
[-1].c
.ul
;
2672 vtop
[-1].r
= t
| VT_LVAL
;
2674 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2675 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
2676 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
2677 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
2679 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
2680 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
2682 vtop
[-1].type
.t
= load_type
;
2685 /* convert to int to increment easily */
2686 vtop
->type
.t
= addr_type
;
2692 vtop
[-1].type
.t
= load_type
;
2693 /* XXX: it works because r2 is spilled last ! */
2694 store(vtop
->r2
, vtop
- 1);
2700 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
2701 vtop
->r
|= delayed_cast
;
2705 /* post defines POST/PRE add. c is the token ++ or -- */
2706 ST_FUNC
void inc(int post
, int c
)
2709 vdup(); /* save lvalue */
2712 gv_dup(); /* duplicate value */
2714 vdup(); /* duplicate value */
2719 vpushi(c
- TOK_MID
);
2721 vstore(); /* store value */
2723 vpop(); /* if post op, return saved value */
2726 /* Parse GNUC __attribute__ extension. Currently, the following
2727 extensions are recognized:
2728 - aligned(n) : set data/function alignment.
2729 - packed : force data alignment to 1
2730 - section(x) : generate data/code in this section.
2731 - unused : currently ignored, but may be used someday.
2732 - regparm(n) : pass function parameters in registers (i386 only)
2734 static void parse_attribute(AttributeDef
*ad
)
2738 while (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
) {
2742 while (tok
!= ')') {
2743 if (tok
< TOK_IDENT
)
2744 expect("attribute name");
2752 expect("section name");
2753 ad
->section
= find_section(tcc_state
, (char *)tokc
.cstr
->data
);
2761 expect("alias(\"target\")");
2762 ad
->alias_target
= /* save string as token, for later */
2763 tok_alloc((char*)tokc
.cstr
->data
, tokc
.cstr
->size
-1)->tok
;
2767 case TOK_VISIBILITY1
:
2768 case TOK_VISIBILITY2
:
2771 expect("visibility(\"default|hidden|internal|protected\")");
2772 if (!strcmp (tokc
.cstr
->data
, "default"))
2773 ad
->a
.visibility
= STV_DEFAULT
;
2774 else if (!strcmp (tokc
.cstr
->data
, "hidden"))
2775 ad
->a
.visibility
= STV_HIDDEN
;
2776 else if (!strcmp (tokc
.cstr
->data
, "internal"))
2777 ad
->a
.visibility
= STV_INTERNAL
;
2778 else if (!strcmp (tokc
.cstr
->data
, "protected"))
2779 ad
->a
.visibility
= STV_PROTECTED
;
2781 expect("visibility(\"default|hidden|internal|protected\")");
2790 if (n
<= 0 || (n
& (n
- 1)) != 0)
2791 tcc_error("alignment must be a positive power of two");
2808 /* currently, no need to handle it because tcc does not
2809 track unused objects */
2813 /* currently, no need to handle it because tcc does not
2814 track unused objects */
2819 ad
->a
.func_call
= FUNC_CDECL
;
2824 ad
->a
.func_call
= FUNC_STDCALL
;
2826 #ifdef TCC_TARGET_I386
2836 ad
->a
.func_call
= FUNC_FASTCALL1
+ n
- 1;
2842 ad
->a
.func_call
= FUNC_FASTCALLW
;
2849 ad
->a
.mode
= VT_LLONG
+ 1;
2852 ad
->a
.mode
= VT_SHORT
+ 1;
2855 ad
->a
.mode
= VT_INT
+ 1;
2858 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
2865 ad
->a
.func_export
= 1;
2868 ad
->a
.func_import
= 1;
2871 if (tcc_state
->warn_unsupported
)
2872 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
2873 /* skip parameters */
2875 int parenthesis
= 0;
2879 else if (tok
== ')')
2882 } while (parenthesis
&& tok
!= -1);
2895 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
2896 static void struct_decl(CType
*type
, int u
, int tdef
)
2898 int a
, v
, size
, align
, maxalign
, c
, offset
, flexible
;
2899 int bit_size
, bit_pos
, bsize
, bt
, lbit_pos
, prevbt
;
2900 Sym
*s
, *ss
, *ass
, **ps
;
2904 a
= tok
; /* save decl type */
2909 /* struct already defined ? return it */
2911 expect("struct/union/enum name");
2915 tcc_error("invalid type");
2917 } else if (tok
>= TOK_IDENT
&& !tdef
)
2918 tcc_error("unknown struct/union/enum");
2924 /* we put an undefined size for struct/union */
2925 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
2926 s
->r
= 0; /* default alignment is zero as gcc */
2927 /* put struct/union/enum name in type */
2935 tcc_error("struct/union/enum already defined");
2936 /* cannot be empty */
2938 /* non empty enums are not allowed */
2939 if (a
== TOK_ENUM
) {
2943 expect("identifier");
2945 if (ss
&& !local_stack
)
2946 tcc_error("redefinition of enumerator '%s'",
2947 get_tok_str(v
, NULL
));
2953 /* enum symbols have static storage */
2954 ss
= sym_push(v
, &int_type
, VT_CONST
, c
);
2955 ss
->type
.t
|= VT_STATIC
;
2960 /* NOTE: we accept a trailing comma */
2964 s
->c
= type_size(&int_type
, &align
);
2973 while (tok
!= '}') {
2974 parse_btype(&btype
, &ad
);
2977 tcc_error("flexible array member '%s' not at the end of struct",
2978 get_tok_str(v
, NULL
));
2983 type_decl(&type1
, &ad
, &v
, TYPE_DIRECT
| TYPE_ABSTRACT
);
2985 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
2986 expect("identifier");
2988 int v
= btype
.ref
->v
;
2989 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
2990 if (tcc_state
->ms_extensions
== 0)
2991 expect("identifier");
2995 if (type_size(&type1
, &align
) < 0) {
2996 if ((a
== TOK_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
2999 tcc_error("field '%s' has incomplete type",
3000 get_tok_str(v
, NULL
));
3002 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3003 (type1
.t
& (VT_TYPEDEF
| VT_STATIC
| VT_EXTERN
| VT_INLINE
)))
3004 tcc_error("invalid type for '%s'",
3005 get_tok_str(v
, NULL
));
3009 bit_size
= expr_const();
3010 /* XXX: handle v = 0 case for messages */
3012 tcc_error("negative width in bit-field '%s'",
3013 get_tok_str(v
, NULL
));
3014 if (v
&& bit_size
== 0)
3015 tcc_error("zero width for bit-field '%s'",
3016 get_tok_str(v
, NULL
));
3018 size
= type_size(&type1
, &align
);
3020 if (align
< ad
.a
.aligned
)
3021 align
= ad
.a
.aligned
;
3022 } else if (ad
.a
.packed
) {
3024 } else if (*tcc_state
->pack_stack_ptr
) {
3025 if (align
> *tcc_state
->pack_stack_ptr
)
3026 align
= *tcc_state
->pack_stack_ptr
;
3029 if (bit_size
>= 0) {
3030 bt
= type1
.t
& VT_BTYPE
;
3037 tcc_error("bitfields must have scalar type");
3039 if (bit_size
> bsize
) {
3040 tcc_error("width of '%s' exceeds its type",
3041 get_tok_str(v
, NULL
));
3042 } else if (bit_size
== bsize
) {
3043 /* no need for bit fields */
3045 } else if (bit_size
== 0) {
3046 /* XXX: what to do if only padding in a
3048 /* zero size: means to pad */
3051 /* we do not have enough room ?
3052 did the type change?
3054 if ((bit_pos
+ bit_size
) > bsize
||
3055 bt
!= prevbt
|| a
== TOK_UNION
)
3058 /* XXX: handle LSB first */
3059 type1
.t
|= VT_BITFIELD
|
3060 (bit_pos
<< VT_STRUCT_SHIFT
) |
3061 (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3062 bit_pos
+= bit_size
;
3068 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3069 /* add new memory data only if starting
3071 if (lbit_pos
== 0) {
3072 if (a
== TOK_STRUCT
) {
3073 c
= (c
+ align
- 1) & -align
;
3082 if (align
> maxalign
)
3086 printf("add field %s offset=%d",
3087 get_tok_str(v
, NULL
), offset
);
3088 if (type1
.t
& VT_BITFIELD
) {
3089 printf(" pos=%d size=%d",
3090 (type1
.t
>> VT_STRUCT_SHIFT
) & 0x3f,
3091 (type1
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f);
3096 if (v
== 0 && (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3098 while ((ass
= ass
->next
) != NULL
) {
3099 ss
= sym_push(ass
->v
, &ass
->type
, 0, offset
+ ass
->c
);
3104 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, offset
);
3108 if (tok
== ';' || tok
== TOK_EOF
)
3115 /* store size and alignment */
3116 s
->c
= (c
+ maxalign
- 1) & -maxalign
;
3122 /* return 1 if basic type is a type size (short, long, long long) */
3123 ST_FUNC
int is_btype_size(int bt
)
3125 return bt
== VT_SHORT
|| bt
== VT_LONG
|| bt
== VT_LLONG
;
3128 /* return 0 if no type declaration. otherwise, return the basic type
3131 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3133 int t
, u
, bt_size
, complete
, type_found
, typespec_found
;
3137 memset(ad
, 0, sizeof(AttributeDef
));
3145 /* currently, we really ignore extension */
3156 tcc_error("too many basic types");
3158 bt_size
= is_btype_size (u
& VT_BTYPE
);
3159 if (u
== VT_INT
|| (!bt_size
&& !(t
& VT_TYPEDEF
)))
3174 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3175 #ifndef TCC_TARGET_PE
3176 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3178 } else if ((t
& VT_BTYPE
) == VT_LONG
) {
3179 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3185 #ifdef TCC_TARGET_ARM64
3187 /* GCC's __uint128_t appears in some Linux header files. Make it a
3188 synonym for long double to get the size and alignment right. */
3200 if ((t
& VT_BTYPE
) == VT_LONG
) {
3201 #ifdef TCC_TARGET_PE
3202 t
= (t
& ~VT_BTYPE
) | VT_DOUBLE
;
3204 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3212 struct_decl(&type1
, VT_ENUM
, t
& (VT_TYPEDEF
| VT_EXTERN
));
3215 type
->ref
= type1
.ref
;
3219 struct_decl(&type1
, VT_STRUCT
, t
& (VT_TYPEDEF
| VT_EXTERN
));
3222 /* type modifiers */
3238 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
3239 tcc_error("signed and unsigned modifier");
3252 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
3253 tcc_error("signed and unsigned modifier");
3254 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
3279 /* GNUC attribute */
3280 case TOK_ATTRIBUTE1
:
3281 case TOK_ATTRIBUTE2
:
3282 parse_attribute(ad
);
3285 t
= (t
& ~VT_BTYPE
) | u
;
3293 parse_expr_type(&type1
);
3294 /* remove all storage modifiers except typedef */
3295 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
3301 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
3303 t
|= (s
->type
.t
& ~VT_TYPEDEF
);
3304 type
->ref
= s
->type
.ref
;
3306 /* get attributes from typedef */
3307 if (0 == ad
->a
.aligned
)
3308 ad
->a
.aligned
= s
->a
.aligned
;
3309 if (0 == ad
->a
.func_call
)
3310 ad
->a
.func_call
= s
->a
.func_call
;
3311 ad
->a
.packed
|= s
->a
.packed
;
3320 if (tcc_state
->char_is_unsigned
) {
3321 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
3325 /* long is never used as type */
3326 if ((t
& VT_BTYPE
) == VT_LONG
)
3327 #if (!defined TCC_TARGET_X86_64 && !defined TCC_TARGET_ARM64) || \
3328 defined TCC_TARGET_PE
3329 t
= (t
& ~VT_BTYPE
) | VT_INT
;
3331 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3337 /* convert a function parameter type (array to pointer and function to
3338 function pointer) */
3339 static inline void convert_parameter_type(CType
*pt
)
3341 /* remove const and volatile qualifiers (XXX: const could be used
3342 to indicate a const function parameter */
3343 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3344 /* array must be transformed to pointer according to ANSI C */
3346 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
3351 ST_FUNC
void parse_asm_str(CString
*astr
)
3354 /* read the string */
3356 expect("string constant");
3358 while (tok
== TOK_STR
) {
3359 /* XXX: add \0 handling too ? */
3360 cstr_cat(astr
, tokc
.cstr
->data
);
3363 cstr_ccat(astr
, '\0');
3366 /* Parse an asm label and return the label
3367 * Don't forget to free the CString in the caller! */
3368 static void asm_label_instr(CString
*astr
)
3371 parse_asm_str(astr
);
3374 printf("asm_alias: \"%s\"\n", (char *)astr
->data
);
3378 static void post_type(CType
*type
, AttributeDef
*ad
)
3380 int n
, l
, t1
, arg_size
, align
;
3381 Sym
**plast
, *s
, *first
;
3386 /* function declaration */
3394 /* read param name and compute offset */
3395 if (l
!= FUNC_OLD
) {
3396 if (!parse_btype(&pt
, &ad1
)) {
3398 tcc_error("invalid type");
3405 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
3407 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
3408 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
3409 tcc_error("parameter declared as void");
3410 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
3415 expect("identifier");
3419 convert_parameter_type(&pt
);
3420 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
3426 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
3433 /* if no parameters, then old type prototype */
3437 /* NOTE: const is ignored in returned type as it has a special
3438 meaning in gcc / C++ */
3439 type
->t
&= ~VT_CONSTANT
;
3440 /* some ancient pre-K&R C allows a function to return an array
3441 and the array brackets to be put after the arguments, such
3442 that "int c()[]" means something like "int[] c()" */
3445 skip(']'); /* only handle simple "[]" */
3448 /* we push a anonymous symbol which will contain the function prototype */
3449 ad
->a
.func_args
= arg_size
;
3450 s
= sym_push(SYM_FIELD
, type
, 0, l
);
3455 } else if (tok
== '[') {
3456 /* array definition */
3458 if (tok
== TOK_RESTRICT1
)
3463 if (!local_stack
|| nocode_wanted
)
3464 vpushi(expr_const());
3466 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
3469 tcc_error("invalid array size");
3471 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
3472 tcc_error("size of variable length array should be an integer");
3477 /* parse next post type */
3478 post_type(type
, ad
);
3479 if (type
->t
== VT_FUNC
)
3480 tcc_error("declaration of an array of functions");
3481 t1
|= type
->t
& VT_VLA
;
3484 loc
-= type_size(&int_type
, &align
);
3488 vla_runtime_type_size(type
, &align
);
3490 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
3497 /* we push an anonymous symbol which will contain the array
3499 s
= sym_push(SYM_FIELD
, type
, 0, n
);
3500 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
3505 /* Parse a type declaration (except basic type), and return the type
3506 in 'type'. 'td' is a bitmask indicating which kind of type decl is
3507 expected. 'type' should contain the basic type. 'ad' is the
3508 attribute definition of the basic type. It can be modified by
3511 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
3514 CType type1
, *type2
;
3515 int qualifiers
, storage
;
3517 while (tok
== '*') {
3525 qualifiers
|= VT_CONSTANT
;
3530 qualifiers
|= VT_VOLATILE
;
3538 type
->t
|= qualifiers
;
3541 /* XXX: clarify attribute handling */
3542 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3543 parse_attribute(ad
);
3545 /* recursive type */
3546 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
3547 type1
.t
= 0; /* XXX: same as int */
3550 /* XXX: this is not correct to modify 'ad' at this point, but
3551 the syntax is not clear */
3552 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3553 parse_attribute(ad
);
3554 type_decl(&type1
, ad
, v
, td
);
3557 /* type identifier */
3558 if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
3562 if (!(td
& TYPE_ABSTRACT
))
3563 expect("identifier");
3567 storage
= type
->t
& VT_STORAGE
;
3568 type
->t
&= ~VT_STORAGE
;
3569 if (storage
& VT_STATIC
) {
3570 int saved_nocode_wanted
= nocode_wanted
;
3572 post_type(type
, ad
);
3573 nocode_wanted
= saved_nocode_wanted
;
3575 post_type(type
, ad
);
3577 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3578 parse_attribute(ad
);
3582 /* append type at the end of type1 */
3595 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
3596 ST_FUNC
int lvalue_type(int t
)
3601 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
3603 else if (bt
== VT_SHORT
)
3607 if (t
& VT_UNSIGNED
)
3608 r
|= VT_LVAL_UNSIGNED
;
3612 /* indirection with full error checking and bound check */
3613 ST_FUNC
void indir(void)
3615 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
3616 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
3620 if ((vtop
->r
& VT_LVAL
) && !nocode_wanted
)
3622 vtop
->type
= *pointed_type(&vtop
->type
);
3623 /* Arrays and functions are never lvalues */
3624 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
3625 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
3626 vtop
->r
|= lvalue_type(vtop
->type
.t
);
3627 /* if bound checking, the referenced pointer must be checked */
3628 #ifdef CONFIG_TCC_BCHECK
3629 if (tcc_state
->do_bounds_check
)
3630 vtop
->r
|= VT_MUSTBOUND
;
3635 /* pass a parameter to a function and do type checking and casting */
3636 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
3641 func_type
= func
->c
;
3642 if (func_type
== FUNC_OLD
||
3643 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
3644 /* default casting : only need to convert float to double */
3645 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
3648 } else if (vtop
->type
.t
& VT_BITFIELD
) {
3649 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3652 } else if (arg
== NULL
) {
3653 tcc_error("too many arguments to function");
3656 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
3657 gen_assign_cast(&type
);
3661 /* parse an expression of the form '(type)' or '(expr)' and return its
3663 static void parse_expr_type(CType
*type
)
3669 if (parse_btype(type
, &ad
)) {
3670 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
3677 static void parse_type(CType
*type
)
3682 if (!parse_btype(type
, &ad
)) {
3685 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
3688 static void vpush_tokc(int t
)
3693 vsetc(&type
, VT_CONST
, &tokc
);
3696 ST_FUNC
void unary(void)
3698 int n
, t
, align
, size
, r
, sizeof_caller
;
3702 static int in_sizeof
= 0;
3704 sizeof_caller
= in_sizeof
;
3706 /* XXX: GCC 2.95.3 does not generate a table although it should be
3720 vpush_tokc(VT_INT
| VT_UNSIGNED
);
3724 vpush_tokc(VT_LLONG
);
3728 vpush_tokc(VT_LLONG
| VT_UNSIGNED
);
3732 vpush_tokc(VT_FLOAT
);
3736 vpush_tokc(VT_DOUBLE
);
3740 vpush_tokc(VT_LDOUBLE
);
3743 case TOK___FUNCTION__
:
3745 goto tok_identifier
;
3751 /* special function name identifier */
3752 len
= strlen(funcname
) + 1;
3753 /* generate char[len] type */
3758 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
3759 ptr
= section_ptr_add(data_section
, len
);
3760 memcpy(ptr
, funcname
, len
);
3765 #ifdef TCC_TARGET_PE
3766 t
= VT_SHORT
| VT_UNSIGNED
;
3772 /* string parsing */
3775 if (tcc_state
->warn_write_strings
)
3780 memset(&ad
, 0, sizeof(AttributeDef
));
3781 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, NULL
, 0);
3786 if (parse_btype(&type
, &ad
)) {
3787 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
3789 /* check ISOC99 compound literal */
3791 /* data is allocated locally by default */
3796 /* all except arrays are lvalues */
3797 if (!(type
.t
& VT_ARRAY
))
3798 r
|= lvalue_type(type
.t
);
3799 memset(&ad
, 0, sizeof(AttributeDef
));
3800 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, NULL
, 0);
3802 if (sizeof_caller
) {
3809 } else if (tok
== '{') {
3812 tcc_error("statement expression in global scope"); */
3813 /* this check breaks compilation of the linux 2.4.26 with the meesage:
3814 linux/include/net/tcp.h:945: error: statement expression in global scope */
3816 /* save all registers */
3818 /* statement expression : we do not accept break/continue
3819 inside as GCC does */
3820 block(NULL
, NULL
, NULL
, NULL
, 0, 1);
3835 /* functions names must be treated as function pointers,
3836 except for unary '&' and sizeof. Since we consider that
3837 functions are not lvalues, we only have to handle it
3838 there and in function calls. */
3839 /* arrays can also be used although they are not lvalues */
3840 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
3841 !(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_LLOCAL
))
3843 mk_pointer(&vtop
->type
);
3849 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
3851 boolean
.t
= VT_BOOL
;
3853 vtop
->c
.i
= !vtop
->c
.i
;
3854 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
3855 vtop
->c
.i
= vtop
->c
.i
^ 1;
3856 else if (!nocode_wanted
) {
3858 vseti(VT_JMP
, gvtst(1, 0));
3872 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
3873 tcc_error("pointer not accepted for unary plus");
3874 /* In order to force cast, we add zero, except for floating point
3875 where we really need an noop (otherwise -0.0 will be transformed
3877 if (!is_float(vtop
->type
.t
)) {
3888 unary_type(&type
); // Perform a in_sizeof = 0;
3889 size
= type_size(&type
, &align
);
3890 if (t
== TOK_SIZEOF
) {
3891 if (!(type
.t
& VT_VLA
)) {
3893 tcc_error("sizeof applied to an incomplete type");
3896 vla_runtime_type_size(&type
, &align
);
3901 vtop
->type
.t
|= VT_UNSIGNED
;
3904 case TOK_builtin_types_compatible_p
:
3913 type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3914 type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3915 vpushi(is_compatible_types(&type1
, &type2
));
3918 case TOK_builtin_constant_p
:
3920 int saved_nocode_wanted
, res
;
3923 saved_nocode_wanted
= nocode_wanted
;
3926 res
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3928 nocode_wanted
= saved_nocode_wanted
;
3933 case TOK_builtin_frame_address
:
3934 case TOK_builtin_return_address
:
3941 if (tok
!= TOK_CINT
|| tokc
.i
< 0) {
3942 tcc_error("%s only takes positive integers",
3943 tok1
== TOK_builtin_return_address
?
3944 "__builtin_return_address" :
3945 "__builtin_frame_address");
3952 vset(&type
, VT_LOCAL
, 0); /* local frame */
3954 mk_pointer(&vtop
->type
);
3955 indir(); /* -> parent frame */
3957 if (tok1
== TOK_builtin_return_address
) {
3958 // assume return address is just above frame pointer on stack
3961 mk_pointer(&vtop
->type
);
3966 #ifdef TCC_TARGET_X86_64
3967 #ifdef TCC_TARGET_PE
3968 case TOK_builtin_va_start
:
3976 if ((vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
3977 tcc_error("__builtin_va_start expects a local variable");
3978 vtop
->r
&= ~(VT_LVAL
| VT_REF
);
3979 vtop
->type
= char_pointer_type
;
3984 case TOK_builtin_va_arg_types
:
3991 vpushi(classify_x86_64_va_arg(&type
));
3997 #ifdef TCC_TARGET_ARM64
3998 case TOK___va_start
: {
4000 tcc_error("statement in global scope");
4010 vtop
->type
.t
= VT_VOID
;
4013 case TOK___va_arg
: {
4016 tcc_error("statement in global scope");
4028 case TOK___arm64_clear_cache
: {
4037 vtop
->type
.t
= VT_VOID
;
4041 /* pre operations */
4052 t
= vtop
->type
.t
& VT_BTYPE
;
4054 /* In IEEE negate(x) isn't subtract(0,x), but rather
4059 else if (t
== VT_DOUBLE
)
4070 goto tok_identifier
;
4072 /* allow to take the address of a label */
4073 if (tok
< TOK_UIDENT
)
4074 expect("label identifier");
4075 s
= label_find(tok
);
4077 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4079 if (s
->r
== LABEL_DECLARED
)
4080 s
->r
= LABEL_FORWARD
;
4083 s
->type
.t
= VT_VOID
;
4084 mk_pointer(&s
->type
);
4085 s
->type
.t
|= VT_STATIC
;
4087 vpushsym(&s
->type
, s
);
4091 // special qnan , snan and infinity values
4093 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4097 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
4101 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
4110 expect("identifier");
4113 const char *name
= get_tok_str(t
, NULL
);
4115 tcc_error("'%s' undeclared", name
);
4116 /* for simple function calls, we tolerate undeclared
4117 external reference to int() function */
4118 if (tcc_state
->warn_implicit_function_declaration
4119 #ifdef TCC_TARGET_PE
4120 /* people must be warned about using undeclared WINAPI functions
4121 (which usually start with uppercase letter) */
4122 || (name
[0] >= 'A' && name
[0] <= 'Z')
4125 tcc_warning("implicit declaration of function '%s'", name
);
4126 s
= external_global_sym(t
, &func_old_type
, 0);
4128 if ((s
->type
.t
& (VT_STATIC
| VT_INLINE
| VT_BTYPE
)) ==
4129 (VT_STATIC
| VT_INLINE
| VT_FUNC
)) {
4130 /* if referencing an inline function, then we generate a
4131 symbol to it if not already done. It will have the
4132 effect to generate code for it at the end of the
4133 compilation unit. Inline function as always
4134 generated in the text section. */
4136 put_extern_sym(s
, text_section
, 0, 0);
4137 r
= VT_SYM
| VT_CONST
;
4141 vset(&s
->type
, r
, s
->c
);
4142 /* if forward reference, we must point to s */
4143 if (vtop
->r
& VT_SYM
) {
4145 vtop
->c
.ptr_offset
= 0;
4150 /* post operations */
4152 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
4155 } else if (tok
== '.' || tok
== TOK_ARROW
) {
4158 if (tok
== TOK_ARROW
)
4160 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
4164 /* expect pointer on structure */
4165 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
4166 expect("struct or union");
4170 while ((s
= s
->next
) != NULL
) {
4175 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, NULL
));
4176 /* add field offset to pointer */
4177 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
4180 /* change type to field type, and set to lvalue */
4181 vtop
->type
= s
->type
;
4182 vtop
->type
.t
|= qualifiers
;
4183 /* an array is never an lvalue */
4184 if (!(vtop
->type
.t
& VT_ARRAY
)) {
4185 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4186 #ifdef CONFIG_TCC_BCHECK
4187 /* if bound checking, the referenced pointer must be checked */
4188 if (tcc_state
->do_bounds_check
)
4189 vtop
->r
|= VT_MUSTBOUND
;
4193 } else if (tok
== '[') {
4199 } else if (tok
== '(') {
4202 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
4206 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4207 /* pointer test (no array accepted) */
4208 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
4209 vtop
->type
= *pointed_type(&vtop
->type
);
4210 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4214 expect("function pointer");
4217 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
4219 /* get return type */
4222 sa
= s
->next
; /* first parameter */
4225 /* compute first implicit argument if a structure is returned */
4226 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
4227 variadic
= (s
->c
== FUNC_ELLIPSIS
);
4228 gfunc_sret(&s
->type
, variadic
, &ret
.type
,
4229 &ret_align
, ®size
, &args
);
4230 ret_nregs
= regargs_nregs(&args
);
4233 /* get some space for the returned structure */
4234 size
= type_size(&s
->type
, &align
);
4235 #ifdef TCC_TARGET_ARM64
4236 /* On arm64, a small struct is return in registers.
4237 It is much easier to write it to memory if we know
4238 that we are allowed to write some extra bytes, so
4239 round the allocated space up to a power of 2: */
4241 while (size
& (size
- 1))
4242 size
= (size
| (size
- 1)) + 1;
4244 loc
= (loc
- size
) & -align
;
4246 ret
.r
= VT_LOCAL
| VT_LVAL
;
4247 /* pass it as 'int' to avoid structure arg passing
4249 vseti(VT_LOCAL
, loc
);
4259 /* return in register */
4260 if (is_float(ret
.type
.t
)) {
4261 ret
.r
= reg_fret(ret
.type
.t
);
4262 #ifdef TCC_TARGET_X86_64
4263 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
4267 #ifndef TCC_TARGET_ARM64
4268 #ifdef TCC_TARGET_X86_64
4269 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
4271 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
4282 gfunc_param_typed(s
, sa
);
4292 tcc_error("too few arguments to function");
4294 if (!nocode_wanted
) {
4295 gfunc_call(nb_args
);
4297 vtop
-= (nb_args
+ 1);
4301 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
4302 vsetc(&ret
.type
, r
, &ret
.c
);
4303 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
4306 /* handle packed struct return */
4307 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
4309 #if defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_PE)
4314 size
= type_size(&s
->type
, &align
);
4315 /* We're writing whole regs often, make sure there's enough
4316 space. Assume register size is power of 2. */
4317 if (regsize
> align
)
4319 loc
= (loc
- size
) & -align
;
4321 #if defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_PE)
4322 for (i
=0; i
<REG_ARGS_MAX
; i
++) {
4323 offset
= args
.ireg
[i
];
4328 ret
.type
.t
= VT_LLONG
;
4329 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4330 vsetc(&ret
.type
, i
? REG_LRET
: REG_IRET
, &ret
.c
);
4335 for (i
=0; i
<REG_ARGS_MAX
; i
++) {
4336 offset
= args
.freg
[i
];
4341 ret
.type
.t
= VT_DOUBLE
;
4342 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4343 vsetc(&ret
.type
, i
? REG_QRET
: REG_FRET
, &ret
.c
);
4351 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4355 if (--ret_nregs
== 0)
4360 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
4368 ST_FUNC
void expr_prod(void)
4373 while (tok
== '*' || tok
== '/' || tok
== '%') {
4381 ST_FUNC
void expr_sum(void)
4386 while (tok
== '+' || tok
== '-') {
4394 static void expr_shift(void)
4399 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
4407 static void expr_cmp(void)
4412 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
4413 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
4421 static void expr_cmpeq(void)
4426 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
4434 static void expr_and(void)
4437 while (tok
== '&') {
4444 static void expr_xor(void)
4447 while (tok
== '^') {
4454 static void expr_or(void)
4457 while (tok
== '|') {
4464 /* XXX: fix this mess */
4465 static void expr_land_const(void)
4468 while (tok
== TOK_LAND
) {
4475 /* XXX: fix this mess */
4476 static void expr_lor_const(void)
4479 while (tok
== TOK_LOR
) {
4486 /* only used if non constant */
4487 static void expr_land(void)
4492 if (tok
== TOK_LAND
) {
4497 if (tok
!= TOK_LAND
) {
4507 static void expr_lor(void)
4512 if (tok
== TOK_LOR
) {
4517 if (tok
!= TOK_LOR
) {
4527 /* XXX: better constant handling */
4528 static void expr_cond(void)
4530 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
;
4532 CType type
, type1
, type2
;
4539 boolean
.t
= VT_BOOL
;
4545 if (tok
!= ':' || !gnu_ext
) {
4560 if (vtop
!= vstack
) {
4561 /* needed to avoid having different registers saved in
4563 if (is_float(vtop
->type
.t
)) {
4565 #ifdef TCC_TARGET_X86_64
4566 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
4576 if (tok
== ':' && gnu_ext
) {
4584 sv
= *vtop
; /* save value to handle it later */
4585 vtop
--; /* no vpop so that FP stack is not flushed */
4593 bt1
= t1
& VT_BTYPE
;
4595 bt2
= t2
& VT_BTYPE
;
4596 /* cast operands to correct type according to ISOC rules */
4597 if (is_float(bt1
) || is_float(bt2
)) {
4598 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
4599 type
.t
= VT_LDOUBLE
;
4600 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
4605 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
4606 /* cast to biggest op */
4608 /* convert to unsigned if it does not fit in a long long */
4609 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
4610 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
4611 type
.t
|= VT_UNSIGNED
;
4612 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
4613 /* If one is a null ptr constant the result type
4615 if (is_null_pointer (vtop
))
4617 else if (is_null_pointer (&sv
))
4619 /* XXX: test pointer compatibility, C99 has more elaborate
4623 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
4624 /* XXX: test function pointer compatibility */
4625 type
= bt1
== VT_FUNC
? type1
: type2
;
4626 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
4627 /* XXX: test structure compatibility */
4628 type
= bt1
== VT_STRUCT
? type1
: type2
;
4629 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
4630 /* NOTE: as an extension, we accept void on only one side */
4633 /* integer operations */
4635 /* convert to unsigned if it does not fit in an integer */
4636 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
4637 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
4638 type
.t
|= VT_UNSIGNED
;
4641 /* now we convert second operand */
4643 if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
4646 if (is_float(type
.t
)) {
4648 #ifdef TCC_TARGET_X86_64
4649 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
4653 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
4654 /* for long longs, we use fixed registers to avoid having
4655 to handle a complicated move */
4660 /* this is horrible, but we must also convert first
4664 /* put again first value and cast it */
4667 if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
4670 move_reg(r2
, r1
, type
.t
);
4677 static void expr_eq(void)
4683 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
4684 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
4685 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
4700 ST_FUNC
void gexpr(void)
4711 /* parse an expression and return its type without any side effect. */
4712 static void expr_type(CType
*type
)
4714 int saved_nocode_wanted
;
4716 saved_nocode_wanted
= nocode_wanted
;
4721 nocode_wanted
= saved_nocode_wanted
;
4724 /* parse a unary expression and return its type without any side
4726 static void unary_type(CType
*type
)
4738 /* parse a constant expression and return value in vtop. */
4739 static void expr_const1(void)
4748 /* parse an integer constant and return its value. */
4749 ST_FUNC
int expr_const(void)
4753 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
4754 expect("constant expression");
4760 /* return the label token if current token is a label, otherwise
4762 static int is_label(void)
4766 /* fast test first */
4767 if (tok
< TOK_UIDENT
)
4769 /* no need to save tokc because tok is an identifier */
4776 unget_tok(last_tok
);
4781 static void label_or_decl(int l
)
4785 /* fast test first */
4786 if (tok
>= TOK_UIDENT
)
4788 /* no need to save tokc because tok is an identifier */
4792 unget_tok(last_tok
);
4795 unget_tok(last_tok
);
4800 static void block(int *bsym
, int *csym
, int *case_sym
, int *def_sym
,
4801 int case_reg
, int is_expr
)
4804 Sym
*s
, *frame_bottom
;
4806 /* generate line number info */
4807 if (tcc_state
->do_debug
&&
4808 (last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
4809 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
4811 last_line_num
= file
->line_num
;
4815 /* default return value is (void) */
4817 vtop
->type
.t
= VT_VOID
;
4820 if (tok
== TOK_IF
) {
4827 block(bsym
, csym
, case_sym
, def_sym
, case_reg
, 0);
4829 if (c
== TOK_ELSE
) {
4833 block(bsym
, csym
, case_sym
, def_sym
, case_reg
, 0);
4834 gsym(d
); /* patch else jmp */
4837 } else if (tok
== TOK_WHILE
) {
4846 block(&a
, &b
, case_sym
, def_sym
, case_reg
, 0);
4850 } else if (tok
== '{') {
4852 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
4855 /* record local declaration stack position */
4857 frame_bottom
= sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
4858 frame_bottom
->next
= scope_stack_bottom
;
4859 scope_stack_bottom
= frame_bottom
;
4860 llabel
= local_label_stack
;
4862 /* handle local labels declarations */
4863 if (tok
== TOK_LABEL
) {
4866 if (tok
< TOK_UIDENT
)
4867 expect("label identifier");
4868 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
4878 while (tok
!= '}') {
4879 label_or_decl(VT_LOCAL
);
4883 block(bsym
, csym
, case_sym
, def_sym
, case_reg
, is_expr
);
4886 /* pop locally defined labels */
4887 label_pop(&local_label_stack
, llabel
);
4889 /* XXX: this solution makes only valgrind happy...
4890 triggered by gcc.c-torture/execute/20000917-1.c */
4892 switch(vtop
->type
.t
& VT_BTYPE
) {
4894 /* this breaks a compilation of the linux kernel v2.4.26 */
4895 /* pmd_t *new = ({ __asm__ __volatile__("ud2\n") ; ((pmd_t *)1); }); */
4896 /* Look a commit a80acab: Display error on statement expressions with complex return type */
4897 /* A pointer is not a complex return type */
4901 for(p
=vtop
->type
.ref
;p
;p
=p
->prev
)
4903 tcc_error("unsupported expression type");
4906 /* pop locally defined symbols */
4907 scope_stack_bottom
= scope_stack_bottom
->next
;
4908 sym_pop(&local_stack
, s
);
4910 /* Pop VLA frames and restore stack pointer if required */
4911 if (vlas_in_scope
> saved_vlas_in_scope
) {
4912 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
4915 vlas_in_scope
= saved_vlas_in_scope
;
4918 } else if (tok
== TOK_RETURN
) {
4922 gen_assign_cast(&func_vt
);
4923 #ifdef TCC_TARGET_ARM64
4924 // Perhaps it would be better to use this for all backends:
4927 if ((func_vt
.t
& VT_BTYPE
) == VT_STRUCT
) {
4928 CType type
, ret_type
;
4929 int ret_align
, ret_nregs
, regsize
;
4932 gfunc_sret(&func_vt
, func_var
, &ret_type
,
4933 &ret_align
, ®size
, &args
);
4934 ret_nregs
= regargs_nregs(&args
);
4935 if (0 == ret_nregs
) {
4936 /* if returning structure, must copy it to implicit
4937 first pointer arg location */
4940 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
4943 /* copy structure value to pointer */
4946 /* returning structure packed into registers */
4947 int r
, size
, addr
, align
;
4948 #if defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_PE)
4951 size
= type_size(&func_vt
,&align
);
4952 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) || (vtop
->c
.i
& (ret_align
-1)))
4953 && (align
& (ret_align
-1))) {
4954 loc
= (loc
- size
) & -ret_align
;
4957 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
4961 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
4963 vtop
->type
= ret_type
;
4964 #if defined(TCC_TARGET_X86_64) && !defined(TCC_TARGET_PE)
4965 for (i
=0; i
<REG_ARGS_MAX
; i
++) {
4966 int off
= args
.ireg
[i
];
4971 r
= i
? RC_LRET
: RC_IRET
;
4975 vtop
->type
.t
= VT_LLONG
;
4979 for (i
=0; i
<REG_ARGS_MAX
; i
++) {
4980 int off
= args
.freg
[i
];
4985 /* We assume that when a structure is returned in multiple
4986 registers, their classes are consecutive values of the
4988 r
= rc_fret(ret_type
.t
) << i
;
4992 vtop
->type
.t
= VT_DOUBLE
;
4997 if (is_float(ret_type
.t
))
4998 r
= rc_fret(ret_type
.t
);
5004 if (--ret_nregs
== 0)
5006 /* We assume that when a structure is returned in multiple
5007 registers, their classes are consecutive values of the
5010 vtop
->c
.i
+= regsize
;
5011 vtop
->r
= VT_LOCAL
| VT_LVAL
;
5015 } else if (is_float(func_vt
.t
)) {
5016 gv(rc_fret(func_vt
.t
));
5021 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5024 rsym
= gjmp(rsym
); /* jmp */
5025 } else if (tok
== TOK_BREAK
) {
5028 tcc_error("cannot break");
5029 *bsym
= gjmp(*bsym
);
5032 } else if (tok
== TOK_CONTINUE
) {
5035 tcc_error("cannot continue");
5036 vla_sp_restore_root();
5037 *csym
= gjmp(*csym
);
5040 } else if (tok
== TOK_FOR
) {
5045 frame_bottom
= sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
5046 frame_bottom
->next
= scope_stack_bottom
;
5047 scope_stack_bottom
= frame_bottom
;
5049 /* c99 for-loop init decl? */
5050 if (!decl0(VT_LOCAL
, 1)) {
5051 /* no, regular for-loop init expr */
5077 block(&a
, &b
, case_sym
, def_sym
, case_reg
, 0);
5081 scope_stack_bottom
= scope_stack_bottom
->next
;
5082 sym_pop(&local_stack
, s
);
5084 if (tok
== TOK_DO
) {
5090 block(&a
, &b
, case_sym
, def_sym
, case_reg
, 0);
5101 if (tok
== TOK_SWITCH
) {
5105 /* XXX: other types than integer */
5106 case_reg
= gv(RC_INT
);
5110 b
= gjmp(0); /* jump to first case */
5112 block(&a
, csym
, &b
, &c
, case_reg
, 0);
5113 /* if no default, jmp after switch */
5121 if (tok
== TOK_CASE
) {
5128 if (gnu_ext
&& tok
== TOK_DOTS
) {
5132 tcc_warning("empty case range");
5134 /* since a case is like a label, we must skip it with a jmp */
5142 *case_sym
= gtst(1, 0);
5145 *case_sym
= gtst(1, 0);
5149 *case_sym
= gtst(1, *case_sym
);
5151 case_reg
= gv(RC_INT
);
5156 goto block_after_label
;
5158 if (tok
== TOK_DEFAULT
) {
5164 tcc_error("too many 'default'");
5167 goto block_after_label
;
5169 if (tok
== TOK_GOTO
) {
5171 if (tok
== '*' && gnu_ext
) {
5175 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5178 } else if (tok
>= TOK_UIDENT
) {
5179 s
= label_find(tok
);
5180 /* put forward definition if needed */
5182 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5184 if (s
->r
== LABEL_DECLARED
)
5185 s
->r
= LABEL_FORWARD
;
5187 vla_sp_restore_root();
5188 if (s
->r
& LABEL_FORWARD
)
5189 s
->jnext
= gjmp(s
->jnext
);
5191 gjmp_addr(s
->jnext
);
5194 expect("label identifier");
5197 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
5205 if (s
->r
== LABEL_DEFINED
)
5206 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
5208 s
->r
= LABEL_DEFINED
;
5210 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
5214 /* we accept this, but it is a mistake */
5217 tcc_warning("deprecated use of label at end of compound statement");
5221 block(bsym
, csym
, case_sym
, def_sym
, case_reg
, is_expr
);
5224 /* expression case */
5239 /* t is the array or struct type. c is the array or struct
5240 address. cur_index/cur_field is the pointer to the current
5241 value. 'size_only' is true if only size info is needed (only used
5243 static void decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
5244 int *cur_index
, Sym
**cur_field
,
5248 int notfirst
, index
, index_last
, align
, l
, nb_elems
, elem_size
;
5254 if (gnu_ext
&& (l
= is_label()) != 0)
5256 while (tok
== '[' || tok
== '.') {
5258 if (!(type
->t
& VT_ARRAY
))
5259 expect("array type");
5262 index
= expr_const();
5263 if (index
< 0 || (s
->c
>= 0 && index
>= s
->c
))
5264 expect("invalid index");
5265 if (tok
== TOK_DOTS
&& gnu_ext
) {
5267 index_last
= expr_const();
5268 if (index_last
< 0 ||
5269 (s
->c
>= 0 && index_last
>= s
->c
) ||
5271 expect("invalid index");
5277 *cur_index
= index_last
;
5278 type
= pointed_type(type
);
5279 elem_size
= type_size(type
, &align
);
5280 c
+= index
* elem_size
;
5281 /* NOTE: we only support ranges for last designator */
5282 nb_elems
= index_last
- index
+ 1;
5283 if (nb_elems
!= 1) {
5292 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
5293 expect("struct/union type");
5306 /* XXX: fix this mess by using explicit storage field */
5308 type1
.t
|= (type
->t
& ~VT_TYPE
);
5322 if (type
->t
& VT_ARRAY
) {
5324 type
= pointed_type(type
);
5325 c
+= index
* type_size(type
, &align
);
5329 tcc_error("too many field init");
5330 /* XXX: fix this mess by using explicit storage field */
5332 type1
.t
|= (type
->t
& ~VT_TYPE
);
5337 decl_initializer(type
, sec
, c
, 0, size_only
);
5339 /* XXX: make it more general */
5340 if (!size_only
&& nb_elems
> 1) {
5341 unsigned long c_end
;
5346 tcc_error("range init not supported yet for dynamic storage");
5347 c_end
= c
+ nb_elems
* elem_size
;
5348 if (c_end
> sec
->data_allocated
)
5349 section_realloc(sec
, c_end
);
5350 src
= sec
->data
+ c
;
5352 for(i
= 1; i
< nb_elems
; i
++) {
5354 memcpy(dst
, src
, elem_size
);
5360 #define EXPR_CONST 1
5363 /* store a value or an expression directly in global data or in local array */
5364 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
,
5365 int v
, int expr_type
)
5367 int saved_global_expr
, bt
, bit_pos
, bit_size
;
5369 unsigned long long bit_mask
;
5377 /* compound literals must be allocated globally in this case */
5378 saved_global_expr
= global_expr
;
5381 global_expr
= saved_global_expr
;
5382 /* NOTE: symbols are accepted */
5383 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
)
5384 tcc_error("initializer element is not constant");
5392 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5395 /* XXX: not portable */
5396 /* XXX: generate error if incorrect relocation */
5397 gen_assign_cast(&dtype
);
5398 bt
= type
->t
& VT_BTYPE
;
5399 /* we'll write at most 16 bytes */
5400 if (c
+ 16 > sec
->data_allocated
) {
5401 section_realloc(sec
, c
+ 16);
5403 ptr
= sec
->data
+ c
;
5404 /* XXX: make code faster ? */
5405 if (!(type
->t
& VT_BITFIELD
)) {
5410 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
5411 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
5412 bit_mask
= (1LL << bit_size
) - 1;
5414 if ((vtop
->r
& VT_SYM
) &&
5420 (bt
== VT_INT
&& bit_size
!= 32)))
5421 tcc_error("initializer element is not computable at load time");
5423 /* XXX: when cross-compiling we assume that each type has the
5424 same representation on host and target, which is likely to
5425 be wrong in the case of long double */
5427 vtop
->c
.i
= (vtop
->c
.i
!= 0);
5429 *(char *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
5432 *(short *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
5435 *(double *)ptr
= vtop
->c
.d
;
5438 *(long double *)ptr
= vtop
->c
.ld
;
5441 *(long long *)ptr
|= (vtop
->c
.ll
& bit_mask
) << bit_pos
;
5444 addr_t val
= (vtop
->c
.ptr_offset
& bit_mask
) << bit_pos
;
5445 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
5446 if (vtop
->r
& VT_SYM
)
5447 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
5449 *(addr_t
*)ptr
|= val
;
5451 if (vtop
->r
& VT_SYM
)
5452 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
5453 *(addr_t
*)ptr
|= val
;
5458 int val
= (vtop
->c
.i
& bit_mask
) << bit_pos
;
5459 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_X86_64)
5460 if (vtop
->r
& VT_SYM
)
5461 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
5465 if (vtop
->r
& VT_SYM
)
5466 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
5474 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
5481 /* put zeros for variable based init */
5482 static void init_putz(CType
*t
, Section
*sec
, unsigned long c
, int size
)
5485 /* nothing to do because globals are already set to zero */
5487 vpush_global_sym(&func_old_type
, TOK_memset
);
5489 #ifdef TCC_TARGET_ARM
5500 /* 't' contains the type and storage info. 'c' is the offset of the
5501 object in section 'sec'. If 'sec' is NULL, it means stack based
5502 allocation. 'first' is true if array '{' must be read (multi
5503 dimension implicit array init handling). 'size_only' is true if
5504 size only evaluation is wanted (only for arrays). */
5505 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
5506 int first
, int size_only
)
5508 int index
, array_length
, n
, no_oblock
, nb
, parlevel
, parlevel1
, i
;
5509 int size1
, align1
, expr_type
;
5513 if (type
->t
& VT_VLA
) {
5516 /* save current stack pointer */
5517 if (vlas_in_scope
== 0) {
5518 if (vla_sp_root_loc
== -1)
5519 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
5520 gen_vla_sp_save(vla_sp_root_loc
);
5523 vla_runtime_type_size(type
, &a
);
5524 gen_vla_alloc(type
, a
);
5528 } else if (type
->t
& VT_ARRAY
) {
5532 t1
= pointed_type(type
);
5533 size1
= type_size(t1
, &align1
);
5536 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
5539 tcc_error("character array initializer must be a literal,"
5540 " optionally enclosed in braces");
5545 /* only parse strings here if correct type (otherwise: handle
5546 them as ((w)char *) expressions */
5547 if ((tok
== TOK_LSTR
&&
5548 #ifdef TCC_TARGET_PE
5549 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
5551 (t1
->t
& VT_BTYPE
) == VT_INT
5553 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
5554 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
5559 /* compute maximum number of chars wanted */
5561 cstr_len
= cstr
->size
;
5563 cstr_len
= cstr
->size
/ sizeof(nwchar_t
);
5566 if (n
>= 0 && nb
> (n
- array_length
))
5567 nb
= n
- array_length
;
5570 tcc_warning("initializer-string for array is too long");
5571 /* in order to go faster for common case (char
5572 string in global variable, we handle it
5574 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
5575 memcpy(sec
->data
+ c
+ array_length
, cstr
->data
, nb
);
5579 ch
= ((unsigned char *)cstr
->data
)[i
];
5581 ch
= ((nwchar_t
*)cstr
->data
)[i
];
5582 init_putv(t1
, sec
, c
+ (array_length
+ i
) * size1
,
5590 /* only add trailing zero if enough storage (no
5591 warning in this case since it is standard) */
5592 if (n
< 0 || array_length
< n
) {
5594 init_putv(t1
, sec
, c
+ (array_length
* size1
), 0, EXPR_VAL
);
5600 while (tok
!= '}') {
5601 decl_designator(type
, sec
, c
, &index
, NULL
, size_only
);
5602 if (n
>= 0 && index
>= n
)
5603 tcc_error("index too large");
5604 /* must put zero in holes (note that doing it that way
5605 ensures that it even works with designators) */
5606 if (!size_only
&& array_length
< index
) {
5607 init_putz(t1
, sec
, c
+ array_length
* size1
,
5608 (index
- array_length
) * size1
);
5611 if (index
> array_length
)
5612 array_length
= index
;
5613 /* special test for multi dimensional arrays (may not
5614 be strictly correct if designators are used at the
5616 if (index
>= n
&& no_oblock
)
5625 /* put zeros at the end */
5626 if (!size_only
&& n
>= 0 && array_length
< n
) {
5627 init_putz(t1
, sec
, c
+ array_length
* size1
,
5628 (n
- array_length
) * size1
);
5630 /* patch type size if needed */
5632 s
->c
= array_length
;
5633 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
&&
5634 (sec
|| !first
|| tok
== '{')) {
5636 /* NOTE: the previous test is a specific case for automatic
5637 struct/union init */
5638 /* XXX: union needs only one init */
5645 if (tcc_state
->old_struct_init_code
) {
5646 /* an old version of struct initialization.
5647 It have a problems. But with a new version
5648 linux 2.4.26 can't load ramdisk.
5650 while (tok
== '(') {
5654 if (!parse_btype(&type1
, &ad1
))
5656 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
5658 if (!is_assignable_types(type
, &type1
))
5659 tcc_error("invalid type for cast");
5666 if (!parse_btype(&type1
, &ad1
))
5668 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
5670 if (!is_assignable_types(type
, &type1
))
5671 tcc_error("invalid type for cast");
5680 if (first
|| tok
== '{') {
5689 while (tok
!= '}') {
5690 decl_designator(type
, sec
, c
, NULL
, &f
, size_only
);
5692 if (!size_only
&& array_length
< index
) {
5693 init_putz(type
, sec
, c
+ array_length
,
5694 index
- array_length
);
5696 index
= index
+ type_size(&f
->type
, &align1
);
5697 if (index
> array_length
)
5698 array_length
= index
;
5700 /* gr: skip fields from same union - ugly. */
5703 int f_size
= type_size(&f
->type
, &align
);
5704 int f_type
= (f
->type
.t
& VT_BTYPE
);
5706 ///printf("index: %2d %08x -- %2d %08x\n", f->c, f->type.t, f->next->c, f->next->type.t);
5707 /* test for same offset */
5708 if (f
->next
->c
!= f
->c
)
5710 if ((f_type
== VT_STRUCT
) && (f_size
== 0)) {
5712 Lets assume a structure of size 0 can't be a member of the union.
5713 This allow to compile the following code from a linux kernel v2.4.26
5714 typedef struct { } rwlock_t;
5720 struct fs_struct init_fs = { { (1) }, (rwlock_t) {}, 0022, };
5721 tcc-0.9.23 can succesfully compile this version of the kernel.
5722 gcc don't have problems with this code too.
5726 /* if yes, test for bitfield shift */
5727 if ((f
->type
.t
& VT_BITFIELD
) && (f
->next
->type
.t
& VT_BITFIELD
)) {
5728 int bit_pos_1
= (f
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
5729 int bit_pos_2
= (f
->next
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
5730 //printf("bitfield %d %d\n", bit_pos_1, bit_pos_2);
5731 if (bit_pos_1
!= bit_pos_2
)
5738 if (no_oblock
&& f
== NULL
)
5744 /* put zeros at the end */
5745 if (!size_only
&& array_length
< n
) {
5746 init_putz(type
, sec
, c
+ array_length
,
5755 } else if (tok
== '{') {
5757 decl_initializer(type
, sec
, c
, first
, size_only
);
5759 } else if (size_only
) {
5760 /* just skip expression */
5761 parlevel
= parlevel1
= 0;
5762 while ((parlevel
> 0 || parlevel1
> 0 ||
5763 (tok
!= '}' && tok
!= ',')) && tok
!= -1) {
5766 else if (tok
== ')') {
5767 if (parlevel
== 0 && parlevel1
== 0)
5771 else if (tok
== '{')
5773 else if (tok
== '}') {
5774 if (parlevel
== 0 && parlevel1
== 0)
5781 /* currently, we always use constant expression for globals
5782 (may change for scripting case) */
5783 expr_type
= EXPR_CONST
;
5785 expr_type
= EXPR_ANY
;
5786 init_putv(type
, sec
, c
, 0, expr_type
);
5790 /* parse an initializer for type 't' if 'has_init' is non zero, and
5791 allocate space in local or global data space ('r' is either
5792 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
5793 variable 'v' with an associated name represented by 'asm_label' of
5794 scope 'scope' is declared before initializers are parsed. If 'v' is
5795 zero, then a reference to the new object is put in the value stack.
5796 If 'has_init' is 2, a special parsing is done to handle string
5798 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
5799 int has_init
, int v
, char *asm_label
,
5802 int size
, align
, addr
, data_offset
;
5804 ParseState saved_parse_state
= {0};
5805 TokenString init_str
;
5807 Sym
*flexible_array
;
5809 flexible_array
= NULL
;
5810 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5811 Sym
*field
= type
->ref
->next
;
5814 field
= field
->next
;
5815 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
5816 flexible_array
= field
;
5820 size
= type_size(type
, &align
);
5821 /* If unknown size, we must evaluate it before
5822 evaluating initializers because
5823 initializers can generate global data too
5824 (e.g. string pointers or ISOC99 compound
5825 literals). It also simplifies local
5826 initializers handling */
5827 tok_str_new(&init_str
);
5828 if (size
< 0 || (flexible_array
&& has_init
)) {
5830 tcc_error("unknown type size");
5831 /* get all init string */
5832 if (has_init
== 2) {
5833 /* only get strings */
5834 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
5835 tok_str_add_tok(&init_str
);
5840 while (level
> 0 || (tok
!= ',' && tok
!= ';')) {
5842 tcc_error("unexpected end of file in initializer");
5843 tok_str_add_tok(&init_str
);
5846 else if (tok
== '}') {
5856 tok_str_add(&init_str
, -1);
5857 tok_str_add(&init_str
, 0);
5860 save_parse_state(&saved_parse_state
);
5862 begin_macro(&init_str
, 0);
5864 decl_initializer(type
, NULL
, 0, 1, 1);
5865 /* prepare second initializer parsing */
5866 macro_ptr
= init_str
.str
;
5869 /* if still unknown size, error */
5870 size
= type_size(type
, &align
);
5872 tcc_error("unknown type size");
5875 size
+= flexible_array
->type
.ref
->c
* pointed_size(&flexible_array
->type
);
5876 /* take into account specified alignment if bigger */
5877 if (ad
->a
.aligned
) {
5878 if (ad
->a
.aligned
> align
)
5879 align
= ad
->a
.aligned
;
5880 } else if (ad
->a
.packed
) {
5883 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
5885 #ifdef CONFIG_TCC_BCHECK
5886 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
5890 loc
= (loc
- size
) & -align
;
5892 #ifdef CONFIG_TCC_BCHECK
5893 /* handles bounds */
5894 /* XXX: currently, since we do only one pass, we cannot track
5895 '&' operators, so we add only arrays */
5896 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
5898 /* add padding between regions */
5900 /* then add local bound info */
5901 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
5902 bounds_ptr
[0] = addr
;
5903 bounds_ptr
[1] = size
;
5907 /* local variable */
5908 sym_push(v
, type
, r
, addr
);
5910 /* push local reference */
5911 vset(type
, r
, addr
);
5917 if (v
&& scope
== VT_CONST
) {
5918 /* see if the symbol was already defined */
5921 if (!is_compatible_types(&sym
->type
, type
))
5922 tcc_error("incompatible types for redefinition of '%s'",
5923 get_tok_str(v
, NULL
));
5924 if (sym
->type
.t
& VT_EXTERN
) {
5925 /* if the variable is extern, it was not allocated */
5926 sym
->type
.t
&= ~VT_EXTERN
;
5927 /* set array size if it was omitted in extern
5929 if ((sym
->type
.t
& VT_ARRAY
) &&
5930 sym
->type
.ref
->c
< 0 &&
5932 sym
->type
.ref
->c
= type
->ref
->c
;
5934 /* we accept several definitions of the same
5935 global variable. this is tricky, because we
5936 must play with the SHN_COMMON type of the symbol */
5937 /* XXX: should check if the variable was already
5938 initialized. It is incorrect to initialized it
5940 /* no init data, we won't add more to the symbol */
5947 /* allocate symbol in corresponding section */
5952 else if (tcc_state
->nocommon
)
5956 data_offset
= sec
->data_offset
;
5957 data_offset
= (data_offset
+ align
- 1) & -align
;
5959 /* very important to increment global pointer at this time
5960 because initializers themselves can create new initializers */
5961 data_offset
+= size
;
5962 #ifdef CONFIG_TCC_BCHECK
5963 /* add padding if bound check */
5964 if (tcc_state
->do_bounds_check
)
5967 sec
->data_offset
= data_offset
;
5968 /* allocate section space to put the data */
5969 if (sec
->sh_type
!= SHT_NOBITS
&&
5970 data_offset
> sec
->data_allocated
)
5971 section_realloc(sec
, data_offset
);
5972 /* align section if needed */
5973 if (align
> sec
->sh_addralign
)
5974 sec
->sh_addralign
= align
;
5976 addr
= 0; /* avoid warning */
5980 if (scope
!= VT_CONST
|| !sym
) {
5981 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
5982 sym
->asm_label
= asm_label
;
5984 /* update symbol definition */
5986 put_extern_sym(sym
, sec
, addr
, size
);
5989 /* put a common area */
5990 put_extern_sym(sym
, NULL
, align
, size
);
5991 /* XXX: find a nicer way */
5992 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
5993 esym
->st_shndx
= SHN_COMMON
;
5996 /* push global reference */
5997 sym
= get_sym_ref(type
, sec
, addr
, size
);
5998 vpushsym(type
, sym
);
6000 /* patch symbol weakness */
6001 if (type
->t
& VT_WEAK
)
6003 apply_visibility(sym
, type
);
6004 #ifdef CONFIG_TCC_BCHECK
6005 /* handles bounds now because the symbol must be defined
6006 before for the relocation */
6007 if (tcc_state
->do_bounds_check
) {
6010 greloc(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
);
6011 /* then add global bound info */
6012 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6013 bounds_ptr
[0] = 0; /* relocated */
6014 bounds_ptr
[1] = size
;
6018 if (has_init
|| (type
->t
& VT_VLA
)) {
6019 decl_initializer(type
, sec
, addr
, 1, 0);
6020 /* restore parse state if needed */
6023 restore_parse_state(&saved_parse_state
);
6025 /* patch flexible array member size back to -1, */
6026 /* for possible subsequent similar declarations */
6028 flexible_array
->type
.ref
->c
= -1;
6033 static void put_func_debug(Sym
*sym
)
6038 /* XXX: we put here a dummy type */
6039 snprintf(buf
, sizeof(buf
), "%s:%c1",
6040 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
6041 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
6042 cur_text_section
, sym
->c
);
6043 /* //gr gdb wants a line at the function */
6044 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
6049 /* parse an old style function declaration list */
6050 /* XXX: check multiple parameter */
6051 static void func_decl_list(Sym
*func_sym
)
6058 /* parse each declaration */
6059 while (tok
!= '{' && tok
!= ';' && tok
!= ',' && tok
!= TOK_EOF
&&
6060 tok
!= TOK_ASM1
&& tok
!= TOK_ASM2
&& tok
!= TOK_ASM3
) {
6061 if (!parse_btype(&btype
, &ad
))
6062 expect("declaration list");
6063 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6064 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6066 /* we accept no variable after */
6070 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6071 /* find parameter in function parameter list */
6074 if ((s
->v
& ~SYM_FIELD
) == v
)
6078 tcc_error("declaration for parameter '%s' but no such parameter",
6079 get_tok_str(v
, NULL
));
6081 /* check that no storage specifier except 'register' was given */
6082 if (type
.t
& VT_STORAGE
)
6083 tcc_error("storage class specified for '%s'", get_tok_str(v
, NULL
));
6084 convert_parameter_type(&type
);
6085 /* we can add the type (NOTE: it could be local to the function) */
6087 /* accept other parameters */
6098 /* parse a function defined by symbol 'sym' and generate its code in
6099 'cur_text_section' */
6100 static void gen_function(Sym
*sym
)
6102 int saved_nocode_wanted
= nocode_wanted
;
6105 ind
= cur_text_section
->data_offset
;
6106 /* NOTE: we patch the symbol size later */
6107 put_extern_sym(sym
, cur_text_section
, ind
, 0);
6108 funcname
= get_tok_str(sym
->v
, NULL
);
6110 /* Initialize VLA state */
6112 vla_sp_root_loc
= -1;
6113 /* put debug symbol */
6114 if (tcc_state
->do_debug
)
6115 put_func_debug(sym
);
6116 /* push a dummy symbol to enable local sym storage */
6117 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
6118 gfunc_prolog(&sym
->type
);
6119 #ifdef CONFIG_TCC_BCHECK
6120 if (tcc_state
->do_bounds_check
&& !strcmp(funcname
, "main")) {
6123 for (i
= 0, sym
= local_stack
; i
< 2; i
++, sym
= sym
->prev
) {
6124 if (sym
->v
& SYM_FIELD
|| sym
->prev
->v
& SYM_FIELD
)
6126 vpush_global_sym(&func_old_type
, TOK___bound_main_arg
);
6127 vset(&sym
->type
, sym
->r
, sym
->c
);
6133 block(NULL
, NULL
, NULL
, NULL
, 0, 0);
6136 cur_text_section
->data_offset
= ind
;
6137 label_pop(&global_label_stack
, NULL
);
6138 /* reset local stack */
6139 scope_stack_bottom
= NULL
;
6140 sym_pop(&local_stack
, NULL
);
6141 /* end of function */
6142 /* patch symbol size */
6143 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
6145 /* patch symbol weakness (this definition overrules any prototype) */
6146 if (sym
->type
.t
& VT_WEAK
)
6148 apply_visibility(sym
, &sym
->type
);
6149 if (tcc_state
->do_debug
) {
6150 put_stabn(N_FUN
, 0, 0, ind
- func_ind
);
6152 /* It's better to crash than to generate wrong code */
6153 cur_text_section
= NULL
;
6154 funcname
= ""; /* for safety */
6155 func_vt
.t
= VT_VOID
; /* for safety */
6156 func_var
= 0; /* for safety */
6157 ind
= 0; /* for safety */
6158 nocode_wanted
= saved_nocode_wanted
;
6162 ST_FUNC
void gen_inline_functions(void)
6165 int inline_generated
, i
, ln
;
6166 struct InlineFunc
*fn
;
6168 ln
= file
->line_num
;
6169 /* iterate while inline function are referenced */
6171 inline_generated
= 0;
6172 for (i
= 0; i
< tcc_state
->nb_inline_fns
; ++i
) {
6173 fn
= tcc_state
->inline_fns
[i
];
6175 if (sym
&& sym
->c
) {
6176 /* the function was used: generate its code and
6177 convert it to a normal function */
6180 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
6181 sym
->r
= VT_SYM
| VT_CONST
;
6182 sym
->type
.t
&= ~VT_INLINE
;
6184 begin_macro(&fn
->func_str
, 0);
6186 cur_text_section
= text_section
;
6190 inline_generated
= 1;
6193 if (!inline_generated
)
6196 file
->line_num
= ln
;
6197 /* free tokens of unused inline functions */
6198 for (i
= 0; i
< tcc_state
->nb_inline_fns
; ++i
) {
6199 fn
= tcc_state
->inline_fns
[i
];
6201 tok_str_free(fn
->func_str
.str
);
6203 dynarray_reset(&tcc_state
->inline_fns
, &tcc_state
->nb_inline_fns
);
6206 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
6207 static int decl0(int l
, int is_for_loop_init
)
6215 if (!parse_btype(&btype
, &ad
)) {
6216 if (is_for_loop_init
)
6218 /* skip redundant ';' */
6219 /* XXX: find more elegant solution */
6224 if (l
== VT_CONST
&&
6225 (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6226 /* global asm block */
6230 /* special test for old K&R protos without explicit int
6231 type. Only accepted when defining global data */
6232 if (l
== VT_LOCAL
|| tok
< TOK_DEFINE
)
6236 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
6237 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
6239 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
6240 int v
= btype
.ref
->v
;
6241 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
6242 tcc_warning("unnamed struct/union that defines no instances");
6247 while (1) { /* iterate thru each declaration */
6248 char *asm_label
; // associated asm label
6250 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
6254 type_to_str(buf
, sizeof(buf
), t
, get_tok_str(v
, NULL
));
6255 printf("type = '%s'\n", buf
);
6258 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6259 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
6260 tcc_error("function without file scope cannot be static");
6262 /* if old style function prototype, we accept a
6265 if (sym
->c
== FUNC_OLD
)
6266 func_decl_list(sym
);
6270 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
6273 asm_label_instr(&astr
);
6274 asm_label
= tcc_strdup(astr
.data
);
6277 /* parse one last attribute list, after asm label */
6278 parse_attribute(&ad
);
6283 #ifdef TCC_TARGET_PE
6284 if (ad
.a
.func_import
)
6285 type
.t
|= VT_IMPORT
;
6286 if (ad
.a
.func_export
)
6287 type
.t
|= VT_EXPORT
;
6289 type
.t
|= ad
.a
.visibility
<< VT_VIS_SHIFT
;
6293 tcc_error("cannot use local functions");
6294 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
6295 expect("function definition");
6297 /* reject abstract declarators in function definition */
6299 while ((sym
= sym
->next
) != NULL
)
6300 if (!(sym
->v
& ~SYM_FIELD
))
6301 expect("identifier");
6303 /* XXX: cannot do better now: convert extern line to static inline */
6304 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
6305 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6310 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6313 ref
= sym
->type
.ref
;
6314 if (0 == ref
->a
.func_proto
)
6315 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6317 /* use func_call from prototype if not defined */
6318 if (ref
->a
.func_call
!= FUNC_CDECL
6319 && type
.ref
->a
.func_call
== FUNC_CDECL
)
6320 type
.ref
->a
.func_call
= ref
->a
.func_call
;
6322 /* use export from prototype */
6323 if (ref
->a
.func_export
)
6324 type
.ref
->a
.func_export
= 1;
6326 /* use static from prototype */
6327 if (sym
->type
.t
& VT_STATIC
)
6328 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6330 /* If the definition has no visibility use the
6331 one from prototype. */
6332 if (! (type
.t
& VT_VIS_MASK
))
6333 type
.t
|= sym
->type
.t
& VT_VIS_MASK
;
6335 if (!is_compatible_types(&sym
->type
, &type
)) {
6337 tcc_error("incompatible types for redefinition of '%s'",
6338 get_tok_str(v
, NULL
));
6340 type
.ref
->a
.func_proto
= 0;
6341 /* if symbol is already defined, then put complete type */
6344 /* put function symbol */
6345 sym
= global_identifier_push(v
, type
.t
, 0);
6346 sym
->type
.ref
= type
.ref
;
6349 /* static inline functions are just recorded as a kind
6350 of macro. Their code will be emitted at the end of
6351 the compilation unit only if they are used */
6352 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
6353 (VT_INLINE
| VT_STATIC
)) {
6355 struct InlineFunc
*fn
;
6356 const char *filename
;
6358 filename
= file
? file
->filename
: "";
6359 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
6360 strcpy(fn
->filename
, filename
);
6362 tok_str_new(&fn
->func_str
);
6368 tcc_error("unexpected end of file");
6369 tok_str_add_tok(&fn
->func_str
);
6374 } else if (t
== '}') {
6376 if (block_level
== 0)
6380 tok_str_add(&fn
->func_str
, -1);
6381 tok_str_add(&fn
->func_str
, 0);
6382 dynarray_add((void ***)&tcc_state
->inline_fns
, &tcc_state
->nb_inline_fns
, fn
);
6385 /* compute text section */
6386 cur_text_section
= ad
.section
;
6387 if (!cur_text_section
)
6388 cur_text_section
= text_section
;
6389 sym
->r
= VT_SYM
| VT_CONST
;
6394 if (btype
.t
& VT_TYPEDEF
) {
6395 /* save typedefed type */
6396 /* XXX: test storage specifiers ? */
6397 sym
= sym_push(v
, &type
, 0, 0);
6399 sym
->type
.t
|= VT_TYPEDEF
;
6402 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6403 /* external function definition */
6404 /* specific case for func_call attribute */
6405 ad
.a
.func_proto
= 1;
6407 } else if (!(type
.t
& VT_ARRAY
)) {
6408 /* not lvalue if array */
6409 r
|= lvalue_type(type
.t
);
6411 has_init
= (tok
== '=');
6412 if (has_init
&& (type
.t
& VT_VLA
))
6413 tcc_error("Variable length array cannot be initialized");
6414 if ((btype
.t
& VT_EXTERN
) || ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
6415 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
6416 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
6417 /* external variable or function */
6418 /* NOTE: as GCC, uninitialized global static
6419 arrays of null size are considered as
6421 sym
= external_sym(v
, &type
, r
, asm_label
);
6423 if (ad
.alias_target
) {
6428 alias_target
= sym_find(ad
.alias_target
);
6429 if (!alias_target
|| !alias_target
->c
)
6430 tcc_error("unsupported forward __alias__ attribute");
6431 esym
= &((Elf32_Sym
*)symtab_section
->data
)[alias_target
->c
];
6432 tsec
.sh_num
= esym
->st_shndx
;
6433 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
6436 type
.t
|= (btype
.t
& VT_STATIC
); /* Retain "static". */
6437 if (type
.t
& VT_STATIC
)
6443 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, asm_label
, l
);
6447 if (is_for_loop_init
)
6460 ST_FUNC
void decl(int l
)