2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Section
*text_section
, *data_section
, *bss_section
; /* predefined sections */
34 ST_DATA Section
*cur_text_section
; /* current section where function code is generated */
36 ST_DATA Section
*last_text_section
; /* to handle .previous asm directive */
38 #ifdef CONFIG_TCC_BCHECK
39 /* bound check related sections */
40 ST_DATA Section
*bounds_section
; /* contains global data bound description */
41 ST_DATA Section
*lbounds_section
; /* contains local data bound description */
44 ST_DATA Section
*symtab_section
, *strtab_section
;
46 ST_DATA Section
*stab_section
, *stabstr_section
;
47 ST_DATA Sym
*sym_free_first
;
48 ST_DATA
void **sym_pools
;
49 ST_DATA
int nb_sym_pools
;
51 ST_DATA Sym
*global_stack
;
52 ST_DATA Sym
*local_stack
;
53 ST_DATA Sym
*scope_stack_bottom
;
54 ST_DATA Sym
*define_stack
;
55 ST_DATA Sym
*global_label_stack
;
56 ST_DATA Sym
*local_label_stack
;
58 ST_DATA
int vla_sp_loc_tmp
; /* vla_sp_loc is set to this when the value won't be needed later */
59 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
60 ST_DATA
int *vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
61 ST_DATA
int vla_flags
; /* VLA_* flags */
63 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
;
65 ST_DATA
int const_wanted
; /* true if constant wanted */
66 ST_DATA
int nocode_wanted
; /* true if no code generation wanted for an expression */
67 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
68 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
69 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
71 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
72 ST_DATA
char *funcname
;
74 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
;
76 /* ------------------------------------------------------------------------- */
77 static void gen_cast(CType
*type
);
78 static inline CType
*pointed_type(CType
*type
);
79 static int is_compatible_types(CType
*type1
, CType
*type2
);
80 static int parse_btype(CType
*type
, AttributeDef
*ad
);
81 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
82 static void parse_expr_type(CType
*type
);
83 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
84 static void block(int *bsym
, int *csym
, int *case_sym
, int *def_sym
, int case_reg
, int is_expr
);
85 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, char *asm_label
, int scope
);
86 static int decl0(int l
, int is_for_loop_init
);
87 static void expr_eq(void);
88 static void unary_type(CType
*type
);
89 static void vla_runtime_type_size(CType
*type
, int *a
);
90 static void vla_sp_save(void);
91 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
);
92 static void expr_type(CType
*type
);
93 ST_FUNC
void vpush64(int ty
, unsigned long long v
);
94 ST_FUNC
void vpush(CType
*type
);
95 ST_FUNC
int gvtst(int inv
, int t
);
96 ST_FUNC
int is_btype_size(int bt
);
98 ST_INLN
int is_float(int t
)
102 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
105 /* we use our own 'finite' function to avoid potential problems with
106 non standard math libs */
107 /* XXX: endianness dependent */
108 ST_FUNC
int ieee_finite(double d
)
111 memcpy(p
, &d
, sizeof(double));
112 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
115 ST_FUNC
void test_lvalue(void)
117 if (!(vtop
->r
& VT_LVAL
))
121 /* ------------------------------------------------------------------------- */
122 /* symbol allocator */
123 static Sym
*__sym_malloc(void)
125 Sym
*sym_pool
, *sym
, *last_sym
;
128 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
129 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
131 last_sym
= sym_free_first
;
133 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
134 sym
->next
= last_sym
;
138 sym_free_first
= last_sym
;
142 static inline Sym
*sym_malloc(void)
145 sym
= sym_free_first
;
147 sym
= __sym_malloc();
148 sym_free_first
= sym
->next
;
152 ST_INLN
void sym_free(Sym
*sym
)
154 sym
->next
= sym_free_first
;
155 tcc_free(sym
->asm_label
);
156 sym_free_first
= sym
;
159 /* push, without hashing */
160 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, long c
)
163 if (ps
== &local_stack
) {
164 for (s
= *ps
; s
&& s
!= scope_stack_bottom
; s
= s
->prev
)
165 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
&& s
->v
== v
)
166 tcc_error("incompatible types for redefinition of '%s'",
167 get_tok_str(v
, NULL
));
185 /* find a symbol and return its associated structure. 's' is the top
186 of the symbol stack */
187 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
199 /* structure lookup */
200 ST_INLN Sym
*struct_find(int v
)
203 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
205 return table_ident
[v
]->sym_struct
;
208 /* find an identifier */
209 ST_INLN Sym
*sym_find(int v
)
212 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
214 return table_ident
[v
]->sym_identifier
;
217 /* push a given symbol on the symbol stack */
218 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
227 s
= sym_push2(ps
, v
, type
->t
, c
);
228 s
->type
.ref
= type
->ref
;
230 /* don't record fields or anonymous symbols */
232 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
233 /* record symbol in token array */
234 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
236 ps
= &ts
->sym_struct
;
238 ps
= &ts
->sym_identifier
;
245 /* push a global identifier */
246 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
249 s
= sym_push2(&global_stack
, v
, t
, c
);
250 /* don't record anonymous symbol */
251 if (v
< SYM_FIRST_ANOM
) {
252 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
253 /* modify the top most local identifier, so that
254 sym_identifier will point to 's' when popped */
256 ps
= &(*ps
)->prev_tok
;
263 /* pop symbols until top reaches 'b' */
264 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
)
274 /* remove symbol in token array */
276 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
277 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
279 ps
= &ts
->sym_struct
;
281 ps
= &ts
->sym_identifier
;
290 static void weaken_symbol(Sym
*sym
)
292 sym
->type
.t
|= VT_WEAK
;
297 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
298 esym_type
= ELFW(ST_TYPE
)(esym
->st_info
);
299 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, esym_type
);
303 static void apply_visibility(Sym
*sym
, CType
*type
)
305 int vis
= sym
->type
.t
& VT_VIS_MASK
;
306 int vis2
= type
->t
& VT_VIS_MASK
;
307 if (vis
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
309 else if (vis2
== (STV_DEFAULT
<< VT_VIS_SHIFT
))
312 vis
= (vis
< vis2
) ? vis
: vis2
;
313 sym
->type
.t
&= ~VT_VIS_MASK
;
319 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
320 vis
>>= VT_VIS_SHIFT
;
321 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1)) | vis
;
325 /* ------------------------------------------------------------------------- */
327 ST_FUNC
void swap(int *p
, int *q
)
335 static void vsetc(CType
*type
, int r
, CValue
*vc
)
339 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
340 tcc_error("memory full (vstack)");
341 /* cannot let cpu flags if other instruction are generated. Also
342 avoid leaving VT_JMP anywhere except on the top of the stack
343 because it would complicate the code generator. */
344 if (vtop
>= vstack
) {
345 v
= vtop
->r
& VT_VALMASK
;
346 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
356 /* push constant of type "type" with useless value */
357 ST_FUNC
void vpush(CType
*type
)
360 vsetc(type
, VT_CONST
, &cval
);
363 /* push integer constant */
364 ST_FUNC
void vpushi(int v
)
368 vsetc(&int_type
, VT_CONST
, &cval
);
371 /* push a pointer sized constant */
372 static void vpushs(addr_t v
)
376 vsetc(&size_type
, VT_CONST
, &cval
);
379 /* push arbitrary 64bit constant */
380 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
387 vsetc(&ctype
, VT_CONST
, &cval
);
390 /* push long long constant */
391 static inline void vpushll(long long v
)
393 vpush64(VT_LLONG
, v
);
396 /* push a symbol value of TYPE */
397 static inline void vpushsym(CType
*type
, Sym
*sym
)
401 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
405 /* Return a static symbol pointing to a section */
406 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
412 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
413 sym
->type
.ref
= type
->ref
;
414 sym
->r
= VT_CONST
| VT_SYM
;
415 put_extern_sym(sym
, sec
, offset
, size
);
419 /* push a reference to a section offset by adding a dummy symbol */
420 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
422 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
425 /* define a new external reference to a symbol 'v' of type 'u' */
426 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
432 /* push forward reference */
433 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
434 s
->type
.ref
= type
->ref
;
435 s
->r
= r
| VT_CONST
| VT_SYM
;
440 /* define a new external reference to a symbol 'v' with alternate asm
441 name 'asm_label' of type 'u'. 'asm_label' is equal to NULL if there
442 is no alternate name (most cases) */
443 static Sym
*external_sym(int v
, CType
*type
, int r
, char *asm_label
)
449 /* push forward reference */
450 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
451 s
->asm_label
= asm_label
;
452 s
->type
.t
|= VT_EXTERN
;
453 } else if (s
->type
.ref
== func_old_type
.ref
) {
454 s
->type
.ref
= type
->ref
;
455 s
->r
= r
| VT_CONST
| VT_SYM
;
456 s
->type
.t
|= VT_EXTERN
;
457 } else if (!is_compatible_types(&s
->type
, type
)) {
458 tcc_error("incompatible types for redefinition of '%s'",
459 get_tok_str(v
, NULL
));
461 /* Merge some storage attributes. */
462 if (type
->t
& VT_WEAK
)
465 if (type
->t
& VT_VIS_MASK
)
466 apply_visibility(s
, type
);
471 /* push a reference to global symbol v */
472 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
474 vpushsym(type
, external_global_sym(v
, type
, 0));
477 ST_FUNC
void vset(CType
*type
, int r
, int v
)
482 vsetc(type
, r
, &cval
);
485 static void vseti(int r
, int v
)
493 ST_FUNC
void vswap(void)
496 /* cannot let cpu flags if other instruction are generated. Also
497 avoid leaving VT_JMP anywhere except on the top of the stack
498 because it would complicate the code generator. */
499 if (vtop
>= vstack
) {
500 int v
= vtop
->r
& VT_VALMASK
;
501 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
508 /* XXX: +2% overall speed possible with optimized memswap
510 * memswap(&vtop[0], &vtop[1], sizeof *vtop);
514 ST_FUNC
void vpushv(SValue
*v
)
516 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
517 tcc_error("memory full (vstack)");
522 static void vdup(void)
527 /* save r to the memory stack, and mark it as being free */
528 ST_FUNC
void save_reg(int r
)
530 int l
, saved
, size
, align
;
534 /* modify all stack values */
537 for(p
=vstack
;p
<=vtop
;p
++) {
538 if ((p
->r
& VT_VALMASK
) == r
||
539 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
540 /* must save value on stack if not already done */
542 /* NOTE: must reload 'r' because r might be equal to r2 */
543 r
= p
->r
& VT_VALMASK
;
544 /* store register in the stack */
546 if ((p
->r
& VT_LVAL
) ||
547 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
548 #ifdef TCC_TARGET_X86_64
549 type
= &char_pointer_type
;
553 size
= type_size(type
, &align
);
554 loc
= (loc
- size
) & -align
;
556 sv
.r
= VT_LOCAL
| VT_LVAL
;
559 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
560 /* x86 specific: need to pop fp register ST0 if saved */
562 o(0xd8dd); /* fstp %st(0) */
565 #ifndef TCC_TARGET_X86_64
566 /* special long long case */
567 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
575 /* mark that stack entry as being saved on the stack */
576 if (p
->r
& VT_LVAL
) {
577 /* also clear the bounded flag because the
578 relocation address of the function was stored in
580 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
582 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
590 #ifdef TCC_TARGET_ARM
591 /* find a register of class 'rc2' with at most one reference on stack.
592 * If none, call get_reg(rc) */
593 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
598 for(r
=0;r
<NB_REGS
;r
++) {
599 if (reg_classes
[r
] & rc2
) {
602 for(p
= vstack
; p
<= vtop
; p
++) {
603 if ((p
->r
& VT_VALMASK
) == r
||
604 (p
->r2
& VT_VALMASK
) == r
)
615 /* find a free register of class 'rc'. If none, save one register */
616 ST_FUNC
int get_reg(int rc
)
621 /* find a free register */
622 for(r
=0;r
<NB_REGS
;r
++) {
623 if (reg_classes
[r
] & rc
) {
624 for(p
=vstack
;p
<=vtop
;p
++) {
625 if ((p
->r
& VT_VALMASK
) == r
||
626 (p
->r2
& VT_VALMASK
) == r
)
634 /* no register left : free the first one on the stack (VERY
635 IMPORTANT to start from the bottom to ensure that we don't
636 spill registers used in gen_opi()) */
637 for(p
=vstack
;p
<=vtop
;p
++) {
638 /* look at second register (if long long) */
639 r
= p
->r2
& VT_VALMASK
;
640 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
642 r
= p
->r
& VT_VALMASK
;
643 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
649 /* Should never comes here */
653 /* save registers up to (vtop - n) stack entry */
654 ST_FUNC
void save_regs(int n
)
659 for(p
= vstack
;p
<= p1
; p
++) {
660 r
= p
->r
& VT_VALMASK
;
667 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
669 static void move_reg(int r
, int s
, int t
)
683 /* get address of vtop (vtop MUST BE an lvalue) */
684 static void gaddrof(void)
686 if (vtop
->r
& VT_REF
)
689 /* tricky: if saved lvalue, then we can go back to lvalue */
690 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
691 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
696 #ifdef CONFIG_TCC_BCHECK
697 /* generate lvalue bound code */
698 static void gbound(void)
703 vtop
->r
&= ~VT_MUSTBOUND
;
704 /* if lvalue, then use checking code before dereferencing */
705 if (vtop
->r
& VT_LVAL
) {
706 /* if not VT_BOUNDED value, then make one */
707 if (!(vtop
->r
& VT_BOUNDED
)) {
708 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
709 /* must save type because we must set it to int to get pointer */
711 vtop
->type
.t
= VT_INT
;
714 gen_bounded_ptr_add();
715 vtop
->r
|= lval_type
;
718 /* then check for dereferencing */
719 gen_bounded_ptr_deref();
724 /* store vtop a register belonging to class 'rc'. lvalues are
725 converted to values. Cannot be used if cannot be converted to
726 register value (such as structures). */
727 ST_FUNC
int gv(int rc
)
729 int r
, bit_pos
, bit_size
, size
, align
, i
;
732 /* NOTE: get_reg can modify vstack[] */
733 if (vtop
->type
.t
& VT_BITFIELD
) {
736 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
737 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
738 /* remove bit field info to avoid loops */
739 vtop
->type
.t
&= ~(VT_BITFIELD
| (-1 << VT_STRUCT_SHIFT
));
740 /* cast to int to propagate signedness in following ops */
741 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
746 if((vtop
->type
.t
& VT_UNSIGNED
) ||
747 (vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
748 type
.t
|= VT_UNSIGNED
;
750 /* generate shifts */
751 vpushi(bits
- (bit_pos
+ bit_size
));
753 vpushi(bits
- bit_size
);
754 /* NOTE: transformed to SHR if unsigned */
758 if (is_float(vtop
->type
.t
) &&
759 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
762 unsigned long offset
;
763 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
767 /* XXX: unify with initializers handling ? */
768 /* CPUs usually cannot use float constants, so we store them
769 generically in data segment */
770 size
= type_size(&vtop
->type
, &align
);
771 offset
= (data_section
->data_offset
+ align
- 1) & -align
;
772 data_section
->data_offset
= offset
;
773 /* XXX: not portable yet */
774 #if defined(__i386__) || defined(__x86_64__)
775 /* Zero pad x87 tenbyte long doubles */
776 if (size
== LDOUBLE_SIZE
) {
777 vtop
->c
.tab
[2] &= 0xffff;
778 #if LDOUBLE_SIZE == 16
783 ptr
= section_ptr_add(data_section
, size
);
785 #if defined(TCC_TARGET_ARM) && !defined(TCC_ARM_VFP)
789 ptr
[i
] = vtop
->c
.tab
[size
-1-i
];
793 ptr
[i
] = vtop
->c
.tab
[i
];
794 sym
= get_sym_ref(&vtop
->type
, data_section
, offset
, size
<< 2);
795 vtop
->r
|= VT_LVAL
| VT_SYM
;
797 vtop
->c
.ptr_offset
= 0;
799 #ifdef CONFIG_TCC_BCHECK
800 if (vtop
->r
& VT_MUSTBOUND
)
804 r
= vtop
->r
& VT_VALMASK
;
805 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
808 #ifdef TCC_TARGET_X86_64
809 else if (rc
== RC_FRET
)
813 /* need to reload if:
815 - lvalue (need to dereference pointer)
816 - already a register, but not in the right class */
818 || (vtop
->r
& VT_LVAL
)
819 || !(reg_classes
[r
] & rc
)
820 #ifdef TCC_TARGET_X86_64
821 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
822 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
824 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
829 #ifdef TCC_TARGET_X86_64
830 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
831 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
833 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
834 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
835 unsigned long long ll
;
837 int r2
, original_type
;
838 original_type
= vtop
->type
.t
;
839 /* two register type load : expand to two words
841 #ifndef TCC_TARGET_X86_64
842 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
845 vtop
->c
.ui
= ll
; /* first word */
847 vtop
->r
= r
; /* save register value */
848 vpushi(ll
>> 32); /* second word */
851 if (r
>= VT_CONST
|| /* XXX: test to VT_CONST incorrect ? */
852 (vtop
->r
& VT_LVAL
)) {
853 /* We do not want to modifier the long long
854 pointer here, so the safest (and less
855 efficient) is to save all the other registers
856 in the stack. XXX: totally inefficient. */
858 /* load from memory */
859 vtop
->type
.t
= load_type
;
862 vtop
[-1].r
= r
; /* save register value */
863 /* increment pointer to get second word */
864 vtop
->type
.t
= addr_type
;
869 vtop
->type
.t
= load_type
;
874 vtop
[-1].r
= r
; /* save register value */
875 vtop
->r
= vtop
[-1].r2
;
877 /* Allocate second register. Here we rely on the fact that
878 get_reg() tries first to free r2 of an SValue. */
882 /* write second register */
884 vtop
->type
.t
= original_type
;
885 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
887 /* lvalue of scalar type : need to use lvalue type
888 because of possible cast */
891 /* compute memory access type */
892 if (vtop
->r
& VT_REF
)
893 #ifdef TCC_TARGET_X86_64
898 else if (vtop
->r
& VT_LVAL_BYTE
)
900 else if (vtop
->r
& VT_LVAL_SHORT
)
902 if (vtop
->r
& VT_LVAL_UNSIGNED
)
906 /* restore wanted type */
909 /* one register type load */
914 #ifdef TCC_TARGET_C67
915 /* uses register pairs for doubles */
916 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
923 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
924 ST_FUNC
void gv2(int rc1
, int rc2
)
928 /* generate more generic register first. But VT_JMP or VT_CMP
929 values must be generated first in all cases to avoid possible
931 v
= vtop
[0].r
& VT_VALMASK
;
932 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
937 /* test if reload is needed for first register */
938 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
948 /* test if reload is needed for first register */
949 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
955 /* wrapper around RC_FRET to return a register by type */
956 static int rc_fret(int t
)
958 #ifdef TCC_TARGET_X86_64
959 if (t
== VT_LDOUBLE
) {
966 /* wrapper around REG_FRET to return a register by type */
967 static int reg_fret(int t
)
969 #ifdef TCC_TARGET_X86_64
970 if (t
== VT_LDOUBLE
) {
977 /* expand long long on stack in two int registers */
978 static void lexpand(void)
982 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
985 vtop
[0].r
= vtop
[-1].r2
;
986 vtop
[0].r2
= VT_CONST
;
987 vtop
[-1].r2
= VT_CONST
;
988 vtop
[0].type
.t
= VT_INT
| u
;
989 vtop
[-1].type
.t
= VT_INT
| u
;
992 #ifdef TCC_TARGET_ARM
993 /* expand long long on stack */
994 ST_FUNC
void lexpand_nr(void)
998 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1000 vtop
->r2
= VT_CONST
;
1001 vtop
->type
.t
= VT_INT
| u
;
1002 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1003 if (v
== VT_CONST
) {
1004 vtop
[-1].c
.ui
= vtop
->c
.ull
;
1005 vtop
->c
.ui
= vtop
->c
.ull
>> 32;
1007 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1009 vtop
->r
= vtop
[-1].r
;
1010 } else if (v
> VT_CONST
) {
1014 vtop
->r
= vtop
[-1].r2
;
1015 vtop
[-1].r2
= VT_CONST
;
1016 vtop
[-1].type
.t
= VT_INT
| u
;
1020 /* build a long long from two ints */
1021 static void lbuild(int t
)
1023 gv2(RC_INT
, RC_INT
);
1024 vtop
[-1].r2
= vtop
[0].r
;
1025 vtop
[-1].type
.t
= t
;
1029 /* rotate n first stack elements to the bottom
1030 I1 ... In -> I2 ... In I1 [top is right]
1032 ST_FUNC
void vrotb(int n
)
1038 for(i
=-n
+1;i
!=0;i
++)
1039 vtop
[i
] = vtop
[i
+1];
1043 /* rotate the n elements before entry e towards the top
1044 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1046 ST_FUNC
void vrote(SValue
*e
, int n
)
1052 for(i
= 0;i
< n
- 1; i
++)
1057 /* rotate n first stack elements to the top
1058 I1 ... In -> In I1 ... I(n-1) [top is right]
1060 ST_FUNC
void vrott(int n
)
1065 /* pop stack value */
1066 ST_FUNC
void vpop(void)
1069 v
= vtop
->r
& VT_VALMASK
;
1070 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1071 /* for x86, we need to pop the FP stack */
1072 if (v
== TREG_ST0
&& !nocode_wanted
) {
1073 o(0xd8dd); /* fstp %st(0) */
1076 if (v
== VT_JMP
|| v
== VT_JMPI
) {
1077 /* need to put correct jump if && or || without test */
1083 /* convert stack entry to register and duplicate its value in another
1085 static void gv_dup(void)
1091 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1098 /* stack: H L L1 H1 */
1106 /* duplicate value */
1111 #ifdef TCC_TARGET_X86_64
1112 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1122 load(r1
, &sv
); /* move r to r1 */
1124 /* duplicates value */
1130 /* Generate value test
1132 * Generate a test for any value (jump, comparison and integers) */
1133 ST_FUNC
int gvtst(int inv
, int t
)
1135 int v
= vtop
->r
& VT_VALMASK
;
1136 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1140 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1141 /* constant jmp optimization */
1142 if ((vtop
->c
.i
!= 0) != inv
)
1147 return gtst(inv
, t
);
1150 #ifndef TCC_TARGET_X86_64
1151 /* generate CPU independent (unsigned) long long operations */
1152 static void gen_opl(int op
)
1154 int t
, a
, b
, op1
, c
, i
;
1156 unsigned short reg_iret
= REG_IRET
;
1157 unsigned short reg_lret
= REG_LRET
;
1163 func
= TOK___divdi3
;
1166 func
= TOK___udivdi3
;
1169 func
= TOK___moddi3
;
1172 func
= TOK___umoddi3
;
1179 /* call generic long long function */
1180 vpush_global_sym(&func_old_type
, func
);
1185 vtop
->r2
= reg_lret
;
1198 /* stack: L1 H1 L2 H2 */
1203 vtop
[-2] = vtop
[-3];
1206 /* stack: H1 H2 L1 L2 */
1212 /* stack: H1 H2 L1 L2 ML MH */
1215 /* stack: ML MH H1 H2 L1 L2 */
1219 /* stack: ML MH H1 L2 H2 L1 */
1224 /* stack: ML MH M1 M2 */
1227 } else if (op
== '+' || op
== '-') {
1228 /* XXX: add non carry method too (for MIPS or alpha) */
1234 /* stack: H1 H2 (L1 op L2) */
1237 gen_op(op1
+ 1); /* TOK_xxxC2 */
1240 /* stack: H1 H2 (L1 op L2) */
1243 /* stack: (L1 op L2) H1 H2 */
1245 /* stack: (L1 op L2) (H1 op H2) */
1253 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1254 t
= vtop
[-1].type
.t
;
1258 /* stack: L H shift */
1260 /* constant: simpler */
1261 /* NOTE: all comments are for SHL. the other cases are
1262 done by swaping words */
1273 if (op
!= TOK_SAR
) {
1306 /* XXX: should provide a faster fallback on x86 ? */
1309 func
= TOK___ashrdi3
;
1312 func
= TOK___lshrdi3
;
1315 func
= TOK___ashldi3
;
1321 /* compare operations */
1327 /* stack: L1 H1 L2 H2 */
1329 vtop
[-1] = vtop
[-2];
1331 /* stack: L1 L2 H1 H2 */
1334 /* when values are equal, we need to compare low words. since
1335 the jump is inverted, we invert the test too. */
1338 else if (op1
== TOK_GT
)
1340 else if (op1
== TOK_ULT
)
1342 else if (op1
== TOK_UGT
)
1347 if (op1
!= TOK_NE
) {
1351 /* generate non equal test */
1352 /* XXX: NOT PORTABLE yet */
1356 #if defined(TCC_TARGET_I386)
1357 b
= psym(0x850f, 0);
1358 #elif defined(TCC_TARGET_ARM)
1360 o(0x1A000000 | encbranch(ind
, 0, 1));
1361 #elif defined(TCC_TARGET_C67)
1362 tcc_error("not implemented");
1364 #error not supported
1368 /* compare low. Always unsigned */
1372 else if (op1
== TOK_LE
)
1374 else if (op1
== TOK_GT
)
1376 else if (op1
== TOK_GE
)
1387 /* handle integer constant optimizations and various machine
1389 static void gen_opic(int op
)
1391 int c1
, c2
, t1
, t2
, n
;
1394 typedef unsigned long long U
;
1398 t1
= v1
->type
.t
& VT_BTYPE
;
1399 t2
= v2
->type
.t
& VT_BTYPE
;
1403 else if (v1
->type
.t
& VT_UNSIGNED
)
1410 else if (v2
->type
.t
& VT_UNSIGNED
)
1415 /* currently, we cannot do computations with forward symbols */
1416 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1417 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1420 case '+': l1
+= l2
; break;
1421 case '-': l1
-= l2
; break;
1422 case '&': l1
&= l2
; break;
1423 case '^': l1
^= l2
; break;
1424 case '|': l1
|= l2
; break;
1425 case '*': l1
*= l2
; break;
1432 /* if division by zero, generate explicit division */
1435 tcc_error("division by zero in constant");
1439 default: l1
/= l2
; break;
1440 case '%': l1
%= l2
; break;
1441 case TOK_UDIV
: l1
= (U
)l1
/ l2
; break;
1442 case TOK_UMOD
: l1
= (U
)l1
% l2
; break;
1445 case TOK_SHL
: l1
<<= l2
; break;
1446 case TOK_SHR
: l1
= (U
)l1
>> l2
; break;
1447 case TOK_SAR
: l1
>>= l2
; break;
1449 case TOK_ULT
: l1
= (U
)l1
< (U
)l2
; break;
1450 case TOK_UGE
: l1
= (U
)l1
>= (U
)l2
; break;
1451 case TOK_EQ
: l1
= l1
== l2
; break;
1452 case TOK_NE
: l1
= l1
!= l2
; break;
1453 case TOK_ULE
: l1
= (U
)l1
<= (U
)l2
; break;
1454 case TOK_UGT
: l1
= (U
)l1
> (U
)l2
; break;
1455 case TOK_LT
: l1
= l1
< l2
; break;
1456 case TOK_GE
: l1
= l1
>= l2
; break;
1457 case TOK_LE
: l1
= l1
<= l2
; break;
1458 case TOK_GT
: l1
= l1
> l2
; break;
1460 case TOK_LAND
: l1
= l1
&& l2
; break;
1461 case TOK_LOR
: l1
= l1
|| l2
; break;
1468 /* if commutative ops, put c2 as constant */
1469 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1470 op
== '|' || op
== '*')) {
1472 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1473 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1475 /* Filter out NOP operations like x*1, x-0, x&-1... */
1476 if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1479 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1480 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1486 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1487 /* try to use shifts instead of muls or divs */
1488 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1497 else if (op
== TOK_PDIV
)
1503 } else if (c2
&& (op
== '+' || op
== '-') &&
1504 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1505 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1506 /* symbol + constant case */
1513 if (!nocode_wanted
) {
1514 /* call low level op generator */
1515 if (t1
== VT_LLONG
|| t2
== VT_LLONG
)
1526 /* generate a floating point operation with constant propagation */
1527 static void gen_opif(int op
)
1535 /* currently, we cannot do computations with forward symbols */
1536 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1537 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1539 if (v1
->type
.t
== VT_FLOAT
) {
1542 } else if (v1
->type
.t
== VT_DOUBLE
) {
1550 /* NOTE: we only do constant propagation if finite number (not
1551 NaN or infinity) (ANSI spec) */
1552 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1556 case '+': f1
+= f2
; break;
1557 case '-': f1
-= f2
; break;
1558 case '*': f1
*= f2
; break;
1562 tcc_error("division by zero in constant");
1567 /* XXX: also handles tests ? */
1571 /* XXX: overflow test ? */
1572 if (v1
->type
.t
== VT_FLOAT
) {
1574 } else if (v1
->type
.t
== VT_DOUBLE
) {
1582 if (!nocode_wanted
) {
1590 static int pointed_size(CType
*type
)
1593 return type_size(pointed_type(type
), &align
);
1596 static void vla_runtime_pointed_size(CType
*type
)
1599 vla_runtime_type_size(pointed_type(type
), &align
);
1602 static inline int is_null_pointer(SValue
*p
)
1604 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
1606 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& p
->c
.i
== 0) ||
1607 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.ll
== 0) ||
1608 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&& p
->c
.ptr_offset
== 0);
1611 static inline int is_integer_btype(int bt
)
1613 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
1614 bt
== VT_INT
|| bt
== VT_LLONG
);
1617 /* check types for comparison or subtraction of pointers */
1618 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
1620 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
1623 /* null pointers are accepted for all comparisons as gcc */
1624 if (is_null_pointer(p1
) || is_null_pointer(p2
))
1628 bt1
= type1
->t
& VT_BTYPE
;
1629 bt2
= type2
->t
& VT_BTYPE
;
1630 /* accept comparison between pointer and integer with a warning */
1631 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
1632 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
1633 tcc_warning("comparison between pointer and integer");
1637 /* both must be pointers or implicit function pointers */
1638 if (bt1
== VT_PTR
) {
1639 type1
= pointed_type(type1
);
1640 } else if (bt1
!= VT_FUNC
)
1641 goto invalid_operands
;
1643 if (bt2
== VT_PTR
) {
1644 type2
= pointed_type(type2
);
1645 } else if (bt2
!= VT_FUNC
) {
1647 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
1649 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
1650 (type2
->t
& VT_BTYPE
) == VT_VOID
)
1654 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1655 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
1656 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
1657 /* gcc-like error if '-' is used */
1659 goto invalid_operands
;
1661 tcc_warning("comparison of distinct pointer types lacks a cast");
1665 /* generic gen_op: handles types problems */
1666 ST_FUNC
void gen_op(int op
)
1668 int u
, t1
, t2
, bt1
, bt2
, t
;
1671 t1
= vtop
[-1].type
.t
;
1672 t2
= vtop
[0].type
.t
;
1673 bt1
= t1
& VT_BTYPE
;
1674 bt2
= t2
& VT_BTYPE
;
1676 if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
1677 /* at least one operand is a pointer */
1678 /* relationnal op: must be both pointers */
1679 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
1680 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1681 /* pointers are handled are unsigned */
1682 #ifdef TCC_TARGET_X86_64
1683 t
= VT_LLONG
| VT_UNSIGNED
;
1685 t
= VT_INT
| VT_UNSIGNED
;
1689 /* if both pointers, then it must be the '-' op */
1690 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
1692 tcc_error("cannot use pointers here");
1693 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
1694 /* XXX: check that types are compatible */
1695 if (vtop
[-1].type
.t
& VT_VLA
) {
1696 vla_runtime_pointed_size(&vtop
[-1].type
);
1698 vpushi(pointed_size(&vtop
[-1].type
));
1702 /* set to integer type */
1703 #ifdef TCC_TARGET_X86_64
1704 vtop
->type
.t
= VT_LLONG
;
1706 vtop
->type
.t
= VT_INT
;
1711 /* exactly one pointer : must be '+' or '-'. */
1712 if (op
!= '-' && op
!= '+')
1713 tcc_error("cannot use pointers here");
1714 /* Put pointer as first operand */
1715 if (bt2
== VT_PTR
) {
1719 type1
= vtop
[-1].type
;
1720 type1
.t
&= ~VT_ARRAY
;
1721 if (vtop
[-1].type
.t
& VT_VLA
)
1722 vla_runtime_pointed_size(&vtop
[-1].type
);
1724 u
= pointed_size(&vtop
[-1].type
);
1726 tcc_error("unknown array element size");
1727 #ifdef TCC_TARGET_X86_64
1730 /* XXX: cast to int ? (long long case) */
1735 #ifdef CONFIG_TCC_BCHECK
1736 /* if evaluating constant expression, no code should be
1737 generated, so no bound check */
1738 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
1739 /* if bounded pointers, we generate a special code to
1746 gen_bounded_ptr_add();
1752 /* put again type if gen_opic() swaped operands */
1755 } else if (is_float(bt1
) || is_float(bt2
)) {
1756 /* compute bigger type and do implicit casts */
1757 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
1759 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
1764 /* floats can only be used for a few operations */
1765 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
1766 (op
< TOK_ULT
|| op
> TOK_GT
))
1767 tcc_error("invalid operands for binary operation");
1769 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
1770 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
1771 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (t
| VT_UNSIGNED
))
1774 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
1775 /* cast to biggest op */
1777 /* convert to unsigned if it does not fit in a long long */
1778 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
1779 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
1782 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
1783 tcc_error("comparison of struct");
1785 /* integer operations */
1787 /* convert to unsigned if it does not fit in an integer */
1788 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
1789 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
1792 /* XXX: currently, some unsigned operations are explicit, so
1793 we modify them here */
1794 if (t
& VT_UNSIGNED
) {
1801 else if (op
== TOK_LT
)
1803 else if (op
== TOK_GT
)
1805 else if (op
== TOK_LE
)
1807 else if (op
== TOK_GE
)
1814 /* special case for shifts and long long: we keep the shift as
1816 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
1823 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
1824 /* relationnal op: the result is an int */
1825 vtop
->type
.t
= VT_INT
;
1832 #ifndef TCC_TARGET_ARM
1833 /* generic itof for unsigned long long case */
1834 static void gen_cvt_itof1(int t
)
1836 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
1837 (VT_LLONG
| VT_UNSIGNED
)) {
1840 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
1841 #if LDOUBLE_SIZE != 8
1842 else if (t
== VT_LDOUBLE
)
1843 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
1846 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
1850 vtop
->r
= reg_fret(t
);
1857 /* generic ftoi for unsigned long long case */
1858 static void gen_cvt_ftoi1(int t
)
1862 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
1863 /* not handled natively */
1864 st
= vtop
->type
.t
& VT_BTYPE
;
1866 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
1867 #if LDOUBLE_SIZE != 8
1868 else if (st
== VT_LDOUBLE
)
1869 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
1872 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
1877 vtop
->r2
= REG_LRET
;
1883 /* force char or short cast */
1884 static void force_charshort_cast(int t
)
1888 /* XXX: add optimization if lvalue : just change type and offset */
1893 if (t
& VT_UNSIGNED
) {
1894 vpushi((1 << bits
) - 1);
1900 /* result must be signed or the SAR is converted to an SHL
1901 This was not the case when "t" was a signed short
1902 and the last value on the stack was an unsigned int */
1903 vtop
->type
.t
&= ~VT_UNSIGNED
;
1909 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
1910 static void gen_cast(CType
*type
)
1912 int sbt
, dbt
, sf
, df
, c
, p
;
1914 /* special delayed cast for char/short */
1915 /* XXX: in some cases (multiple cascaded casts), it may still
1917 if (vtop
->r
& VT_MUSTCAST
) {
1918 vtop
->r
&= ~VT_MUSTCAST
;
1919 force_charshort_cast(vtop
->type
.t
);
1922 /* bitfields first get cast to ints */
1923 if (vtop
->type
.t
& VT_BITFIELD
) {
1927 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
1928 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
1933 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1934 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
1936 /* constant case: we can do it now */
1937 /* XXX: in ISOC, cannot do it if error in convert */
1938 if (sbt
== VT_FLOAT
)
1939 vtop
->c
.ld
= vtop
->c
.f
;
1940 else if (sbt
== VT_DOUBLE
)
1941 vtop
->c
.ld
= vtop
->c
.d
;
1944 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
1945 if (sbt
& VT_UNSIGNED
)
1946 vtop
->c
.ld
= vtop
->c
.ull
;
1948 vtop
->c
.ld
= vtop
->c
.ll
;
1950 if (sbt
& VT_UNSIGNED
)
1951 vtop
->c
.ld
= vtop
->c
.ui
;
1953 vtop
->c
.ld
= vtop
->c
.i
;
1956 if (dbt
== VT_FLOAT
)
1957 vtop
->c
.f
= (float)vtop
->c
.ld
;
1958 else if (dbt
== VT_DOUBLE
)
1959 vtop
->c
.d
= (double)vtop
->c
.ld
;
1960 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
1961 vtop
->c
.ull
= (unsigned long long)vtop
->c
.ld
;
1962 } else if (sf
&& dbt
== VT_BOOL
) {
1963 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
1966 vtop
->c
.ll
= (long long)vtop
->c
.ld
;
1967 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
1968 vtop
->c
.ll
= vtop
->c
.ull
;
1969 else if (sbt
& VT_UNSIGNED
)
1970 vtop
->c
.ll
= vtop
->c
.ui
;
1971 #ifdef TCC_TARGET_X86_64
1972 else if (sbt
== VT_PTR
)
1975 else if (sbt
!= VT_LLONG
)
1976 vtop
->c
.ll
= vtop
->c
.i
;
1978 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
1979 vtop
->c
.ull
= vtop
->c
.ll
;
1980 else if (dbt
== VT_BOOL
)
1981 vtop
->c
.i
= (vtop
->c
.ll
!= 0);
1982 #ifdef TCC_TARGET_X86_64
1983 else if (dbt
== VT_PTR
)
1986 else if (dbt
!= VT_LLONG
) {
1988 if ((dbt
& VT_BTYPE
) == VT_BYTE
)
1990 else if ((dbt
& VT_BTYPE
) == VT_SHORT
)
1992 if(dbt
& VT_UNSIGNED
)
1993 vtop
->c
.ui
= ((unsigned int)vtop
->c
.ll
<< s
) >> s
;
1995 vtop
->c
.i
= ((int)vtop
->c
.ll
<< s
) >> s
;
1998 } else if (p
&& dbt
== VT_BOOL
) {
2001 } else if (!nocode_wanted
) {
2002 /* non constant case: generate code */
2004 /* convert from fp to fp */
2007 /* convert int to fp */
2010 /* convert fp to int */
2011 if (dbt
== VT_BOOL
) {
2015 /* we handle char/short/etc... with generic code */
2016 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2017 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2021 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2022 /* additional cast for char/short... */
2027 #ifndef TCC_TARGET_X86_64
2028 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2029 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2030 /* scalar to long long */
2031 /* machine independent conversion */
2033 /* generate high word */
2034 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2038 if (sbt
== VT_PTR
) {
2039 /* cast from pointer to int before we apply
2040 shift operation, which pointers don't support*/
2041 gen_cast(&int_type
);
2047 /* patch second register */
2048 vtop
[-1].r2
= vtop
->r
;
2052 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2053 (dbt
& VT_BTYPE
) == VT_PTR
||
2054 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2055 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2056 (sbt
& VT_BTYPE
) != VT_PTR
&&
2057 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2058 /* need to convert from 32bit to 64bit */
2060 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2061 /* x86_64 specific: movslq */
2063 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2067 } else if (dbt
== VT_BOOL
) {
2068 /* scalar to bool */
2071 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2072 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2073 if (sbt
== VT_PTR
) {
2074 vtop
->type
.t
= VT_INT
;
2075 tcc_warning("nonportable conversion from pointer to char/short");
2077 force_charshort_cast(dbt
);
2078 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2080 if (sbt
== VT_LLONG
) {
2081 /* from long long: just take low order word */
2085 /* if lvalue and single word type, nothing to do because
2086 the lvalue already contains the real type size (see
2087 VT_LVAL_xxx constants) */
2090 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2091 /* if we are casting between pointer types,
2092 we must update the VT_LVAL_xxx size */
2093 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2094 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2099 /* return type size as known at compile time. Put alignment at 'a' */
2100 ST_FUNC
int type_size(CType
*type
, int *a
)
2105 bt
= type
->t
& VT_BTYPE
;
2106 if (bt
== VT_STRUCT
) {
2111 } else if (bt
== VT_PTR
) {
2112 if (type
->t
& VT_ARRAY
) {
2116 ts
= type_size(&s
->type
, a
);
2118 if (ts
< 0 && s
->c
< 0)
2126 } else if (bt
== VT_LDOUBLE
) {
2128 return LDOUBLE_SIZE
;
2129 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2130 #ifdef TCC_TARGET_I386
2131 #ifdef TCC_TARGET_PE
2136 #elif defined(TCC_TARGET_ARM)
2146 } else if (bt
== VT_INT
|| bt
== VT_ENUM
|| bt
== VT_FLOAT
) {
2149 } else if (bt
== VT_SHORT
) {
2152 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2156 /* char, void, function, _Bool */
2162 /* push type size as known at runtime time on top of value stack. Put
2164 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2166 if (type
->t
& VT_VLA
) {
2167 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2169 vpushi(type_size(type
, a
));
2173 static void vla_sp_save(void) {
2174 if (!(vla_flags
& VLA_SP_LOC_SET
)) {
2175 *vla_sp_loc
= (loc
-= PTR_SIZE
);
2176 vla_flags
|= VLA_SP_LOC_SET
;
2178 if (!(vla_flags
& VLA_SP_SAVED
)) {
2179 gen_vla_sp_save(*vla_sp_loc
);
2180 vla_flags
|= VLA_SP_SAVED
;
2184 /* return the pointed type of t */
2185 static inline CType
*pointed_type(CType
*type
)
2187 return &type
->ref
->type
;
2190 /* modify type so that its it is a pointer to type. */
2191 ST_FUNC
void mk_pointer(CType
*type
)
2194 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2195 type
->t
= VT_PTR
| (type
->t
& ~VT_TYPE
);
2199 /* compare function types. OLD functions match any new functions */
2200 static int is_compatible_func(CType
*type1
, CType
*type2
)
2206 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2208 /* check func_call */
2209 if (s1
->a
.func_call
!= s2
->a
.func_call
)
2211 /* XXX: not complete */
2212 if (s1
->c
== FUNC_OLD
|| s2
->c
== FUNC_OLD
)
2216 while (s1
!= NULL
) {
2219 if (!is_compatible_parameter_types(&s1
->type
, &s2
->type
))
2229 /* return true if type1 and type2 are the same. If unqualified is
2230 true, qualifiers on the types are ignored.
2232 - enums are not checked as gcc __builtin_types_compatible_p ()
2234 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2238 t1
= type1
->t
& VT_TYPE
;
2239 t2
= type2
->t
& VT_TYPE
;
2241 /* strip qualifiers before comparing */
2242 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2243 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2245 /* Default Vs explicit signedness only matters for char */
2246 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2250 /* XXX: bitfields ? */
2253 /* test more complicated cases */
2254 bt1
= t1
& VT_BTYPE
;
2255 if (bt1
== VT_PTR
) {
2256 type1
= pointed_type(type1
);
2257 type2
= pointed_type(type2
);
2258 return is_compatible_types(type1
, type2
);
2259 } else if (bt1
== VT_STRUCT
) {
2260 return (type1
->ref
== type2
->ref
);
2261 } else if (bt1
== VT_FUNC
) {
2262 return is_compatible_func(type1
, type2
);
2268 /* return true if type1 and type2 are exactly the same (including
2271 static int is_compatible_types(CType
*type1
, CType
*type2
)
2273 return compare_types(type1
,type2
,0);
2276 /* return true if type1 and type2 are the same (ignoring qualifiers).
2278 static int is_compatible_parameter_types(CType
*type1
, CType
*type2
)
2280 return compare_types(type1
,type2
,1);
2283 /* print a type. If 'varstr' is not NULL, then the variable is also
2284 printed in the type */
2286 /* XXX: add array and function pointers */
2287 static void type_to_str(char *buf
, int buf_size
,
2288 CType
*type
, const char *varstr
)
2295 t
= type
->t
& VT_TYPE
;
2298 if (t
& VT_CONSTANT
)
2299 pstrcat(buf
, buf_size
, "const ");
2300 if (t
& VT_VOLATILE
)
2301 pstrcat(buf
, buf_size
, "volatile ");
2302 if ((t
& (VT_DEFSIGN
| VT_UNSIGNED
)) == (VT_DEFSIGN
| VT_UNSIGNED
))
2303 pstrcat(buf
, buf_size
, "unsigned ");
2304 else if (t
& VT_DEFSIGN
)
2305 pstrcat(buf
, buf_size
, "signed ");
2335 tstr
= "long double";
2337 pstrcat(buf
, buf_size
, tstr
);
2341 if (bt
== VT_STRUCT
)
2345 pstrcat(buf
, buf_size
, tstr
);
2346 v
= type
->ref
->v
& ~SYM_STRUCT
;
2347 if (v
>= SYM_FIRST_ANOM
)
2348 pstrcat(buf
, buf_size
, "<anonymous>");
2350 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2354 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2355 pstrcat(buf
, buf_size
, "(");
2357 while (sa
!= NULL
) {
2358 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2359 pstrcat(buf
, buf_size
, buf1
);
2362 pstrcat(buf
, buf_size
, ", ");
2364 pstrcat(buf
, buf_size
, ")");
2368 pstrcpy(buf1
, sizeof(buf1
), "*");
2370 pstrcat(buf1
, sizeof(buf1
), varstr
);
2371 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2375 pstrcat(buf
, buf_size
, " ");
2376 pstrcat(buf
, buf_size
, varstr
);
2381 /* verify type compatibility to store vtop in 'dt' type, and generate
2383 static void gen_assign_cast(CType
*dt
)
2385 CType
*st
, *type1
, *type2
, tmp_type1
, tmp_type2
;
2386 char buf1
[256], buf2
[256];
2389 st
= &vtop
->type
; /* source type */
2390 dbt
= dt
->t
& VT_BTYPE
;
2391 sbt
= st
->t
& VT_BTYPE
;
2392 if (sbt
== VT_VOID
|| dbt
== VT_VOID
)
2393 tcc_error("cannot cast from/to void");
2394 if (dt
->t
& VT_CONSTANT
)
2395 tcc_warning("assignment of read-only location");
2398 /* special cases for pointers */
2399 /* '0' can also be a pointer */
2400 if (is_null_pointer(vtop
))
2402 /* accept implicit pointer to integer cast with warning */
2403 if (is_integer_btype(sbt
)) {
2404 tcc_warning("assignment makes pointer from integer without a cast");
2407 type1
= pointed_type(dt
);
2408 /* a function is implicitely a function pointer */
2409 if (sbt
== VT_FUNC
) {
2410 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2411 !is_compatible_types(pointed_type(dt
), st
))
2412 tcc_warning("assignment from incompatible pointer type");
2417 type2
= pointed_type(st
);
2418 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2419 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2420 /* void * can match anything */
2422 /* exact type match, except for unsigned */
2425 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
|
2427 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
|
2429 if (!is_compatible_types(&tmp_type1
, &tmp_type2
))
2430 tcc_warning("assignment from incompatible pointer type");
2432 /* check const and volatile */
2433 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2434 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2435 tcc_warning("assignment discards qualifiers from pointer target type");
2441 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2442 tcc_warning("assignment makes integer from pointer without a cast");
2444 /* XXX: more tests */
2449 tmp_type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2450 tmp_type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2451 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2453 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2454 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2455 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
2463 /* store vtop in lvalue pushed on stack */
2464 ST_FUNC
void vstore(void)
2466 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
2468 ft
= vtop
[-1].type
.t
;
2469 sbt
= vtop
->type
.t
& VT_BTYPE
;
2470 dbt
= ft
& VT_BTYPE
;
2471 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
2472 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
2473 && !(vtop
->type
.t
& VT_BITFIELD
)) {
2474 /* optimize char/short casts */
2475 delayed_cast
= VT_MUSTCAST
;
2476 vtop
->type
.t
= ft
& (VT_TYPE
& ~(VT_BITFIELD
| (-1 << VT_STRUCT_SHIFT
)));
2477 /* XXX: factorize */
2478 if (ft
& VT_CONSTANT
)
2479 tcc_warning("assignment of read-only location");
2482 if (!(ft
& VT_BITFIELD
))
2483 gen_assign_cast(&vtop
[-1].type
);
2486 if (sbt
== VT_STRUCT
) {
2487 /* if structure, only generate pointer */
2488 /* structure assignment : generate memcpy */
2489 /* XXX: optimize if small size */
2490 if (!nocode_wanted
) {
2491 size
= type_size(&vtop
->type
, &align
);
2495 vtop
->type
.t
= VT_PTR
;
2498 /* address of memcpy() */
2501 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
2502 else if(!(align
& 3))
2503 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
2506 vpush_global_sym(&func_old_type
, TOK_memcpy
);
2511 vtop
->type
.t
= VT_PTR
;
2520 /* leave source on stack */
2521 } else if (ft
& VT_BITFIELD
) {
2522 /* bitfield store handling */
2524 /* save lvalue as expression result (example: s.b = s.a = n;) */
2525 vdup(), vtop
[-1] = vtop
[-2];
2527 bit_pos
= (ft
>> VT_STRUCT_SHIFT
) & 0x3f;
2528 bit_size
= (ft
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
2529 /* remove bit field info to avoid loops */
2530 vtop
[-1].type
.t
= ft
& ~(VT_BITFIELD
| (-1 << VT_STRUCT_SHIFT
));
2532 if((ft
& VT_BTYPE
) == VT_BOOL
) {
2533 gen_cast(&vtop
[-1].type
);
2534 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
2537 /* duplicate destination */
2539 vtop
[-1] = vtop
[-2];
2541 /* mask and shift source */
2542 if((ft
& VT_BTYPE
) != VT_BOOL
) {
2543 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2544 vpushll((1ULL << bit_size
) - 1ULL);
2546 vpushi((1 << bit_size
) - 1);
2552 /* load destination, mask and or with source */
2554 if((ft
& VT_BTYPE
) == VT_LLONG
) {
2555 vpushll(~(((1ULL << bit_size
) - 1ULL) << bit_pos
));
2557 vpushi(~(((1 << bit_size
) - 1) << bit_pos
));
2563 /* ... and discard */
2567 #ifdef CONFIG_TCC_BCHECK
2568 /* bound check case */
2569 if (vtop
[-1].r
& VT_MUSTBOUND
) {
2575 if (!nocode_wanted
) {
2579 #ifdef TCC_TARGET_X86_64
2580 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
2582 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
2587 r
= gv(rc
); /* generate value */
2588 /* if lvalue was saved on stack, must read it */
2589 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
2591 t
= get_reg(RC_INT
);
2592 #ifdef TCC_TARGET_X86_64
2597 sv
.r
= VT_LOCAL
| VT_LVAL
;
2598 sv
.c
.ul
= vtop
[-1].c
.ul
;
2600 vtop
[-1].r
= t
| VT_LVAL
;
2602 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
2603 #ifdef TCC_TARGET_X86_64
2604 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
2605 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
2607 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
2608 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
2610 vtop
[-1].type
.t
= load_type
;
2613 /* convert to int to increment easily */
2614 vtop
->type
.t
= addr_type
;
2620 vtop
[-1].type
.t
= load_type
;
2621 /* XXX: it works because r2 is spilled last ! */
2622 store(vtop
->r2
, vtop
- 1);
2628 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
2629 vtop
->r
|= delayed_cast
;
2633 /* post defines POST/PRE add. c is the token ++ or -- */
2634 ST_FUNC
void inc(int post
, int c
)
2637 vdup(); /* save lvalue */
2639 gv_dup(); /* duplicate value */
2644 vpushi(c
- TOK_MID
);
2646 vstore(); /* store value */
2648 vpop(); /* if post op, return saved value */
2651 /* Parse GNUC __attribute__ extension. Currently, the following
2652 extensions are recognized:
2653 - aligned(n) : set data/function alignment.
2654 - packed : force data alignment to 1
2655 - section(x) : generate data/code in this section.
2656 - unused : currently ignored, but may be used someday.
2657 - regparm(n) : pass function parameters in registers (i386 only)
2659 static void parse_attribute(AttributeDef
*ad
)
2663 while (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
) {
2667 while (tok
!= ')') {
2668 if (tok
< TOK_IDENT
)
2669 expect("attribute name");
2677 expect("section name");
2678 ad
->section
= find_section(tcc_state
, (char *)tokc
.cstr
->data
);
2686 expect("alias(\"target\")");
2687 ad
->alias_target
= /* save string as token, for later */
2688 tok_alloc((char*)tokc
.cstr
->data
, tokc
.cstr
->size
-1)->tok
;
2692 case TOK_VISIBILITY1
:
2693 case TOK_VISIBILITY2
:
2696 expect("visibility(\"default|hidden|internal|protected\")");
2697 if (!strcmp (tokc
.cstr
->data
, "default"))
2698 ad
->a
.visibility
= STV_DEFAULT
;
2699 else if (!strcmp (tokc
.cstr
->data
, "hidden"))
2700 ad
->a
.visibility
= STV_HIDDEN
;
2701 else if (!strcmp (tokc
.cstr
->data
, "internal"))
2702 ad
->a
.visibility
= STV_INTERNAL
;
2703 else if (!strcmp (tokc
.cstr
->data
, "protected"))
2704 ad
->a
.visibility
= STV_PROTECTED
;
2706 expect("visibility(\"default|hidden|internal|protected\")");
2715 if (n
<= 0 || (n
& (n
- 1)) != 0)
2716 tcc_error("alignment must be a positive power of two");
2733 /* currently, no need to handle it because tcc does not
2734 track unused objects */
2738 /* currently, no need to handle it because tcc does not
2739 track unused objects */
2744 ad
->a
.func_call
= FUNC_CDECL
;
2749 ad
->a
.func_call
= FUNC_STDCALL
;
2751 #ifdef TCC_TARGET_I386
2761 ad
->a
.func_call
= FUNC_FASTCALL1
+ n
- 1;
2767 ad
->a
.func_call
= FUNC_FASTCALLW
;
2774 ad
->a
.mode
= VT_LLONG
+ 1;
2777 ad
->a
.mode
= VT_SHORT
+ 1;
2780 ad
->a
.mode
= VT_INT
+ 1;
2783 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
2790 ad
->a
.func_export
= 1;
2793 ad
->a
.func_import
= 1;
2796 if (tcc_state
->warn_unsupported
)
2797 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
2798 /* skip parameters */
2800 int parenthesis
= 0;
2804 else if (tok
== ')')
2807 } while (parenthesis
&& tok
!= -1);
2820 /* enum/struct/union declaration. u is either VT_ENUM or VT_STRUCT */
2821 static void struct_decl(CType
*type
, int u
, int tdef
)
2823 int a
, v
, size
, align
, maxalign
, c
, offset
, flexible
;
2824 int bit_size
, bit_pos
, bsize
, bt
, lbit_pos
, prevbt
;
2825 Sym
*s
, *ss
, *ass
, **ps
;
2829 a
= tok
; /* save decl type */
2834 /* struct already defined ? return it */
2836 expect("struct/union/enum name");
2840 tcc_error("invalid type");
2842 } else if (tok
>= TOK_IDENT
&& !tdef
)
2843 tcc_error("unknown struct/union/enum");
2849 /* we put an undefined size for struct/union */
2850 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
2851 s
->r
= 0; /* default alignment is zero as gcc */
2852 /* put struct/union/enum name in type */
2860 tcc_error("struct/union/enum already defined");
2861 /* cannot be empty */
2863 /* non empty enums are not allowed */
2864 if (a
== TOK_ENUM
) {
2868 expect("identifier");
2870 if (ss
&& !local_stack
)
2871 tcc_error("redefinition of enumerator '%s'",
2872 get_tok_str(v
, NULL
));
2878 /* enum symbols have static storage */
2879 ss
= sym_push(v
, &int_type
, VT_CONST
, c
);
2880 ss
->type
.t
|= VT_STATIC
;
2885 /* NOTE: we accept a trailing comma */
2889 s
->c
= type_size(&int_type
, &align
);
2898 while (tok
!= '}') {
2899 parse_btype(&btype
, &ad
);
2902 tcc_error("flexible array member '%s' not at the end of struct",
2903 get_tok_str(v
, NULL
));
2908 type_decl(&type1
, &ad
, &v
, TYPE_DIRECT
| TYPE_ABSTRACT
);
2909 if (v
== 0 && (type1
.t
& VT_BTYPE
) != VT_STRUCT
)
2910 expect("identifier");
2911 if (type_size(&type1
, &align
) < 0) {
2912 if ((a
== TOK_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
2915 tcc_error("field '%s' has incomplete type",
2916 get_tok_str(v
, NULL
));
2918 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
2919 (type1
.t
& (VT_TYPEDEF
| VT_STATIC
| VT_EXTERN
| VT_INLINE
)))
2920 tcc_error("invalid type for '%s'",
2921 get_tok_str(v
, NULL
));
2925 bit_size
= expr_const();
2926 /* XXX: handle v = 0 case for messages */
2928 tcc_error("negative width in bit-field '%s'",
2929 get_tok_str(v
, NULL
));
2930 if (v
&& bit_size
== 0)
2931 tcc_error("zero width for bit-field '%s'",
2932 get_tok_str(v
, NULL
));
2934 size
= type_size(&type1
, &align
);
2936 if (align
< ad
.a
.aligned
)
2937 align
= ad
.a
.aligned
;
2938 } else if (ad
.a
.packed
) {
2940 } else if (*tcc_state
->pack_stack_ptr
) {
2941 if (align
> *tcc_state
->pack_stack_ptr
)
2942 align
= *tcc_state
->pack_stack_ptr
;
2945 if (bit_size
>= 0) {
2946 bt
= type1
.t
& VT_BTYPE
;
2953 tcc_error("bitfields must have scalar type");
2955 if (bit_size
> bsize
) {
2956 tcc_error("width of '%s' exceeds its type",
2957 get_tok_str(v
, NULL
));
2958 } else if (bit_size
== bsize
) {
2959 /* no need for bit fields */
2961 } else if (bit_size
== 0) {
2962 /* XXX: what to do if only padding in a
2964 /* zero size: means to pad */
2967 /* we do not have enough room ?
2968 did the type change?
2970 if ((bit_pos
+ bit_size
) > bsize
||
2971 bt
!= prevbt
|| a
== TOK_UNION
)
2974 /* XXX: handle LSB first */
2975 type1
.t
|= VT_BITFIELD
|
2976 (bit_pos
<< VT_STRUCT_SHIFT
) |
2977 (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
2978 bit_pos
+= bit_size
;
2984 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
2985 /* add new memory data only if starting
2987 if (lbit_pos
== 0) {
2988 if (a
== TOK_STRUCT
) {
2989 c
= (c
+ align
- 1) & -align
;
2998 if (align
> maxalign
)
3002 printf("add field %s offset=%d",
3003 get_tok_str(v
, NULL
), offset
);
3004 if (type1
.t
& VT_BITFIELD
) {
3005 printf(" pos=%d size=%d",
3006 (type1
.t
>> VT_STRUCT_SHIFT
) & 0x3f,
3007 (type1
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f);
3012 if (v
== 0 && (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3014 while ((ass
= ass
->next
) != NULL
) {
3015 ss
= sym_push(ass
->v
, &ass
->type
, 0, offset
+ ass
->c
);
3020 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, offset
);
3024 if (tok
== ';' || tok
== TOK_EOF
)
3031 /* store size and alignment */
3032 s
->c
= (c
+ maxalign
- 1) & -maxalign
;
3038 /* return 1 if basic type is a type size (short, long, long long) */
3039 ST_FUNC
int is_btype_size(int bt
)
3041 return bt
== VT_SHORT
|| bt
== VT_LONG
|| bt
== VT_LLONG
;
3044 /* return 0 if no type declaration. otherwise, return the basic type
3047 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3049 int t
, u
, bt_size
, complete
, type_found
, typespec_found
;
3053 memset(ad
, 0, sizeof(AttributeDef
));
3061 /* currently, we really ignore extension */
3072 tcc_error("too many basic types");
3074 bt_size
= is_btype_size (u
& VT_BTYPE
);
3075 if (u
== VT_INT
|| (!bt_size
&& !(t
& VT_TYPEDEF
)))
3090 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3091 #ifndef TCC_TARGET_PE
3092 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3094 } else if ((t
& VT_BTYPE
) == VT_LONG
) {
3095 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3109 if ((t
& VT_BTYPE
) == VT_LONG
) {
3110 #ifdef TCC_TARGET_PE
3111 t
= (t
& ~VT_BTYPE
) | VT_DOUBLE
;
3113 t
= (t
& ~VT_BTYPE
) | VT_LDOUBLE
;
3121 struct_decl(&type1
, VT_ENUM
, t
& VT_TYPEDEF
);
3124 type
->ref
= type1
.ref
;
3128 struct_decl(&type1
, VT_STRUCT
, t
& VT_TYPEDEF
);
3131 /* type modifiers */
3147 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
3148 tcc_error("signed and unsigned modifier");
3161 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
3162 tcc_error("signed and unsigned modifier");
3163 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
3188 /* GNUC attribute */
3189 case TOK_ATTRIBUTE1
:
3190 case TOK_ATTRIBUTE2
:
3191 parse_attribute(ad
);
3194 t
= (t
& ~VT_BTYPE
) | u
;
3202 parse_expr_type(&type1
);
3203 /* remove all storage modifiers except typedef */
3204 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
3210 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
3212 t
|= (s
->type
.t
& ~VT_TYPEDEF
);
3213 type
->ref
= s
->type
.ref
;
3215 /* get attributes from typedef */
3216 if (0 == ad
->a
.aligned
)
3217 ad
->a
.aligned
= s
->a
.aligned
;
3218 if (0 == ad
->a
.func_call
)
3219 ad
->a
.func_call
= s
->a
.func_call
;
3220 ad
->a
.packed
|= s
->a
.packed
;
3229 if (tcc_state
->char_is_unsigned
) {
3230 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
3234 /* long is never used as type */
3235 if ((t
& VT_BTYPE
) == VT_LONG
)
3236 #if !defined TCC_TARGET_X86_64 || defined TCC_TARGET_PE
3237 t
= (t
& ~VT_BTYPE
) | VT_INT
;
3239 t
= (t
& ~VT_BTYPE
) | VT_LLONG
;
3245 /* convert a function parameter type (array to pointer and function to
3246 function pointer) */
3247 static inline void convert_parameter_type(CType
*pt
)
3249 /* remove const and volatile qualifiers (XXX: const could be used
3250 to indicate a const function parameter */
3251 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3252 /* array must be transformed to pointer according to ANSI C */
3254 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
3259 ST_FUNC
void parse_asm_str(CString
*astr
)
3262 /* read the string */
3264 expect("string constant");
3266 while (tok
== TOK_STR
) {
3267 /* XXX: add \0 handling too ? */
3268 cstr_cat(astr
, tokc
.cstr
->data
);
3271 cstr_ccat(astr
, '\0');
3274 /* Parse an asm label and return the label
3275 * Don't forget to free the CString in the caller! */
3276 static void asm_label_instr(CString
*astr
)
3279 parse_asm_str(astr
);
3282 printf("asm_alias: \"%s\"\n", (char *)astr
->data
);
3286 static void post_type(CType
*type
, AttributeDef
*ad
)
3288 int n
, l
, t1
, arg_size
, align
;
3289 Sym
**plast
, *s
, *first
;
3294 /* function declaration */
3302 /* read param name and compute offset */
3303 if (l
!= FUNC_OLD
) {
3304 if (!parse_btype(&pt
, &ad1
)) {
3306 tcc_error("invalid type");
3313 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
3315 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
3316 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
3317 tcc_error("parameter declared as void");
3318 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
3323 expect("identifier");
3327 convert_parameter_type(&pt
);
3328 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
3334 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
3341 /* if no parameters, then old type prototype */
3345 /* NOTE: const is ignored in returned type as it has a special
3346 meaning in gcc / C++ */
3347 type
->t
&= ~VT_CONSTANT
;
3348 /* some ancient pre-K&R C allows a function to return an array
3349 and the array brackets to be put after the arguments, such
3350 that "int c()[]" means something like "int[] c()" */
3353 skip(']'); /* only handle simple "[]" */
3356 /* we push a anonymous symbol which will contain the function prototype */
3357 ad
->a
.func_args
= arg_size
;
3358 s
= sym_push(SYM_FIELD
, type
, 0, l
);
3363 } else if (tok
== '[') {
3364 /* array definition */
3366 if (tok
== TOK_RESTRICT1
)
3371 if (!local_stack
|| nocode_wanted
)
3372 vpushi(expr_const());
3374 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
3377 tcc_error("invalid array size");
3379 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
3380 tcc_error("size of variable length array should be an integer");
3385 /* parse next post type */
3386 post_type(type
, ad
);
3387 if (type
->t
== VT_FUNC
)
3388 tcc_error("declaration of an array of functions");
3389 t1
|= type
->t
& VT_VLA
;
3392 loc
-= type_size(&int_type
, &align
);
3396 vla_runtime_type_size(type
, &align
);
3398 vset(&int_type
, VT_LOCAL
|VT_LVAL
, loc
);
3405 /* we push an anonymous symbol which will contain the array
3407 s
= sym_push(SYM_FIELD
, type
, 0, n
);
3408 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
3413 /* Parse a type declaration (except basic type), and return the type
3414 in 'type'. 'td' is a bitmask indicating which kind of type decl is
3415 expected. 'type' should contain the basic type. 'ad' is the
3416 attribute definition of the basic type. It can be modified by
3419 static void type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
3422 CType type1
, *type2
;
3423 int qualifiers
, storage
;
3425 while (tok
== '*') {
3433 qualifiers
|= VT_CONSTANT
;
3438 qualifiers
|= VT_VOLATILE
;
3446 type
->t
|= qualifiers
;
3449 /* XXX: clarify attribute handling */
3450 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3451 parse_attribute(ad
);
3453 /* recursive type */
3454 /* XXX: incorrect if abstract type for functions (e.g. 'int ()') */
3455 type1
.t
= 0; /* XXX: same as int */
3458 /* XXX: this is not correct to modify 'ad' at this point, but
3459 the syntax is not clear */
3460 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3461 parse_attribute(ad
);
3462 type_decl(&type1
, ad
, v
, td
);
3465 /* type identifier */
3466 if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
3470 if (!(td
& TYPE_ABSTRACT
))
3471 expect("identifier");
3475 storage
= type
->t
& VT_STORAGE
;
3476 type
->t
&= ~VT_STORAGE
;
3477 if (storage
& VT_STATIC
) {
3478 int saved_nocode_wanted
= nocode_wanted
;
3480 post_type(type
, ad
);
3481 nocode_wanted
= saved_nocode_wanted
;
3483 post_type(type
, ad
);
3485 if (tok
== TOK_ATTRIBUTE1
|| tok
== TOK_ATTRIBUTE2
)
3486 parse_attribute(ad
);
3490 /* append type at the end of type1 */
3503 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
3504 ST_FUNC
int lvalue_type(int t
)
3509 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
3511 else if (bt
== VT_SHORT
)
3515 if (t
& VT_UNSIGNED
)
3516 r
|= VT_LVAL_UNSIGNED
;
3520 /* indirection with full error checking and bound check */
3521 ST_FUNC
void indir(void)
3523 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
3524 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
3528 if ((vtop
->r
& VT_LVAL
) && !nocode_wanted
)
3530 vtop
->type
= *pointed_type(&vtop
->type
);
3531 /* Arrays and functions are never lvalues */
3532 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
3533 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
3534 vtop
->r
|= lvalue_type(vtop
->type
.t
);
3535 /* if bound checking, the referenced pointer must be checked */
3536 #ifdef CONFIG_TCC_BCHECK
3537 if (tcc_state
->do_bounds_check
)
3538 vtop
->r
|= VT_MUSTBOUND
;
3543 /* pass a parameter to a function and do type checking and casting */
3544 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
3549 func_type
= func
->c
;
3550 if (func_type
== FUNC_OLD
||
3551 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
3552 /* default casting : only need to convert float to double */
3553 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
3556 } else if (vtop
->type
.t
& VT_BITFIELD
) {
3557 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3560 } else if (arg
== NULL
) {
3561 tcc_error("too many arguments to function");
3564 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
3565 gen_assign_cast(&type
);
3569 /* parse an expression of the form '(type)' or '(expr)' and return its
3571 static void parse_expr_type(CType
*type
)
3577 if (parse_btype(type
, &ad
)) {
3578 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
3585 static void parse_type(CType
*type
)
3590 if (!parse_btype(type
, &ad
)) {
3593 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
3596 static void vpush_tokc(int t
)
3601 vsetc(&type
, VT_CONST
, &tokc
);
3604 ST_FUNC
void unary(void)
3606 int n
, t
, align
, size
, r
, sizeof_caller
;
3610 static int in_sizeof
= 0;
3612 sizeof_caller
= in_sizeof
;
3614 /* XXX: GCC 2.95.3 does not generate a table although it should be
3628 vpush_tokc(VT_INT
| VT_UNSIGNED
);
3632 vpush_tokc(VT_LLONG
);
3636 vpush_tokc(VT_LLONG
| VT_UNSIGNED
);
3640 vpush_tokc(VT_FLOAT
);
3644 vpush_tokc(VT_DOUBLE
);
3648 vpush_tokc(VT_LDOUBLE
);
3651 case TOK___FUNCTION__
:
3653 goto tok_identifier
;
3659 /* special function name identifier */
3660 len
= strlen(funcname
) + 1;
3661 /* generate char[len] type */
3666 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
3667 ptr
= section_ptr_add(data_section
, len
);
3668 memcpy(ptr
, funcname
, len
);
3673 #ifdef TCC_TARGET_PE
3674 t
= VT_SHORT
| VT_UNSIGNED
;
3680 /* string parsing */
3683 if (tcc_state
->warn_write_strings
)
3688 memset(&ad
, 0, sizeof(AttributeDef
));
3689 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, NULL
, 0);
3694 if (parse_btype(&type
, &ad
)) {
3695 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
3697 /* check ISOC99 compound literal */
3699 /* data is allocated locally by default */
3704 /* all except arrays are lvalues */
3705 if (!(type
.t
& VT_ARRAY
))
3706 r
|= lvalue_type(type
.t
);
3707 memset(&ad
, 0, sizeof(AttributeDef
));
3708 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, NULL
, 0);
3710 if (sizeof_caller
) {
3717 } else if (tok
== '{') {
3718 /* save all registers */
3720 /* statement expression : we do not accept break/continue
3721 inside as GCC does */
3722 block(NULL
, NULL
, NULL
, NULL
, 0, 1);
3737 /* functions names must be treated as function pointers,
3738 except for unary '&' and sizeof. Since we consider that
3739 functions are not lvalues, we only have to handle it
3740 there and in function calls. */
3741 /* arrays can also be used although they are not lvalues */
3742 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
3743 !(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_LLOCAL
))
3745 mk_pointer(&vtop
->type
);
3751 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
3753 boolean
.t
= VT_BOOL
;
3755 vtop
->c
.i
= !vtop
->c
.i
;
3756 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
3757 vtop
->c
.i
= vtop
->c
.i
^ 1;
3760 vseti(VT_JMP
, gvtst(1, 0));
3772 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
3773 tcc_error("pointer not accepted for unary plus");
3774 /* In order to force cast, we add zero, except for floating point
3775 where we really need an noop (otherwise -0.0 will be transformed
3777 if (!is_float(vtop
->type
.t
)) {
3788 unary_type(&type
); // Perform a in_sizeof = 0;
3789 size
= type_size(&type
, &align
);
3790 if (t
== TOK_SIZEOF
) {
3791 if (!(type
.t
& VT_VLA
)) {
3793 tcc_error("sizeof applied to an incomplete type");
3796 vla_runtime_type_size(&type
, &align
);
3801 vtop
->type
.t
|= VT_UNSIGNED
;
3804 case TOK_builtin_types_compatible_p
:
3813 type1
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3814 type2
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3815 vpushi(is_compatible_types(&type1
, &type2
));
3818 case TOK_builtin_constant_p
:
3820 int saved_nocode_wanted
, res
;
3823 saved_nocode_wanted
= nocode_wanted
;
3826 res
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3828 nocode_wanted
= saved_nocode_wanted
;
3833 case TOK_builtin_frame_address
:
3839 if (tok
!= TOK_CINT
|| tokc
.i
< 0) {
3840 tcc_error("__builtin_frame_address only takes positive integers");
3847 vset(&type
, VT_LOCAL
, 0); /* local frame */
3849 mk_pointer(&vtop
->type
);
3850 indir(); /* -> parent frame */
3854 #ifdef TCC_TARGET_X86_64
3855 #ifdef TCC_TARGET_PE
3856 case TOK_builtin_va_start
:
3864 if ((vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
3865 tcc_error("__builtin_va_start expects a local variable");
3866 vtop
->r
&= ~(VT_LVAL
| VT_REF
);
3867 vtop
->type
= char_pointer_type
;
3872 case TOK_builtin_va_arg_types
:
3879 vpushi(classify_x86_64_va_arg(&type
));
3894 t
= vtop
->type
.t
& VT_BTYPE
;
3896 /* In IEEE negate(x) isn't subtract(0,x), but rather
3901 else if (t
== VT_DOUBLE
)
3912 goto tok_identifier
;
3914 /* allow to take the address of a label */
3915 if (tok
< TOK_UIDENT
)
3916 expect("label identifier");
3917 s
= label_find(tok
);
3919 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
3921 if (s
->r
== LABEL_DECLARED
)
3922 s
->r
= LABEL_FORWARD
;
3925 s
->type
.t
= VT_VOID
;
3926 mk_pointer(&s
->type
);
3927 s
->type
.t
|= VT_STATIC
;
3929 vpushsym(&s
->type
, s
);
3933 // special qnan , snan and infinity values
3935 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
3939 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
3943 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
3952 expect("identifier");
3955 const char *name
= get_tok_str(t
, NULL
);
3957 tcc_error("'%s' undeclared", name
);
3958 /* for simple function calls, we tolerate undeclared
3959 external reference to int() function */
3960 if (tcc_state
->warn_implicit_function_declaration
3961 #ifdef TCC_TARGET_PE
3962 /* people must be warned about using undeclared WINAPI functions
3963 (which usually start with uppercase letter) */
3964 || (name
[0] >= 'A' && name
[0] <= 'Z')
3967 tcc_warning("implicit declaration of function '%s'", name
);
3968 s
= external_global_sym(t
, &func_old_type
, 0);
3970 if ((s
->type
.t
& (VT_STATIC
| VT_INLINE
| VT_BTYPE
)) ==
3971 (VT_STATIC
| VT_INLINE
| VT_FUNC
)) {
3972 /* if referencing an inline function, then we generate a
3973 symbol to it if not already done. It will have the
3974 effect to generate code for it at the end of the
3975 compilation unit. Inline function as always
3976 generated in the text section. */
3978 put_extern_sym(s
, text_section
, 0, 0);
3979 r
= VT_SYM
| VT_CONST
;
3983 vset(&s
->type
, r
, s
->c
);
3984 /* if forward reference, we must point to s */
3985 if (vtop
->r
& VT_SYM
) {
3987 vtop
->c
.ptr_offset
= 0;
3992 /* post operations */
3994 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
3997 } else if (tok
== '.' || tok
== TOK_ARROW
) {
4000 if (tok
== TOK_ARROW
)
4002 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
4006 /* expect pointer on structure */
4007 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
4008 expect("struct or union");
4012 while ((s
= s
->next
) != NULL
) {
4017 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, NULL
));
4018 /* add field offset to pointer */
4019 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
4022 /* change type to field type, and set to lvalue */
4023 vtop
->type
= s
->type
;
4024 vtop
->type
.t
|= qualifiers
;
4025 /* an array is never an lvalue */
4026 if (!(vtop
->type
.t
& VT_ARRAY
)) {
4027 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4028 #ifdef CONFIG_TCC_BCHECK
4029 /* if bound checking, the referenced pointer must be checked */
4030 if (tcc_state
->do_bounds_check
)
4031 vtop
->r
|= VT_MUSTBOUND
;
4035 } else if (tok
== '[') {
4041 } else if (tok
== '(') {
4044 int nb_args
, ret_nregs
, ret_align
, variadic
;
4047 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4048 /* pointer test (no array accepted) */
4049 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
4050 vtop
->type
= *pointed_type(&vtop
->type
);
4051 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4055 expect("function pointer");
4058 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
4060 /* get return type */
4063 sa
= s
->next
; /* first parameter */
4066 /* compute first implicit argument if a structure is returned */
4067 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
4068 variadic
= (s
->c
== FUNC_ELLIPSIS
);
4069 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
4072 /* get some space for the returned structure */
4073 size
= type_size(&s
->type
, &align
);
4074 loc
= (loc
- size
) & -align
;
4076 ret
.r
= VT_LOCAL
| VT_LVAL
;
4077 /* pass it as 'int' to avoid structure arg passing
4079 vseti(VT_LOCAL
, loc
);
4089 /* return in register */
4090 if (is_float(ret
.type
.t
)) {
4091 ret
.r
= reg_fret(ret
.type
.t
);
4092 #ifdef TCC_TARGET_X86_64
4093 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
4097 #ifdef TCC_TARGET_X86_64
4098 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
4100 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
4110 gfunc_param_typed(s
, sa
);
4120 tcc_error("too few arguments to function");
4122 if (!nocode_wanted
) {
4123 gfunc_call(nb_args
);
4125 vtop
-= (nb_args
+ 1);
4129 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
4130 vsetc(&ret
.type
, r
, &ret
.c
);
4131 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
4134 /* handle packed struct return */
4135 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
4138 size
= type_size(&s
->type
, &align
);
4139 loc
= (loc
- size
) & -align
;
4143 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
4147 if (--ret_nregs
== 0)
4149 /* XXX: compatible with arm only: ret_align == register_size */
4150 offset
+= ret_align
;
4152 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
4160 ST_FUNC
void expr_prod(void)
4165 while (tok
== '*' || tok
== '/' || tok
== '%') {
4173 ST_FUNC
void expr_sum(void)
4178 while (tok
== '+' || tok
== '-') {
4186 static void expr_shift(void)
4191 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
4199 static void expr_cmp(void)
4204 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
4205 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
4213 static void expr_cmpeq(void)
4218 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
4226 static void expr_and(void)
4229 while (tok
== '&') {
4236 static void expr_xor(void)
4239 while (tok
== '^') {
4246 static void expr_or(void)
4249 while (tok
== '|') {
4256 /* XXX: fix this mess */
4257 static void expr_land_const(void)
4260 while (tok
== TOK_LAND
) {
4267 /* XXX: fix this mess */
4268 static void expr_lor_const(void)
4271 while (tok
== TOK_LOR
) {
4278 /* only used if non constant */
4279 static void expr_land(void)
4284 if (tok
== TOK_LAND
) {
4289 if (tok
!= TOK_LAND
) {
4299 static void expr_lor(void)
4304 if (tok
== TOK_LOR
) {
4309 if (tok
!= TOK_LOR
) {
4319 /* XXX: better constant handling */
4320 static void expr_cond(void)
4322 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
;
4324 CType type
, type1
, type2
;
4331 boolean
.t
= VT_BOOL
;
4337 if (tok
!= ':' || !gnu_ext
) {
4352 if (vtop
!= vstack
) {
4353 /* needed to avoid having different registers saved in
4355 if (is_float(vtop
->type
.t
)) {
4357 #ifdef TCC_TARGET_X86_64
4358 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
4368 if (tok
== ':' && gnu_ext
) {
4376 sv
= *vtop
; /* save value to handle it later */
4377 vtop
--; /* no vpop so that FP stack is not flushed */
4385 bt1
= t1
& VT_BTYPE
;
4387 bt2
= t2
& VT_BTYPE
;
4388 /* cast operands to correct type according to ISOC rules */
4389 if (is_float(bt1
) || is_float(bt2
)) {
4390 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
4391 type
.t
= VT_LDOUBLE
;
4392 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
4397 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
4398 /* cast to biggest op */
4400 /* convert to unsigned if it does not fit in a long long */
4401 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
) ||
4402 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_LLONG
| VT_UNSIGNED
))
4403 type
.t
|= VT_UNSIGNED
;
4404 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
4405 /* If one is a null ptr constant the result type
4407 if (is_null_pointer (vtop
))
4409 else if (is_null_pointer (&sv
))
4411 /* XXX: test pointer compatibility, C99 has more elaborate
4415 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
4416 /* XXX: test function pointer compatibility */
4417 type
= bt1
== VT_FUNC
? type1
: type2
;
4418 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
4419 /* XXX: test structure compatibility */
4420 type
= bt1
== VT_STRUCT
? type1
: type2
;
4421 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
4422 /* NOTE: as an extension, we accept void on only one side */
4425 /* integer operations */
4427 /* convert to unsigned if it does not fit in an integer */
4428 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
) ||
4429 (t2
& (VT_BTYPE
| VT_UNSIGNED
)) == (VT_INT
| VT_UNSIGNED
))
4430 type
.t
|= VT_UNSIGNED
;
4433 /* now we convert second operand */
4435 if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
4438 if (is_float(type
.t
)) {
4440 #ifdef TCC_TARGET_X86_64
4441 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
4445 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
4446 /* for long longs, we use fixed registers to avoid having
4447 to handle a complicated move */
4452 /* this is horrible, but we must also convert first
4456 /* put again first value and cast it */
4459 if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
4462 move_reg(r2
, r1
, type
.t
);
4469 static void expr_eq(void)
4475 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
4476 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
4477 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
4492 ST_FUNC
void gexpr(void)
4503 /* parse an expression and return its type without any side effect. */
4504 static void expr_type(CType
*type
)
4506 int saved_nocode_wanted
;
4508 saved_nocode_wanted
= nocode_wanted
;
4513 nocode_wanted
= saved_nocode_wanted
;
4516 /* parse a unary expression and return its type without any side
4518 static void unary_type(CType
*type
)
4530 /* parse a constant expression and return value in vtop. */
4531 static void expr_const1(void)
4540 /* parse an integer constant and return its value. */
4541 ST_FUNC
int expr_const(void)
4545 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
4546 expect("constant expression");
4552 /* return the label token if current token is a label, otherwise
4554 static int is_label(void)
4558 /* fast test first */
4559 if (tok
< TOK_UIDENT
)
4561 /* no need to save tokc because tok is an identifier */
4568 unget_tok(last_tok
);
4573 static void label_or_decl(int l
)
4577 /* fast test first */
4578 if (tok
>= TOK_UIDENT
)
4580 /* no need to save tokc because tok is an identifier */
4584 unget_tok(last_tok
);
4587 unget_tok(last_tok
);
4592 static void block(int *bsym
, int *csym
, int *case_sym
, int *def_sym
,
4593 int case_reg
, int is_expr
)
4596 Sym
*s
, *frame_bottom
;
4598 /* generate line number info */
4599 if (tcc_state
->do_debug
&&
4600 (last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
4601 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
4603 last_line_num
= file
->line_num
;
4607 /* default return value is (void) */
4609 vtop
->type
.t
= VT_VOID
;
4612 if (tok
== TOK_IF
) {
4619 block(bsym
, csym
, case_sym
, def_sym
, case_reg
, 0);
4621 if (c
== TOK_ELSE
) {
4625 block(bsym
, csym
, case_sym
, def_sym
, case_reg
, 0);
4626 gsym(d
); /* patch else jmp */
4629 } else if (tok
== TOK_WHILE
) {
4637 block(&a
, &b
, case_sym
, def_sym
, case_reg
, 0);
4641 } else if (tok
== '{') {
4643 int block_vla_sp_loc
, *saved_vla_sp_loc
, saved_vla_flags
;
4646 /* record local declaration stack position */
4648 frame_bottom
= sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
4649 frame_bottom
->next
= scope_stack_bottom
;
4650 scope_stack_bottom
= frame_bottom
;
4651 llabel
= local_label_stack
;
4653 /* save VLA state */
4654 block_vla_sp_loc
= *(saved_vla_sp_loc
= vla_sp_loc
);
4655 if (saved_vla_sp_loc
!= &vla_sp_root_loc
)
4656 vla_sp_loc
= &block_vla_sp_loc
;
4658 saved_vla_flags
= vla_flags
;
4659 vla_flags
|= VLA_NEED_NEW_FRAME
;
4661 /* handle local labels declarations */
4662 if (tok
== TOK_LABEL
) {
4665 if (tok
< TOK_UIDENT
)
4666 expect("label identifier");
4667 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
4677 while (tok
!= '}') {
4678 label_or_decl(VT_LOCAL
);
4682 block(bsym
, csym
, case_sym
, def_sym
, case_reg
, is_expr
);
4685 /* pop locally defined labels */
4686 label_pop(&local_label_stack
, llabel
);
4688 /* XXX: this solution makes only valgrind happy...
4689 triggered by gcc.c-torture/execute/20000917-1.c */
4691 switch(vtop
->type
.t
& VT_BTYPE
) {
4696 for(p
=vtop
->type
.ref
;p
;p
=p
->prev
)
4698 tcc_error("unsupported expression type");
4701 /* pop locally defined symbols */
4702 scope_stack_bottom
= scope_stack_bottom
->next
;
4703 sym_pop(&local_stack
, s
);
4705 /* Pop VLA frames and restore stack pointer if required */
4706 if (saved_vla_sp_loc
!= &vla_sp_root_loc
)
4707 *saved_vla_sp_loc
= block_vla_sp_loc
;
4708 if (vla_sp_loc
!= (saved_vla_sp_loc
== &vla_sp_root_loc
? &vla_sp_root_loc
: &block_vla_sp_loc
)) {
4709 vla_sp_loc
= saved_vla_sp_loc
;
4710 gen_vla_sp_restore(*vla_sp_loc
);
4712 vla_flags
= (vla_flags
& ~VLA_SCOPE_FLAGS
) | (saved_vla_flags
& VLA_SCOPE_FLAGS
);
4715 } else if (tok
== TOK_RETURN
) {
4719 gen_assign_cast(&func_vt
);
4720 if ((func_vt
.t
& VT_BTYPE
) == VT_STRUCT
) {
4721 CType type
, ret_type
;
4722 int ret_align
, ret_nregs
;
4723 ret_nregs
= gfunc_sret(&func_vt
, func_var
, &ret_type
,
4725 if (0 == ret_nregs
) {
4726 /* if returning structure, must copy it to implicit
4727 first pointer arg location */
4730 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
4733 /* copy structure value to pointer */
4736 /* returning structure packed into registers */
4737 int r
, size
, addr
, align
;
4738 size
= type_size(&func_vt
,&align
);
4739 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) || (vtop
->c
.i
& (ret_align
-1)))
4740 && (align
& (ret_align
-1))) {
4741 loc
= (loc
- size
) & -align
;
4744 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
4747 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
4749 vtop
->type
= ret_type
;
4750 if (is_float(ret_type
.t
))
4751 r
= rc_fret(ret_type
.t
);
4757 if (--ret_nregs
== 0)
4759 /* We assume that when a structure is returned in multiple
4760 registers, their classes are consecutive values of the
4763 /* XXX: compatible with arm only: ret_align == register_size */
4764 vtop
->c
.i
+= ret_align
;
4765 vtop
->r
= VT_LOCAL
| VT_LVAL
;
4768 } else if (is_float(func_vt
.t
)) {
4769 gv(rc_fret(func_vt
.t
));
4773 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
4776 rsym
= gjmp(rsym
); /* jmp */
4777 } else if (tok
== TOK_BREAK
) {
4780 tcc_error("cannot break");
4781 *bsym
= gjmp(*bsym
);
4784 } else if (tok
== TOK_CONTINUE
) {
4787 tcc_error("cannot continue");
4788 *csym
= gjmp(*csym
);
4791 } else if (tok
== TOK_FOR
) {
4796 frame_bottom
= sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
4797 frame_bottom
->next
= scope_stack_bottom
;
4798 scope_stack_bottom
= frame_bottom
;
4800 /* c99 for-loop init decl? */
4801 if (!decl0(VT_LOCAL
, 1)) {
4802 /* no, regular for-loop init expr */
4826 block(&a
, &b
, case_sym
, def_sym
, case_reg
, 0);
4830 scope_stack_bottom
= scope_stack_bottom
->next
;
4831 sym_pop(&local_stack
, s
);
4833 if (tok
== TOK_DO
) {
4838 block(&a
, &b
, case_sym
, def_sym
, case_reg
, 0);
4849 if (tok
== TOK_SWITCH
) {
4853 /* XXX: other types than integer */
4854 case_reg
= gv(RC_INT
);
4858 b
= gjmp(0); /* jump to first case */
4860 block(&a
, csym
, &b
, &c
, case_reg
, 0);
4861 /* if no default, jmp after switch */
4869 if (tok
== TOK_CASE
) {
4876 if (gnu_ext
&& tok
== TOK_DOTS
) {
4880 tcc_warning("empty case range");
4882 /* since a case is like a label, we must skip it with a jmp */
4889 *case_sym
= gtst(1, 0);
4892 *case_sym
= gtst(1, 0);
4896 *case_sym
= gtst(1, *case_sym
);
4901 goto block_after_label
;
4903 if (tok
== TOK_DEFAULT
) {
4909 tcc_error("too many 'default'");
4912 goto block_after_label
;
4914 if (tok
== TOK_GOTO
) {
4916 if (tok
== '*' && gnu_ext
) {
4920 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
4923 } else if (tok
>= TOK_UIDENT
) {
4924 s
= label_find(tok
);
4925 /* put forward definition if needed */
4927 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4929 if (s
->r
== LABEL_DECLARED
)
4930 s
->r
= LABEL_FORWARD
;
4932 /* label already defined */
4933 if (vla_flags
& VLA_IN_SCOPE
) {
4934 /* If VLAs are in use, save the current stack pointer and
4935 reset the stack pointer to what it was at function entry
4936 (label will restore stack pointer in inner scopes) */
4938 gen_vla_sp_restore(vla_sp_root_loc
);
4940 if (s
->r
& LABEL_FORWARD
)
4941 s
->jnext
= gjmp(s
->jnext
);
4943 gjmp_addr(s
->jnext
);
4946 expect("label identifier");
4949 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
4955 if (vla_flags
& VLA_IN_SCOPE
) {
4956 /* save/restore stack pointer across label
4957 this is a no-op when combined with the load immediately
4958 after the label unless we arrive via goto */
4963 if (s
->r
== LABEL_DEFINED
)
4964 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
4966 s
->r
= LABEL_DEFINED
;
4968 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
4971 if (vla_flags
& VLA_IN_SCOPE
) {
4972 gen_vla_sp_restore(*vla_sp_loc
);
4973 vla_flags
|= VLA_NEED_NEW_FRAME
;
4975 /* we accept this, but it is a mistake */
4978 tcc_warning("deprecated use of label at end of compound statement");
4982 block(bsym
, csym
, case_sym
, def_sym
, case_reg
, is_expr
);
4985 /* expression case */
5000 /* t is the array or struct type. c is the array or struct
5001 address. cur_index/cur_field is the pointer to the current
5002 value. 'size_only' is true if only size info is needed (only used
5004 static void decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
5005 int *cur_index
, Sym
**cur_field
,
5009 int notfirst
, index
, index_last
, align
, l
, nb_elems
, elem_size
;
5015 if (gnu_ext
&& (l
= is_label()) != 0)
5017 while (tok
== '[' || tok
== '.') {
5019 if (!(type
->t
& VT_ARRAY
))
5020 expect("array type");
5023 index
= expr_const();
5024 if (index
< 0 || (s
->c
>= 0 && index
>= s
->c
))
5025 expect("invalid index");
5026 if (tok
== TOK_DOTS
&& gnu_ext
) {
5028 index_last
= expr_const();
5029 if (index_last
< 0 ||
5030 (s
->c
>= 0 && index_last
>= s
->c
) ||
5032 expect("invalid index");
5038 *cur_index
= index_last
;
5039 type
= pointed_type(type
);
5040 elem_size
= type_size(type
, &align
);
5041 c
+= index
* elem_size
;
5042 /* NOTE: we only support ranges for last designator */
5043 nb_elems
= index_last
- index
+ 1;
5044 if (nb_elems
!= 1) {
5053 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
5054 expect("struct/union type");
5067 /* XXX: fix this mess by using explicit storage field */
5069 type1
.t
|= (type
->t
& ~VT_TYPE
);
5083 if (type
->t
& VT_ARRAY
) {
5085 type
= pointed_type(type
);
5086 c
+= index
* type_size(type
, &align
);
5090 tcc_error("too many field init");
5091 /* XXX: fix this mess by using explicit storage field */
5093 type1
.t
|= (type
->t
& ~VT_TYPE
);
5098 decl_initializer(type
, sec
, c
, 0, size_only
);
5100 /* XXX: make it more general */
5101 if (!size_only
&& nb_elems
> 1) {
5102 unsigned long c_end
;
5107 tcc_error("range init not supported yet for dynamic storage");
5108 c_end
= c
+ nb_elems
* elem_size
;
5109 if (c_end
> sec
->data_allocated
)
5110 section_realloc(sec
, c_end
);
5111 src
= sec
->data
+ c
;
5113 for(i
= 1; i
< nb_elems
; i
++) {
5115 memcpy(dst
, src
, elem_size
);
5121 #define EXPR_CONST 1
5124 /* store a value or an expression directly in global data or in local array */
5125 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
,
5126 int v
, int expr_type
)
5128 int saved_global_expr
, bt
, bit_pos
, bit_size
;
5130 unsigned long long bit_mask
;
5138 /* compound literals must be allocated globally in this case */
5139 saved_global_expr
= global_expr
;
5142 global_expr
= saved_global_expr
;
5143 /* NOTE: symbols are accepted */
5144 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
)
5145 tcc_error("initializer element is not constant");
5153 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5156 /* XXX: not portable */
5157 /* XXX: generate error if incorrect relocation */
5158 gen_assign_cast(&dtype
);
5159 bt
= type
->t
& VT_BTYPE
;
5160 /* we'll write at most 12 bytes */
5161 if (c
+ 12 > sec
->data_allocated
) {
5162 section_realloc(sec
, c
+ 12);
5164 ptr
= sec
->data
+ c
;
5165 /* XXX: make code faster ? */
5166 if (!(type
->t
& VT_BITFIELD
)) {
5171 bit_pos
= (vtop
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
5172 bit_size
= (vtop
->type
.t
>> (VT_STRUCT_SHIFT
+ 6)) & 0x3f;
5173 bit_mask
= (1LL << bit_size
) - 1;
5175 if ((vtop
->r
& VT_SYM
) &&
5181 (bt
== VT_INT
&& bit_size
!= 32)))
5182 tcc_error("initializer element is not computable at load time");
5185 vtop
->c
.i
= (vtop
->c
.i
!= 0);
5187 *(char *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
5190 *(short *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
5193 *(double *)ptr
= vtop
->c
.d
;
5196 *(long double *)ptr
= vtop
->c
.ld
;
5199 *(long long *)ptr
|= (vtop
->c
.ll
& bit_mask
) << bit_pos
;
5202 if (vtop
->r
& VT_SYM
) {
5203 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
5205 *(addr_t
*)ptr
|= (vtop
->c
.ptr_offset
& bit_mask
) << bit_pos
;
5208 if (vtop
->r
& VT_SYM
) {
5209 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
5211 *(int *)ptr
|= (vtop
->c
.i
& bit_mask
) << bit_pos
;
5216 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
5223 /* put zeros for variable based init */
5224 static void init_putz(CType
*t
, Section
*sec
, unsigned long c
, int size
)
5227 /* nothing to do because globals are already set to zero */
5229 vpush_global_sym(&func_old_type
, TOK_memset
);
5231 #ifdef TCC_TARGET_ARM
5242 /* 't' contains the type and storage info. 'c' is the offset of the
5243 object in section 'sec'. If 'sec' is NULL, it means stack based
5244 allocation. 'first' is true if array '{' must be read (multi
5245 dimension implicit array init handling). 'size_only' is true if
5246 size only evaluation is wanted (only for arrays). */
5247 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
5248 int first
, int size_only
)
5250 int index
, array_length
, n
, no_oblock
, nb
, parlevel
, parlevel1
, i
;
5251 int size1
, align1
, expr_type
;
5255 if (type
->t
& VT_VLA
) {
5258 /* save current stack pointer */
5259 if (vla_flags
& VLA_NEED_NEW_FRAME
) {
5261 vla_flags
= VLA_IN_SCOPE
;
5262 vla_sp_loc
= &vla_sp_loc_tmp
;
5265 vla_runtime_type_size(type
, &a
);
5266 gen_vla_alloc(type
, a
);
5267 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
5271 } else if (type
->t
& VT_ARRAY
) {
5275 t1
= pointed_type(type
);
5276 size1
= type_size(t1
, &align1
);
5279 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
5282 tcc_error("character array initializer must be a literal,"
5283 " optionally enclosed in braces");
5288 /* only parse strings here if correct type (otherwise: handle
5289 them as ((w)char *) expressions */
5290 if ((tok
== TOK_LSTR
&&
5291 #ifdef TCC_TARGET_PE
5292 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
5294 (t1
->t
& VT_BTYPE
) == VT_INT
5296 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
5297 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
5302 /* compute maximum number of chars wanted */
5304 cstr_len
= cstr
->size
;
5306 cstr_len
= cstr
->size
/ sizeof(nwchar_t
);
5309 if (n
>= 0 && nb
> (n
- array_length
))
5310 nb
= n
- array_length
;
5313 tcc_warning("initializer-string for array is too long");
5314 /* in order to go faster for common case (char
5315 string in global variable, we handle it
5317 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
5318 memcpy(sec
->data
+ c
+ array_length
, cstr
->data
, nb
);
5322 ch
= ((unsigned char *)cstr
->data
)[i
];
5324 ch
= ((nwchar_t
*)cstr
->data
)[i
];
5325 init_putv(t1
, sec
, c
+ (array_length
+ i
) * size1
,
5333 /* only add trailing zero if enough storage (no
5334 warning in this case since it is standard) */
5335 if (n
< 0 || array_length
< n
) {
5337 init_putv(t1
, sec
, c
+ (array_length
* size1
), 0, EXPR_VAL
);
5343 while (tok
!= '}') {
5344 decl_designator(type
, sec
, c
, &index
, NULL
, size_only
);
5345 if (n
>= 0 && index
>= n
)
5346 tcc_error("index too large");
5347 /* must put zero in holes (note that doing it that way
5348 ensures that it even works with designators) */
5349 if (!size_only
&& array_length
< index
) {
5350 init_putz(t1
, sec
, c
+ array_length
* size1
,
5351 (index
- array_length
) * size1
);
5354 if (index
> array_length
)
5355 array_length
= index
;
5356 /* special test for multi dimensional arrays (may not
5357 be strictly correct if designators are used at the
5359 if (index
>= n
&& no_oblock
)
5368 /* put zeros at the end */
5369 if (!size_only
&& n
>= 0 && array_length
< n
) {
5370 init_putz(t1
, sec
, c
+ array_length
* size1
,
5371 (n
- array_length
) * size1
);
5373 /* patch type size if needed */
5375 s
->c
= array_length
;
5376 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
&&
5377 (sec
|| !first
|| tok
== '{')) {
5380 /* NOTE: the previous test is a specific case for automatic
5381 struct/union init */
5382 /* XXX: union needs only one init */
5384 /* XXX: this test is incorrect for local initializers
5385 beginning with ( without {. It would be much more difficult
5386 to do it correctly (ideally, the expression parser should
5387 be used in all cases) */
5393 while (tok
== '(') {
5397 if (!parse_btype(&type1
, &ad1
))
5399 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
5401 if (!is_assignable_types(type
, &type1
))
5402 tcc_error("invalid type for cast");
5407 if (first
|| tok
== '{') {
5416 while (tok
!= '}') {
5417 decl_designator(type
, sec
, c
, NULL
, &f
, size_only
);
5419 if (!size_only
&& array_length
< index
) {
5420 init_putz(type
, sec
, c
+ array_length
,
5421 index
- array_length
);
5423 index
= index
+ type_size(&f
->type
, &align1
);
5424 if (index
> array_length
)
5425 array_length
= index
;
5427 /* gr: skip fields from same union - ugly. */
5429 ///printf("index: %2d %08x -- %2d %08x\n", f->c, f->type.t, f->next->c, f->next->type.t);
5430 /* test for same offset */
5431 if (f
->next
->c
!= f
->c
)
5433 /* if yes, test for bitfield shift */
5434 if ((f
->type
.t
& VT_BITFIELD
) && (f
->next
->type
.t
& VT_BITFIELD
)) {
5435 int bit_pos_1
= (f
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
5436 int bit_pos_2
= (f
->next
->type
.t
>> VT_STRUCT_SHIFT
) & 0x3f;
5437 //printf("bitfield %d %d\n", bit_pos_1, bit_pos_2);
5438 if (bit_pos_1
!= bit_pos_2
)
5445 if (no_oblock
&& f
== NULL
)
5451 /* put zeros at the end */
5452 if (!size_only
&& array_length
< n
) {
5453 init_putz(type
, sec
, c
+ array_length
,
5462 } else if (tok
== '{') {
5464 decl_initializer(type
, sec
, c
, first
, size_only
);
5466 } else if (size_only
) {
5467 /* just skip expression */
5468 parlevel
= parlevel1
= 0;
5469 while ((parlevel
> 0 || parlevel1
> 0 ||
5470 (tok
!= '}' && tok
!= ',')) && tok
!= -1) {
5473 else if (tok
== ')')
5475 else if (tok
== '{')
5477 else if (tok
== '}')
5482 /* currently, we always use constant expression for globals
5483 (may change for scripting case) */
5484 expr_type
= EXPR_CONST
;
5486 expr_type
= EXPR_ANY
;
5487 init_putv(type
, sec
, c
, 0, expr_type
);
5491 /* parse an initializer for type 't' if 'has_init' is non zero, and
5492 allocate space in local or global data space ('r' is either
5493 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
5494 variable 'v' with an associated name represented by 'asm_label' of
5495 scope 'scope' is declared before initializers are parsed. If 'v' is
5496 zero, then a reference to the new object is put in the value stack.
5497 If 'has_init' is 2, a special parsing is done to handle string
5499 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
5500 int has_init
, int v
, char *asm_label
,
5503 int size
, align
, addr
, data_offset
;
5505 ParseState saved_parse_state
= {0};
5506 TokenString init_str
;
5508 Sym
*flexible_array
;
5510 flexible_array
= NULL
;
5511 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5512 Sym
*field
= type
->ref
->next
;
5515 field
= field
->next
;
5516 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
5517 flexible_array
= field
;
5521 size
= type_size(type
, &align
);
5522 /* If unknown size, we must evaluate it before
5523 evaluating initializers because
5524 initializers can generate global data too
5525 (e.g. string pointers or ISOC99 compound
5526 literals). It also simplifies local
5527 initializers handling */
5528 tok_str_new(&init_str
);
5529 if ((size
< 0 || flexible_array
) && has_init
) {
5530 /* get all init string */
5531 if (has_init
== 2) {
5532 /* only get strings */
5533 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
5534 tok_str_add_tok(&init_str
);
5539 while (level
> 0 || (tok
!= ',' && tok
!= ';')) {
5541 tcc_error("unexpected end of file in initializer");
5542 tok_str_add_tok(&init_str
);
5545 else if (tok
== '}') {
5555 tok_str_add(&init_str
, -1);
5556 tok_str_add(&init_str
, 0);
5559 save_parse_state(&saved_parse_state
);
5561 macro_ptr
= init_str
.str
;
5563 decl_initializer(type
, NULL
, 0, 1, 1);
5564 /* prepare second initializer parsing */
5565 macro_ptr
= init_str
.str
;
5568 size
= type_size(type
, &align
);
5571 /* if still unknown size, error */
5573 tcc_error("unknown type size");
5575 if (nocode_wanted
) {
5576 //tcc_warning("nocode_wanted set for decl_initializer_alloc");
5582 size
+= flexible_array
->type
.ref
->c
* pointed_size(&flexible_array
->type
);
5583 /* take into account specified alignment if bigger */
5584 if (ad
->a
.aligned
) {
5585 if (ad
->a
.aligned
> align
)
5586 align
= ad
->a
.aligned
;
5587 } else if (ad
->a
.packed
) {
5590 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
5592 #ifdef CONFIG_TCC_BCHECK
5593 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
5597 loc
= (loc
- size
) & -align
;
5599 #ifdef CONFIG_TCC_BCHECK
5600 /* handles bounds */
5601 /* XXX: currently, since we do only one pass, we cannot track
5602 '&' operators, so we add only arrays */
5603 if (tcc_state
->do_bounds_check
&& (type
->t
& VT_ARRAY
)) {
5604 unsigned long *bounds_ptr
;
5605 /* add padding between regions */
5607 /* then add local bound info */
5608 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(unsigned long));
5609 bounds_ptr
[0] = addr
;
5610 bounds_ptr
[1] = size
;
5614 /* local variable */
5615 sym_push(v
, type
, r
, addr
);
5617 /* push local reference */
5618 vset(type
, r
, addr
);
5624 if (v
&& scope
== VT_CONST
) {
5625 /* see if the symbol was already defined */
5628 if (!is_compatible_types(&sym
->type
, type
))
5629 tcc_error("incompatible types for redefinition of '%s'",
5630 get_tok_str(v
, NULL
));
5631 if (sym
->type
.t
& VT_EXTERN
) {
5632 /* if the variable is extern, it was not allocated */
5633 sym
->type
.t
&= ~VT_EXTERN
;
5634 /* set array size if it was omitted in extern
5636 if ((sym
->type
.t
& VT_ARRAY
) &&
5637 sym
->type
.ref
->c
< 0 &&
5639 sym
->type
.ref
->c
= type
->ref
->c
;
5641 /* we accept several definitions of the same
5642 global variable. this is tricky, because we
5643 must play with the SHN_COMMON type of the symbol */
5644 /* XXX: should check if the variable was already
5645 initialized. It is incorrect to initialized it
5647 /* no init data, we won't add more to the symbol */
5654 /* allocate symbol in corresponding section */
5659 else if (tcc_state
->nocommon
)
5663 data_offset
= sec
->data_offset
;
5664 data_offset
= (data_offset
+ align
- 1) & -align
;
5666 /* very important to increment global pointer at this time
5667 because initializers themselves can create new initializers */
5668 data_offset
+= size
;
5669 #ifdef CONFIG_TCC_BCHECK
5670 /* add padding if bound check */
5671 if (tcc_state
->do_bounds_check
)
5674 sec
->data_offset
= data_offset
;
5675 /* allocate section space to put the data */
5676 if (sec
->sh_type
!= SHT_NOBITS
&&
5677 data_offset
> sec
->data_allocated
)
5678 section_realloc(sec
, data_offset
);
5679 /* align section if needed */
5680 if (align
> sec
->sh_addralign
)
5681 sec
->sh_addralign
= align
;
5683 addr
= 0; /* avoid warning */
5687 if (scope
!= VT_CONST
|| !sym
) {
5688 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
5689 sym
->asm_label
= asm_label
;
5691 /* update symbol definition */
5693 put_extern_sym(sym
, sec
, addr
, size
);
5696 /* put a common area */
5697 put_extern_sym(sym
, NULL
, align
, size
);
5698 /* XXX: find a nicer way */
5699 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
5700 esym
->st_shndx
= SHN_COMMON
;
5703 /* push global reference */
5704 sym
= get_sym_ref(type
, sec
, addr
, size
);
5705 vpushsym(type
, sym
);
5707 /* patch symbol weakness */
5708 if (type
->t
& VT_WEAK
)
5710 apply_visibility(sym
, type
);
5711 #ifdef CONFIG_TCC_BCHECK
5712 /* handles bounds now because the symbol must be defined
5713 before for the relocation */
5714 if (tcc_state
->do_bounds_check
) {
5715 unsigned long *bounds_ptr
;
5717 greloc(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
);
5718 /* then add global bound info */
5719 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(long));
5720 bounds_ptr
[0] = 0; /* relocated */
5721 bounds_ptr
[1] = size
;
5725 if (has_init
|| (type
->t
& VT_VLA
)) {
5726 decl_initializer(type
, sec
, addr
, 1, 0);
5727 /* patch flexible array member size back to -1, */
5728 /* for possible subsequent similar declarations */
5730 flexible_array
->type
.ref
->c
= -1;
5733 /* restore parse state if needed */
5735 tok_str_free(init_str
.str
);
5736 restore_parse_state(&saved_parse_state
);
5740 static void put_func_debug(Sym
*sym
)
5745 /* XXX: we put here a dummy type */
5746 snprintf(buf
, sizeof(buf
), "%s:%c1",
5747 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
5748 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
5749 cur_text_section
, sym
->c
);
5750 /* //gr gdb wants a line at the function */
5751 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
5756 /* parse an old style function declaration list */
5757 /* XXX: check multiple parameter */
5758 static void func_decl_list(Sym
*func_sym
)
5765 /* parse each declaration */
5766 while (tok
!= '{' && tok
!= ';' && tok
!= ',' && tok
!= TOK_EOF
&&
5767 tok
!= TOK_ASM1
&& tok
!= TOK_ASM2
&& tok
!= TOK_ASM3
) {
5768 if (!parse_btype(&btype
, &ad
))
5769 expect("declaration list");
5770 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
5771 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
5773 /* we accept no variable after */
5777 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
5778 /* find parameter in function parameter list */
5781 if ((s
->v
& ~SYM_FIELD
) == v
)
5785 tcc_error("declaration for parameter '%s' but no such parameter",
5786 get_tok_str(v
, NULL
));
5788 /* check that no storage specifier except 'register' was given */
5789 if (type
.t
& VT_STORAGE
)
5790 tcc_error("storage class specified for '%s'", get_tok_str(v
, NULL
));
5791 convert_parameter_type(&type
);
5792 /* we can add the type (NOTE: it could be local to the function) */
5794 /* accept other parameters */
5805 /* parse a function defined by symbol 'sym' and generate its code in
5806 'cur_text_section' */
5807 static void gen_function(Sym
*sym
)
5809 ind
= cur_text_section
->data_offset
;
5810 /* NOTE: we patch the symbol size later */
5811 put_extern_sym(sym
, cur_text_section
, ind
, 0);
5812 funcname
= get_tok_str(sym
->v
, NULL
);
5814 /* Initialize VLA state */
5815 vla_sp_loc
= &vla_sp_root_loc
;
5816 vla_flags
= VLA_NEED_NEW_FRAME
;
5817 /* put debug symbol */
5818 if (tcc_state
->do_debug
)
5819 put_func_debug(sym
);
5820 /* push a dummy symbol to enable local sym storage */
5821 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
5822 gfunc_prolog(&sym
->type
);
5823 #ifdef CONFIG_TCC_BCHECK
5824 if (tcc_state
->do_bounds_check
5825 && !strcmp(get_tok_str(sym
->v
, NULL
), "main")) {
5829 for (i
= 0, sym
= local_stack
; i
< 2; i
++, sym
= sym
->prev
) {
5830 if (sym
->v
& SYM_FIELD
|| sym
->prev
->v
& SYM_FIELD
)
5832 vpush_global_sym(&func_old_type
, TOK___bound_main_arg
);
5833 vset(&sym
->type
, sym
->r
, sym
->c
);
5839 block(NULL
, NULL
, NULL
, NULL
, 0, 0);
5842 cur_text_section
->data_offset
= ind
;
5843 label_pop(&global_label_stack
, NULL
);
5844 /* reset local stack */
5845 scope_stack_bottom
= NULL
;
5846 sym_pop(&local_stack
, NULL
);
5847 /* end of function */
5848 /* patch symbol size */
5849 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
5851 /* patch symbol weakness (this definition overrules any prototype) */
5852 if (sym
->type
.t
& VT_WEAK
)
5854 apply_visibility(sym
, &sym
->type
);
5855 if (tcc_state
->do_debug
) {
5856 put_stabn(N_FUN
, 0, 0, ind
- func_ind
);
5858 /* It's better to crash than to generate wrong code */
5859 cur_text_section
= NULL
;
5860 funcname
= ""; /* for safety */
5861 func_vt
.t
= VT_VOID
; /* for safety */
5862 func_var
= 0; /* for safety */
5863 ind
= 0; /* for safety */
5866 ST_FUNC
void gen_inline_functions(void)
5869 int *str
, inline_generated
, i
;
5870 struct InlineFunc
*fn
;
5872 /* iterate while inline function are referenced */
5874 inline_generated
= 0;
5875 for (i
= 0; i
< tcc_state
->nb_inline_fns
; ++i
) {
5876 fn
= tcc_state
->inline_fns
[i
];
5878 if (sym
&& sym
->c
) {
5879 /* the function was used: generate its code and
5880 convert it to a normal function */
5881 str
= fn
->token_str
;
5884 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
5885 sym
->r
= VT_SYM
| VT_CONST
;
5886 sym
->type
.t
&= ~VT_INLINE
;
5890 cur_text_section
= text_section
;
5892 macro_ptr
= NULL
; /* fail safe */
5894 inline_generated
= 1;
5897 if (!inline_generated
)
5900 for (i
= 0; i
< tcc_state
->nb_inline_fns
; ++i
) {
5901 fn
= tcc_state
->inline_fns
[i
];
5902 str
= fn
->token_str
;
5905 dynarray_reset(&tcc_state
->inline_fns
, &tcc_state
->nb_inline_fns
);
5908 /* 'l' is VT_LOCAL or VT_CONST to define default storage type */
5909 static int decl0(int l
, int is_for_loop_init
)
5917 if (!parse_btype(&btype
, &ad
)) {
5918 if (is_for_loop_init
)
5920 /* skip redundant ';' */
5921 /* XXX: find more elegant solution */
5926 if (l
== VT_CONST
&&
5927 (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
5928 /* global asm block */
5932 /* special test for old K&R protos without explicit int
5933 type. Only accepted when defining global data */
5934 if (l
== VT_LOCAL
|| tok
< TOK_DEFINE
)
5938 if (((btype
.t
& VT_BTYPE
) == VT_ENUM
||
5939 (btype
.t
& VT_BTYPE
) == VT_STRUCT
) &&
5941 /* we accept no variable after */
5945 while (1) { /* iterate thru each declaration */
5946 char *asm_label
; // associated asm label
5948 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
5952 type_to_str(buf
, sizeof(buf
), t
, get_tok_str(v
, NULL
));
5953 printf("type = '%s'\n", buf
);
5956 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
5957 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
5958 tcc_error("function without file scope cannot be static");
5960 /* if old style function prototype, we accept a
5963 if (sym
->c
== FUNC_OLD
)
5964 func_decl_list(sym
);
5968 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
5971 asm_label_instr(&astr
);
5972 asm_label
= tcc_strdup(astr
.data
);
5975 /* parse one last attribute list, after asm label */
5976 parse_attribute(&ad
);
5981 #ifdef TCC_TARGET_PE
5982 if (ad
.a
.func_import
)
5983 type
.t
|= VT_IMPORT
;
5984 if (ad
.a
.func_export
)
5985 type
.t
|= VT_EXPORT
;
5987 type
.t
|= ad
.a
.visibility
<< VT_VIS_SHIFT
;
5991 tcc_error("cannot use local functions");
5992 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
5993 expect("function definition");
5995 /* reject abstract declarators in function definition */
5997 while ((sym
= sym
->next
) != NULL
)
5998 if (!(sym
->v
& ~SYM_FIELD
))
5999 expect("identifier");
6001 /* XXX: cannot do better now: convert extern line to static inline */
6002 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
6003 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6008 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6011 ref
= sym
->type
.ref
;
6012 if (0 == ref
->a
.func_proto
)
6013 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6015 /* use func_call from prototype if not defined */
6016 if (ref
->a
.func_call
!= FUNC_CDECL
6017 && type
.ref
->a
.func_call
== FUNC_CDECL
)
6018 type
.ref
->a
.func_call
= ref
->a
.func_call
;
6020 /* use export from prototype */
6021 if (ref
->a
.func_export
)
6022 type
.ref
->a
.func_export
= 1;
6024 /* use static from prototype */
6025 if (sym
->type
.t
& VT_STATIC
)
6026 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
6028 /* If the definition has no visibility use the
6029 one from prototype. */
6030 if (! (type
.t
& VT_VIS_MASK
))
6031 type
.t
|= sym
->type
.t
& VT_VIS_MASK
;
6033 if (!is_compatible_types(&sym
->type
, &type
)) {
6035 tcc_error("incompatible types for redefinition of '%s'",
6036 get_tok_str(v
, NULL
));
6038 type
.ref
->a
.func_proto
= 0;
6039 /* if symbol is already defined, then put complete type */
6042 /* put function symbol */
6043 sym
= global_identifier_push(v
, type
.t
, 0);
6044 sym
->type
.ref
= type
.ref
;
6047 /* static inline functions are just recorded as a kind
6048 of macro. Their code will be emitted at the end of
6049 the compilation unit only if they are used */
6050 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
6051 (VT_INLINE
| VT_STATIC
)) {
6052 TokenString func_str
;
6054 struct InlineFunc
*fn
;
6055 const char *filename
;
6057 tok_str_new(&func_str
);
6063 tcc_error("unexpected end of file");
6064 tok_str_add_tok(&func_str
);
6069 } else if (t
== '}') {
6071 if (block_level
== 0)
6075 tok_str_add(&func_str
, -1);
6076 tok_str_add(&func_str
, 0);
6077 filename
= file
? file
->filename
: "";
6078 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
6079 strcpy(fn
->filename
, filename
);
6081 fn
->token_str
= func_str
.str
;
6082 dynarray_add((void ***)&tcc_state
->inline_fns
, &tcc_state
->nb_inline_fns
, fn
);
6085 /* compute text section */
6086 cur_text_section
= ad
.section
;
6087 if (!cur_text_section
)
6088 cur_text_section
= text_section
;
6089 sym
->r
= VT_SYM
| VT_CONST
;
6094 if (btype
.t
& VT_TYPEDEF
) {
6095 /* save typedefed type */
6096 /* XXX: test storage specifiers ? */
6097 sym
= sym_push(v
, &type
, 0, 0);
6099 sym
->type
.t
|= VT_TYPEDEF
;
6102 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
6103 /* external function definition */
6104 /* specific case for func_call attribute */
6105 ad
.a
.func_proto
= 1;
6107 } else if (!(type
.t
& VT_ARRAY
)) {
6108 /* not lvalue if array */
6109 r
|= lvalue_type(type
.t
);
6111 has_init
= (tok
== '=');
6112 if (has_init
&& (type
.t
& VT_VLA
))
6113 tcc_error("Variable length array cannot be initialized");
6114 if ((btype
.t
& VT_EXTERN
) || ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
6115 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
6116 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
6117 /* external variable or function */
6118 /* NOTE: as GCC, uninitialized global static
6119 arrays of null size are considered as
6121 sym
= external_sym(v
, &type
, r
, asm_label
);
6123 if (ad
.alias_target
) {
6128 alias_target
= sym_find(ad
.alias_target
);
6129 if (!alias_target
|| !alias_target
->c
)
6130 tcc_error("unsupported forward __alias__ attribute");
6131 esym
= &((Elf32_Sym
*)symtab_section
->data
)[alias_target
->c
];
6132 tsec
.sh_num
= esym
->st_shndx
;
6133 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
6136 type
.t
|= (btype
.t
& VT_STATIC
); /* Retain "static". */
6137 if (type
.t
& VT_STATIC
)
6143 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, asm_label
, l
);
6147 if (is_for_loop_init
)
6160 ST_FUNC
void decl(int l
)