2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
42 static Sym
*all_cleanups
, *current_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int section_sym
;
49 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
50 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
51 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
53 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
55 ST_DATA
int const_wanted
; /* true if constant wanted */
56 ST_DATA
int nocode_wanted
; /* no code generation wanted */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
60 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
61 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
63 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
64 ST_DATA
const char *funcname
;
67 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
69 ST_DATA
struct switch_t
{
73 } **p
; int n
; /* list of case ranges */
74 int def_sym
; /* default symbol */
75 } *cur_switch
; /* current switch */
77 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
78 /*list of temporary local variables on the stack in current function. */
79 ST_DATA
struct temp_local_variable
{
80 int location
; //offset on stack. Svalue.c.i
83 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
84 short nb_temp_local_vars
;
86 /* ------------------------------------------------------------------------- */
88 static void gen_cast(CType
*type
);
89 static void gen_cast_s(int t
);
90 static inline CType
*pointed_type(CType
*type
);
91 static int is_compatible_types(CType
*type1
, CType
*type2
);
92 static int parse_btype(CType
*type
, AttributeDef
*ad
);
93 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
94 static void parse_expr_type(CType
*type
);
95 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
96 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
97 static void block(int *bsym
, int *csym
, int is_expr
);
98 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
99 static void decl(int l
);
100 static int decl0(int l
, int is_for_loop_init
, Sym
*);
101 static void expr_eq(void);
102 static void vla_runtime_type_size(CType
*type
, int *a
);
103 static void vla_sp_restore(void);
104 static void vla_sp_restore_root(void);
105 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
106 static inline int64_t expr_const64(void);
107 static void vpush64(int ty
, unsigned long long v
);
108 static void vpush(CType
*type
);
109 static int gvtst(int inv
, int t
);
110 static void gen_inline_functions(TCCState
*s
);
111 static void skip_or_save_block(TokenString
**str
);
112 static void gv_dup(void);
113 static int get_temp_local_var(int size
,int align
);
114 static void clear_temp_local_var_list();
117 static void reset_local_scope(void)
119 if (current_cleanups
)
120 tcc_error("ICE current_cleanups");
121 sym_pop(&all_cleanups
, NULL
, 0);
125 ST_INLN
int is_float(int t
)
129 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
132 /* we use our own 'finite' function to avoid potential problems with
133 non standard math libs */
134 /* XXX: endianness dependent */
135 ST_FUNC
int ieee_finite(double d
)
138 memcpy(p
, &d
, sizeof(double));
139 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
142 /* compiling intel long double natively */
143 #if (defined __i386__ || defined __x86_64__) \
144 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
145 # define TCC_IS_NATIVE_387
148 ST_FUNC
void test_lvalue(void)
150 if (!(vtop
->r
& VT_LVAL
))
154 ST_FUNC
void check_vstack(void)
157 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
160 /* ------------------------------------------------------------------------- */
161 /* vstack debugging aid */
164 void pv (const char *lbl
, int a
, int b
)
167 for (i
= a
; i
< a
+ b
; ++i
) {
168 SValue
*p
= &vtop
[-i
];
169 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
170 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
175 /* ------------------------------------------------------------------------- */
176 /* start of translation unit info */
177 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
182 /* file info: full path + filename */
183 section_sym
= put_elf_sym(symtab_section
, 0, 0,
184 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
185 text_section
->sh_num
, NULL
);
186 getcwd(buf
, sizeof(buf
));
188 normalize_slashes(buf
);
190 pstrcat(buf
, sizeof(buf
), "/");
191 put_stabs_r(buf
, N_SO
, 0, 0,
192 text_section
->data_offset
, text_section
, section_sym
);
193 put_stabs_r(file
->filename
, N_SO
, 0, 0,
194 text_section
->data_offset
, text_section
, section_sym
);
199 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
200 symbols can be safely used */
201 put_elf_sym(symtab_section
, 0, 0,
202 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
203 SHN_ABS
, file
->filename
);
206 /* put end of translation unit info */
207 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
211 put_stabs_r(NULL
, N_SO
, 0, 0,
212 text_section
->data_offset
, text_section
, section_sym
);
216 /* generate line number info */
217 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
221 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
222 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
224 last_line_num
= file
->line_num
;
228 /* put function symbol */
229 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
237 /* XXX: we put here a dummy type */
238 snprintf(buf
, sizeof(buf
), "%s:%c1",
239 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
240 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
241 cur_text_section
, sym
->c
);
242 /* //gr gdb wants a line at the function */
243 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
249 /* put function size */
250 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
254 put_stabn(N_FUN
, 0, 0, size
);
257 /* ------------------------------------------------------------------------- */
258 ST_FUNC
int tccgen_compile(TCCState
*s1
)
260 cur_text_section
= NULL
;
262 anon_sym
= SYM_FIRST_ANOM
;
265 nocode_wanted
= 0x80000000;
267 /* define some often used types */
269 char_pointer_type
.t
= VT_BYTE
;
270 mk_pointer(&char_pointer_type
);
272 size_type
.t
= VT_INT
| VT_UNSIGNED
;
273 ptrdiff_type
.t
= VT_INT
;
275 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
276 ptrdiff_type
.t
= VT_LLONG
;
278 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
279 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
281 func_old_type
.t
= VT_FUNC
;
282 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
283 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
284 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
288 #ifdef TCC_TARGET_ARM
293 printf("%s: **** new file\n", file
->filename
);
296 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
299 gen_inline_functions(s1
);
301 /* end of translation unit info */
306 /* ------------------------------------------------------------------------- */
307 ST_FUNC ElfSym
*elfsym(Sym
*s
)
311 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
314 /* apply storage attributes to Elf symbol */
315 ST_FUNC
void update_storage(Sym
*sym
)
318 int sym_bind
, old_sym_bind
;
324 if (sym
->a
.visibility
)
325 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
328 if (sym
->type
.t
& VT_STATIC
)
329 sym_bind
= STB_LOCAL
;
330 else if (sym
->a
.weak
)
333 sym_bind
= STB_GLOBAL
;
334 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
335 if (sym_bind
!= old_sym_bind
) {
336 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
340 if (sym
->a
.dllimport
)
341 esym
->st_other
|= ST_PE_IMPORT
;
342 if (sym
->a
.dllexport
)
343 esym
->st_other
|= ST_PE_EXPORT
;
347 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
348 get_tok_str(sym
->v
, NULL
),
349 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
357 /* ------------------------------------------------------------------------- */
358 /* update sym->c so that it points to an external symbol in section
359 'section' with value 'value' */
361 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
362 addr_t value
, unsigned long size
,
363 int can_add_underscore
)
365 int sym_type
, sym_bind
, info
, other
, t
;
369 #ifdef CONFIG_TCC_BCHECK
374 name
= get_tok_str(sym
->v
, NULL
);
375 #ifdef CONFIG_TCC_BCHECK
376 if (tcc_state
->do_bounds_check
) {
377 /* XXX: avoid doing that for statics ? */
378 /* if bound checking is activated, we change some function
379 names by adding the "__bound" prefix */
382 /* XXX: we rely only on malloc hooks */
395 strcpy(buf
, "__bound_");
403 if ((t
& VT_BTYPE
) == VT_FUNC
) {
405 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
406 sym_type
= STT_NOTYPE
;
408 sym_type
= STT_OBJECT
;
411 sym_bind
= STB_LOCAL
;
413 sym_bind
= STB_GLOBAL
;
416 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
417 Sym
*ref
= sym
->type
.ref
;
418 if (ref
->a
.nodecorate
) {
419 can_add_underscore
= 0;
421 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
422 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
424 other
|= ST_PE_STDCALL
;
425 can_add_underscore
= 0;
429 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
431 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
435 name
= get_tok_str(sym
->asm_label
, NULL
);
436 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
437 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
440 esym
->st_value
= value
;
441 esym
->st_size
= size
;
442 esym
->st_shndx
= sh_num
;
447 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
448 addr_t value
, unsigned long size
)
450 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
451 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
454 /* add a new relocation entry to symbol 'sym' in section 's' */
455 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
460 if (nocode_wanted
&& s
== cur_text_section
)
465 put_extern_sym(sym
, NULL
, 0, 0);
469 /* now we can add ELF relocation info */
470 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
474 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
476 greloca(s
, sym
, offset
, type
, 0);
480 /* ------------------------------------------------------------------------- */
481 /* symbol allocator */
482 static Sym
*__sym_malloc(void)
484 Sym
*sym_pool
, *sym
, *last_sym
;
487 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
488 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
490 last_sym
= sym_free_first
;
492 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
493 sym
->next
= last_sym
;
497 sym_free_first
= last_sym
;
501 static inline Sym
*sym_malloc(void)
505 sym
= sym_free_first
;
507 sym
= __sym_malloc();
508 sym_free_first
= sym
->next
;
511 sym
= tcc_malloc(sizeof(Sym
));
516 ST_INLN
void sym_free(Sym
*sym
)
519 sym
->next
= sym_free_first
;
520 sym_free_first
= sym
;
526 /* push, without hashing */
527 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
532 memset(s
, 0, sizeof *s
);
542 /* find a symbol and return its associated structure. 's' is the top
543 of the symbol stack */
544 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
556 /* structure lookup */
557 ST_INLN Sym
*struct_find(int v
)
560 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
562 return table_ident
[v
]->sym_struct
;
565 /* find an identifier */
566 ST_INLN Sym
*sym_find(int v
)
569 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
571 return table_ident
[v
]->sym_identifier
;
574 static int sym_scope(Sym
*s
)
576 if (IS_ENUM_VAL (s
->type
.t
))
577 return s
->type
.ref
->sym_scope
;
582 /* push a given symbol on the symbol stack */
583 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
592 s
= sym_push2(ps
, v
, type
->t
, c
);
593 s
->type
.ref
= type
->ref
;
595 /* don't record fields or anonymous symbols */
597 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
598 /* record symbol in token array */
599 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
601 ps
= &ts
->sym_struct
;
603 ps
= &ts
->sym_identifier
;
606 s
->sym_scope
= local_scope
;
607 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
608 tcc_error("redeclaration of '%s'",
609 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
614 /* push a global identifier */
615 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
618 s
= sym_push2(&global_stack
, v
, t
, c
);
619 /* don't record anonymous symbol */
620 if (v
< SYM_FIRST_ANOM
) {
621 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
622 /* modify the top most local identifier, so that
623 sym_identifier will point to 's' when popped */
624 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
625 ps
= &(*ps
)->prev_tok
;
632 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
633 pop them yet from the list, but do remove them from the token array. */
634 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
644 /* remove symbol in token array */
646 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
647 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
649 ps
= &ts
->sym_struct
;
651 ps
= &ts
->sym_identifier
;
662 /* ------------------------------------------------------------------------- */
664 static void vsetc(CType
*type
, int r
, CValue
*vc
)
668 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
669 tcc_error("memory full (vstack)");
670 /* cannot let cpu flags if other instruction are generated. Also
671 avoid leaving VT_JMP anywhere except on the top of the stack
672 because it would complicate the code generator.
674 Don't do this when nocode_wanted. vtop might come from
675 !nocode_wanted regions (see 88_codeopt.c) and transforming
676 it to a register without actually generating code is wrong
677 as their value might still be used for real. All values
678 we push under nocode_wanted will eventually be popped
679 again, so that the VT_CMP/VT_JMP value will be in vtop
680 when code is unsuppressed again.
682 Same logic below in vswap(); */
683 if (vtop
>= vstack
&& !nocode_wanted
) {
684 v
= vtop
->r
& VT_VALMASK
;
685 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
697 ST_FUNC
void vswap(void)
700 /* cannot vswap cpu flags. See comment at vsetc() above */
701 if (vtop
>= vstack
&& !nocode_wanted
) {
702 int v
= vtop
->r
& VT_VALMASK
;
703 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
711 /* pop stack value */
712 ST_FUNC
void vpop(void)
715 v
= vtop
->r
& VT_VALMASK
;
716 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
717 /* for x86, we need to pop the FP stack */
719 o(0xd8dd); /* fstp %st(0) */
722 if (v
== VT_JMP
|| v
== VT_JMPI
) {
723 /* need to put correct jump if && or || without test */
729 /* push constant of type "type" with useless value */
730 ST_FUNC
void vpush(CType
*type
)
732 vset(type
, VT_CONST
, 0);
735 /* push integer constant */
736 ST_FUNC
void vpushi(int v
)
740 vsetc(&int_type
, VT_CONST
, &cval
);
743 /* push a pointer sized constant */
744 static void vpushs(addr_t v
)
748 vsetc(&size_type
, VT_CONST
, &cval
);
751 /* push arbitrary 64bit constant */
752 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
759 vsetc(&ctype
, VT_CONST
, &cval
);
762 /* push long long constant */
763 static inline void vpushll(long long v
)
765 vpush64(VT_LLONG
, v
);
768 ST_FUNC
void vset(CType
*type
, int r
, int v
)
773 vsetc(type
, r
, &cval
);
776 static void vseti(int r
, int v
)
784 ST_FUNC
void vpushv(SValue
*v
)
786 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
787 tcc_error("memory full (vstack)");
792 static void vdup(void)
797 /* rotate n first stack elements to the bottom
798 I1 ... In -> I2 ... In I1 [top is right]
800 ST_FUNC
void vrotb(int n
)
811 /* rotate the n elements before entry e towards the top
812 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
814 ST_FUNC
void vrote(SValue
*e
, int n
)
820 for(i
= 0;i
< n
- 1; i
++)
825 /* rotate n first stack elements to the top
826 I1 ... In -> In I1 ... I(n-1) [top is right]
828 ST_FUNC
void vrott(int n
)
833 /* push a symbol value of TYPE */
834 static inline void vpushsym(CType
*type
, Sym
*sym
)
838 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
842 /* Return a static symbol pointing to a section */
843 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
849 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
850 sym
->type
.ref
= type
->ref
;
851 sym
->r
= VT_CONST
| VT_SYM
;
852 put_extern_sym(sym
, sec
, offset
, size
);
856 /* push a reference to a section offset by adding a dummy symbol */
857 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
859 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
862 /* define a new external reference to a symbol 'v' of type 'u' */
863 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
869 /* push forward reference */
870 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
871 s
->type
.ref
= type
->ref
;
872 s
->r
= r
| VT_CONST
| VT_SYM
;
873 } else if (IS_ASM_SYM(s
)) {
874 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
875 s
->type
.ref
= type
->ref
;
881 /* Merge symbol attributes. */
882 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
884 if (sa1
->aligned
&& !sa
->aligned
)
885 sa
->aligned
= sa1
->aligned
;
886 sa
->packed
|= sa1
->packed
;
887 sa
->weak
|= sa1
->weak
;
888 if (sa1
->visibility
!= STV_DEFAULT
) {
889 int vis
= sa
->visibility
;
890 if (vis
== STV_DEFAULT
891 || vis
> sa1
->visibility
)
892 vis
= sa1
->visibility
;
893 sa
->visibility
= vis
;
895 sa
->dllexport
|= sa1
->dllexport
;
896 sa
->nodecorate
|= sa1
->nodecorate
;
897 sa
->dllimport
|= sa1
->dllimport
;
900 /* Merge function attributes. */
901 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
903 if (fa1
->func_call
&& !fa
->func_call
)
904 fa
->func_call
= fa1
->func_call
;
905 if (fa1
->func_type
&& !fa
->func_type
)
906 fa
->func_type
= fa1
->func_type
;
907 if (fa1
->func_args
&& !fa
->func_args
)
908 fa
->func_args
= fa1
->func_args
;
911 /* Merge attributes. */
912 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
914 merge_symattr(&ad
->a
, &ad1
->a
);
915 merge_funcattr(&ad
->f
, &ad1
->f
);
918 ad
->section
= ad1
->section
;
919 if (ad1
->alias_target
)
920 ad
->alias_target
= ad1
->alias_target
;
922 ad
->asm_label
= ad1
->asm_label
;
924 ad
->attr_mode
= ad1
->attr_mode
;
927 /* Merge some type attributes. */
928 static void patch_type(Sym
*sym
, CType
*type
)
930 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
931 if (!(sym
->type
.t
& VT_EXTERN
))
932 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
933 sym
->type
.t
&= ~VT_EXTERN
;
936 if (IS_ASM_SYM(sym
)) {
937 /* stay static if both are static */
938 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
939 sym
->type
.ref
= type
->ref
;
942 if (!is_compatible_types(&sym
->type
, type
)) {
943 tcc_error("incompatible types for redefinition of '%s'",
944 get_tok_str(sym
->v
, NULL
));
946 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
947 int static_proto
= sym
->type
.t
& VT_STATIC
;
948 /* warn if static follows non-static function declaration */
949 if ((type
->t
& VT_STATIC
) && !static_proto
&& !(type
->t
& VT_INLINE
))
950 tcc_warning("static storage ignored for redefinition of '%s'",
951 get_tok_str(sym
->v
, NULL
));
953 if (0 == (type
->t
& VT_EXTERN
)) {
954 /* put complete type, use static from prototype */
955 sym
->type
.t
= (type
->t
& ~VT_STATIC
) | static_proto
;
956 if (type
->t
& VT_INLINE
)
957 sym
->type
.t
= type
->t
;
958 sym
->type
.ref
= type
->ref
;
962 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
963 /* set array size if it was omitted in extern declaration */
964 if (sym
->type
.ref
->c
< 0)
965 sym
->type
.ref
->c
= type
->ref
->c
;
966 else if (sym
->type
.ref
->c
!= type
->ref
->c
)
967 tcc_error("conflicting type for '%s'", get_tok_str(sym
->v
, NULL
));
969 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
970 tcc_warning("storage mismatch for redefinition of '%s'",
971 get_tok_str(sym
->v
, NULL
));
976 /* Merge some storage attributes. */
977 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
980 patch_type(sym
, type
);
983 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
984 tcc_error("incompatible dll linkage for redefinition of '%s'",
985 get_tok_str(sym
->v
, NULL
));
987 merge_symattr(&sym
->a
, &ad
->a
);
989 sym
->asm_label
= ad
->asm_label
;
993 /* define a new external reference to a symbol 'v' */
994 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
998 if (!s
|| (!IS_ASM_SYM(s
) && !(s
->type
.t
& VT_EXTERN
)
999 && (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)) {
1000 if (s
&& !is_compatible_types(&s
->type
, type
))
1001 tcc_error("conflicting types for '%s'", get_tok_str(s
->v
, NULL
));
1002 /* push forward reference */
1003 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
1004 s
->type
.t
|= VT_EXTERN
;
1008 if (s
->type
.ref
== func_old_type
.ref
) {
1009 s
->type
.ref
= type
->ref
;
1010 s
->r
= r
| VT_CONST
| VT_SYM
;
1011 s
->type
.t
|= VT_EXTERN
;
1013 patch_storage(s
, ad
, type
);
1018 /* push a reference to global symbol v */
1019 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1021 vpushsym(type
, external_global_sym(v
, type
, 0));
1024 /* save registers up to (vtop - n) stack entry */
1025 ST_FUNC
void save_regs(int n
)
1028 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1032 /* save r to the memory stack, and mark it as being free */
1033 ST_FUNC
void save_reg(int r
)
1035 save_reg_upstack(r
, 0);
1038 /* save r to the memory stack, and mark it as being free,
1039 if seen up to (vtop - n) stack entry */
1040 ST_FUNC
void save_reg_upstack(int r
, int n
)
1042 int l
, saved
, size
, align
;
1046 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1051 /* modify all stack values */
1054 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1055 if ((p
->r
& VT_VALMASK
) == r
||
1056 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
1057 /* must save value on stack if not already done */
1059 /* NOTE: must reload 'r' because r might be equal to r2 */
1060 r
= p
->r
& VT_VALMASK
;
1061 /* store register in the stack */
1063 if ((p
->r
& VT_LVAL
) ||
1064 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1066 type
= &char_pointer_type
;
1070 size
= type_size(type
, &align
);
1071 l
=get_temp_local_var(size
,align
);
1072 sv
.type
.t
= type
->t
;
1073 sv
.r
= VT_LOCAL
| VT_LVAL
;
1076 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1077 /* x86 specific: need to pop fp register ST0 if saved */
1078 if (r
== TREG_ST0
) {
1079 o(0xd8dd); /* fstp %st(0) */
1083 /* special long long case */
1084 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1091 /* mark that stack entry as being saved on the stack */
1092 if (p
->r
& VT_LVAL
) {
1093 /* also clear the bounded flag because the
1094 relocation address of the function was stored in
1096 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1098 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1106 #ifdef TCC_TARGET_ARM
1107 /* find a register of class 'rc2' with at most one reference on stack.
1108 * If none, call get_reg(rc) */
1109 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1114 for(r
=0;r
<NB_REGS
;r
++) {
1115 if (reg_classes
[r
] & rc2
) {
1118 for(p
= vstack
; p
<= vtop
; p
++) {
1119 if ((p
->r
& VT_VALMASK
) == r
||
1120 (p
->r2
& VT_VALMASK
) == r
)
1131 /* find a free register of class 'rc'. If none, save one register */
1132 ST_FUNC
int get_reg(int rc
)
1137 /* find a free register */
1138 for(r
=0;r
<NB_REGS
;r
++) {
1139 if (reg_classes
[r
] & rc
) {
1142 for(p
=vstack
;p
<=vtop
;p
++) {
1143 if ((p
->r
& VT_VALMASK
) == r
||
1144 (p
->r2
& VT_VALMASK
) == r
)
1152 /* no register left : free the first one on the stack (VERY
1153 IMPORTANT to start from the bottom to ensure that we don't
1154 spill registers used in gen_opi()) */
1155 for(p
=vstack
;p
<=vtop
;p
++) {
1156 /* look at second register (if long long) */
1157 r
= p
->r2
& VT_VALMASK
;
1158 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1160 r
= p
->r
& VT_VALMASK
;
1161 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1167 /* Should never comes here */
1171 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1172 static int get_temp_local_var(int size
,int align
){
1174 struct temp_local_variable
*temp_var
;
1181 for(i
=0;i
<nb_temp_local_vars
;i
++){
1182 temp_var
=&arr_temp_local_vars
[i
];
1183 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1186 /*check if temp_var is free*/
1188 for(p
=vstack
;p
<=vtop
;p
++) {
1190 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1191 if(p
->c
.i
==temp_var
->location
){
1198 found_var
=temp_var
->location
;
1204 loc
= (loc
- size
) & -align
;
1205 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1206 temp_var
=&arr_temp_local_vars
[i
];
1207 temp_var
->location
=loc
;
1208 temp_var
->size
=size
;
1209 temp_var
->align
=align
;
1210 nb_temp_local_vars
++;
1217 static void clear_temp_local_var_list(){
1218 nb_temp_local_vars
=0;
1221 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1223 static void move_reg(int r
, int s
, int t
)
1237 /* get address of vtop (vtop MUST BE an lvalue) */
1238 ST_FUNC
void gaddrof(void)
1240 vtop
->r
&= ~VT_LVAL
;
1241 /* tricky: if saved lvalue, then we can go back to lvalue */
1242 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1243 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1248 #ifdef CONFIG_TCC_BCHECK
1249 /* generate lvalue bound code */
1250 static void gbound(void)
1255 vtop
->r
&= ~VT_MUSTBOUND
;
1256 /* if lvalue, then use checking code before dereferencing */
1257 if (vtop
->r
& VT_LVAL
) {
1258 /* if not VT_BOUNDED value, then make one */
1259 if (!(vtop
->r
& VT_BOUNDED
)) {
1260 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1261 /* must save type because we must set it to int to get pointer */
1263 vtop
->type
.t
= VT_PTR
;
1266 gen_bounded_ptr_add();
1267 vtop
->r
|= lval_type
;
1270 /* then check for dereferencing */
1271 gen_bounded_ptr_deref();
1276 static void incr_bf_adr(int o
)
1278 vtop
->type
= char_pointer_type
;
1282 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1283 | (VT_BYTE
|VT_UNSIGNED
);
1284 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1285 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1288 /* single-byte load mode for packed or otherwise unaligned bitfields */
1289 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1292 save_reg_upstack(vtop
->r
, 1);
1293 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1294 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1303 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1305 vpushi((1 << n
) - 1), gen_op('&');
1308 vpushi(bits
), gen_op(TOK_SHL
);
1311 bits
+= n
, bit_size
-= n
, o
= 1;
1314 if (!(type
->t
& VT_UNSIGNED
)) {
1315 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1316 vpushi(n
), gen_op(TOK_SHL
);
1317 vpushi(n
), gen_op(TOK_SAR
);
1321 /* single-byte store mode for packed or otherwise unaligned bitfields */
1322 static void store_packed_bf(int bit_pos
, int bit_size
)
1324 int bits
, n
, o
, m
, c
;
1326 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1328 save_reg_upstack(vtop
->r
, 1);
1329 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1331 incr_bf_adr(o
); // X B
1333 c
? vdup() : gv_dup(); // B V X
1336 vpushi(bits
), gen_op(TOK_SHR
);
1338 vpushi(bit_pos
), gen_op(TOK_SHL
);
1343 m
= ((1 << n
) - 1) << bit_pos
;
1344 vpushi(m
), gen_op('&'); // X B V1
1345 vpushv(vtop
-1); // X B V1 B
1346 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1347 gen_op('&'); // X B V1 B1
1348 gen_op('|'); // X B V2
1350 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1351 vstore(), vpop(); // X B
1352 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1357 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1360 if (0 == sv
->type
.ref
)
1362 t
= sv
->type
.ref
->auxtype
;
1363 if (t
!= -1 && t
!= VT_STRUCT
) {
1364 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1365 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1370 /* store vtop a register belonging to class 'rc'. lvalues are
1371 converted to values. Cannot be used if cannot be converted to
1372 register value (such as structures). */
1373 ST_FUNC
int gv(int rc
)
1375 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1377 /* NOTE: get_reg can modify vstack[] */
1378 if (vtop
->type
.t
& VT_BITFIELD
) {
1381 bit_pos
= BIT_POS(vtop
->type
.t
);
1382 bit_size
= BIT_SIZE(vtop
->type
.t
);
1383 /* remove bit field info to avoid loops */
1384 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1387 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1388 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1389 type
.t
|= VT_UNSIGNED
;
1391 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1393 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1398 if (r
== VT_STRUCT
) {
1399 load_packed_bf(&type
, bit_pos
, bit_size
);
1401 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1402 /* cast to int to propagate signedness in following ops */
1404 /* generate shifts */
1405 vpushi(bits
- (bit_pos
+ bit_size
));
1407 vpushi(bits
- bit_size
);
1408 /* NOTE: transformed to SHR if unsigned */
1413 if (is_float(vtop
->type
.t
) &&
1414 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1415 unsigned long offset
;
1416 /* CPUs usually cannot use float constants, so we store them
1417 generically in data segment */
1418 size
= type_size(&vtop
->type
, &align
);
1420 size
= 0, align
= 1;
1421 offset
= section_add(data_section
, size
, align
);
1422 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1424 init_putv(&vtop
->type
, data_section
, offset
);
1427 #ifdef CONFIG_TCC_BCHECK
1428 if (vtop
->r
& VT_MUSTBOUND
)
1432 r
= vtop
->r
& VT_VALMASK
;
1433 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1434 #ifndef TCC_TARGET_ARM64
1437 #ifdef TCC_TARGET_X86_64
1438 else if (rc
== RC_FRET
)
1442 /* need to reload if:
1444 - lvalue (need to dereference pointer)
1445 - already a register, but not in the right class */
1447 || (vtop
->r
& VT_LVAL
)
1448 || !(reg_classes
[r
] & rc
)
1450 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1451 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1453 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1459 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1460 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1462 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1463 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1464 unsigned long long ll
;
1466 int r2
, original_type
;
1467 original_type
= vtop
->type
.t
;
1468 /* two register type load : expand to two words
1471 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1474 vtop
->c
.i
= ll
; /* first word */
1476 vtop
->r
= r
; /* save register value */
1477 vpushi(ll
>> 32); /* second word */
1480 if (vtop
->r
& VT_LVAL
) {
1481 /* We do not want to modifier the long long
1482 pointer here, so the safest (and less
1483 efficient) is to save all the other registers
1484 in the stack. XXX: totally inefficient. */
1488 /* lvalue_save: save only if used further down the stack */
1489 save_reg_upstack(vtop
->r
, 1);
1491 /* load from memory */
1492 vtop
->type
.t
= load_type
;
1495 vtop
[-1].r
= r
; /* save register value */
1496 /* increment pointer to get second word */
1497 vtop
->type
.t
= addr_type
;
1502 vtop
->type
.t
= load_type
;
1504 /* move registers */
1507 vtop
[-1].r
= r
; /* save register value */
1508 vtop
->r
= vtop
[-1].r2
;
1510 /* Allocate second register. Here we rely on the fact that
1511 get_reg() tries first to free r2 of an SValue. */
1515 /* write second register */
1517 vtop
->type
.t
= original_type
;
1518 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1520 /* lvalue of scalar type : need to use lvalue type
1521 because of possible cast */
1524 /* compute memory access type */
1525 if (vtop
->r
& VT_LVAL_BYTE
)
1527 else if (vtop
->r
& VT_LVAL_SHORT
)
1529 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1533 /* restore wanted type */
1536 /* one register type load */
1541 #ifdef TCC_TARGET_C67
1542 /* uses register pairs for doubles */
1543 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1550 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1551 ST_FUNC
void gv2(int rc1
, int rc2
)
1555 /* generate more generic register first. But VT_JMP or VT_CMP
1556 values must be generated first in all cases to avoid possible
1558 v
= vtop
[0].r
& VT_VALMASK
;
1559 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1564 /* test if reload is needed for first register */
1565 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1575 /* test if reload is needed for first register */
1576 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1582 #ifndef TCC_TARGET_ARM64
1583 /* wrapper around RC_FRET to return a register by type */
1584 static int rc_fret(int t
)
1586 #ifdef TCC_TARGET_X86_64
1587 if (t
== VT_LDOUBLE
) {
1595 /* wrapper around REG_FRET to return a register by type */
1596 static int reg_fret(int t
)
1598 #ifdef TCC_TARGET_X86_64
1599 if (t
== VT_LDOUBLE
) {
1607 /* expand 64bit on stack in two ints */
1608 ST_FUNC
void lexpand(void)
1611 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1612 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1613 if (v
== VT_CONST
) {
1616 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1622 vtop
[0].r
= vtop
[-1].r2
;
1623 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1625 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1630 /* build a long long from two ints */
1631 static void lbuild(int t
)
1633 gv2(RC_INT
, RC_INT
);
1634 vtop
[-1].r2
= vtop
[0].r
;
1635 vtop
[-1].type
.t
= t
;
1640 /* convert stack entry to register and duplicate its value in another
1642 static void gv_dup(void)
1649 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1650 if (t
& VT_BITFIELD
) {
1660 /* stack: H L L1 H1 */
1670 /* duplicate value */
1675 #ifdef TCC_TARGET_X86_64
1676 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1686 load(r1
, &sv
); /* move r to r1 */
1688 /* duplicates value */
1694 /* Generate value test
1696 * Generate a test for any value (jump, comparison and integers) */
1697 ST_FUNC
int gvtst(int inv
, int t
)
1699 int v
= vtop
->r
& VT_VALMASK
;
1700 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1704 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1705 /* constant jmp optimization */
1706 if ((vtop
->c
.i
!= 0) != inv
)
1711 return gtst(inv
, t
);
1715 /* generate CPU independent (unsigned) long long operations */
1716 static void gen_opl(int op
)
1718 int t
, a
, b
, op1
, c
, i
;
1720 unsigned short reg_iret
= REG_IRET
;
1721 unsigned short reg_lret
= REG_LRET
;
1727 func
= TOK___divdi3
;
1730 func
= TOK___udivdi3
;
1733 func
= TOK___moddi3
;
1736 func
= TOK___umoddi3
;
1743 /* call generic long long function */
1744 vpush_global_sym(&func_old_type
, func
);
1749 vtop
->r2
= reg_lret
;
1757 //pv("gen_opl A",0,2);
1763 /* stack: L1 H1 L2 H2 */
1768 vtop
[-2] = vtop
[-3];
1771 /* stack: H1 H2 L1 L2 */
1772 //pv("gen_opl B",0,4);
1778 /* stack: H1 H2 L1 L2 ML MH */
1781 /* stack: ML MH H1 H2 L1 L2 */
1785 /* stack: ML MH H1 L2 H2 L1 */
1790 /* stack: ML MH M1 M2 */
1793 } else if (op
== '+' || op
== '-') {
1794 /* XXX: add non carry method too (for MIPS or alpha) */
1800 /* stack: H1 H2 (L1 op L2) */
1803 gen_op(op1
+ 1); /* TOK_xxxC2 */
1806 /* stack: H1 H2 (L1 op L2) */
1809 /* stack: (L1 op L2) H1 H2 */
1811 /* stack: (L1 op L2) (H1 op H2) */
1819 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1820 t
= vtop
[-1].type
.t
;
1824 /* stack: L H shift */
1826 /* constant: simpler */
1827 /* NOTE: all comments are for SHL. the other cases are
1828 done by swapping words */
1839 if (op
!= TOK_SAR
) {
1872 /* XXX: should provide a faster fallback on x86 ? */
1875 func
= TOK___ashrdi3
;
1878 func
= TOK___lshrdi3
;
1881 func
= TOK___ashldi3
;
1887 /* compare operations */
1893 /* stack: L1 H1 L2 H2 */
1895 vtop
[-1] = vtop
[-2];
1897 /* stack: L1 L2 H1 H2 */
1900 /* when values are equal, we need to compare low words. since
1901 the jump is inverted, we invert the test too. */
1904 else if (op1
== TOK_GT
)
1906 else if (op1
== TOK_ULT
)
1908 else if (op1
== TOK_UGT
)
1918 /* generate non equal test */
1924 /* compare low. Always unsigned */
1928 else if (op1
== TOK_LE
)
1930 else if (op1
== TOK_GT
)
1932 else if (op1
== TOK_GE
)
1943 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1945 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1946 return (a
^ b
) >> 63 ? -x
: x
;
1949 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1951 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1954 /* handle integer constant optimizations and various machine
1956 static void gen_opic(int op
)
1958 SValue
*v1
= vtop
- 1;
1960 int t1
= v1
->type
.t
& VT_BTYPE
;
1961 int t2
= v2
->type
.t
& VT_BTYPE
;
1962 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1963 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1964 uint64_t l1
= c1
? v1
->c
.i
: 0;
1965 uint64_t l2
= c2
? v2
->c
.i
: 0;
1966 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1968 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1969 l1
= ((uint32_t)l1
|
1970 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1971 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1972 l2
= ((uint32_t)l2
|
1973 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1977 case '+': l1
+= l2
; break;
1978 case '-': l1
-= l2
; break;
1979 case '&': l1
&= l2
; break;
1980 case '^': l1
^= l2
; break;
1981 case '|': l1
|= l2
; break;
1982 case '*': l1
*= l2
; break;
1989 /* if division by zero, generate explicit division */
1992 tcc_error("division by zero in constant");
1996 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1997 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1998 case TOK_UDIV
: l1
= l1
/ l2
; break;
1999 case TOK_UMOD
: l1
= l1
% l2
; break;
2002 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2003 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2005 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2008 case TOK_ULT
: l1
= l1
< l2
; break;
2009 case TOK_UGE
: l1
= l1
>= l2
; break;
2010 case TOK_EQ
: l1
= l1
== l2
; break;
2011 case TOK_NE
: l1
= l1
!= l2
; break;
2012 case TOK_ULE
: l1
= l1
<= l2
; break;
2013 case TOK_UGT
: l1
= l1
> l2
; break;
2014 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2015 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2016 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2017 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2019 case TOK_LAND
: l1
= l1
&& l2
; break;
2020 case TOK_LOR
: l1
= l1
|| l2
; break;
2024 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2025 l1
= ((uint32_t)l1
|
2026 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2030 /* if commutative ops, put c2 as constant */
2031 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2032 op
== '|' || op
== '*')) {
2034 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2035 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2037 if (!const_wanted
&&
2039 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2040 (l1
== -1 && op
== TOK_SAR
))) {
2041 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2043 } else if (!const_wanted
&&
2044 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2046 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2047 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2048 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2053 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2056 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2057 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2060 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2061 /* filter out NOP operations like x*1, x-0, x&-1... */
2063 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2064 /* try to use shifts instead of muls or divs */
2065 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2074 else if (op
== TOK_PDIV
)
2080 } else if (c2
&& (op
== '+' || op
== '-') &&
2081 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2082 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2083 /* symbol + constant case */
2087 /* The backends can't always deal with addends to symbols
2088 larger than +-1<<31. Don't construct such. */
2095 /* call low level op generator */
2096 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2097 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2105 /* generate a floating point operation with constant propagation */
2106 static void gen_opif(int op
)
2110 #if defined _MSC_VER && defined _AMD64_
2111 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2118 /* currently, we cannot do computations with forward symbols */
2119 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2120 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2122 if (v1
->type
.t
== VT_FLOAT
) {
2125 } else if (v1
->type
.t
== VT_DOUBLE
) {
2133 /* NOTE: we only do constant propagation if finite number (not
2134 NaN or infinity) (ANSI spec) */
2135 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2139 case '+': f1
+= f2
; break;
2140 case '-': f1
-= f2
; break;
2141 case '*': f1
*= f2
; break;
2144 /* If not in initializer we need to potentially generate
2145 FP exceptions at runtime, otherwise we want to fold. */
2151 /* XXX: also handles tests ? */
2155 /* XXX: overflow test ? */
2156 if (v1
->type
.t
== VT_FLOAT
) {
2158 } else if (v1
->type
.t
== VT_DOUBLE
) {
2170 static int pointed_size(CType
*type
)
2173 return type_size(pointed_type(type
), &align
);
2176 static void vla_runtime_pointed_size(CType
*type
)
2179 vla_runtime_type_size(pointed_type(type
), &align
);
2182 static inline int is_null_pointer(SValue
*p
)
2184 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2186 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2187 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2188 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2189 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2190 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2191 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2194 static inline int is_integer_btype(int bt
)
2196 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2197 bt
== VT_INT
|| bt
== VT_LLONG
);
2200 /* check types for comparison or subtraction of pointers */
2201 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2203 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2206 /* null pointers are accepted for all comparisons as gcc */
2207 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2211 bt1
= type1
->t
& VT_BTYPE
;
2212 bt2
= type2
->t
& VT_BTYPE
;
2213 /* accept comparison between pointer and integer with a warning */
2214 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2215 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2216 tcc_warning("comparison between pointer and integer");
2220 /* both must be pointers or implicit function pointers */
2221 if (bt1
== VT_PTR
) {
2222 type1
= pointed_type(type1
);
2223 } else if (bt1
!= VT_FUNC
)
2224 goto invalid_operands
;
2226 if (bt2
== VT_PTR
) {
2227 type2
= pointed_type(type2
);
2228 } else if (bt2
!= VT_FUNC
) {
2230 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2232 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2233 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2237 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2238 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2239 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2240 /* gcc-like error if '-' is used */
2242 goto invalid_operands
;
2244 tcc_warning("comparison of distinct pointer types lacks a cast");
2248 /* generic gen_op: handles types problems */
2249 ST_FUNC
void gen_op(int op
)
2251 int u
, t1
, t2
, bt1
, bt2
, t
;
2255 t1
= vtop
[-1].type
.t
;
2256 t2
= vtop
[0].type
.t
;
2257 bt1
= t1
& VT_BTYPE
;
2258 bt2
= t2
& VT_BTYPE
;
2260 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2261 tcc_error("operation on a struct");
2262 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2263 if (bt2
== VT_FUNC
) {
2264 mk_pointer(&vtop
->type
);
2267 if (bt1
== VT_FUNC
) {
2269 mk_pointer(&vtop
->type
);
2274 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2275 /* at least one operand is a pointer */
2276 /* relational op: must be both pointers */
2277 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2278 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2279 /* pointers are handled are unsigned */
2281 t
= VT_LLONG
| VT_UNSIGNED
;
2283 t
= VT_INT
| VT_UNSIGNED
;
2287 /* if both pointers, then it must be the '-' op */
2288 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2290 tcc_error("cannot use pointers here");
2291 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2292 /* XXX: check that types are compatible */
2293 if (vtop
[-1].type
.t
& VT_VLA
) {
2294 vla_runtime_pointed_size(&vtop
[-1].type
);
2296 vpushi(pointed_size(&vtop
[-1].type
));
2300 vtop
->type
.t
= ptrdiff_type
.t
;
2304 /* exactly one pointer : must be '+' or '-'. */
2305 if (op
!= '-' && op
!= '+')
2306 tcc_error("cannot use pointers here");
2307 /* Put pointer as first operand */
2308 if (bt2
== VT_PTR
) {
2310 t
= t1
, t1
= t2
, t2
= t
;
2313 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2314 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2317 type1
= vtop
[-1].type
;
2318 type1
.t
&= ~VT_ARRAY
;
2319 if (vtop
[-1].type
.t
& VT_VLA
)
2320 vla_runtime_pointed_size(&vtop
[-1].type
);
2322 u
= pointed_size(&vtop
[-1].type
);
2324 tcc_error("unknown array element size");
2328 /* XXX: cast to int ? (long long case) */
2334 /* #ifdef CONFIG_TCC_BCHECK
2335 The main reason to removing this code:
2342 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2343 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2345 When this code is on. then the output looks like
2347 v+(i-j) = 0xbff84000
2349 /* if evaluating constant expression, no code should be
2350 generated, so no bound check */
2351 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2352 /* if bounded pointers, we generate a special code to
2359 gen_bounded_ptr_add();
2365 /* put again type if gen_opic() swaped operands */
2368 } else if (is_float(bt1
) || is_float(bt2
)) {
2369 /* compute bigger type and do implicit casts */
2370 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2372 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2377 /* floats can only be used for a few operations */
2378 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2379 (op
< TOK_ULT
|| op
> TOK_GT
))
2380 tcc_error("invalid operands for binary operation");
2382 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2383 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2384 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2386 t
|= (VT_LONG
& t1
);
2388 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2389 /* cast to biggest op */
2390 t
= VT_LLONG
| VT_LONG
;
2391 if (bt1
== VT_LLONG
)
2393 if (bt2
== VT_LLONG
)
2395 /* convert to unsigned if it does not fit in a long long */
2396 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2397 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2401 /* integer operations */
2402 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2403 /* convert to unsigned if it does not fit in an integer */
2404 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2405 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2408 /* XXX: currently, some unsigned operations are explicit, so
2409 we modify them here */
2410 if (t
& VT_UNSIGNED
) {
2417 else if (op
== TOK_LT
)
2419 else if (op
== TOK_GT
)
2421 else if (op
== TOK_LE
)
2423 else if (op
== TOK_GE
)
2431 /* special case for shifts and long long: we keep the shift as
2433 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2440 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2441 /* relational op: the result is an int */
2442 vtop
->type
.t
= VT_INT
;
2447 // Make sure that we have converted to an rvalue:
2448 if (vtop
->r
& VT_LVAL
)
2449 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2452 #ifndef TCC_TARGET_ARM
2453 /* generic itof for unsigned long long case */
2454 static void gen_cvt_itof1(int t
)
2456 #ifdef TCC_TARGET_ARM64
2459 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2460 (VT_LLONG
| VT_UNSIGNED
)) {
2463 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2464 #if LDOUBLE_SIZE != 8
2465 else if (t
== VT_LDOUBLE
)
2466 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2469 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2473 vtop
->r
= reg_fret(t
);
2481 /* generic ftoi for unsigned long long case */
2482 static void gen_cvt_ftoi1(int t
)
2484 #ifdef TCC_TARGET_ARM64
2489 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2490 /* not handled natively */
2491 st
= vtop
->type
.t
& VT_BTYPE
;
2493 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2494 #if LDOUBLE_SIZE != 8
2495 else if (st
== VT_LDOUBLE
)
2496 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2499 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2504 vtop
->r2
= REG_LRET
;
2511 /* force char or short cast */
2512 static void force_charshort_cast(int t
)
2516 /* cannot cast static initializers */
2517 if (STATIC_DATA_WANTED
)
2521 /* XXX: add optimization if lvalue : just change type and offset */
2526 if (t
& VT_UNSIGNED
) {
2527 vpushi((1 << bits
) - 1);
2530 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2536 /* result must be signed or the SAR is converted to an SHL
2537 This was not the case when "t" was a signed short
2538 and the last value on the stack was an unsigned int */
2539 vtop
->type
.t
&= ~VT_UNSIGNED
;
2545 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2546 static void gen_cast_s(int t
)
2554 static void gen_cast(CType
*type
)
2556 int sbt
, dbt
, sf
, df
, c
, p
;
2558 /* special delayed cast for char/short */
2559 /* XXX: in some cases (multiple cascaded casts), it may still
2561 if (vtop
->r
& VT_MUSTCAST
) {
2562 vtop
->r
&= ~VT_MUSTCAST
;
2563 force_charshort_cast(vtop
->type
.t
);
2566 /* bitfields first get cast to ints */
2567 if (vtop
->type
.t
& VT_BITFIELD
) {
2571 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2572 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2577 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2578 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2579 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2580 c
&= dbt
!= VT_LDOUBLE
;
2583 /* constant case: we can do it now */
2584 /* XXX: in ISOC, cannot do it if error in convert */
2585 if (sbt
== VT_FLOAT
)
2586 vtop
->c
.ld
= vtop
->c
.f
;
2587 else if (sbt
== VT_DOUBLE
)
2588 vtop
->c
.ld
= vtop
->c
.d
;
2591 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2592 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2593 vtop
->c
.ld
= vtop
->c
.i
;
2595 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2597 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2598 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2600 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2603 if (dbt
== VT_FLOAT
)
2604 vtop
->c
.f
= (float)vtop
->c
.ld
;
2605 else if (dbt
== VT_DOUBLE
)
2606 vtop
->c
.d
= (double)vtop
->c
.ld
;
2607 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2608 vtop
->c
.i
= vtop
->c
.ld
;
2609 } else if (sf
&& dbt
== VT_BOOL
) {
2610 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2613 vtop
->c
.i
= vtop
->c
.ld
;
2614 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2616 else if (sbt
& VT_UNSIGNED
)
2617 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2619 else if (sbt
== VT_PTR
)
2622 else if (sbt
!= VT_LLONG
)
2623 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2624 -(vtop
->c
.i
& 0x80000000));
2626 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2628 else if (dbt
== VT_BOOL
)
2629 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2631 else if (dbt
== VT_PTR
)
2634 else if (dbt
!= VT_LLONG
) {
2635 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2636 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2639 if (!(dbt
& VT_UNSIGNED
))
2640 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2643 } else if (p
&& dbt
== VT_BOOL
) {
2647 /* non constant case: generate code */
2649 /* convert from fp to fp */
2652 /* convert int to fp */
2655 /* convert fp to int */
2656 if (dbt
== VT_BOOL
) {
2660 /* we handle char/short/etc... with generic code */
2661 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2662 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2666 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2667 /* additional cast for char/short... */
2673 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2674 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2675 /* scalar to long long */
2676 /* machine independent conversion */
2678 /* generate high word */
2679 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2683 if (sbt
== VT_PTR
) {
2684 /* cast from pointer to int before we apply
2685 shift operation, which pointers don't support*/
2692 /* patch second register */
2693 vtop
[-1].r2
= vtop
->r
;
2697 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2698 (dbt
& VT_BTYPE
) == VT_PTR
||
2699 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2700 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2701 (sbt
& VT_BTYPE
) != VT_PTR
&&
2702 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2703 /* need to convert from 32bit to 64bit */
2705 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2706 #if defined(TCC_TARGET_ARM64)
2708 #elif defined(TCC_TARGET_X86_64)
2710 /* x86_64 specific: movslq */
2712 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2719 } else if (dbt
== VT_BOOL
) {
2720 /* scalar to bool */
2723 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2724 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2725 if (sbt
== VT_PTR
) {
2726 vtop
->type
.t
= VT_INT
;
2727 tcc_warning("nonportable conversion from pointer to char/short");
2729 force_charshort_cast(dbt
);
2730 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2732 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2734 /* from long long: just take low order word */
2739 vtop
->type
.t
|= VT_UNSIGNED
;
2743 /* if lvalue and single word type, nothing to do because
2744 the lvalue already contains the real type size (see
2745 VT_LVAL_xxx constants) */
2748 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2749 /* if we are casting between pointer types,
2750 we must update the VT_LVAL_xxx size */
2751 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2752 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2755 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2758 /* return type size as known at compile time. Put alignment at 'a' */
2759 ST_FUNC
int type_size(CType
*type
, int *a
)
2764 bt
= type
->t
& VT_BTYPE
;
2765 if (bt
== VT_STRUCT
) {
2770 } else if (bt
== VT_PTR
) {
2771 if (type
->t
& VT_ARRAY
) {
2775 ts
= type_size(&s
->type
, a
);
2777 if (ts
< 0 && s
->c
< 0)
2785 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
2786 return -1; /* incomplete enum */
2787 } else if (bt
== VT_LDOUBLE
) {
2789 return LDOUBLE_SIZE
;
2790 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2791 #ifdef TCC_TARGET_I386
2792 #ifdef TCC_TARGET_PE
2797 #elif defined(TCC_TARGET_ARM)
2807 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2810 } else if (bt
== VT_SHORT
) {
2813 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2817 /* char, void, function, _Bool */
2823 /* push type size as known at runtime time on top of value stack. Put
2825 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2827 if (type
->t
& VT_VLA
) {
2828 type_size(&type
->ref
->type
, a
);
2829 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2831 vpushi(type_size(type
, a
));
2835 static void vla_sp_restore(void) {
2836 if (vlas_in_scope
) {
2837 gen_vla_sp_restore(vla_sp_loc
);
2841 static void vla_sp_restore_root(void) {
2842 if (vlas_in_scope
) {
2843 gen_vla_sp_restore(vla_sp_root_loc
);
2847 /* return the pointed type of t */
2848 static inline CType
*pointed_type(CType
*type
)
2850 return &type
->ref
->type
;
2853 /* modify type so that its it is a pointer to type. */
2854 ST_FUNC
void mk_pointer(CType
*type
)
2857 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2858 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2862 /* compare function types. OLD functions match any new functions */
2863 static int is_compatible_func(CType
*type1
, CType
*type2
)
2869 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2871 /* check func_call */
2872 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2874 /* XXX: not complete */
2875 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2877 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2879 while (s1
!= NULL
) {
2882 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2892 /* return true if type1 and type2 are the same. If unqualified is
2893 true, qualifiers on the types are ignored.
2895 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2899 t1
= type1
->t
& VT_TYPE
;
2900 t2
= type2
->t
& VT_TYPE
;
2902 /* strip qualifiers before comparing */
2903 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2904 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2907 /* Default Vs explicit signedness only matters for char */
2908 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2912 /* XXX: bitfields ? */
2915 /* test more complicated cases */
2916 bt1
= t1
& (VT_BTYPE
| VT_ARRAY
);
2917 if (bt1
== VT_PTR
) {
2918 type1
= pointed_type(type1
);
2919 type2
= pointed_type(type2
);
2920 return is_compatible_types(type1
, type2
);
2921 } else if (bt1
& VT_ARRAY
) {
2922 return type1
->ref
->c
< 0 || type2
->ref
->c
< 0
2923 || type1
->ref
->c
== type2
->ref
->c
;
2924 } else if (bt1
== VT_STRUCT
) {
2925 return (type1
->ref
== type2
->ref
);
2926 } else if (bt1
== VT_FUNC
) {
2927 return is_compatible_func(type1
, type2
);
2928 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
2929 return type1
->ref
== type2
->ref
;
2935 /* return true if type1 and type2 are exactly the same (including
2938 static int is_compatible_types(CType
*type1
, CType
*type2
)
2940 return compare_types(type1
,type2
,0);
2943 /* return true if type1 and type2 are the same (ignoring qualifiers).
2945 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2947 return compare_types(type1
,type2
,1);
2950 /* print a type. If 'varstr' is not NULL, then the variable is also
2951 printed in the type */
2953 /* XXX: add array and function pointers */
2954 static void type_to_str(char *buf
, int buf_size
,
2955 CType
*type
, const char *varstr
)
2967 pstrcat(buf
, buf_size
, "extern ");
2969 pstrcat(buf
, buf_size
, "static ");
2971 pstrcat(buf
, buf_size
, "typedef ");
2973 pstrcat(buf
, buf_size
, "inline ");
2974 if (t
& VT_VOLATILE
)
2975 pstrcat(buf
, buf_size
, "volatile ");
2976 if (t
& VT_CONSTANT
)
2977 pstrcat(buf
, buf_size
, "const ");
2979 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2980 || ((t
& VT_UNSIGNED
)
2981 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2984 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2986 buf_size
-= strlen(buf
);
3021 tstr
= "long double";
3023 pstrcat(buf
, buf_size
, tstr
);
3030 pstrcat(buf
, buf_size
, tstr
);
3031 v
= type
->ref
->v
& ~SYM_STRUCT
;
3032 if (v
>= SYM_FIRST_ANOM
)
3033 pstrcat(buf
, buf_size
, "<anonymous>");
3035 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3040 if (varstr
&& '*' == *varstr
) {
3041 pstrcat(buf1
, sizeof(buf1
), "(");
3042 pstrcat(buf1
, sizeof(buf1
), varstr
);
3043 pstrcat(buf1
, sizeof(buf1
), ")");
3045 pstrcat(buf1
, buf_size
, "(");
3047 while (sa
!= NULL
) {
3049 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3050 pstrcat(buf1
, sizeof(buf1
), buf2
);
3053 pstrcat(buf1
, sizeof(buf1
), ", ");
3055 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3056 pstrcat(buf1
, sizeof(buf1
), ", ...");
3057 pstrcat(buf1
, sizeof(buf1
), ")");
3058 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3063 if (varstr
&& '*' == *varstr
)
3064 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3066 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3067 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3070 pstrcpy(buf1
, sizeof(buf1
), "*");
3071 if (t
& VT_CONSTANT
)
3072 pstrcat(buf1
, buf_size
, "const ");
3073 if (t
& VT_VOLATILE
)
3074 pstrcat(buf1
, buf_size
, "volatile ");
3076 pstrcat(buf1
, sizeof(buf1
), varstr
);
3077 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3081 pstrcat(buf
, buf_size
, " ");
3082 pstrcat(buf
, buf_size
, varstr
);
3087 /* verify type compatibility to store vtop in 'dt' type, and generate
3089 static void gen_assign_cast(CType
*dt
)
3091 CType
*st
, *type1
, *type2
;
3092 char buf1
[256], buf2
[256];
3093 int dbt
, sbt
, qualwarn
, lvl
;
3095 st
= &vtop
->type
; /* source type */
3096 dbt
= dt
->t
& VT_BTYPE
;
3097 sbt
= st
->t
& VT_BTYPE
;
3098 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3099 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3100 ; /* It is Ok if both are void */
3102 tcc_error("cannot cast from/to void");
3104 if (dt
->t
& VT_CONSTANT
)
3105 tcc_warning("assignment of read-only location");
3108 /* special cases for pointers */
3109 /* '0' can also be a pointer */
3110 if (is_null_pointer(vtop
))
3112 /* accept implicit pointer to integer cast with warning */
3113 if (is_integer_btype(sbt
)) {
3114 tcc_warning("assignment makes pointer from integer without a cast");
3117 type1
= pointed_type(dt
);
3119 type2
= pointed_type(st
);
3120 else if (sbt
== VT_FUNC
)
3121 type2
= st
; /* a function is implicitly a function pointer */
3124 if (is_compatible_types(type1
, type2
))
3126 for (qualwarn
= lvl
= 0;; ++lvl
) {
3127 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3128 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3130 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3131 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3132 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3134 type1
= pointed_type(type1
);
3135 type2
= pointed_type(type2
);
3137 if (!is_compatible_unqualified_types(type1
, type2
)) {
3138 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3139 /* void * can match anything */
3140 } else if (dbt
== sbt
3141 && is_integer_btype(sbt
& VT_BTYPE
)
3142 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3143 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3144 /* Like GCC don't warn by default for merely changes
3145 in pointer target signedness. Do warn for different
3146 base types, though, in particular for unsigned enums
3147 and signed int targets. */
3149 tcc_warning("assignment from incompatible pointer type");
3154 tcc_warning("assignment discards qualifiers from pointer target type");
3160 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3161 tcc_warning("assignment makes integer from pointer without a cast");
3162 } else if (sbt
== VT_STRUCT
) {
3163 goto case_VT_STRUCT
;
3165 /* XXX: more tests */
3169 if (!is_compatible_unqualified_types(dt
, st
)) {
3171 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3172 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3173 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3180 /* store vtop in lvalue pushed on stack */
3181 ST_FUNC
void vstore(void)
3183 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3185 ft
= vtop
[-1].type
.t
;
3186 sbt
= vtop
->type
.t
& VT_BTYPE
;
3187 dbt
= ft
& VT_BTYPE
;
3188 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3189 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3190 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3191 /* optimize char/short casts */
3192 delayed_cast
= VT_MUSTCAST
;
3193 vtop
->type
.t
= ft
& VT_TYPE
;
3194 /* XXX: factorize */
3195 if (ft
& VT_CONSTANT
)
3196 tcc_warning("assignment of read-only location");
3199 if (!(ft
& VT_BITFIELD
))
3200 gen_assign_cast(&vtop
[-1].type
);
3203 if (sbt
== VT_STRUCT
) {
3204 /* if structure, only generate pointer */
3205 /* structure assignment : generate memcpy */
3206 /* XXX: optimize if small size */
3207 size
= type_size(&vtop
->type
, &align
);
3211 vtop
->type
.t
= VT_PTR
;
3214 /* address of memcpy() */
3217 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3218 else if(!(align
& 3))
3219 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3222 /* Use memmove, rather than memcpy, as dest and src may be same: */
3223 vpush_global_sym(&func_old_type
, TOK_memmove
);
3228 vtop
->type
.t
= VT_PTR
;
3234 /* leave source on stack */
3235 } else if (ft
& VT_BITFIELD
) {
3236 /* bitfield store handling */
3238 /* save lvalue as expression result (example: s.b = s.a = n;) */
3239 vdup(), vtop
[-1] = vtop
[-2];
3241 bit_pos
= BIT_POS(ft
);
3242 bit_size
= BIT_SIZE(ft
);
3243 /* remove bit field info to avoid loops */
3244 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3246 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3247 gen_cast(&vtop
[-1].type
);
3248 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3251 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3252 if (r
== VT_STRUCT
) {
3253 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3254 store_packed_bf(bit_pos
, bit_size
);
3256 unsigned long long mask
= (1ULL << bit_size
) - 1;
3257 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3259 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3262 vpushi((unsigned)mask
);
3269 /* duplicate destination */
3272 /* load destination, mask and or with source */
3273 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3274 vpushll(~(mask
<< bit_pos
));
3276 vpushi(~((unsigned)mask
<< bit_pos
));
3281 /* ... and discard */
3284 } else if (dbt
== VT_VOID
) {
3287 #ifdef CONFIG_TCC_BCHECK
3288 /* bound check case */
3289 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3298 #ifdef TCC_TARGET_X86_64
3299 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3301 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3306 r
= gv(rc
); /* generate value */
3307 /* if lvalue was saved on stack, must read it */
3308 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3310 t
= get_reg(RC_INT
);
3316 sv
.r
= VT_LOCAL
| VT_LVAL
;
3317 sv
.c
.i
= vtop
[-1].c
.i
;
3319 vtop
[-1].r
= t
| VT_LVAL
;
3321 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3323 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3324 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3326 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3327 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3329 vtop
[-1].type
.t
= load_type
;
3332 /* convert to int to increment easily */
3333 vtop
->type
.t
= addr_type
;
3339 vtop
[-1].type
.t
= load_type
;
3340 /* XXX: it works because r2 is spilled last ! */
3341 store(vtop
->r2
, vtop
- 1);
3347 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3348 vtop
->r
|= delayed_cast
;
3352 /* post defines POST/PRE add. c is the token ++ or -- */
3353 ST_FUNC
void inc(int post
, int c
)
3356 vdup(); /* save lvalue */
3358 gv_dup(); /* duplicate value */
3363 vpushi(c
- TOK_MID
);
3365 vstore(); /* store value */
3367 vpop(); /* if post op, return saved value */
3370 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3372 /* read the string */
3376 while (tok
== TOK_STR
) {
3377 /* XXX: add \0 handling too ? */
3378 cstr_cat(astr
, tokc
.str
.data
, -1);
3381 cstr_ccat(astr
, '\0');
3384 /* If I is >= 1 and a power of two, returns log2(i)+1.
3385 If I is 0 returns 0. */
3386 static int exact_log2p1(int i
)
3391 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3402 /* Parse __attribute__((...)) GNUC extension. */
3403 static void parse_attribute(AttributeDef
*ad
)
3409 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3414 while (tok
!= ')') {
3415 if (tok
< TOK_IDENT
)
3416 expect("attribute name");
3428 tcc_warning("implicit declaration of function '%s'",
3429 get_tok_str(tok
, &tokc
));
3430 s
= external_global_sym(tok
, &func_old_type
, 0);
3432 ad
->cleanup_func
= s
;
3440 parse_mult_str(&astr
, "section name");
3441 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3448 parse_mult_str(&astr
, "alias(\"target\")");
3449 ad
->alias_target
= /* save string as token, for later */
3450 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3454 case TOK_VISIBILITY1
:
3455 case TOK_VISIBILITY2
:
3457 parse_mult_str(&astr
,
3458 "visibility(\"default|hidden|internal|protected\")");
3459 if (!strcmp (astr
.data
, "default"))
3460 ad
->a
.visibility
= STV_DEFAULT
;
3461 else if (!strcmp (astr
.data
, "hidden"))
3462 ad
->a
.visibility
= STV_HIDDEN
;
3463 else if (!strcmp (astr
.data
, "internal"))
3464 ad
->a
.visibility
= STV_INTERNAL
;
3465 else if (!strcmp (astr
.data
, "protected"))
3466 ad
->a
.visibility
= STV_PROTECTED
;
3468 expect("visibility(\"default|hidden|internal|protected\")");
3477 if (n
<= 0 || (n
& (n
- 1)) != 0)
3478 tcc_error("alignment must be a positive power of two");
3483 ad
->a
.aligned
= exact_log2p1(n
);
3484 if (n
!= 1 << (ad
->a
.aligned
- 1))
3485 tcc_error("alignment of %d is larger than implemented", n
);
3497 /* currently, no need to handle it because tcc does not
3498 track unused objects */
3502 /* currently, no need to handle it because tcc does not
3503 track unused objects */
3508 ad
->f
.func_call
= FUNC_CDECL
;
3513 ad
->f
.func_call
= FUNC_STDCALL
;
3515 #ifdef TCC_TARGET_I386
3525 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3531 ad
->f
.func_call
= FUNC_FASTCALLW
;
3538 ad
->attr_mode
= VT_LLONG
+ 1;
3541 ad
->attr_mode
= VT_BYTE
+ 1;
3544 ad
->attr_mode
= VT_SHORT
+ 1;
3548 ad
->attr_mode
= VT_INT
+ 1;
3551 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3558 ad
->a
.dllexport
= 1;
3560 case TOK_NODECORATE
:
3561 ad
->a
.nodecorate
= 1;
3564 ad
->a
.dllimport
= 1;
3567 if (tcc_state
->warn_unsupported
)
3568 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3569 /* skip parameters */
3571 int parenthesis
= 0;
3575 else if (tok
== ')')
3578 } while (parenthesis
&& tok
!= -1);
3591 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3595 while ((s
= s
->next
) != NULL
) {
3596 if ((s
->v
& SYM_FIELD
) &&
3597 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3598 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3599 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3611 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3613 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3614 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3615 int pcc
= !tcc_state
->ms_bitfields
;
3616 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3623 prevbt
= VT_STRUCT
; /* make it never match */
3628 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3629 if (f
->type
.t
& VT_BITFIELD
)
3630 bit_size
= BIT_SIZE(f
->type
.t
);
3633 size
= type_size(&f
->type
, &align
);
3634 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3637 if (pcc
&& bit_size
== 0) {
3638 /* in pcc mode, packing does not affect zero-width bitfields */
3641 /* in pcc mode, attribute packed overrides if set. */
3642 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3645 /* pragma pack overrides align if lesser and packs bitfields always */
3648 if (pragma_pack
< align
)
3649 align
= pragma_pack
;
3650 /* in pcc mode pragma pack also overrides individual align */
3651 if (pcc
&& pragma_pack
< a
)
3655 /* some individual align was specified */
3659 if (type
->ref
->type
.t
== VT_UNION
) {
3660 if (pcc
&& bit_size
>= 0)
3661 size
= (bit_size
+ 7) >> 3;
3666 } else if (bit_size
< 0) {
3668 c
+= (bit_pos
+ 7) >> 3;
3669 c
= (c
+ align
- 1) & -align
;
3678 /* A bit-field. Layout is more complicated. There are two
3679 options: PCC (GCC) compatible and MS compatible */
3681 /* In PCC layout a bit-field is placed adjacent to the
3682 preceding bit-fields, except if:
3684 - an individual alignment was given
3685 - it would overflow its base type container and
3686 there is no packing */
3687 if (bit_size
== 0) {
3689 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3691 } else if (f
->a
.aligned
) {
3693 } else if (!packed
) {
3695 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3696 if (ofs
> size
/ align
)
3700 /* in pcc mode, long long bitfields have type int if they fit */
3701 if (size
== 8 && bit_size
<= 32)
3702 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3704 while (bit_pos
>= align
* 8)
3705 c
+= align
, bit_pos
-= align
* 8;
3708 /* In PCC layout named bit-fields influence the alignment
3709 of the containing struct using the base types alignment,
3710 except for packed fields (which here have correct align). */
3711 if (f
->v
& SYM_FIRST_ANOM
3712 // && bit_size // ??? gcc on ARM/rpi does that
3717 bt
= f
->type
.t
& VT_BTYPE
;
3718 if ((bit_pos
+ bit_size
> size
* 8)
3719 || (bit_size
> 0) == (bt
!= prevbt
)
3721 c
= (c
+ align
- 1) & -align
;
3724 /* In MS bitfield mode a bit-field run always uses
3725 at least as many bits as the underlying type.
3726 To start a new run it's also required that this
3727 or the last bit-field had non-zero width. */
3728 if (bit_size
|| prev_bit_size
)
3731 /* In MS layout the records alignment is normally
3732 influenced by the field, except for a zero-width
3733 field at the start of a run (but by further zero-width
3734 fields it is again). */
3735 if (bit_size
== 0 && prevbt
!= bt
)
3738 prev_bit_size
= bit_size
;
3741 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3742 | (bit_pos
<< VT_STRUCT_SHIFT
);
3743 bit_pos
+= bit_size
;
3745 if (align
> maxalign
)
3749 printf("set field %s offset %-2d size %-2d align %-2d",
3750 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3751 if (f
->type
.t
& VT_BITFIELD
) {
3752 printf(" pos %-2d bits %-2d",
3765 c
+= (bit_pos
+ 7) >> 3;
3767 /* store size and alignment */
3768 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3772 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3773 /* can happen if individual align for some member was given. In
3774 this case MSVC ignores maxalign when aligning the size */
3779 c
= (c
+ a
- 1) & -a
;
3783 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3786 /* check whether we can access bitfields by their type */
3787 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3791 if (0 == (f
->type
.t
& VT_BITFIELD
))
3795 bit_size
= BIT_SIZE(f
->type
.t
);
3798 bit_pos
= BIT_POS(f
->type
.t
);
3799 size
= type_size(&f
->type
, &align
);
3800 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3803 /* try to access the field using a different type */
3804 c0
= -1, s
= align
= 1;
3806 px
= f
->c
* 8 + bit_pos
;
3807 cx
= (px
>> 3) & -align
;
3808 px
= px
- (cx
<< 3);
3811 s
= (px
+ bit_size
+ 7) >> 3;
3821 s
= type_size(&t
, &align
);
3825 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3826 /* update offset and bit position */
3829 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3830 | (bit_pos
<< VT_STRUCT_SHIFT
);
3834 printf("FIX field %s offset %-2d size %-2d align %-2d "
3835 "pos %-2d bits %-2d\n",
3836 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3837 cx
, s
, align
, px
, bit_size
);
3840 /* fall back to load/store single-byte wise */
3841 f
->auxtype
= VT_STRUCT
;
3843 printf("FIX field %s : load byte-wise\n",
3844 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3850 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3851 static void struct_decl(CType
*type
, int u
)
3853 int v
, c
, size
, align
, flexible
;
3854 int bit_size
, bsize
, bt
;
3856 AttributeDef ad
, ad1
;
3859 memset(&ad
, 0, sizeof ad
);
3861 parse_attribute(&ad
);
3865 /* struct already defined ? return it */
3867 expect("struct/union/enum name");
3869 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3872 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3874 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3879 /* Record the original enum/struct/union token. */
3880 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3882 /* we put an undefined size for struct/union */
3883 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3884 s
->r
= 0; /* default alignment is zero as gcc */
3886 type
->t
= s
->type
.t
;
3892 tcc_error("struct/union/enum already defined");
3894 /* cannot be empty */
3895 /* non empty enums are not allowed */
3898 long long ll
= 0, pl
= 0, nl
= 0;
3901 /* enum symbols have static storage */
3902 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3906 expect("identifier");
3908 if (ss
&& !local_stack
)
3909 tcc_error("redefinition of enumerator '%s'",
3910 get_tok_str(v
, NULL
));
3914 ll
= expr_const64();
3916 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3918 *ps
= ss
, ps
= &ss
->next
;
3927 /* NOTE: we accept a trailing comma */
3932 /* set integral type of the enum */
3935 if (pl
!= (unsigned)pl
)
3936 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3938 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3939 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3940 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3942 /* set type for enum members */
3943 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3945 if (ll
== (int)ll
) /* default is int if it fits */
3947 if (t
.t
& VT_UNSIGNED
) {
3948 ss
->type
.t
|= VT_UNSIGNED
;
3949 if (ll
== (unsigned)ll
)
3952 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3953 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3958 while (tok
!= '}') {
3959 if (!parse_btype(&btype
, &ad1
)) {
3965 tcc_error("flexible array member '%s' not at the end of struct",
3966 get_tok_str(v
, NULL
));
3972 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3974 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3975 expect("identifier");
3977 int v
= btype
.ref
->v
;
3978 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3979 if (tcc_state
->ms_extensions
== 0)
3980 expect("identifier");
3984 if (type_size(&type1
, &align
) < 0) {
3985 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3988 tcc_error("field '%s' has incomplete type",
3989 get_tok_str(v
, NULL
));
3991 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3992 (type1
.t
& VT_BTYPE
) == VT_VOID
||
3993 (type1
.t
& VT_STORAGE
))
3994 tcc_error("invalid type for '%s'",
3995 get_tok_str(v
, NULL
));
3999 bit_size
= expr_const();
4000 /* XXX: handle v = 0 case for messages */
4002 tcc_error("negative width in bit-field '%s'",
4003 get_tok_str(v
, NULL
));
4004 if (v
&& bit_size
== 0)
4005 tcc_error("zero width for bit-field '%s'",
4006 get_tok_str(v
, NULL
));
4007 parse_attribute(&ad1
);
4009 size
= type_size(&type1
, &align
);
4010 if (bit_size
>= 0) {
4011 bt
= type1
.t
& VT_BTYPE
;
4017 tcc_error("bitfields must have scalar type");
4019 if (bit_size
> bsize
) {
4020 tcc_error("width of '%s' exceeds its type",
4021 get_tok_str(v
, NULL
));
4022 } else if (bit_size
== bsize
4023 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4024 /* no need for bit fields */
4026 } else if (bit_size
== 64) {
4027 tcc_error("field width 64 not implemented");
4029 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4031 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4034 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4035 /* Remember we've seen a real field to check
4036 for placement of flexible array member. */
4039 /* If member is a struct or bit-field, enforce
4040 placing into the struct (as anonymous). */
4042 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4047 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4052 if (tok
== ';' || tok
== TOK_EOF
)
4059 parse_attribute(&ad
);
4060 struct_layout(type
, &ad
);
4065 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4067 merge_symattr(&ad
->a
, &s
->a
);
4068 merge_funcattr(&ad
->f
, &s
->f
);
4071 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4072 are added to the element type, copied because it could be a typedef. */
4073 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4075 while (type
->t
& VT_ARRAY
) {
4076 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4077 type
= &type
->ref
->type
;
4079 type
->t
|= qualifiers
;
4082 /* return 0 if no type declaration. otherwise, return the basic type
4085 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4087 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
4091 memset(ad
, 0, sizeof(AttributeDef
));
4101 /* currently, we really ignore extension */
4111 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4112 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4113 tmbt
: tcc_error("too many basic types");
4116 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4121 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4138 memset(&ad1
, 0, sizeof(AttributeDef
));
4139 if (parse_btype(&type1
, &ad1
)) {
4140 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4142 n
= 1 << (ad1
.a
.aligned
- 1);
4144 type_size(&type1
, &n
);
4147 if (n
<= 0 || (n
& (n
- 1)) != 0)
4148 tcc_error("alignment must be a positive power of two");
4151 ad
->a
.aligned
= exact_log2p1(n
);
4155 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4156 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4157 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4158 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4165 #ifdef TCC_TARGET_ARM64
4167 /* GCC's __uint128_t appears in some Linux header files. Make it a
4168 synonym for long double to get the size and alignment right. */
4179 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4180 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4188 struct_decl(&type1
, VT_ENUM
);
4191 type
->ref
= type1
.ref
;
4194 struct_decl(&type1
, VT_STRUCT
);
4197 struct_decl(&type1
, VT_UNION
);
4200 /* type modifiers */
4205 parse_btype_qualify(type
, VT_CONSTANT
);
4213 parse_btype_qualify(type
, VT_VOLATILE
);
4220 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4221 tcc_error("signed and unsigned modifier");
4234 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4235 tcc_error("signed and unsigned modifier");
4236 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4252 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4253 tcc_error("multiple storage classes");
4264 /* currently, no need to handle it because tcc does not
4265 track unused objects */
4268 /* GNUC attribute */
4269 case TOK_ATTRIBUTE1
:
4270 case TOK_ATTRIBUTE2
:
4271 parse_attribute(ad
);
4272 if (ad
->attr_mode
) {
4273 u
= ad
->attr_mode
-1;
4274 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4282 parse_expr_type(&type1
);
4283 /* remove all storage modifiers except typedef */
4284 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4286 sym_to_attr(ad
, type1
.ref
);
4292 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4294 t
&= ~(VT_BTYPE
|VT_LONG
);
4295 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4296 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4297 type
->ref
= s
->type
.ref
;
4299 parse_btype_qualify(type
, t
);
4301 /* get attributes from typedef */
4311 if (tcc_state
->char_is_unsigned
) {
4312 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4315 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4316 bt
= t
& (VT_BTYPE
|VT_LONG
);
4318 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4319 #ifdef TCC_TARGET_PE
4320 if (bt
== VT_LDOUBLE
)
4321 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4327 /* convert a function parameter type (array to pointer and function to
4328 function pointer) */
4329 static inline void convert_parameter_type(CType
*pt
)
4331 /* remove const and volatile qualifiers (XXX: const could be used
4332 to indicate a const function parameter */
4333 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4334 /* array must be transformed to pointer according to ANSI C */
4336 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4341 ST_FUNC
void parse_asm_str(CString
*astr
)
4344 parse_mult_str(astr
, "string constant");
4347 /* Parse an asm label and return the token */
4348 static int asm_label_instr(void)
4354 parse_asm_str(&astr
);
4357 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4359 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4364 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4366 int n
, l
, t1
, arg_size
, align
;
4367 Sym
**plast
, *s
, *first
;
4372 /* function type, or recursive declarator (return if so) */
4374 if (td
&& !(td
& TYPE_ABSTRACT
))
4378 else if (parse_btype(&pt
, &ad1
))
4381 merge_attr (ad
, &ad1
);
4390 /* read param name and compute offset */
4391 if (l
!= FUNC_OLD
) {
4392 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4394 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4395 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4396 tcc_error("parameter declared as void");
4400 expect("identifier");
4401 pt
.t
= VT_VOID
; /* invalid type */
4404 convert_parameter_type(&pt
);
4405 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4406 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4412 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4417 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4418 tcc_error("invalid type");
4421 /* if no parameters, then old type prototype */
4424 /* NOTE: const is ignored in returned type as it has a special
4425 meaning in gcc / C++ */
4426 type
->t
&= ~VT_CONSTANT
;
4427 /* some ancient pre-K&R C allows a function to return an array
4428 and the array brackets to be put after the arguments, such
4429 that "int c()[]" means something like "int[] c()" */
4432 skip(']'); /* only handle simple "[]" */
4435 /* we push a anonymous symbol which will contain the function prototype */
4436 ad
->f
.func_args
= arg_size
;
4437 ad
->f
.func_type
= l
;
4438 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4444 } else if (tok
== '[') {
4445 int saved_nocode_wanted
= nocode_wanted
;
4446 /* array definition */
4449 /* XXX The optional type-quals and static should only be accepted
4450 in parameter decls. The '*' as well, and then even only
4451 in prototypes (not function defs). */
4453 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4468 if (!local_stack
|| (storage
& VT_STATIC
))
4469 vpushi(expr_const());
4471 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4472 length must always be evaluated, even under nocode_wanted,
4473 so that its size slot is initialized (e.g. under sizeof
4478 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4481 tcc_error("invalid array size");
4483 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4484 tcc_error("size of variable length array should be an integer");
4490 /* parse next post type */
4491 post_type(type
, ad
, storage
, 0);
4492 if (type
->t
== VT_FUNC
)
4493 tcc_error("declaration of an array of functions");
4494 t1
|= type
->t
& VT_VLA
;
4498 tcc_error("need explicit inner array size in VLAs");
4499 loc
-= type_size(&int_type
, &align
);
4503 vla_runtime_type_size(type
, &align
);
4505 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4511 nocode_wanted
= saved_nocode_wanted
;
4513 /* we push an anonymous symbol which will contain the array
4515 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4516 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4522 /* Parse a type declarator (except basic type), and return the type
4523 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4524 expected. 'type' should contain the basic type. 'ad' is the
4525 attribute definition of the basic type. It can be modified by
4526 type_decl(). If this (possibly abstract) declarator is a pointer chain
4527 it returns the innermost pointed to type (equals *type, but is a different
4528 pointer), otherwise returns type itself, that's used for recursive calls. */
4529 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4532 int qualifiers
, storage
;
4534 /* recursive type, remove storage bits first, apply them later again */
4535 storage
= type
->t
& VT_STORAGE
;
4536 type
->t
&= ~VT_STORAGE
;
4539 while (tok
== '*') {
4547 qualifiers
|= VT_CONSTANT
;
4552 qualifiers
|= VT_VOLATILE
;
4558 /* XXX: clarify attribute handling */
4559 case TOK_ATTRIBUTE1
:
4560 case TOK_ATTRIBUTE2
:
4561 parse_attribute(ad
);
4565 type
->t
|= qualifiers
;
4567 /* innermost pointed to type is the one for the first derivation */
4568 ret
= pointed_type(type
);
4572 /* This is possibly a parameter type list for abstract declarators
4573 ('int ()'), use post_type for testing this. */
4574 if (!post_type(type
, ad
, 0, td
)) {
4575 /* It's not, so it's a nested declarator, and the post operations
4576 apply to the innermost pointed to type (if any). */
4577 /* XXX: this is not correct to modify 'ad' at this point, but
4578 the syntax is not clear */
4579 parse_attribute(ad
);
4580 post
= type_decl(type
, ad
, v
, td
);
4584 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4585 /* type identifier */
4590 if (!(td
& TYPE_ABSTRACT
))
4591 expect("identifier");
4594 post_type(post
, ad
, storage
, 0);
4595 parse_attribute(ad
);
4600 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4601 ST_FUNC
int lvalue_type(int t
)
4606 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4608 else if (bt
== VT_SHORT
)
4612 if (t
& VT_UNSIGNED
)
4613 r
|= VT_LVAL_UNSIGNED
;
4617 /* indirection with full error checking and bound check */
4618 ST_FUNC
void indir(void)
4620 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4621 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4625 if (vtop
->r
& VT_LVAL
)
4627 vtop
->type
= *pointed_type(&vtop
->type
);
4628 /* Arrays and functions are never lvalues */
4629 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4630 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4631 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4632 /* if bound checking, the referenced pointer must be checked */
4633 #ifdef CONFIG_TCC_BCHECK
4634 if (tcc_state
->do_bounds_check
)
4635 vtop
->r
|= VT_MUSTBOUND
;
4640 /* pass a parameter to a function and do type checking and casting */
4641 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4646 func_type
= func
->f
.func_type
;
4647 if (func_type
== FUNC_OLD
||
4648 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4649 /* default casting : only need to convert float to double */
4650 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4651 gen_cast_s(VT_DOUBLE
);
4652 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4653 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4654 type
.ref
= vtop
->type
.ref
;
4657 } else if (arg
== NULL
) {
4658 tcc_error("too many arguments to function");
4661 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4662 gen_assign_cast(&type
);
4666 /* parse an expression and return its type without any side effect. */
4667 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4676 /* parse an expression of the form '(type)' or '(expr)' and return its
4678 static void parse_expr_type(CType
*type
)
4684 if (parse_btype(type
, &ad
)) {
4685 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4687 expr_type(type
, gexpr
);
4692 static void parse_type(CType
*type
)
4697 if (!parse_btype(type
, &ad
)) {
4700 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4703 static void parse_builtin_params(int nc
, const char *args
)
4710 while ((c
= *args
++)) {
4714 case 'e': expr_eq(); continue;
4715 case 't': parse_type(&t
); vpush(&t
); continue;
4716 default: tcc_error("internal error"); break;
4724 static void try_call_scope_cleanup(Sym
*stop
)
4726 Sym
*cls
= current_cleanups
;
4728 for (; cls
!= stop
; cls
= cls
->ncl
) {
4729 Sym
*fs
= cls
->next
;
4730 Sym
*vs
= cls
->prev_tok
;
4732 vpushsym(&fs
->type
, fs
);
4733 vset(&vs
->type
, vs
->r
, vs
->c
);
4735 mk_pointer(&vtop
->type
);
4741 static void try_call_cleanup_goto(Sym
*cleanupstate
)
4746 if (!current_cleanups
)
4749 /* search NCA of both cleanup chains given parents and initial depth */
4750 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
4751 for (ccd
= ncleanups
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
4753 for (cc
= current_cleanups
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
4755 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
4758 try_call_scope_cleanup(cc
);
4761 ST_FUNC
void unary(void)
4763 int n
, t
, align
, size
, r
, sizeof_caller
;
4768 sizeof_caller
= in_sizeof
;
4771 /* XXX: GCC 2.95.3 does not generate a table although it should be
4779 #ifdef TCC_TARGET_PE
4780 t
= VT_SHORT
|VT_UNSIGNED
;
4788 vsetc(&type
, VT_CONST
, &tokc
);
4792 t
= VT_INT
| VT_UNSIGNED
;
4798 t
= VT_LLONG
| VT_UNSIGNED
;
4810 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4813 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4815 case TOK___FUNCTION__
:
4817 goto tok_identifier
;
4823 /* special function name identifier */
4824 len
= strlen(funcname
) + 1;
4825 /* generate char[len] type */
4830 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4831 if (!NODATA_WANTED
) {
4832 ptr
= section_ptr_add(data_section
, len
);
4833 memcpy(ptr
, funcname
, len
);
4839 #ifdef TCC_TARGET_PE
4840 t
= VT_SHORT
| VT_UNSIGNED
;
4846 /* string parsing */
4848 if (tcc_state
->char_is_unsigned
)
4849 t
= VT_BYTE
| VT_UNSIGNED
;
4851 if (tcc_state
->warn_write_strings
)
4856 memset(&ad
, 0, sizeof(AttributeDef
));
4857 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4862 if (parse_btype(&type
, &ad
)) {
4863 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4865 /* check ISOC99 compound literal */
4867 /* data is allocated locally by default */
4872 /* all except arrays are lvalues */
4873 if (!(type
.t
& VT_ARRAY
))
4874 r
|= lvalue_type(type
.t
);
4875 memset(&ad
, 0, sizeof(AttributeDef
));
4876 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4878 if (sizeof_caller
) {
4885 } else if (tok
== '{') {
4886 int saved_nocode_wanted
= nocode_wanted
;
4888 tcc_error("expected constant");
4889 /* save all registers */
4891 /* statement expression : we do not accept break/continue
4892 inside as GCC does. We do retain the nocode_wanted state,
4893 as statement expressions can't ever be entered from the
4894 outside, so any reactivation of code emission (from labels
4895 or loop heads) can be disabled again after the end of it. */
4896 block(NULL
, NULL
, 1);
4897 nocode_wanted
= saved_nocode_wanted
;
4912 /* functions names must be treated as function pointers,
4913 except for unary '&' and sizeof. Since we consider that
4914 functions are not lvalues, we only have to handle it
4915 there and in function calls. */
4916 /* arrays can also be used although they are not lvalues */
4917 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4918 !(vtop
->type
.t
& VT_ARRAY
))
4920 mk_pointer(&vtop
->type
);
4926 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4927 gen_cast_s(VT_BOOL
);
4928 vtop
->c
.i
= !vtop
->c
.i
;
4929 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4933 vseti(VT_JMP
, gvtst(1, 0));
4945 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4946 tcc_error("pointer not accepted for unary plus");
4947 /* In order to force cast, we add zero, except for floating point
4948 where we really need an noop (otherwise -0.0 will be transformed
4950 if (!is_float(vtop
->type
.t
)) {
4962 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
4963 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
4964 size
= type_size(&type
, &align
);
4965 if (s
&& s
->a
.aligned
)
4966 align
= 1 << (s
->a
.aligned
- 1);
4967 if (t
== TOK_SIZEOF
) {
4968 if (!(type
.t
& VT_VLA
)) {
4970 tcc_error("sizeof applied to an incomplete type");
4973 vla_runtime_type_size(&type
, &align
);
4978 vtop
->type
.t
|= VT_UNSIGNED
;
4981 case TOK_builtin_expect
:
4982 /* __builtin_expect is a no-op for now */
4983 parse_builtin_params(0, "ee");
4986 case TOK_builtin_types_compatible_p
:
4987 parse_builtin_params(0, "tt");
4988 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4989 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4990 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4994 case TOK_builtin_choose_expr
:
5021 case TOK_builtin_constant_p
:
5022 parse_builtin_params(1, "e");
5023 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5027 case TOK_builtin_frame_address
:
5028 case TOK_builtin_return_address
:
5034 if (tok
!= TOK_CINT
) {
5035 tcc_error("%s only takes positive integers",
5036 tok1
== TOK_builtin_return_address
?
5037 "__builtin_return_address" :
5038 "__builtin_frame_address");
5040 level
= (uint32_t)tokc
.i
;
5045 vset(&type
, VT_LOCAL
, 0); /* local frame */
5047 mk_pointer(&vtop
->type
);
5048 indir(); /* -> parent frame */
5050 if (tok1
== TOK_builtin_return_address
) {
5051 // assume return address is just above frame pointer on stack
5054 mk_pointer(&vtop
->type
);
5059 #ifdef TCC_TARGET_X86_64
5060 #ifdef TCC_TARGET_PE
5061 case TOK_builtin_va_start
:
5062 parse_builtin_params(0, "ee");
5063 r
= vtop
->r
& VT_VALMASK
;
5067 tcc_error("__builtin_va_start expects a local variable");
5069 vtop
->type
= char_pointer_type
;
5074 case TOK_builtin_va_arg_types
:
5075 parse_builtin_params(0, "t");
5076 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5083 #ifdef TCC_TARGET_ARM64
5084 case TOK___va_start
: {
5085 parse_builtin_params(0, "ee");
5089 vtop
->type
.t
= VT_VOID
;
5092 case TOK___va_arg
: {
5093 parse_builtin_params(0, "et");
5101 case TOK___arm64_clear_cache
: {
5102 parse_builtin_params(0, "ee");
5105 vtop
->type
.t
= VT_VOID
;
5109 /* pre operations */
5120 t
= vtop
->type
.t
& VT_BTYPE
;
5122 /* In IEEE negate(x) isn't subtract(0,x), but rather
5126 vtop
->c
.f
= -1.0 * 0.0;
5127 else if (t
== VT_DOUBLE
)
5128 vtop
->c
.d
= -1.0 * 0.0;
5130 vtop
->c
.ld
= -1.0 * 0.0;
5138 goto tok_identifier
;
5140 /* allow to take the address of a label */
5141 if (tok
< TOK_UIDENT
)
5142 expect("label identifier");
5143 s
= label_find(tok
);
5145 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5147 if (s
->r
== LABEL_DECLARED
)
5148 s
->r
= LABEL_FORWARD
;
5151 s
->type
.t
= VT_VOID
;
5152 mk_pointer(&s
->type
);
5153 s
->type
.t
|= VT_STATIC
;
5155 vpushsym(&s
->type
, s
);
5161 CType controlling_type
;
5162 int has_default
= 0;
5165 TokenString
*str
= NULL
;
5166 int saved_const_wanted
= const_wanted
;
5171 expr_type(&controlling_type
, expr_eq
);
5172 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5173 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5174 mk_pointer(&controlling_type
);
5175 const_wanted
= saved_const_wanted
;
5179 if (tok
== TOK_DEFAULT
) {
5181 tcc_error("too many 'default'");
5187 AttributeDef ad_tmp
;
5190 parse_btype(&cur_type
, &ad_tmp
);
5191 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5192 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5194 tcc_error("type match twice");
5204 skip_or_save_block(&str
);
5206 skip_or_save_block(NULL
);
5213 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5214 tcc_error("type '%s' does not match any association", buf
);
5216 begin_macro(str
, 1);
5225 // special qnan , snan and infinity values
5230 vtop
->type
.t
= VT_FLOAT
;
5235 goto special_math_val
;
5238 goto special_math_val
;
5245 expect("identifier");
5247 if (!s
|| IS_ASM_SYM(s
)) {
5248 const char *name
= get_tok_str(t
, NULL
);
5250 tcc_error("'%s' undeclared", name
);
5251 /* for simple function calls, we tolerate undeclared
5252 external reference to int() function */
5253 if (tcc_state
->warn_implicit_function_declaration
5254 #ifdef TCC_TARGET_PE
5255 /* people must be warned about using undeclared WINAPI functions
5256 (which usually start with uppercase letter) */
5257 || (name
[0] >= 'A' && name
[0] <= 'Z')
5260 tcc_warning("implicit declaration of function '%s'", name
);
5261 s
= external_global_sym(t
, &func_old_type
, 0);
5265 /* A symbol that has a register is a local register variable,
5266 which starts out as VT_LOCAL value. */
5267 if ((r
& VT_VALMASK
) < VT_CONST
)
5268 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5270 vset(&s
->type
, r
, s
->c
);
5271 /* Point to s as backpointer (even without r&VT_SYM).
5272 Will be used by at least the x86 inline asm parser for
5278 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5279 vtop
->c
.i
= s
->enum_val
;
5284 /* post operations */
5286 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5289 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5290 int qualifiers
, cumofs
= 0;
5292 if (tok
== TOK_ARROW
)
5294 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5297 /* expect pointer on structure */
5298 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5299 expect("struct or union");
5300 if (tok
== TOK_CDOUBLE
)
5301 expect("field name");
5303 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5304 expect("field name");
5305 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5307 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5308 /* add field offset to pointer */
5309 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5310 vpushi(cumofs
+ s
->c
);
5312 /* change type to field type, and set to lvalue */
5313 vtop
->type
= s
->type
;
5314 vtop
->type
.t
|= qualifiers
;
5315 /* an array is never an lvalue */
5316 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5317 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5318 #ifdef CONFIG_TCC_BCHECK
5319 /* if bound checking, the referenced pointer must be checked */
5320 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5321 vtop
->r
|= VT_MUSTBOUND
;
5325 } else if (tok
== '[') {
5331 } else if (tok
== '(') {
5334 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5337 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5338 /* pointer test (no array accepted) */
5339 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5340 vtop
->type
= *pointed_type(&vtop
->type
);
5341 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5345 expect("function pointer");
5348 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5350 /* get return type */
5353 sa
= s
->next
; /* first parameter */
5354 nb_args
= regsize
= 0;
5356 /* compute first implicit argument if a structure is returned */
5357 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5358 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5359 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5360 &ret_align
, ®size
);
5362 /* get some space for the returned structure */
5363 size
= type_size(&s
->type
, &align
);
5364 #ifdef TCC_TARGET_ARM64
5365 /* On arm64, a small struct is return in registers.
5366 It is much easier to write it to memory if we know
5367 that we are allowed to write some extra bytes, so
5368 round the allocated space up to a power of 2: */
5370 while (size
& (size
- 1))
5371 size
= (size
| (size
- 1)) + 1;
5373 loc
= (loc
- size
) & -align
;
5375 ret
.r
= VT_LOCAL
| VT_LVAL
;
5376 /* pass it as 'int' to avoid structure arg passing
5378 vseti(VT_LOCAL
, loc
);
5388 /* return in register */
5389 if (is_float(ret
.type
.t
)) {
5390 ret
.r
= reg_fret(ret
.type
.t
);
5391 #ifdef TCC_TARGET_X86_64
5392 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5396 #ifndef TCC_TARGET_ARM64
5397 #ifdef TCC_TARGET_X86_64
5398 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5400 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5411 gfunc_param_typed(s
, sa
);
5421 tcc_error("too few arguments to function");
5423 gfunc_call(nb_args
);
5426 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5427 vsetc(&ret
.type
, r
, &ret
.c
);
5428 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5431 /* handle packed struct return */
5432 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5435 size
= type_size(&s
->type
, &align
);
5436 /* We're writing whole regs often, make sure there's enough
5437 space. Assume register size is power of 2. */
5438 if (regsize
> align
)
5440 loc
= (loc
- size
) & -align
;
5444 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5448 if (--ret_nregs
== 0)
5452 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5460 ST_FUNC
void expr_prod(void)
5465 while (tok
== '*' || tok
== '/' || tok
== '%') {
5473 ST_FUNC
void expr_sum(void)
5478 while (tok
== '+' || tok
== '-') {
5486 static void expr_shift(void)
5491 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5499 static void expr_cmp(void)
5504 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5505 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5513 static void expr_cmpeq(void)
5518 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5526 static void expr_and(void)
5529 while (tok
== '&') {
5536 static void expr_xor(void)
5539 while (tok
== '^') {
5546 static void expr_or(void)
5549 while (tok
== '|') {
5556 static void expr_land(void)
5559 if (tok
== TOK_LAND
) {
5562 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5563 gen_cast_s(VT_BOOL
);
5568 while (tok
== TOK_LAND
) {
5584 if (tok
!= TOK_LAND
) {
5597 static void expr_lor(void)
5600 if (tok
== TOK_LOR
) {
5603 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5604 gen_cast_s(VT_BOOL
);
5609 while (tok
== TOK_LOR
) {
5625 if (tok
!= TOK_LOR
) {
5638 /* Assuming vtop is a value used in a conditional context
5639 (i.e. compared with zero) return 0 if it's false, 1 if
5640 true and -1 if it can't be statically determined. */
5641 static int condition_3way(void)
5644 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5645 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5647 gen_cast_s(VT_BOOL
);
5654 static void expr_cond(void)
5656 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5658 CType type
, type1
, type2
;
5663 c
= condition_3way();
5664 g
= (tok
== ':' && gnu_ext
);
5666 /* needed to avoid having different registers saved in
5668 if (is_float(vtop
->type
.t
)) {
5670 #ifdef TCC_TARGET_X86_64
5671 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5695 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5696 mk_pointer(&vtop
->type
);
5698 sv
= *vtop
; /* save value to handle it later */
5699 vtop
--; /* no vpop so that FP stack is not flushed */
5715 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5716 mk_pointer(&vtop
->type
);
5719 bt1
= t1
& VT_BTYPE
;
5721 bt2
= t2
& VT_BTYPE
;
5725 /* cast operands to correct type according to ISOC rules */
5726 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5727 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5728 } else if (is_float(bt1
) || is_float(bt2
)) {
5729 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5730 type
.t
= VT_LDOUBLE
;
5732 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5737 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5738 /* cast to biggest op */
5739 type
.t
= VT_LLONG
| VT_LONG
;
5740 if (bt1
== VT_LLONG
)
5742 if (bt2
== VT_LLONG
)
5744 /* convert to unsigned if it does not fit in a long long */
5745 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5746 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5747 type
.t
|= VT_UNSIGNED
;
5748 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5749 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5750 /* If one is a null ptr constant the result type
5752 if (is_null_pointer (vtop
)) type
= type1
;
5753 else if (is_null_pointer (&sv
)) type
= type2
;
5754 else if (bt1
!= bt2
)
5755 tcc_error("incompatible types in conditional expressions");
5757 CType
*pt1
= pointed_type(&type1
);
5758 CType
*pt2
= pointed_type(&type2
);
5759 int pbt1
= pt1
->t
& VT_BTYPE
;
5760 int pbt2
= pt2
->t
& VT_BTYPE
;
5761 int newquals
, copied
= 0;
5762 /* pointers to void get preferred, otherwise the
5763 pointed to types minus qualifs should be compatible */
5764 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5765 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5766 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5767 tcc_warning("pointer type mismatch in conditional expression\n");
5769 /* combine qualifs */
5770 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5771 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5774 /* copy the pointer target symbol */
5775 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5778 pointed_type(&type
)->t
|= newquals
;
5780 /* pointers to incomplete arrays get converted to
5781 pointers to completed ones if possible */
5782 if (pt1
->t
& VT_ARRAY
5783 && pt2
->t
& VT_ARRAY
5784 && pointed_type(&type
)->ref
->c
< 0
5785 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5788 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5790 pointed_type(&type
)->ref
=
5791 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5792 0, pointed_type(&type
)->ref
->c
);
5793 pointed_type(&type
)->ref
->c
=
5794 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5797 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5798 /* XXX: test structure compatibility */
5799 type
= bt1
== VT_STRUCT
? type1
: type2
;
5801 /* integer operations */
5802 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5803 /* convert to unsigned if it does not fit in an integer */
5804 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5805 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5806 type
.t
|= VT_UNSIGNED
;
5808 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5809 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5810 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5812 /* now we convert second operand */
5816 mk_pointer(&vtop
->type
);
5818 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5823 if (is_float(type
.t
)) {
5825 #ifdef TCC_TARGET_X86_64
5826 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5830 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5831 /* for long longs, we use fixed registers to avoid having
5832 to handle a complicated move */
5843 /* this is horrible, but we must also convert first
5849 mk_pointer(&vtop
->type
);
5851 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5855 if (c
< 0 || islv
) {
5857 move_reg(r2
, r1
, type
.t
);
5867 static void expr_eq(void)
5873 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5874 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5875 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5890 ST_FUNC
void gexpr(void)
5901 /* parse a constant expression and return value in vtop. */
5902 static void expr_const1(void)
5911 /* parse an integer constant and return its value. */
5912 static inline int64_t expr_const64(void)
5916 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5917 expect("constant expression");
5923 /* parse an integer constant and return its value.
5924 Complain if it doesn't fit 32bit (signed or unsigned). */
5925 ST_FUNC
int expr_const(void)
5928 int64_t wc
= expr_const64();
5930 if (c
!= wc
&& (unsigned)c
!= wc
)
5931 tcc_error("constant exceeds 32 bit");
5935 /* return the label token if current token is a label, otherwise
5937 static int is_label(void)
5941 /* fast test first */
5942 if (tok
< TOK_UIDENT
)
5944 /* no need to save tokc because tok is an identifier */
5950 unget_tok(last_tok
);
5955 #ifndef TCC_TARGET_ARM64
5956 static void gfunc_return(CType
*func_type
)
5958 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5959 CType type
, ret_type
;
5960 int ret_align
, ret_nregs
, regsize
;
5961 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5962 &ret_align
, ®size
);
5963 if (0 == ret_nregs
) {
5964 /* if returning structure, must copy it to implicit
5965 first pointer arg location */
5968 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5971 /* copy structure value to pointer */
5974 /* returning structure packed into registers */
5975 int r
, size
, addr
, align
;
5976 size
= type_size(func_type
,&align
);
5977 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5978 (vtop
->c
.i
& (ret_align
-1)))
5979 && (align
& (ret_align
-1))) {
5980 loc
= (loc
- size
) & -ret_align
;
5983 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5987 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5989 vtop
->type
= ret_type
;
5990 if (is_float(ret_type
.t
))
5991 r
= rc_fret(ret_type
.t
);
6002 if (--ret_nregs
== 0)
6004 /* We assume that when a structure is returned in multiple
6005 registers, their classes are consecutive values of the
6008 vtop
->c
.i
+= regsize
;
6012 } else if (is_float(func_type
->t
)) {
6013 gv(rc_fret(func_type
->t
));
6017 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6021 static int case_cmp(const void *pa
, const void *pb
)
6023 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6024 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6025 return a
< b
? -1 : a
> b
;
6028 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6032 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6050 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6052 gcase(base
, len
/2, bsym
);
6053 if (cur_switch
->def_sym
)
6054 gjmp_addr(cur_switch
->def_sym
);
6056 *bsym
= gjmp(*bsym
);
6060 base
+= e
; len
-= e
;
6070 if (p
->v1
== p
->v2
) {
6072 gtst_addr(0, p
->sym
);
6082 gtst_addr(0, p
->sym
);
6088 static void block(int *bsym
, int *csym
, int is_expr
)
6090 int a
, b
, c
, d
, cond
;
6093 /* generate line number info */
6094 if (tcc_state
->do_debug
)
6095 tcc_debug_line(tcc_state
);
6098 /* default return value is (void) */
6100 vtop
->type
.t
= VT_VOID
;
6103 if (tok
== TOK_IF
) {
6105 int saved_nocode_wanted
= nocode_wanted
;
6110 cond
= condition_3way();
6116 nocode_wanted
|= 0x20000000;
6117 block(bsym
, csym
, 0);
6119 nocode_wanted
= saved_nocode_wanted
;
6120 if (tok
== TOK_ELSE
) {
6125 nocode_wanted
|= 0x20000000;
6126 block(bsym
, csym
, 0);
6127 gsym(d
); /* patch else jmp */
6129 nocode_wanted
= saved_nocode_wanted
;
6132 } else if (tok
== TOK_WHILE
) {
6133 int saved_nocode_wanted
;
6134 nocode_wanted
&= ~0x20000000;
6144 saved_nocode_wanted
= nocode_wanted
;
6146 nocode_wanted
= saved_nocode_wanted
;
6151 } else if (tok
== '{') {
6152 Sym
*llabel
, *lcleanup
;
6153 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
6154 int lncleanups
= ncleanups
;
6157 /* record local declaration stack position */
6159 llabel
= local_label_stack
;
6160 lcleanup
= current_cleanups
;
6163 /* handle local labels declarations */
6164 while (tok
== TOK_LABEL
) {
6167 if (tok
< TOK_UIDENT
)
6168 expect("label identifier");
6169 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6179 while (tok
!= '}') {
6180 if ((a
= is_label()))
6187 block(bsym
, csym
, is_expr
);
6191 if (current_cleanups
!= lcleanup
) {
6195 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> lncleanups
;)
6196 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6201 try_call_scope_cleanup(lcleanup
);
6202 pcl
->jnext
= gjmp(0);
6204 goto remove_pending
;
6213 if (!nocode_wanted
) {
6214 try_call_scope_cleanup(lcleanup
);
6218 current_cleanups
= lcleanup
;
6219 ncleanups
= lncleanups
;
6220 /* pop locally defined labels */
6221 label_pop(&local_label_stack
, llabel
, is_expr
);
6222 /* pop locally defined symbols */
6224 /* In the is_expr case (a statement expression is finished here),
6225 vtop might refer to symbols on the local_stack. Either via the
6226 type or via vtop->sym. We can't pop those nor any that in turn
6227 might be referred to. To make it easier we don't roll back
6228 any symbols in that case; some upper level call to block() will
6229 do that. We do have to remove such symbols from the lookup
6230 tables, though. sym_pop will do that. */
6231 sym_pop(&local_stack
, s
, is_expr
);
6233 /* Pop VLA frames and restore stack pointer if required */
6234 if (vlas_in_scope
> saved_vlas_in_scope
) {
6235 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
6238 vlas_in_scope
= saved_vlas_in_scope
;
6241 } else if (tok
== TOK_RETURN
) {
6245 gen_assign_cast(&func_vt
);
6246 try_call_scope_cleanup(NULL
);
6247 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6250 gfunc_return(&func_vt
);
6252 try_call_scope_cleanup(NULL
);
6255 /* jump unless last stmt in top-level block */
6256 if (tok
!= '}' || local_scope
!= 1)
6258 nocode_wanted
|= 0x20000000;
6259 } else if (tok
== TOK_BREAK
) {
6262 tcc_error("cannot break");
6263 *bsym
= gjmp(*bsym
);
6266 nocode_wanted
|= 0x20000000;
6267 } else if (tok
== TOK_CONTINUE
) {
6270 tcc_error("cannot continue");
6271 vla_sp_restore_root();
6272 *csym
= gjmp(*csym
);
6275 nocode_wanted
|= 0x20000000;
6276 } else if (tok
== TOK_FOR
) {
6278 int saved_nocode_wanted
;
6279 nocode_wanted
&= ~0x20000000;
6285 /* c99 for-loop init decl? */
6286 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6287 /* no, regular for-loop init expr */
6313 saved_nocode_wanted
= nocode_wanted
;
6315 nocode_wanted
= saved_nocode_wanted
;
6320 sym_pop(&local_stack
, s
, 0);
6323 if (tok
== TOK_DO
) {
6324 int saved_nocode_wanted
;
6325 nocode_wanted
&= ~0x20000000;
6331 saved_nocode_wanted
= nocode_wanted
;
6337 nocode_wanted
= saved_nocode_wanted
;
6341 nocode_wanted
= saved_nocode_wanted
;
6346 if (tok
== TOK_SWITCH
) {
6347 struct switch_t
*saved
, sw
;
6348 int saved_nocode_wanted
= nocode_wanted
;
6354 switchval
= *vtop
--;
6356 b
= gjmp(0); /* jump to first case */
6357 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6361 nocode_wanted
= saved_nocode_wanted
;
6362 a
= gjmp(a
); /* add implicit break */
6365 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6366 for (b
= 1; b
< sw
.n
; b
++)
6367 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6368 tcc_error("duplicate case value");
6369 /* Our switch table sorting is signed, so the compared
6370 value needs to be as well when it's 64bit. */
6371 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6372 switchval
.type
.t
&= ~VT_UNSIGNED
;
6374 gcase(sw
.p
, sw
.n
, &a
);
6377 gjmp_addr(sw
.def_sym
);
6378 dynarray_reset(&sw
.p
, &sw
.n
);
6383 if (tok
== TOK_CASE
) {
6384 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6387 nocode_wanted
&= ~0x20000000;
6389 cr
->v1
= cr
->v2
= expr_const64();
6390 if (gnu_ext
&& tok
== TOK_DOTS
) {
6392 cr
->v2
= expr_const64();
6393 if (cr
->v2
< cr
->v1
)
6394 tcc_warning("empty case range");
6397 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6400 goto block_after_label
;
6402 if (tok
== TOK_DEFAULT
) {
6407 if (cur_switch
->def_sym
)
6408 tcc_error("too many 'default'");
6409 cur_switch
->def_sym
= ind
;
6411 goto block_after_label
;
6413 if (tok
== TOK_GOTO
) {
6415 if (tok
== '*' && gnu_ext
) {
6419 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6422 } else if (tok
>= TOK_UIDENT
) {
6423 s
= label_find(tok
);
6424 /* put forward definition if needed */
6426 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6427 else if (s
->r
== LABEL_DECLARED
)
6428 s
->r
= LABEL_FORWARD
;
6430 vla_sp_restore_root();
6431 if (s
->r
& LABEL_FORWARD
) {
6432 /* start new goto chain for cleanups, linked via label->next */
6433 if (current_cleanups
) {
6434 sym_push2(&pending_gotos
, SYM_FIELD
, 0, ncleanups
);
6435 pending_gotos
->prev_tok
= s
;
6436 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6437 pending_gotos
->next
= s
;
6439 s
->jnext
= gjmp(s
->jnext
);
6441 try_call_cleanup_goto(s
->cleanupstate
);
6442 gjmp_addr(s
->jnext
);
6446 expect("label identifier");
6449 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6458 if (s
->r
== LABEL_DEFINED
)
6459 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6460 s
->r
= LABEL_DEFINED
;
6462 Sym
*pcl
; /* pending cleanup goto */
6463 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6465 sym_pop(&s
->next
, NULL
, 0);
6469 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6472 s
->cleanupstate
= current_cleanups
;
6474 /* we accept this, but it is a mistake */
6476 nocode_wanted
&= ~0x20000000;
6478 tcc_warning("deprecated use of label at end of compound statement");
6482 block(bsym
, csym
, is_expr
);
6485 /* expression case */
6500 /* This skips over a stream of tokens containing balanced {} and ()
6501 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6502 with a '{'). If STR then allocates and stores the skipped tokens
6503 in *STR. This doesn't check if () and {} are nested correctly,
6504 i.e. "({)}" is accepted. */
6505 static void skip_or_save_block(TokenString
**str
)
6507 int braces
= tok
== '{';
6510 *str
= tok_str_alloc();
6512 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6514 if (tok
== TOK_EOF
) {
6515 if (str
|| level
> 0)
6516 tcc_error("unexpected end of file");
6521 tok_str_add_tok(*str
);
6524 if (t
== '{' || t
== '(') {
6526 } else if (t
== '}' || t
== ')') {
6528 if (level
== 0 && braces
&& t
== '}')
6533 tok_str_add(*str
, -1);
6534 tok_str_add(*str
, 0);
6538 #define EXPR_CONST 1
6541 static void parse_init_elem(int expr_type
)
6543 int saved_global_expr
;
6546 /* compound literals must be allocated globally in this case */
6547 saved_global_expr
= global_expr
;
6550 global_expr
= saved_global_expr
;
6551 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6552 (compound literals). */
6553 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6554 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6555 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6556 #ifdef TCC_TARGET_PE
6557 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6560 tcc_error("initializer element is not constant");
6568 /* put zeros for variable based init */
6569 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6572 /* nothing to do because globals are already set to zero */
6574 vpush_global_sym(&func_old_type
, TOK_memset
);
6576 #ifdef TCC_TARGET_ARM
6588 #define DIF_SIZE_ONLY 2
6589 #define DIF_HAVE_ELEM 4
6591 /* t is the array or struct type. c is the array or struct
6592 address. cur_field is the pointer to the current
6593 field, for arrays the 'c' member contains the current start
6594 index. 'flags' is as in decl_initializer.
6595 'al' contains the already initialized length of the
6596 current container (starting at c). This returns the new length of that. */
6597 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6598 Sym
**cur_field
, int flags
, int al
)
6601 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6602 unsigned long corig
= c
;
6606 if (flags
& DIF_HAVE_ELEM
)
6608 if (gnu_ext
&& (l
= is_label()) != 0)
6610 /* NOTE: we only support ranges for last designator */
6611 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6613 if (!(type
->t
& VT_ARRAY
))
6614 expect("array type");
6616 index
= index_last
= expr_const();
6617 if (tok
== TOK_DOTS
&& gnu_ext
) {
6619 index_last
= expr_const();
6623 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6625 tcc_error("invalid index");
6627 (*cur_field
)->c
= index_last
;
6628 type
= pointed_type(type
);
6629 elem_size
= type_size(type
, &align
);
6630 c
+= index
* elem_size
;
6631 nb_elems
= index_last
- index
+ 1;
6638 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6639 expect("struct/union type");
6640 f
= find_field(type
, l
, &cumofs
);
6653 } else if (!gnu_ext
) {
6658 if (type
->t
& VT_ARRAY
) {
6659 index
= (*cur_field
)->c
;
6660 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6661 tcc_error("index too large");
6662 type
= pointed_type(type
);
6663 c
+= index
* type_size(type
, &align
);
6666 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6667 *cur_field
= f
= f
->next
;
6669 tcc_error("too many field init");
6674 /* must put zero in holes (note that doing it that way
6675 ensures that it even works with designators) */
6676 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
6677 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6678 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
6680 /* XXX: make it more general */
6681 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
6682 unsigned long c_end
;
6687 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6688 for (i
= 1; i
< nb_elems
; i
++) {
6689 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6694 } else if (!NODATA_WANTED
) {
6695 c_end
= c
+ nb_elems
* elem_size
;
6696 if (c_end
> sec
->data_allocated
)
6697 section_realloc(sec
, c_end
);
6698 src
= sec
->data
+ c
;
6700 for(i
= 1; i
< nb_elems
; i
++) {
6702 memcpy(dst
, src
, elem_size
);
6706 c
+= nb_elems
* type_size(type
, &align
);
6712 /* store a value or an expression directly in global data or in local array */
6713 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6720 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6724 /* XXX: not portable */
6725 /* XXX: generate error if incorrect relocation */
6726 gen_assign_cast(&dtype
);
6727 bt
= type
->t
& VT_BTYPE
;
6729 if ((vtop
->r
& VT_SYM
)
6732 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6733 || (type
->t
& VT_BITFIELD
))
6734 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6736 tcc_error("initializer element is not computable at load time");
6738 if (NODATA_WANTED
) {
6743 size
= type_size(type
, &align
);
6744 section_reserve(sec
, c
+ size
);
6745 ptr
= sec
->data
+ c
;
6747 /* XXX: make code faster ? */
6748 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6749 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6750 /* XXX This rejects compound literals like
6751 '(void *){ptr}'. The problem is that '&sym' is
6752 represented the same way, which would be ruled out
6753 by the SYM_FIRST_ANOM check above, but also '"string"'
6754 in 'char *p = "string"' is represented the same
6755 with the type being VT_PTR and the symbol being an
6756 anonymous one. That is, there's no difference in vtop
6757 between '(void *){x}' and '&(void *){x}'. Ignore
6758 pointer typed entities here. Hopefully no real code
6759 will every use compound literals with scalar type. */
6760 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6761 /* These come from compound literals, memcpy stuff over. */
6765 esym
= elfsym(vtop
->sym
);
6766 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6767 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6769 /* We need to copy over all memory contents, and that
6770 includes relocations. Use the fact that relocs are
6771 created it order, so look from the end of relocs
6772 until we hit one before the copied region. */
6773 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6774 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6775 while (num_relocs
--) {
6777 if (rel
->r_offset
>= esym
->st_value
+ size
)
6779 if (rel
->r_offset
< esym
->st_value
)
6781 /* Note: if the same fields are initialized multiple
6782 times (possible with designators) then we possibly
6783 add multiple relocations for the same offset here.
6784 That would lead to wrong code, the last reloc needs
6785 to win. We clean this up later after the whole
6786 initializer is parsed. */
6787 put_elf_reloca(symtab_section
, sec
,
6788 c
+ rel
->r_offset
- esym
->st_value
,
6789 ELFW(R_TYPE
)(rel
->r_info
),
6790 ELFW(R_SYM
)(rel
->r_info
),
6800 if (type
->t
& VT_BITFIELD
) {
6801 int bit_pos
, bit_size
, bits
, n
;
6802 unsigned char *p
, v
, m
;
6803 bit_pos
= BIT_POS(vtop
->type
.t
);
6804 bit_size
= BIT_SIZE(vtop
->type
.t
);
6805 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6806 bit_pos
&= 7, bits
= 0;
6811 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6812 m
= ((1 << n
) - 1) << bit_pos
;
6813 *p
= (*p
& ~m
) | (v
& m
);
6814 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6818 /* XXX: when cross-compiling we assume that each type has the
6819 same representation on host and target, which is likely to
6820 be wrong in the case of long double */
6822 vtop
->c
.i
= vtop
->c
.i
!= 0;
6824 *(char *)ptr
|= vtop
->c
.i
;
6827 *(short *)ptr
|= vtop
->c
.i
;
6830 *(float*)ptr
= vtop
->c
.f
;
6833 *(double *)ptr
= vtop
->c
.d
;
6836 #if defined TCC_IS_NATIVE_387
6837 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6838 memcpy(ptr
, &vtop
->c
.ld
, 10);
6840 else if (sizeof (long double) == sizeof (double))
6841 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
6843 else if (vtop
->c
.ld
== 0.0)
6847 if (sizeof(long double) == LDOUBLE_SIZE
)
6848 *(long double*)ptr
= vtop
->c
.ld
;
6849 else if (sizeof(double) == LDOUBLE_SIZE
)
6850 *(double *)ptr
= (double)vtop
->c
.ld
;
6852 tcc_error("can't cross compile long double constants");
6856 *(long long *)ptr
|= vtop
->c
.i
;
6863 addr_t val
= vtop
->c
.i
;
6865 if (vtop
->r
& VT_SYM
)
6866 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6868 *(addr_t
*)ptr
|= val
;
6870 if (vtop
->r
& VT_SYM
)
6871 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6872 *(addr_t
*)ptr
|= val
;
6878 int val
= vtop
->c
.i
;
6880 if (vtop
->r
& VT_SYM
)
6881 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6885 if (vtop
->r
& VT_SYM
)
6886 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6895 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6902 /* 't' contains the type and storage info. 'c' is the offset of the
6903 object in section 'sec'. If 'sec' is NULL, it means stack based
6904 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
6905 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
6906 size only evaluation is wanted (only for arrays). */
6907 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6910 int len
, n
, no_oblock
, nb
, i
;
6916 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
6917 /* In case of strings we have special handling for arrays, so
6918 don't consume them as initializer value (which would commit them
6919 to some anonymous symbol). */
6920 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6921 !(flags
& DIF_SIZE_ONLY
)) {
6922 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6923 flags
|= DIF_HAVE_ELEM
;
6926 if ((flags
& DIF_HAVE_ELEM
) &&
6927 !(type
->t
& VT_ARRAY
) &&
6928 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6929 The source type might have VT_CONSTANT set, which is
6930 of course assignable to non-const elements. */
6931 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6932 init_putv(type
, sec
, c
);
6933 } else if (type
->t
& VT_ARRAY
) {
6936 t1
= pointed_type(type
);
6937 size1
= type_size(t1
, &align1
);
6940 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6943 tcc_error("character array initializer must be a literal,"
6944 " optionally enclosed in braces");
6949 /* only parse strings here if correct type (otherwise: handle
6950 them as ((w)char *) expressions */
6951 if ((tok
== TOK_LSTR
&&
6952 #ifdef TCC_TARGET_PE
6953 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6955 (t1
->t
& VT_BTYPE
) == VT_INT
6957 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6959 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6962 /* compute maximum number of chars wanted */
6964 cstr_len
= tokc
.str
.size
;
6966 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6969 if (n
>= 0 && nb
> (n
- len
))
6971 if (!(flags
& DIF_SIZE_ONLY
)) {
6973 tcc_warning("initializer-string for array is too long");
6974 /* in order to go faster for common case (char
6975 string in global variable, we handle it
6977 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6979 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6983 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6985 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6987 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6994 /* only add trailing zero if enough storage (no
6995 warning in this case since it is standard) */
6996 if (n
< 0 || len
< n
) {
6997 if (!(flags
& DIF_SIZE_ONLY
)) {
6999 init_putv(t1
, sec
, c
+ (len
* size1
));
7010 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7011 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7012 flags
&= ~DIF_HAVE_ELEM
;
7013 if (type
->t
& VT_ARRAY
) {
7015 /* special test for multi dimensional arrays (may not
7016 be strictly correct if designators are used at the
7018 if (no_oblock
&& len
>= n
*size1
)
7021 if (s
->type
.t
== VT_UNION
)
7025 if (no_oblock
&& f
== NULL
)
7034 /* put zeros at the end */
7035 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7036 init_putz(sec
, c
+ len
, n
*size1
- len
);
7039 /* patch type size if needed, which happens only for array types */
7041 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7042 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7045 if ((flags
& DIF_FIRST
) || tok
== '{') {
7053 } else if (tok
== '{') {
7054 if (flags
& DIF_HAVE_ELEM
)
7057 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7059 } else if ((flags
& DIF_SIZE_ONLY
)) {
7060 /* If we supported only ISO C we wouldn't have to accept calling
7061 this on anything than an array if DIF_SIZE_ONLY (and even then
7062 only on the outermost level, so no recursion would be needed),
7063 because initializing a flex array member isn't supported.
7064 But GNU C supports it, so we need to recurse even into
7065 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7066 /* just skip expression */
7067 skip_or_save_block(NULL
);
7069 if (!(flags
& DIF_HAVE_ELEM
)) {
7070 /* This should happen only when we haven't parsed
7071 the init element above for fear of committing a
7072 string constant to memory too early. */
7073 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7074 expect("string constant");
7075 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7077 init_putv(type
, sec
, c
);
7081 /* parse an initializer for type 't' if 'has_init' is non zero, and
7082 allocate space in local or global data space ('r' is either
7083 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7084 variable 'v' of scope 'scope' is declared before initializers
7085 are parsed. If 'v' is zero, then a reference to the new object
7086 is put in the value stack. If 'has_init' is 2, a special parsing
7087 is done to handle string constants. */
7088 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7089 int has_init
, int v
, int scope
)
7091 int size
, align
, addr
;
7092 TokenString
*init_str
= NULL
;
7095 Sym
*flexible_array
;
7097 int saved_nocode_wanted
= nocode_wanted
;
7098 #ifdef CONFIG_TCC_BCHECK
7102 /* Always allocate static or global variables */
7103 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7104 nocode_wanted
|= 0x80000000;
7106 #ifdef CONFIG_TCC_BCHECK
7107 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7110 flexible_array
= NULL
;
7111 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7112 Sym
*field
= type
->ref
->next
;
7115 field
= field
->next
;
7116 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7117 flexible_array
= field
;
7121 size
= type_size(type
, &align
);
7122 /* If unknown size, we must evaluate it before
7123 evaluating initializers because
7124 initializers can generate global data too
7125 (e.g. string pointers or ISOC99 compound
7126 literals). It also simplifies local
7127 initializers handling */
7128 if (size
< 0 || (flexible_array
&& has_init
)) {
7130 tcc_error("unknown type size");
7131 /* get all init string */
7132 if (has_init
== 2) {
7133 init_str
= tok_str_alloc();
7134 /* only get strings */
7135 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7136 tok_str_add_tok(init_str
);
7139 tok_str_add(init_str
, -1);
7140 tok_str_add(init_str
, 0);
7142 skip_or_save_block(&init_str
);
7147 begin_macro(init_str
, 1);
7149 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7150 /* prepare second initializer parsing */
7151 macro_ptr
= init_str
->str
;
7154 /* if still unknown size, error */
7155 size
= type_size(type
, &align
);
7157 tcc_error("unknown type size");
7159 /* If there's a flex member and it was used in the initializer
7161 if (flexible_array
&&
7162 flexible_array
->type
.ref
->c
> 0)
7163 size
+= flexible_array
->type
.ref
->c
7164 * pointed_size(&flexible_array
->type
);
7165 /* take into account specified alignment if bigger */
7166 if (ad
->a
.aligned
) {
7167 int speca
= 1 << (ad
->a
.aligned
- 1);
7170 } else if (ad
->a
.packed
) {
7174 if (!v
&& NODATA_WANTED
)
7175 size
= 0, align
= 1;
7177 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7179 #ifdef CONFIG_TCC_BCHECK
7180 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7184 loc
= (loc
- size
) & -align
;
7186 #ifdef CONFIG_TCC_BCHECK
7187 /* handles bounds */
7188 /* XXX: currently, since we do only one pass, we cannot track
7189 '&' operators, so we add only arrays */
7190 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7192 /* add padding between regions */
7194 /* then add local bound info */
7195 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7196 bounds_ptr
[0] = addr
;
7197 bounds_ptr
[1] = size
;
7201 /* local variable */
7202 #ifdef CONFIG_TCC_ASM
7203 if (ad
->asm_label
) {
7204 int reg
= asm_parse_regvar(ad
->asm_label
);
7206 r
= (r
& ~VT_VALMASK
) | reg
;
7209 sym
= sym_push(v
, type
, r
, addr
);
7210 if (ad
->cleanup_func
) {
7211 Sym
*cls
= sym_push2(&all_cleanups
, SYM_FIELD
| ++ncleanups
, 0, 0);
7212 cls
->prev_tok
= sym
;
7213 cls
->next
= ad
->cleanup_func
;
7214 cls
->ncl
= current_cleanups
;
7215 current_cleanups
= cls
;
7220 /* push local reference */
7221 vset(type
, r
, addr
);
7224 if (v
&& scope
== VT_CONST
) {
7225 /* see if the symbol was already defined */
7228 patch_storage(sym
, ad
, type
);
7229 /* we accept several definitions of the same global variable. */
7230 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7235 /* allocate symbol in corresponding section */
7240 else if (tcc_state
->nocommon
)
7245 addr
= section_add(sec
, size
, align
);
7246 #ifdef CONFIG_TCC_BCHECK
7247 /* add padding if bound check */
7249 section_add(sec
, 1, 1);
7252 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7253 sec
= common_section
;
7258 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7259 patch_storage(sym
, ad
, NULL
);
7261 /* Local statics have a scope until now (for
7262 warnings), remove it here. */
7264 /* update symbol definition */
7265 put_extern_sym(sym
, sec
, addr
, size
);
7267 /* push global reference */
7268 sym
= get_sym_ref(type
, sec
, addr
, size
);
7269 vpushsym(type
, sym
);
7273 #ifdef CONFIG_TCC_BCHECK
7274 /* handles bounds now because the symbol must be defined
7275 before for the relocation */
7279 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7280 /* then add global bound info */
7281 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7282 bounds_ptr
[0] = 0; /* relocated */
7283 bounds_ptr
[1] = size
;
7288 if (type
->t
& VT_VLA
) {
7294 /* save current stack pointer */
7295 if (vlas_in_scope
== 0) {
7296 if (vla_sp_root_loc
== -1)
7297 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
7298 gen_vla_sp_save(vla_sp_root_loc
);
7301 vla_runtime_type_size(type
, &a
);
7302 gen_vla_alloc(type
, a
);
7303 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7304 /* on _WIN64, because of the function args scratch area, the
7305 result of alloca differs from RSP and is returned in RAX. */
7306 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7308 gen_vla_sp_save(addr
);
7312 } else if (has_init
) {
7313 size_t oldreloc_offset
= 0;
7314 if (sec
&& sec
->reloc
)
7315 oldreloc_offset
= sec
->reloc
->data_offset
;
7316 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7317 if (sec
&& sec
->reloc
)
7318 squeeze_multi_relocs(sec
, oldreloc_offset
);
7319 /* patch flexible array member size back to -1, */
7320 /* for possible subsequent similar declarations */
7322 flexible_array
->type
.ref
->c
= -1;
7326 /* restore parse state if needed */
7332 nocode_wanted
= saved_nocode_wanted
;
7335 /* parse a function defined by symbol 'sym' and generate its code in
7336 'cur_text_section' */
7337 static void gen_function(Sym
*sym
)
7340 ind
= cur_text_section
->data_offset
;
7341 if (sym
->a
.aligned
) {
7342 size_t newoff
= section_add(cur_text_section
, 0,
7343 1 << (sym
->a
.aligned
- 1));
7344 gen_fill_nops(newoff
- ind
);
7346 /* NOTE: we patch the symbol size later */
7347 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7348 funcname
= get_tok_str(sym
->v
, NULL
);
7350 /* Initialize VLA state */
7352 vla_sp_root_loc
= -1;
7353 /* put debug symbol */
7354 tcc_debug_funcstart(tcc_state
, sym
);
7355 /* push a dummy symbol to enable local sym storage */
7356 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7357 local_scope
= 1; /* for function parameters */
7358 gfunc_prolog(&sym
->type
);
7359 reset_local_scope();
7361 clear_temp_local_var_list();
7362 block(NULL
, NULL
, 0);
7363 if (!(nocode_wanted
& 0x20000000)
7364 && ((func_vt
.t
& VT_BTYPE
) == VT_INT
)
7365 && !strcmp (funcname
, "main"))
7369 gen_assign_cast(&func_vt
);
7370 gfunc_return(&func_vt
);
7375 cur_text_section
->data_offset
= ind
;
7376 label_pop(&global_label_stack
, NULL
, 0);
7377 /* reset local stack */
7378 reset_local_scope();
7379 sym_pop(&local_stack
, NULL
, 0);
7380 /* end of function */
7381 /* patch symbol size */
7382 elfsym(sym
)->st_size
= ind
- func_ind
;
7383 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7384 /* It's better to crash than to generate wrong code */
7385 cur_text_section
= NULL
;
7386 funcname
= ""; /* for safety */
7387 func_vt
.t
= VT_VOID
; /* for safety */
7388 func_var
= 0; /* for safety */
7389 ind
= 0; /* for safety */
7390 nocode_wanted
= 0x80000000;
7394 static void gen_inline_functions(TCCState
*s
)
7397 int inline_generated
, i
, ln
;
7398 struct InlineFunc
*fn
;
7400 ln
= file
->line_num
;
7401 /* iterate while inline function are referenced */
7403 inline_generated
= 0;
7404 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7405 fn
= s
->inline_fns
[i
];
7407 if (sym
&& sym
->c
) {
7408 /* the function was used: generate its code and
7409 convert it to a normal function */
7412 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7413 sym
->type
.t
&= ~VT_INLINE
;
7415 begin_macro(fn
->func_str
, 1);
7417 cur_text_section
= text_section
;
7421 inline_generated
= 1;
7424 } while (inline_generated
);
7425 file
->line_num
= ln
;
7428 ST_FUNC
void free_inline_functions(TCCState
*s
)
7431 /* free tokens of unused inline functions */
7432 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7433 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7435 tok_str_free(fn
->func_str
);
7437 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7440 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7441 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7442 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7447 AttributeDef ad
, adbase
;
7450 if (!parse_btype(&btype
, &adbase
)) {
7451 if (is_for_loop_init
)
7453 /* skip redundant ';' if not in old parameter decl scope */
7454 if (tok
== ';' && l
!= VT_CMP
) {
7460 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7461 /* global asm block */
7465 if (tok
>= TOK_UIDENT
) {
7466 /* special test for old K&R protos without explicit int
7467 type. Only accepted when defining global data */
7471 expect("declaration");
7476 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7477 int v
= btype
.ref
->v
;
7478 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7479 tcc_warning("unnamed struct/union that defines no instances");
7483 if (IS_ENUM(btype
.t
)) {
7488 while (1) { /* iterate thru each declaration */
7490 /* If the base type itself was an array type of unspecified
7491 size (like in 'typedef int arr[]; arr x = {1};') then
7492 we will overwrite the unknown size by the real one for
7493 this decl. We need to unshare the ref symbol holding
7495 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7496 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7499 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7503 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7504 printf("type = '%s'\n", buf
);
7507 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7508 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7509 tcc_error("function without file scope cannot be static");
7511 /* if old style function prototype, we accept a
7514 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7515 decl0(VT_CMP
, 0, sym
);
7518 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7519 ad
.asm_label
= asm_label_instr();
7520 /* parse one last attribute list, after asm label */
7521 parse_attribute(&ad
);
7526 #ifdef TCC_TARGET_PE
7527 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7528 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7529 tcc_error("cannot have dll linkage with static or typedef");
7530 if (ad
.a
.dllimport
) {
7531 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7534 type
.t
|= VT_EXTERN
;
7540 tcc_error("cannot use local functions");
7541 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7542 expect("function definition");
7544 /* reject abstract declarators in function definition
7545 make old style params without decl have int type */
7547 while ((sym
= sym
->next
) != NULL
) {
7548 if (!(sym
->v
& ~SYM_FIELD
))
7549 expect("identifier");
7550 if (sym
->type
.t
== VT_VOID
)
7551 sym
->type
= int_type
;
7554 /* XXX: cannot do better now: convert extern line to static inline */
7555 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7556 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7558 /* put function symbol */
7559 sym
= external_global_sym(v
, &type
, 0);
7560 type
.t
&= ~VT_EXTERN
;
7561 patch_storage(sym
, &ad
, &type
);
7563 /* static inline functions are just recorded as a kind
7564 of macro. Their code will be emitted at the end of
7565 the compilation unit only if they are used */
7566 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7567 (VT_INLINE
| VT_STATIC
)) {
7568 struct InlineFunc
*fn
;
7569 const char *filename
;
7571 filename
= file
? file
->filename
: "";
7572 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7573 strcpy(fn
->filename
, filename
);
7575 skip_or_save_block(&fn
->func_str
);
7576 dynarray_add(&tcc_state
->inline_fns
,
7577 &tcc_state
->nb_inline_fns
, fn
);
7579 /* compute text section */
7580 cur_text_section
= ad
.section
;
7581 if (!cur_text_section
)
7582 cur_text_section
= text_section
;
7588 /* find parameter in function parameter list */
7589 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7590 if ((sym
->v
& ~SYM_FIELD
) == v
)
7592 tcc_error("declaration for parameter '%s' but no such parameter",
7593 get_tok_str(v
, NULL
));
7595 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7596 tcc_error("storage class specified for '%s'",
7597 get_tok_str(v
, NULL
));
7598 if (sym
->type
.t
!= VT_VOID
)
7599 tcc_error("redefinition of parameter '%s'",
7600 get_tok_str(v
, NULL
));
7601 convert_parameter_type(&type
);
7603 } else if (type
.t
& VT_TYPEDEF
) {
7604 /* save typedefed type */
7605 /* XXX: test storage specifiers ? */
7607 if (sym
&& sym
->sym_scope
== local_scope
) {
7608 if (!is_compatible_types(&sym
->type
, &type
)
7609 || !(sym
->type
.t
& VT_TYPEDEF
))
7610 tcc_error("incompatible redefinition of '%s'",
7611 get_tok_str(v
, NULL
));
7614 sym
= sym_push(v
, &type
, 0, 0);
7618 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7619 && !(type
.t
& VT_EXTERN
)) {
7620 tcc_error("declaration of void object");
7623 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7624 /* external function definition */
7625 /* specific case for func_call attribute */
7627 } else if (!(type
.t
& VT_ARRAY
)) {
7628 /* not lvalue if array */
7629 r
|= lvalue_type(type
.t
);
7631 has_init
= (tok
== '=');
7632 if (has_init
&& (type
.t
& VT_VLA
))
7633 tcc_error("variable length array cannot be initialized");
7634 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7635 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7636 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7637 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7638 /* external variable or function */
7639 /* NOTE: as GCC, uninitialized global static
7640 arrays of null size are considered as
7642 type
.t
|= VT_EXTERN
;
7643 sym
= external_sym(v
, &type
, r
, &ad
);
7644 if (ad
.alias_target
) {
7647 alias_target
= sym_find(ad
.alias_target
);
7648 esym
= elfsym(alias_target
);
7650 tcc_error("unsupported forward __alias__ attribute");
7651 /* Local statics have a scope until now (for
7652 warnings), remove it here. */
7654 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7657 if (type
.t
& VT_STATIC
)
7663 else if (l
== VT_CONST
)
7664 /* uninitialized global variables may be overridden */
7665 type
.t
|= VT_EXTERN
;
7666 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7670 if (is_for_loop_init
)
7682 static void decl(int l
)
7687 /* ------------------------------------------------------------------------- */