2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
42 static Sym
*all_cleanups
, *current_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int section_sym
;
49 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
50 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
51 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
53 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
55 ST_DATA
int const_wanted
; /* true if constant wanted */
56 ST_DATA
int nocode_wanted
; /* no code generation wanted */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
60 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
61 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
63 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
64 ST_DATA
const char *funcname
;
67 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
69 ST_DATA
struct switch_t
{
73 } **p
; int n
; /* list of case ranges */
74 int def_sym
; /* default symbol */
75 } *cur_switch
; /* current switch */
77 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
78 /*list of temporary local variables on the stack in current function. */
79 ST_DATA
struct temp_local_variable
{
80 int location
; //offset on stack. Svalue.c.i
83 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
84 short nb_temp_local_vars
;
86 /* ------------------------------------------------------------------------- */
88 static void gen_cast(CType
*type
);
89 static void gen_cast_s(int t
);
90 static inline CType
*pointed_type(CType
*type
);
91 static int is_compatible_types(CType
*type1
, CType
*type2
);
92 static int parse_btype(CType
*type
, AttributeDef
*ad
);
93 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
94 static void parse_expr_type(CType
*type
);
95 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
96 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
97 static void block(int *bsym
, int *csym
, int is_expr
);
98 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
99 static void decl(int l
);
100 static int decl0(int l
, int is_for_loop_init
, Sym
*);
101 static void expr_eq(void);
102 static void vla_runtime_type_size(CType
*type
, int *a
);
103 static void vla_sp_restore(void);
104 static void vla_sp_restore_root(void);
105 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
106 static inline int64_t expr_const64(void);
107 static void vpush64(int ty
, unsigned long long v
);
108 static void vpush(CType
*type
);
109 static int gvtst(int inv
, int t
);
110 static void gen_inline_functions(TCCState
*s
);
111 static void skip_or_save_block(TokenString
**str
);
112 static void gv_dup(void);
113 static int get_temp_local_var(int size
,int align
);
114 static void clear_temp_local_var_list();
117 static void reset_local_scope(void)
119 if (current_cleanups
)
120 tcc_error("ICE current_cleanups");
121 sym_pop(&all_cleanups
, NULL
, 0);
125 ST_INLN
int is_float(int t
)
129 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
132 /* we use our own 'finite' function to avoid potential problems with
133 non standard math libs */
134 /* XXX: endianness dependent */
135 ST_FUNC
int ieee_finite(double d
)
138 memcpy(p
, &d
, sizeof(double));
139 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
142 /* compiling intel long double natively */
143 #if (defined __i386__ || defined __x86_64__) \
144 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
145 # define TCC_IS_NATIVE_387
148 ST_FUNC
void test_lvalue(void)
150 if (!(vtop
->r
& VT_LVAL
))
154 ST_FUNC
void check_vstack(void)
157 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
160 /* ------------------------------------------------------------------------- */
161 /* vstack debugging aid */
164 void pv (const char *lbl
, int a
, int b
)
167 for (i
= a
; i
< a
+ b
; ++i
) {
168 SValue
*p
= &vtop
[-i
];
169 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
170 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
175 /* ------------------------------------------------------------------------- */
176 /* start of translation unit info */
177 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
182 /* file info: full path + filename */
183 section_sym
= put_elf_sym(symtab_section
, 0, 0,
184 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
185 text_section
->sh_num
, NULL
);
186 getcwd(buf
, sizeof(buf
));
188 normalize_slashes(buf
);
190 pstrcat(buf
, sizeof(buf
), "/");
191 put_stabs_r(buf
, N_SO
, 0, 0,
192 text_section
->data_offset
, text_section
, section_sym
);
193 put_stabs_r(file
->filename
, N_SO
, 0, 0,
194 text_section
->data_offset
, text_section
, section_sym
);
199 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
200 symbols can be safely used */
201 put_elf_sym(symtab_section
, 0, 0,
202 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
203 SHN_ABS
, file
->filename
);
206 /* put end of translation unit info */
207 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
211 put_stabs_r(NULL
, N_SO
, 0, 0,
212 text_section
->data_offset
, text_section
, section_sym
);
216 /* generate line number info */
217 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
221 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
222 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
224 last_line_num
= file
->line_num
;
228 /* put function symbol */
229 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
237 /* XXX: we put here a dummy type */
238 snprintf(buf
, sizeof(buf
), "%s:%c1",
239 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
240 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
241 cur_text_section
, sym
->c
);
242 /* //gr gdb wants a line at the function */
243 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
249 /* put function size */
250 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
254 put_stabn(N_FUN
, 0, 0, size
);
257 /* ------------------------------------------------------------------------- */
258 ST_FUNC
int tccgen_compile(TCCState
*s1
)
260 cur_text_section
= NULL
;
262 anon_sym
= SYM_FIRST_ANOM
;
265 nocode_wanted
= 0x80000000;
267 /* define some often used types */
269 char_pointer_type
.t
= VT_BYTE
;
270 mk_pointer(&char_pointer_type
);
272 size_type
.t
= VT_INT
| VT_UNSIGNED
;
273 ptrdiff_type
.t
= VT_INT
;
275 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
276 ptrdiff_type
.t
= VT_LLONG
;
278 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
279 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
281 func_old_type
.t
= VT_FUNC
;
282 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
283 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
284 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
288 #ifdef TCC_TARGET_ARM
293 printf("%s: **** new file\n", file
->filename
);
296 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
299 gen_inline_functions(s1
);
301 /* end of translation unit info */
306 /* ------------------------------------------------------------------------- */
307 ST_FUNC ElfSym
*elfsym(Sym
*s
)
311 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
314 /* apply storage attributes to Elf symbol */
315 ST_FUNC
void update_storage(Sym
*sym
)
318 int sym_bind
, old_sym_bind
;
324 if (sym
->a
.visibility
)
325 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
328 if (sym
->type
.t
& VT_STATIC
)
329 sym_bind
= STB_LOCAL
;
330 else if (sym
->a
.weak
)
333 sym_bind
= STB_GLOBAL
;
334 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
335 if (sym_bind
!= old_sym_bind
) {
336 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
340 if (sym
->a
.dllimport
)
341 esym
->st_other
|= ST_PE_IMPORT
;
342 if (sym
->a
.dllexport
)
343 esym
->st_other
|= ST_PE_EXPORT
;
347 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
348 get_tok_str(sym
->v
, NULL
),
349 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
357 /* ------------------------------------------------------------------------- */
358 /* update sym->c so that it points to an external symbol in section
359 'section' with value 'value' */
361 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
362 addr_t value
, unsigned long size
,
363 int can_add_underscore
)
365 int sym_type
, sym_bind
, info
, other
, t
;
369 #ifdef CONFIG_TCC_BCHECK
374 name
= get_tok_str(sym
->v
, NULL
);
375 #ifdef CONFIG_TCC_BCHECK
376 if (tcc_state
->do_bounds_check
) {
377 /* XXX: avoid doing that for statics ? */
378 /* if bound checking is activated, we change some function
379 names by adding the "__bound" prefix */
382 /* XXX: we rely only on malloc hooks */
395 strcpy(buf
, "__bound_");
403 if ((t
& VT_BTYPE
) == VT_FUNC
) {
405 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
406 sym_type
= STT_NOTYPE
;
408 sym_type
= STT_OBJECT
;
411 sym_bind
= STB_LOCAL
;
413 sym_bind
= STB_GLOBAL
;
416 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
417 Sym
*ref
= sym
->type
.ref
;
418 if (ref
->a
.nodecorate
) {
419 can_add_underscore
= 0;
421 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
422 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
424 other
|= ST_PE_STDCALL
;
425 can_add_underscore
= 0;
429 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
431 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
435 name
= get_tok_str(sym
->asm_label
, NULL
);
436 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
437 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
440 esym
->st_value
= value
;
441 esym
->st_size
= size
;
442 esym
->st_shndx
= sh_num
;
447 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
448 addr_t value
, unsigned long size
)
450 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
451 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
454 /* add a new relocation entry to symbol 'sym' in section 's' */
455 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
460 if (nocode_wanted
&& s
== cur_text_section
)
465 put_extern_sym(sym
, NULL
, 0, 0);
469 /* now we can add ELF relocation info */
470 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
474 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
476 greloca(s
, sym
, offset
, type
, 0);
480 /* ------------------------------------------------------------------------- */
481 /* symbol allocator */
482 static Sym
*__sym_malloc(void)
484 Sym
*sym_pool
, *sym
, *last_sym
;
487 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
488 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
490 last_sym
= sym_free_first
;
492 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
493 sym
->next
= last_sym
;
497 sym_free_first
= last_sym
;
501 static inline Sym
*sym_malloc(void)
505 sym
= sym_free_first
;
507 sym
= __sym_malloc();
508 sym_free_first
= sym
->next
;
511 sym
= tcc_malloc(sizeof(Sym
));
516 ST_INLN
void sym_free(Sym
*sym
)
519 sym
->next
= sym_free_first
;
520 sym_free_first
= sym
;
526 /* push, without hashing */
527 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
532 memset(s
, 0, sizeof *s
);
542 /* find a symbol and return its associated structure. 's' is the top
543 of the symbol stack */
544 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
556 /* structure lookup */
557 ST_INLN Sym
*struct_find(int v
)
560 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
562 return table_ident
[v
]->sym_struct
;
565 /* find an identifier */
566 ST_INLN Sym
*sym_find(int v
)
569 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
571 return table_ident
[v
]->sym_identifier
;
574 static int sym_scope(Sym
*s
)
576 if (IS_ENUM_VAL (s
->type
.t
))
577 return s
->type
.ref
->sym_scope
;
582 /* push a given symbol on the symbol stack */
583 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
592 s
= sym_push2(ps
, v
, type
->t
, c
);
593 s
->type
.ref
= type
->ref
;
595 /* don't record fields or anonymous symbols */
597 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
598 /* record symbol in token array */
599 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
601 ps
= &ts
->sym_struct
;
603 ps
= &ts
->sym_identifier
;
606 s
->sym_scope
= local_scope
;
607 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
608 tcc_error("redeclaration of '%s'",
609 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
614 /* push a global identifier */
615 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
618 s
= sym_push2(&global_stack
, v
, t
, c
);
619 /* don't record anonymous symbol */
620 if (v
< SYM_FIRST_ANOM
) {
621 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
622 /* modify the top most local identifier, so that
623 sym_identifier will point to 's' when popped */
624 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
625 ps
= &(*ps
)->prev_tok
;
632 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
633 pop them yet from the list, but do remove them from the token array. */
634 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
644 /* remove symbol in token array */
646 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
647 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
649 ps
= &ts
->sym_struct
;
651 ps
= &ts
->sym_identifier
;
662 /* ------------------------------------------------------------------------- */
664 static void vsetc(CType
*type
, int r
, CValue
*vc
)
668 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
669 tcc_error("memory full (vstack)");
670 /* cannot let cpu flags if other instruction are generated. Also
671 avoid leaving VT_JMP anywhere except on the top of the stack
672 because it would complicate the code generator.
674 Don't do this when nocode_wanted. vtop might come from
675 !nocode_wanted regions (see 88_codeopt.c) and transforming
676 it to a register without actually generating code is wrong
677 as their value might still be used for real. All values
678 we push under nocode_wanted will eventually be popped
679 again, so that the VT_CMP/VT_JMP value will be in vtop
680 when code is unsuppressed again.
682 Same logic below in vswap(); */
683 if (vtop
>= vstack
&& !nocode_wanted
) {
684 v
= vtop
->r
& VT_VALMASK
;
685 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
697 ST_FUNC
void vswap(void)
700 /* cannot vswap cpu flags. See comment at vsetc() above */
701 if (vtop
>= vstack
&& !nocode_wanted
) {
702 int v
= vtop
->r
& VT_VALMASK
;
703 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
711 /* pop stack value */
712 ST_FUNC
void vpop(void)
715 v
= vtop
->r
& VT_VALMASK
;
716 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
717 /* for x86, we need to pop the FP stack */
719 o(0xd8dd); /* fstp %st(0) */
722 if (v
== VT_JMP
|| v
== VT_JMPI
) {
723 /* need to put correct jump if && or || without test */
729 /* push constant of type "type" with useless value */
730 ST_FUNC
void vpush(CType
*type
)
732 vset(type
, VT_CONST
, 0);
735 /* push integer constant */
736 ST_FUNC
void vpushi(int v
)
740 vsetc(&int_type
, VT_CONST
, &cval
);
743 /* push a pointer sized constant */
744 static void vpushs(addr_t v
)
748 vsetc(&size_type
, VT_CONST
, &cval
);
751 /* push arbitrary 64bit constant */
752 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
759 vsetc(&ctype
, VT_CONST
, &cval
);
762 /* push long long constant */
763 static inline void vpushll(long long v
)
765 vpush64(VT_LLONG
, v
);
768 ST_FUNC
void vset(CType
*type
, int r
, int v
)
773 vsetc(type
, r
, &cval
);
776 static void vseti(int r
, int v
)
784 ST_FUNC
void vpushv(SValue
*v
)
786 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
787 tcc_error("memory full (vstack)");
792 static void vdup(void)
797 /* rotate n first stack elements to the bottom
798 I1 ... In -> I2 ... In I1 [top is right]
800 ST_FUNC
void vrotb(int n
)
811 /* rotate the n elements before entry e towards the top
812 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
814 ST_FUNC
void vrote(SValue
*e
, int n
)
820 for(i
= 0;i
< n
- 1; i
++)
825 /* rotate n first stack elements to the top
826 I1 ... In -> In I1 ... I(n-1) [top is right]
828 ST_FUNC
void vrott(int n
)
833 /* push a symbol value of TYPE */
834 static inline void vpushsym(CType
*type
, Sym
*sym
)
838 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
842 /* Return a static symbol pointing to a section */
843 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
849 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
850 sym
->type
.ref
= type
->ref
;
851 sym
->r
= VT_CONST
| VT_SYM
;
852 put_extern_sym(sym
, sec
, offset
, size
);
856 /* push a reference to a section offset by adding a dummy symbol */
857 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
859 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
862 /* define a new external reference to a symbol 'v' of type 'u' */
863 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
869 /* push forward reference */
870 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
871 s
->type
.ref
= type
->ref
;
872 s
->r
= r
| VT_CONST
| VT_SYM
;
873 } else if (IS_ASM_SYM(s
)) {
874 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
875 s
->type
.ref
= type
->ref
;
881 /* Merge symbol attributes. */
882 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
884 if (sa1
->aligned
&& !sa
->aligned
)
885 sa
->aligned
= sa1
->aligned
;
886 sa
->packed
|= sa1
->packed
;
887 sa
->weak
|= sa1
->weak
;
888 if (sa1
->visibility
!= STV_DEFAULT
) {
889 int vis
= sa
->visibility
;
890 if (vis
== STV_DEFAULT
891 || vis
> sa1
->visibility
)
892 vis
= sa1
->visibility
;
893 sa
->visibility
= vis
;
895 sa
->dllexport
|= sa1
->dllexport
;
896 sa
->nodecorate
|= sa1
->nodecorate
;
897 sa
->dllimport
|= sa1
->dllimport
;
900 /* Merge function attributes. */
901 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
903 if (fa1
->func_call
&& !fa
->func_call
)
904 fa
->func_call
= fa1
->func_call
;
905 if (fa1
->func_type
&& !fa
->func_type
)
906 fa
->func_type
= fa1
->func_type
;
907 if (fa1
->func_args
&& !fa
->func_args
)
908 fa
->func_args
= fa1
->func_args
;
911 /* Merge attributes. */
912 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
914 merge_symattr(&ad
->a
, &ad1
->a
);
915 merge_funcattr(&ad
->f
, &ad1
->f
);
918 ad
->section
= ad1
->section
;
919 if (ad1
->alias_target
)
920 ad
->alias_target
= ad1
->alias_target
;
922 ad
->asm_label
= ad1
->asm_label
;
924 ad
->attr_mode
= ad1
->attr_mode
;
927 /* Merge some type attributes. */
928 static void patch_type(Sym
*sym
, CType
*type
)
930 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
931 if (!(sym
->type
.t
& VT_EXTERN
))
932 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
933 sym
->type
.t
&= ~VT_EXTERN
;
936 if (IS_ASM_SYM(sym
)) {
937 /* stay static if both are static */
938 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
939 sym
->type
.ref
= type
->ref
;
942 if (!is_compatible_types(&sym
->type
, type
)) {
943 tcc_error("incompatible types for redefinition of '%s'",
944 get_tok_str(sym
->v
, NULL
));
946 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
947 int static_proto
= sym
->type
.t
& VT_STATIC
;
948 /* warn if static follows non-static function declaration */
949 if ((type
->t
& VT_STATIC
) && !static_proto
&& !(type
->t
& VT_INLINE
))
950 tcc_warning("static storage ignored for redefinition of '%s'",
951 get_tok_str(sym
->v
, NULL
));
953 if (0 == (type
->t
& VT_EXTERN
)) {
954 /* put complete type, use static from prototype */
955 sym
->type
.t
= (type
->t
& ~VT_STATIC
) | static_proto
;
956 if (type
->t
& VT_INLINE
)
957 sym
->type
.t
= type
->t
;
958 sym
->type
.ref
= type
->ref
;
962 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
963 /* set array size if it was omitted in extern declaration */
964 if (sym
->type
.ref
->c
< 0)
965 sym
->type
.ref
->c
= type
->ref
->c
;
966 else if (sym
->type
.ref
->c
!= type
->ref
->c
)
967 tcc_error("conflicting type for '%s'", get_tok_str(sym
->v
, NULL
));
969 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
970 tcc_warning("storage mismatch for redefinition of '%s'",
971 get_tok_str(sym
->v
, NULL
));
976 /* Merge some storage attributes. */
977 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
980 patch_type(sym
, type
);
983 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
984 tcc_error("incompatible dll linkage for redefinition of '%s'",
985 get_tok_str(sym
->v
, NULL
));
987 merge_symattr(&sym
->a
, &ad
->a
);
989 sym
->asm_label
= ad
->asm_label
;
993 /* define a new external reference to a symbol 'v' */
994 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
999 /* push forward reference */
1000 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
1001 s
->type
.t
|= VT_EXTERN
;
1005 if (s
->type
.ref
== func_old_type
.ref
) {
1006 s
->type
.ref
= type
->ref
;
1007 s
->r
= r
| VT_CONST
| VT_SYM
;
1008 s
->type
.t
|= VT_EXTERN
;
1010 patch_storage(s
, ad
, type
);
1015 /* push a reference to global symbol v */
1016 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1018 vpushsym(type
, external_global_sym(v
, type
, 0));
1021 /* save registers up to (vtop - n) stack entry */
1022 ST_FUNC
void save_regs(int n
)
1025 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1029 /* save r to the memory stack, and mark it as being free */
1030 ST_FUNC
void save_reg(int r
)
1032 save_reg_upstack(r
, 0);
1035 /* save r to the memory stack, and mark it as being free,
1036 if seen up to (vtop - n) stack entry */
1037 ST_FUNC
void save_reg_upstack(int r
, int n
)
1039 int l
, saved
, size
, align
;
1043 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1048 /* modify all stack values */
1051 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1052 if ((p
->r
& VT_VALMASK
) == r
||
1053 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
1054 /* must save value on stack if not already done */
1056 /* NOTE: must reload 'r' because r might be equal to r2 */
1057 r
= p
->r
& VT_VALMASK
;
1058 /* store register in the stack */
1060 if ((p
->r
& VT_LVAL
) ||
1061 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1063 type
= &char_pointer_type
;
1067 size
= type_size(type
, &align
);
1068 l
=get_temp_local_var(size
,align
);
1069 sv
.type
.t
= type
->t
;
1070 sv
.r
= VT_LOCAL
| VT_LVAL
;
1073 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1074 /* x86 specific: need to pop fp register ST0 if saved */
1075 if (r
== TREG_ST0
) {
1076 o(0xd8dd); /* fstp %st(0) */
1080 /* special long long case */
1081 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1088 /* mark that stack entry as being saved on the stack */
1089 if (p
->r
& VT_LVAL
) {
1090 /* also clear the bounded flag because the
1091 relocation address of the function was stored in
1093 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1095 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1103 #ifdef TCC_TARGET_ARM
1104 /* find a register of class 'rc2' with at most one reference on stack.
1105 * If none, call get_reg(rc) */
1106 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1111 for(r
=0;r
<NB_REGS
;r
++) {
1112 if (reg_classes
[r
] & rc2
) {
1115 for(p
= vstack
; p
<= vtop
; p
++) {
1116 if ((p
->r
& VT_VALMASK
) == r
||
1117 (p
->r2
& VT_VALMASK
) == r
)
1128 /* find a free register of class 'rc'. If none, save one register */
1129 ST_FUNC
int get_reg(int rc
)
1134 /* find a free register */
1135 for(r
=0;r
<NB_REGS
;r
++) {
1136 if (reg_classes
[r
] & rc
) {
1139 for(p
=vstack
;p
<=vtop
;p
++) {
1140 if ((p
->r
& VT_VALMASK
) == r
||
1141 (p
->r2
& VT_VALMASK
) == r
)
1149 /* no register left : free the first one on the stack (VERY
1150 IMPORTANT to start from the bottom to ensure that we don't
1151 spill registers used in gen_opi()) */
1152 for(p
=vstack
;p
<=vtop
;p
++) {
1153 /* look at second register (if long long) */
1154 r
= p
->r2
& VT_VALMASK
;
1155 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1157 r
= p
->r
& VT_VALMASK
;
1158 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1164 /* Should never comes here */
1168 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1169 static int get_temp_local_var(int size
,int align
){
1171 struct temp_local_variable
*temp_var
;
1178 for(i
=0;i
<nb_temp_local_vars
;i
++){
1179 temp_var
=&arr_temp_local_vars
[i
];
1180 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1183 /*check if temp_var is free*/
1185 for(p
=vstack
;p
<=vtop
;p
++) {
1187 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1188 if(p
->c
.i
==temp_var
->location
){
1195 found_var
=temp_var
->location
;
1201 loc
= (loc
- size
) & -align
;
1202 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1203 temp_var
=&arr_temp_local_vars
[i
];
1204 temp_var
->location
=loc
;
1205 temp_var
->size
=size
;
1206 temp_var
->align
=align
;
1207 nb_temp_local_vars
++;
1214 static void clear_temp_local_var_list(){
1215 nb_temp_local_vars
=0;
1218 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1220 static void move_reg(int r
, int s
, int t
)
1234 /* get address of vtop (vtop MUST BE an lvalue) */
1235 ST_FUNC
void gaddrof(void)
1237 vtop
->r
&= ~VT_LVAL
;
1238 /* tricky: if saved lvalue, then we can go back to lvalue */
1239 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1240 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1245 #ifdef CONFIG_TCC_BCHECK
1246 /* generate lvalue bound code */
1247 static void gbound(void)
1252 vtop
->r
&= ~VT_MUSTBOUND
;
1253 /* if lvalue, then use checking code before dereferencing */
1254 if (vtop
->r
& VT_LVAL
) {
1255 /* if not VT_BOUNDED value, then make one */
1256 if (!(vtop
->r
& VT_BOUNDED
)) {
1257 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1258 /* must save type because we must set it to int to get pointer */
1260 vtop
->type
.t
= VT_PTR
;
1263 gen_bounded_ptr_add();
1264 vtop
->r
|= lval_type
;
1267 /* then check for dereferencing */
1268 gen_bounded_ptr_deref();
1273 static void incr_bf_adr(int o
)
1275 vtop
->type
= char_pointer_type
;
1279 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1280 | (VT_BYTE
|VT_UNSIGNED
);
1281 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1282 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1285 /* single-byte load mode for packed or otherwise unaligned bitfields */
1286 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1289 save_reg_upstack(vtop
->r
, 1);
1290 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1291 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1300 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1302 vpushi((1 << n
) - 1), gen_op('&');
1305 vpushi(bits
), gen_op(TOK_SHL
);
1308 bits
+= n
, bit_size
-= n
, o
= 1;
1311 if (!(type
->t
& VT_UNSIGNED
)) {
1312 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1313 vpushi(n
), gen_op(TOK_SHL
);
1314 vpushi(n
), gen_op(TOK_SAR
);
1318 /* single-byte store mode for packed or otherwise unaligned bitfields */
1319 static void store_packed_bf(int bit_pos
, int bit_size
)
1321 int bits
, n
, o
, m
, c
;
1323 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1325 save_reg_upstack(vtop
->r
, 1);
1326 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1328 incr_bf_adr(o
); // X B
1330 c
? vdup() : gv_dup(); // B V X
1333 vpushi(bits
), gen_op(TOK_SHR
);
1335 vpushi(bit_pos
), gen_op(TOK_SHL
);
1340 m
= ((1 << n
) - 1) << bit_pos
;
1341 vpushi(m
), gen_op('&'); // X B V1
1342 vpushv(vtop
-1); // X B V1 B
1343 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1344 gen_op('&'); // X B V1 B1
1345 gen_op('|'); // X B V2
1347 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1348 vstore(), vpop(); // X B
1349 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1354 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1357 if (0 == sv
->type
.ref
)
1359 t
= sv
->type
.ref
->auxtype
;
1360 if (t
!= -1 && t
!= VT_STRUCT
) {
1361 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1362 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1367 /* store vtop a register belonging to class 'rc'. lvalues are
1368 converted to values. Cannot be used if cannot be converted to
1369 register value (such as structures). */
1370 ST_FUNC
int gv(int rc
)
1372 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1374 /* NOTE: get_reg can modify vstack[] */
1375 if (vtop
->type
.t
& VT_BITFIELD
) {
1378 bit_pos
= BIT_POS(vtop
->type
.t
);
1379 bit_size
= BIT_SIZE(vtop
->type
.t
);
1380 /* remove bit field info to avoid loops */
1381 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1384 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1385 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1386 type
.t
|= VT_UNSIGNED
;
1388 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1390 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1395 if (r
== VT_STRUCT
) {
1396 load_packed_bf(&type
, bit_pos
, bit_size
);
1398 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1399 /* cast to int to propagate signedness in following ops */
1401 /* generate shifts */
1402 vpushi(bits
- (bit_pos
+ bit_size
));
1404 vpushi(bits
- bit_size
);
1405 /* NOTE: transformed to SHR if unsigned */
1410 if (is_float(vtop
->type
.t
) &&
1411 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1412 unsigned long offset
;
1413 /* CPUs usually cannot use float constants, so we store them
1414 generically in data segment */
1415 size
= type_size(&vtop
->type
, &align
);
1417 size
= 0, align
= 1;
1418 offset
= section_add(data_section
, size
, align
);
1419 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1421 init_putv(&vtop
->type
, data_section
, offset
);
1424 #ifdef CONFIG_TCC_BCHECK
1425 if (vtop
->r
& VT_MUSTBOUND
)
1429 r
= vtop
->r
& VT_VALMASK
;
1430 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1431 #ifndef TCC_TARGET_ARM64
1434 #ifdef TCC_TARGET_X86_64
1435 else if (rc
== RC_FRET
)
1439 /* need to reload if:
1441 - lvalue (need to dereference pointer)
1442 - already a register, but not in the right class */
1444 || (vtop
->r
& VT_LVAL
)
1445 || !(reg_classes
[r
] & rc
)
1447 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1448 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1450 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1456 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1457 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1459 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1460 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1461 unsigned long long ll
;
1463 int r2
, original_type
;
1464 original_type
= vtop
->type
.t
;
1465 /* two register type load : expand to two words
1468 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1471 vtop
->c
.i
= ll
; /* first word */
1473 vtop
->r
= r
; /* save register value */
1474 vpushi(ll
>> 32); /* second word */
1477 if (vtop
->r
& VT_LVAL
) {
1478 /* We do not want to modifier the long long
1479 pointer here, so the safest (and less
1480 efficient) is to save all the other registers
1481 in the stack. XXX: totally inefficient. */
1485 /* lvalue_save: save only if used further down the stack */
1486 save_reg_upstack(vtop
->r
, 1);
1488 /* load from memory */
1489 vtop
->type
.t
= load_type
;
1492 vtop
[-1].r
= r
; /* save register value */
1493 /* increment pointer to get second word */
1494 vtop
->type
.t
= addr_type
;
1499 vtop
->type
.t
= load_type
;
1501 /* move registers */
1504 vtop
[-1].r
= r
; /* save register value */
1505 vtop
->r
= vtop
[-1].r2
;
1507 /* Allocate second register. Here we rely on the fact that
1508 get_reg() tries first to free r2 of an SValue. */
1512 /* write second register */
1514 vtop
->type
.t
= original_type
;
1515 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1517 /* lvalue of scalar type : need to use lvalue type
1518 because of possible cast */
1521 /* compute memory access type */
1522 if (vtop
->r
& VT_LVAL_BYTE
)
1524 else if (vtop
->r
& VT_LVAL_SHORT
)
1526 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1530 /* restore wanted type */
1533 /* one register type load */
1538 #ifdef TCC_TARGET_C67
1539 /* uses register pairs for doubles */
1540 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1547 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1548 ST_FUNC
void gv2(int rc1
, int rc2
)
1552 /* generate more generic register first. But VT_JMP or VT_CMP
1553 values must be generated first in all cases to avoid possible
1555 v
= vtop
[0].r
& VT_VALMASK
;
1556 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1561 /* test if reload is needed for first register */
1562 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1572 /* test if reload is needed for first register */
1573 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1579 #ifndef TCC_TARGET_ARM64
1580 /* wrapper around RC_FRET to return a register by type */
1581 static int rc_fret(int t
)
1583 #ifdef TCC_TARGET_X86_64
1584 if (t
== VT_LDOUBLE
) {
1592 /* wrapper around REG_FRET to return a register by type */
1593 static int reg_fret(int t
)
1595 #ifdef TCC_TARGET_X86_64
1596 if (t
== VT_LDOUBLE
) {
1604 /* expand 64bit on stack in two ints */
1605 ST_FUNC
void lexpand(void)
1608 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1609 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1610 if (v
== VT_CONST
) {
1613 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1619 vtop
[0].r
= vtop
[-1].r2
;
1620 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1622 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1627 /* build a long long from two ints */
1628 static void lbuild(int t
)
1630 gv2(RC_INT
, RC_INT
);
1631 vtop
[-1].r2
= vtop
[0].r
;
1632 vtop
[-1].type
.t
= t
;
1637 /* convert stack entry to register and duplicate its value in another
1639 static void gv_dup(void)
1646 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1647 if (t
& VT_BITFIELD
) {
1657 /* stack: H L L1 H1 */
1667 /* duplicate value */
1672 #ifdef TCC_TARGET_X86_64
1673 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1683 load(r1
, &sv
); /* move r to r1 */
1685 /* duplicates value */
1691 /* Generate value test
1693 * Generate a test for any value (jump, comparison and integers) */
1694 ST_FUNC
int gvtst(int inv
, int t
)
1696 int v
= vtop
->r
& VT_VALMASK
;
1697 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1701 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1702 /* constant jmp optimization */
1703 if ((vtop
->c
.i
!= 0) != inv
)
1708 return gtst(inv
, t
);
1712 /* generate CPU independent (unsigned) long long operations */
1713 static void gen_opl(int op
)
1715 int t
, a
, b
, op1
, c
, i
;
1717 unsigned short reg_iret
= REG_IRET
;
1718 unsigned short reg_lret
= REG_LRET
;
1724 func
= TOK___divdi3
;
1727 func
= TOK___udivdi3
;
1730 func
= TOK___moddi3
;
1733 func
= TOK___umoddi3
;
1740 /* call generic long long function */
1741 vpush_global_sym(&func_old_type
, func
);
1746 vtop
->r2
= reg_lret
;
1754 //pv("gen_opl A",0,2);
1760 /* stack: L1 H1 L2 H2 */
1765 vtop
[-2] = vtop
[-3];
1768 /* stack: H1 H2 L1 L2 */
1769 //pv("gen_opl B",0,4);
1775 /* stack: H1 H2 L1 L2 ML MH */
1778 /* stack: ML MH H1 H2 L1 L2 */
1782 /* stack: ML MH H1 L2 H2 L1 */
1787 /* stack: ML MH M1 M2 */
1790 } else if (op
== '+' || op
== '-') {
1791 /* XXX: add non carry method too (for MIPS or alpha) */
1797 /* stack: H1 H2 (L1 op L2) */
1800 gen_op(op1
+ 1); /* TOK_xxxC2 */
1803 /* stack: H1 H2 (L1 op L2) */
1806 /* stack: (L1 op L2) H1 H2 */
1808 /* stack: (L1 op L2) (H1 op H2) */
1816 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1817 t
= vtop
[-1].type
.t
;
1821 /* stack: L H shift */
1823 /* constant: simpler */
1824 /* NOTE: all comments are for SHL. the other cases are
1825 done by swapping words */
1836 if (op
!= TOK_SAR
) {
1869 /* XXX: should provide a faster fallback on x86 ? */
1872 func
= TOK___ashrdi3
;
1875 func
= TOK___lshrdi3
;
1878 func
= TOK___ashldi3
;
1884 /* compare operations */
1890 /* stack: L1 H1 L2 H2 */
1892 vtop
[-1] = vtop
[-2];
1894 /* stack: L1 L2 H1 H2 */
1897 /* when values are equal, we need to compare low words. since
1898 the jump is inverted, we invert the test too. */
1901 else if (op1
== TOK_GT
)
1903 else if (op1
== TOK_ULT
)
1905 else if (op1
== TOK_UGT
)
1915 /* generate non equal test */
1921 /* compare low. Always unsigned */
1925 else if (op1
== TOK_LE
)
1927 else if (op1
== TOK_GT
)
1929 else if (op1
== TOK_GE
)
1940 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1942 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1943 return (a
^ b
) >> 63 ? -x
: x
;
1946 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1948 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1951 /* handle integer constant optimizations and various machine
1953 static void gen_opic(int op
)
1955 SValue
*v1
= vtop
- 1;
1957 int t1
= v1
->type
.t
& VT_BTYPE
;
1958 int t2
= v2
->type
.t
& VT_BTYPE
;
1959 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1960 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1961 uint64_t l1
= c1
? v1
->c
.i
: 0;
1962 uint64_t l2
= c2
? v2
->c
.i
: 0;
1963 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1965 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1966 l1
= ((uint32_t)l1
|
1967 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1968 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1969 l2
= ((uint32_t)l2
|
1970 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1974 case '+': l1
+= l2
; break;
1975 case '-': l1
-= l2
; break;
1976 case '&': l1
&= l2
; break;
1977 case '^': l1
^= l2
; break;
1978 case '|': l1
|= l2
; break;
1979 case '*': l1
*= l2
; break;
1986 /* if division by zero, generate explicit division */
1989 tcc_error("division by zero in constant");
1993 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1994 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1995 case TOK_UDIV
: l1
= l1
/ l2
; break;
1996 case TOK_UMOD
: l1
= l1
% l2
; break;
1999 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2000 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2002 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2005 case TOK_ULT
: l1
= l1
< l2
; break;
2006 case TOK_UGE
: l1
= l1
>= l2
; break;
2007 case TOK_EQ
: l1
= l1
== l2
; break;
2008 case TOK_NE
: l1
= l1
!= l2
; break;
2009 case TOK_ULE
: l1
= l1
<= l2
; break;
2010 case TOK_UGT
: l1
= l1
> l2
; break;
2011 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2012 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2013 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2014 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2016 case TOK_LAND
: l1
= l1
&& l2
; break;
2017 case TOK_LOR
: l1
= l1
|| l2
; break;
2021 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2022 l1
= ((uint32_t)l1
|
2023 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2027 /* if commutative ops, put c2 as constant */
2028 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2029 op
== '|' || op
== '*')) {
2031 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2032 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2034 if (!const_wanted
&&
2036 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2037 (l1
== -1 && op
== TOK_SAR
))) {
2038 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2040 } else if (!const_wanted
&&
2041 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2043 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2044 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2045 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2050 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2053 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2054 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2057 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2058 /* filter out NOP operations like x*1, x-0, x&-1... */
2060 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2061 /* try to use shifts instead of muls or divs */
2062 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2071 else if (op
== TOK_PDIV
)
2077 } else if (c2
&& (op
== '+' || op
== '-') &&
2078 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2079 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2080 /* symbol + constant case */
2084 /* The backends can't always deal with addends to symbols
2085 larger than +-1<<31. Don't construct such. */
2092 /* call low level op generator */
2093 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2094 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2102 /* generate a floating point operation with constant propagation */
2103 static void gen_opif(int op
)
2107 #if defined _MSC_VER && defined _AMD64_
2108 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2115 /* currently, we cannot do computations with forward symbols */
2116 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2117 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2119 if (v1
->type
.t
== VT_FLOAT
) {
2122 } else if (v1
->type
.t
== VT_DOUBLE
) {
2130 /* NOTE: we only do constant propagation if finite number (not
2131 NaN or infinity) (ANSI spec) */
2132 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2136 case '+': f1
+= f2
; break;
2137 case '-': f1
-= f2
; break;
2138 case '*': f1
*= f2
; break;
2141 /* If not in initializer we need to potentially generate
2142 FP exceptions at runtime, otherwise we want to fold. */
2148 /* XXX: also handles tests ? */
2152 /* XXX: overflow test ? */
2153 if (v1
->type
.t
== VT_FLOAT
) {
2155 } else if (v1
->type
.t
== VT_DOUBLE
) {
2167 static int pointed_size(CType
*type
)
2170 return type_size(pointed_type(type
), &align
);
2173 static void vla_runtime_pointed_size(CType
*type
)
2176 vla_runtime_type_size(pointed_type(type
), &align
);
2179 static inline int is_null_pointer(SValue
*p
)
2181 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2183 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2184 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2185 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2186 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2187 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2188 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2191 static inline int is_integer_btype(int bt
)
2193 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2194 bt
== VT_INT
|| bt
== VT_LLONG
);
2197 /* check types for comparison or subtraction of pointers */
2198 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2200 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2203 /* null pointers are accepted for all comparisons as gcc */
2204 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2208 bt1
= type1
->t
& VT_BTYPE
;
2209 bt2
= type2
->t
& VT_BTYPE
;
2210 /* accept comparison between pointer and integer with a warning */
2211 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2212 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2213 tcc_warning("comparison between pointer and integer");
2217 /* both must be pointers or implicit function pointers */
2218 if (bt1
== VT_PTR
) {
2219 type1
= pointed_type(type1
);
2220 } else if (bt1
!= VT_FUNC
)
2221 goto invalid_operands
;
2223 if (bt2
== VT_PTR
) {
2224 type2
= pointed_type(type2
);
2225 } else if (bt2
!= VT_FUNC
) {
2227 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2229 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2230 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2234 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2235 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2236 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2237 /* gcc-like error if '-' is used */
2239 goto invalid_operands
;
2241 tcc_warning("comparison of distinct pointer types lacks a cast");
2245 /* generic gen_op: handles types problems */
2246 ST_FUNC
void gen_op(int op
)
2248 int u
, t1
, t2
, bt1
, bt2
, t
;
2252 t1
= vtop
[-1].type
.t
;
2253 t2
= vtop
[0].type
.t
;
2254 bt1
= t1
& VT_BTYPE
;
2255 bt2
= t2
& VT_BTYPE
;
2257 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2258 tcc_error("operation on a struct");
2259 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2260 if (bt2
== VT_FUNC
) {
2261 mk_pointer(&vtop
->type
);
2264 if (bt1
== VT_FUNC
) {
2266 mk_pointer(&vtop
->type
);
2271 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2272 /* at least one operand is a pointer */
2273 /* relational op: must be both pointers */
2274 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2275 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2276 /* pointers are handled are unsigned */
2278 t
= VT_LLONG
| VT_UNSIGNED
;
2280 t
= VT_INT
| VT_UNSIGNED
;
2284 /* if both pointers, then it must be the '-' op */
2285 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2287 tcc_error("cannot use pointers here");
2288 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2289 /* XXX: check that types are compatible */
2290 if (vtop
[-1].type
.t
& VT_VLA
) {
2291 vla_runtime_pointed_size(&vtop
[-1].type
);
2293 vpushi(pointed_size(&vtop
[-1].type
));
2297 vtop
->type
.t
= ptrdiff_type
.t
;
2301 /* exactly one pointer : must be '+' or '-'. */
2302 if (op
!= '-' && op
!= '+')
2303 tcc_error("cannot use pointers here");
2304 /* Put pointer as first operand */
2305 if (bt2
== VT_PTR
) {
2307 t
= t1
, t1
= t2
, t2
= t
;
2310 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2311 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2314 type1
= vtop
[-1].type
;
2315 type1
.t
&= ~VT_ARRAY
;
2316 if (vtop
[-1].type
.t
& VT_VLA
)
2317 vla_runtime_pointed_size(&vtop
[-1].type
);
2319 u
= pointed_size(&vtop
[-1].type
);
2321 tcc_error("unknown array element size");
2325 /* XXX: cast to int ? (long long case) */
2331 /* #ifdef CONFIG_TCC_BCHECK
2332 The main reason to removing this code:
2339 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2340 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2342 When this code is on. then the output looks like
2344 v+(i-j) = 0xbff84000
2346 /* if evaluating constant expression, no code should be
2347 generated, so no bound check */
2348 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2349 /* if bounded pointers, we generate a special code to
2356 gen_bounded_ptr_add();
2362 /* put again type if gen_opic() swaped operands */
2365 } else if (is_float(bt1
) || is_float(bt2
)) {
2366 /* compute bigger type and do implicit casts */
2367 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2369 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2374 /* floats can only be used for a few operations */
2375 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2376 (op
< TOK_ULT
|| op
> TOK_GT
))
2377 tcc_error("invalid operands for binary operation");
2379 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2380 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2381 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2383 t
|= (VT_LONG
& t1
);
2385 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2386 /* cast to biggest op */
2387 t
= VT_LLONG
| VT_LONG
;
2388 if (bt1
== VT_LLONG
)
2390 if (bt2
== VT_LLONG
)
2392 /* convert to unsigned if it does not fit in a long long */
2393 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2394 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2398 /* integer operations */
2399 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2400 /* convert to unsigned if it does not fit in an integer */
2401 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2402 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2405 /* XXX: currently, some unsigned operations are explicit, so
2406 we modify them here */
2407 if (t
& VT_UNSIGNED
) {
2414 else if (op
== TOK_LT
)
2416 else if (op
== TOK_GT
)
2418 else if (op
== TOK_LE
)
2420 else if (op
== TOK_GE
)
2428 /* special case for shifts and long long: we keep the shift as
2430 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2437 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2438 /* relational op: the result is an int */
2439 vtop
->type
.t
= VT_INT
;
2444 // Make sure that we have converted to an rvalue:
2445 if (vtop
->r
& VT_LVAL
)
2446 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2449 #ifndef TCC_TARGET_ARM
2450 /* generic itof for unsigned long long case */
2451 static void gen_cvt_itof1(int t
)
2453 #ifdef TCC_TARGET_ARM64
2456 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2457 (VT_LLONG
| VT_UNSIGNED
)) {
2460 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2461 #if LDOUBLE_SIZE != 8
2462 else if (t
== VT_LDOUBLE
)
2463 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2466 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2470 vtop
->r
= reg_fret(t
);
2478 /* generic ftoi for unsigned long long case */
2479 static void gen_cvt_ftoi1(int t
)
2481 #ifdef TCC_TARGET_ARM64
2486 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2487 /* not handled natively */
2488 st
= vtop
->type
.t
& VT_BTYPE
;
2490 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2491 #if LDOUBLE_SIZE != 8
2492 else if (st
== VT_LDOUBLE
)
2493 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2496 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2501 vtop
->r2
= REG_LRET
;
2508 /* force char or short cast */
2509 static void force_charshort_cast(int t
)
2513 /* cannot cast static initializers */
2514 if (STATIC_DATA_WANTED
)
2518 /* XXX: add optimization if lvalue : just change type and offset */
2523 if (t
& VT_UNSIGNED
) {
2524 vpushi((1 << bits
) - 1);
2527 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2533 /* result must be signed or the SAR is converted to an SHL
2534 This was not the case when "t" was a signed short
2535 and the last value on the stack was an unsigned int */
2536 vtop
->type
.t
&= ~VT_UNSIGNED
;
2542 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2543 static void gen_cast_s(int t
)
2551 static void gen_cast(CType
*type
)
2553 int sbt
, dbt
, sf
, df
, c
, p
;
2555 /* special delayed cast for char/short */
2556 /* XXX: in some cases (multiple cascaded casts), it may still
2558 if (vtop
->r
& VT_MUSTCAST
) {
2559 vtop
->r
&= ~VT_MUSTCAST
;
2560 force_charshort_cast(vtop
->type
.t
);
2563 /* bitfields first get cast to ints */
2564 if (vtop
->type
.t
& VT_BITFIELD
) {
2568 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2569 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2574 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2575 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2576 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2577 c
&= dbt
!= VT_LDOUBLE
;
2580 /* constant case: we can do it now */
2581 /* XXX: in ISOC, cannot do it if error in convert */
2582 if (sbt
== VT_FLOAT
)
2583 vtop
->c
.ld
= vtop
->c
.f
;
2584 else if (sbt
== VT_DOUBLE
)
2585 vtop
->c
.ld
= vtop
->c
.d
;
2588 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2589 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2590 vtop
->c
.ld
= vtop
->c
.i
;
2592 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2594 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2595 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2597 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2600 if (dbt
== VT_FLOAT
)
2601 vtop
->c
.f
= (float)vtop
->c
.ld
;
2602 else if (dbt
== VT_DOUBLE
)
2603 vtop
->c
.d
= (double)vtop
->c
.ld
;
2604 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2605 vtop
->c
.i
= vtop
->c
.ld
;
2606 } else if (sf
&& dbt
== VT_BOOL
) {
2607 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2610 vtop
->c
.i
= vtop
->c
.ld
;
2611 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2613 else if (sbt
& VT_UNSIGNED
)
2614 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2616 else if (sbt
== VT_PTR
)
2619 else if (sbt
!= VT_LLONG
)
2620 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2621 -(vtop
->c
.i
& 0x80000000));
2623 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2625 else if (dbt
== VT_BOOL
)
2626 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2628 else if (dbt
== VT_PTR
)
2631 else if (dbt
!= VT_LLONG
) {
2632 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2633 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2636 if (!(dbt
& VT_UNSIGNED
))
2637 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2640 } else if (p
&& dbt
== VT_BOOL
) {
2644 /* non constant case: generate code */
2646 /* convert from fp to fp */
2649 /* convert int to fp */
2652 /* convert fp to int */
2653 if (dbt
== VT_BOOL
) {
2657 /* we handle char/short/etc... with generic code */
2658 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2659 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2663 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2664 /* additional cast for char/short... */
2670 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2671 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2672 /* scalar to long long */
2673 /* machine independent conversion */
2675 /* generate high word */
2676 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2680 if (sbt
== VT_PTR
) {
2681 /* cast from pointer to int before we apply
2682 shift operation, which pointers don't support*/
2689 /* patch second register */
2690 vtop
[-1].r2
= vtop
->r
;
2694 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2695 (dbt
& VT_BTYPE
) == VT_PTR
||
2696 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2697 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2698 (sbt
& VT_BTYPE
) != VT_PTR
&&
2699 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2700 /* need to convert from 32bit to 64bit */
2702 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2703 #if defined(TCC_TARGET_ARM64)
2705 #elif defined(TCC_TARGET_X86_64)
2707 /* x86_64 specific: movslq */
2709 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2716 } else if (dbt
== VT_BOOL
) {
2717 /* scalar to bool */
2720 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2721 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2722 if (sbt
== VT_PTR
) {
2723 vtop
->type
.t
= VT_INT
;
2724 tcc_warning("nonportable conversion from pointer to char/short");
2726 force_charshort_cast(dbt
);
2727 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2729 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2731 /* from long long: just take low order word */
2736 vtop
->type
.t
|= VT_UNSIGNED
;
2740 /* if lvalue and single word type, nothing to do because
2741 the lvalue already contains the real type size (see
2742 VT_LVAL_xxx constants) */
2745 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2746 /* if we are casting between pointer types,
2747 we must update the VT_LVAL_xxx size */
2748 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2749 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2752 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2755 /* return type size as known at compile time. Put alignment at 'a' */
2756 ST_FUNC
int type_size(CType
*type
, int *a
)
2761 bt
= type
->t
& VT_BTYPE
;
2762 if (bt
== VT_STRUCT
) {
2767 } else if (bt
== VT_PTR
) {
2768 if (type
->t
& VT_ARRAY
) {
2772 ts
= type_size(&s
->type
, a
);
2774 if (ts
< 0 && s
->c
< 0)
2782 } else if (IS_ENUM(type
->t
) && type
->ref
->c
== -1) {
2783 return -1; /* incomplete enum */
2784 } else if (bt
== VT_LDOUBLE
) {
2786 return LDOUBLE_SIZE
;
2787 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2788 #ifdef TCC_TARGET_I386
2789 #ifdef TCC_TARGET_PE
2794 #elif defined(TCC_TARGET_ARM)
2804 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2807 } else if (bt
== VT_SHORT
) {
2810 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2814 /* char, void, function, _Bool */
2820 /* push type size as known at runtime time on top of value stack. Put
2822 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2824 if (type
->t
& VT_VLA
) {
2825 type_size(&type
->ref
->type
, a
);
2826 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2828 vpushi(type_size(type
, a
));
2832 static void vla_sp_restore(void) {
2833 if (vlas_in_scope
) {
2834 gen_vla_sp_restore(vla_sp_loc
);
2838 static void vla_sp_restore_root(void) {
2839 if (vlas_in_scope
) {
2840 gen_vla_sp_restore(vla_sp_root_loc
);
2844 /* return the pointed type of t */
2845 static inline CType
*pointed_type(CType
*type
)
2847 return &type
->ref
->type
;
2850 /* modify type so that its it is a pointer to type. */
2851 ST_FUNC
void mk_pointer(CType
*type
)
2854 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2855 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2859 /* compare function types. OLD functions match any new functions */
2860 static int is_compatible_func(CType
*type1
, CType
*type2
)
2866 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2868 /* check func_call */
2869 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2871 /* XXX: not complete */
2872 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2874 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2876 while (s1
!= NULL
) {
2879 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2889 /* return true if type1 and type2 are the same. If unqualified is
2890 true, qualifiers on the types are ignored.
2892 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2896 t1
= type1
->t
& VT_TYPE
;
2897 t2
= type2
->t
& VT_TYPE
;
2899 /* strip qualifiers before comparing */
2900 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2901 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2904 /* Default Vs explicit signedness only matters for char */
2905 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2909 /* XXX: bitfields ? */
2912 /* test more complicated cases */
2913 bt1
= t1
& (VT_BTYPE
| VT_ARRAY
);
2914 if (bt1
== VT_PTR
) {
2915 type1
= pointed_type(type1
);
2916 type2
= pointed_type(type2
);
2917 return is_compatible_types(type1
, type2
);
2918 } else if (bt1
& VT_ARRAY
) {
2919 return type1
->ref
->c
< 0 || type2
->ref
->c
< 0
2920 || type1
->ref
->c
== type2
->ref
->c
;
2921 } else if (bt1
== VT_STRUCT
) {
2922 return (type1
->ref
== type2
->ref
);
2923 } else if (bt1
== VT_FUNC
) {
2924 return is_compatible_func(type1
, type2
);
2925 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
2926 return type1
->ref
== type2
->ref
;
2932 /* return true if type1 and type2 are exactly the same (including
2935 static int is_compatible_types(CType
*type1
, CType
*type2
)
2937 return compare_types(type1
,type2
,0);
2940 /* return true if type1 and type2 are the same (ignoring qualifiers).
2942 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2944 return compare_types(type1
,type2
,1);
2947 /* print a type. If 'varstr' is not NULL, then the variable is also
2948 printed in the type */
2950 /* XXX: add array and function pointers */
2951 static void type_to_str(char *buf
, int buf_size
,
2952 CType
*type
, const char *varstr
)
2964 pstrcat(buf
, buf_size
, "extern ");
2966 pstrcat(buf
, buf_size
, "static ");
2968 pstrcat(buf
, buf_size
, "typedef ");
2970 pstrcat(buf
, buf_size
, "inline ");
2971 if (t
& VT_VOLATILE
)
2972 pstrcat(buf
, buf_size
, "volatile ");
2973 if (t
& VT_CONSTANT
)
2974 pstrcat(buf
, buf_size
, "const ");
2976 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2977 || ((t
& VT_UNSIGNED
)
2978 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2981 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2983 buf_size
-= strlen(buf
);
3018 tstr
= "long double";
3020 pstrcat(buf
, buf_size
, tstr
);
3027 pstrcat(buf
, buf_size
, tstr
);
3028 v
= type
->ref
->v
& ~SYM_STRUCT
;
3029 if (v
>= SYM_FIRST_ANOM
)
3030 pstrcat(buf
, buf_size
, "<anonymous>");
3032 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3037 if (varstr
&& '*' == *varstr
) {
3038 pstrcat(buf1
, sizeof(buf1
), "(");
3039 pstrcat(buf1
, sizeof(buf1
), varstr
);
3040 pstrcat(buf1
, sizeof(buf1
), ")");
3042 pstrcat(buf1
, buf_size
, "(");
3044 while (sa
!= NULL
) {
3046 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3047 pstrcat(buf1
, sizeof(buf1
), buf2
);
3050 pstrcat(buf1
, sizeof(buf1
), ", ");
3052 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3053 pstrcat(buf1
, sizeof(buf1
), ", ...");
3054 pstrcat(buf1
, sizeof(buf1
), ")");
3055 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3060 if (varstr
&& '*' == *varstr
)
3061 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3063 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3064 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3067 pstrcpy(buf1
, sizeof(buf1
), "*");
3068 if (t
& VT_CONSTANT
)
3069 pstrcat(buf1
, buf_size
, "const ");
3070 if (t
& VT_VOLATILE
)
3071 pstrcat(buf1
, buf_size
, "volatile ");
3073 pstrcat(buf1
, sizeof(buf1
), varstr
);
3074 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3078 pstrcat(buf
, buf_size
, " ");
3079 pstrcat(buf
, buf_size
, varstr
);
3084 /* verify type compatibility to store vtop in 'dt' type, and generate
3086 static void gen_assign_cast(CType
*dt
)
3088 CType
*st
, *type1
, *type2
;
3089 char buf1
[256], buf2
[256];
3090 int dbt
, sbt
, qualwarn
, lvl
;
3092 st
= &vtop
->type
; /* source type */
3093 dbt
= dt
->t
& VT_BTYPE
;
3094 sbt
= st
->t
& VT_BTYPE
;
3095 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3096 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3097 ; /* It is Ok if both are void */
3099 tcc_error("cannot cast from/to void");
3101 if (dt
->t
& VT_CONSTANT
)
3102 tcc_warning("assignment of read-only location");
3105 /* special cases for pointers */
3106 /* '0' can also be a pointer */
3107 if (is_null_pointer(vtop
))
3109 /* accept implicit pointer to integer cast with warning */
3110 if (is_integer_btype(sbt
)) {
3111 tcc_warning("assignment makes pointer from integer without a cast");
3114 type1
= pointed_type(dt
);
3116 type2
= pointed_type(st
);
3117 else if (sbt
== VT_FUNC
)
3118 type2
= st
; /* a function is implicitly a function pointer */
3121 if (is_compatible_types(type1
, type2
))
3123 for (qualwarn
= lvl
= 0;; ++lvl
) {
3124 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3125 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3127 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3128 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3129 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3131 type1
= pointed_type(type1
);
3132 type2
= pointed_type(type2
);
3134 if (!is_compatible_unqualified_types(type1
, type2
)) {
3135 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3136 /* void * can match anything */
3137 } else if (dbt
== sbt
3138 && is_integer_btype(sbt
& VT_BTYPE
)
3139 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3140 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3141 /* Like GCC don't warn by default for merely changes
3142 in pointer target signedness. Do warn for different
3143 base types, though, in particular for unsigned enums
3144 and signed int targets. */
3146 tcc_warning("assignment from incompatible pointer type");
3151 tcc_warning("assignment discards qualifiers from pointer target type");
3157 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3158 tcc_warning("assignment makes integer from pointer without a cast");
3159 } else if (sbt
== VT_STRUCT
) {
3160 goto case_VT_STRUCT
;
3162 /* XXX: more tests */
3166 if (!is_compatible_unqualified_types(dt
, st
)) {
3168 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3169 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3170 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3177 /* store vtop in lvalue pushed on stack */
3178 ST_FUNC
void vstore(void)
3180 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3182 ft
= vtop
[-1].type
.t
;
3183 sbt
= vtop
->type
.t
& VT_BTYPE
;
3184 dbt
= ft
& VT_BTYPE
;
3185 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3186 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3187 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3188 /* optimize char/short casts */
3189 delayed_cast
= VT_MUSTCAST
;
3190 vtop
->type
.t
= ft
& VT_TYPE
;
3191 /* XXX: factorize */
3192 if (ft
& VT_CONSTANT
)
3193 tcc_warning("assignment of read-only location");
3196 if (!(ft
& VT_BITFIELD
))
3197 gen_assign_cast(&vtop
[-1].type
);
3200 if (sbt
== VT_STRUCT
) {
3201 /* if structure, only generate pointer */
3202 /* structure assignment : generate memcpy */
3203 /* XXX: optimize if small size */
3204 size
= type_size(&vtop
->type
, &align
);
3208 vtop
->type
.t
= VT_PTR
;
3211 /* address of memcpy() */
3214 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3215 else if(!(align
& 3))
3216 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3219 /* Use memmove, rather than memcpy, as dest and src may be same: */
3220 vpush_global_sym(&func_old_type
, TOK_memmove
);
3225 vtop
->type
.t
= VT_PTR
;
3231 /* leave source on stack */
3232 } else if (ft
& VT_BITFIELD
) {
3233 /* bitfield store handling */
3235 /* save lvalue as expression result (example: s.b = s.a = n;) */
3236 vdup(), vtop
[-1] = vtop
[-2];
3238 bit_pos
= BIT_POS(ft
);
3239 bit_size
= BIT_SIZE(ft
);
3240 /* remove bit field info to avoid loops */
3241 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3243 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3244 gen_cast(&vtop
[-1].type
);
3245 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3248 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3249 if (r
== VT_STRUCT
) {
3250 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3251 store_packed_bf(bit_pos
, bit_size
);
3253 unsigned long long mask
= (1ULL << bit_size
) - 1;
3254 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3256 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3259 vpushi((unsigned)mask
);
3266 /* duplicate destination */
3269 /* load destination, mask and or with source */
3270 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3271 vpushll(~(mask
<< bit_pos
));
3273 vpushi(~((unsigned)mask
<< bit_pos
));
3278 /* ... and discard */
3281 } else if (dbt
== VT_VOID
) {
3284 #ifdef CONFIG_TCC_BCHECK
3285 /* bound check case */
3286 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3295 #ifdef TCC_TARGET_X86_64
3296 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3298 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3303 r
= gv(rc
); /* generate value */
3304 /* if lvalue was saved on stack, must read it */
3305 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3307 t
= get_reg(RC_INT
);
3313 sv
.r
= VT_LOCAL
| VT_LVAL
;
3314 sv
.c
.i
= vtop
[-1].c
.i
;
3316 vtop
[-1].r
= t
| VT_LVAL
;
3318 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3320 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3321 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3323 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3324 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3326 vtop
[-1].type
.t
= load_type
;
3329 /* convert to int to increment easily */
3330 vtop
->type
.t
= addr_type
;
3336 vtop
[-1].type
.t
= load_type
;
3337 /* XXX: it works because r2 is spilled last ! */
3338 store(vtop
->r2
, vtop
- 1);
3344 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3345 vtop
->r
|= delayed_cast
;
3349 /* post defines POST/PRE add. c is the token ++ or -- */
3350 ST_FUNC
void inc(int post
, int c
)
3353 vdup(); /* save lvalue */
3355 gv_dup(); /* duplicate value */
3360 vpushi(c
- TOK_MID
);
3362 vstore(); /* store value */
3364 vpop(); /* if post op, return saved value */
3367 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3369 /* read the string */
3373 while (tok
== TOK_STR
) {
3374 /* XXX: add \0 handling too ? */
3375 cstr_cat(astr
, tokc
.str
.data
, -1);
3378 cstr_ccat(astr
, '\0');
3381 /* If I is >= 1 and a power of two, returns log2(i)+1.
3382 If I is 0 returns 0. */
3383 static int exact_log2p1(int i
)
3388 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3399 /* Parse __attribute__((...)) GNUC extension. */
3400 static void parse_attribute(AttributeDef
*ad
)
3406 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3411 while (tok
!= ')') {
3412 if (tok
< TOK_IDENT
)
3413 expect("attribute name");
3425 tcc_warning("implicit declaration of function '%s'",
3426 get_tok_str(tok
, &tokc
));
3427 s
= external_global_sym(tok
, &func_old_type
, 0);
3429 ad
->cleanup_func
= s
;
3437 parse_mult_str(&astr
, "section name");
3438 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3445 parse_mult_str(&astr
, "alias(\"target\")");
3446 ad
->alias_target
= /* save string as token, for later */
3447 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3451 case TOK_VISIBILITY1
:
3452 case TOK_VISIBILITY2
:
3454 parse_mult_str(&astr
,
3455 "visibility(\"default|hidden|internal|protected\")");
3456 if (!strcmp (astr
.data
, "default"))
3457 ad
->a
.visibility
= STV_DEFAULT
;
3458 else if (!strcmp (astr
.data
, "hidden"))
3459 ad
->a
.visibility
= STV_HIDDEN
;
3460 else if (!strcmp (astr
.data
, "internal"))
3461 ad
->a
.visibility
= STV_INTERNAL
;
3462 else if (!strcmp (astr
.data
, "protected"))
3463 ad
->a
.visibility
= STV_PROTECTED
;
3465 expect("visibility(\"default|hidden|internal|protected\")");
3474 if (n
<= 0 || (n
& (n
- 1)) != 0)
3475 tcc_error("alignment must be a positive power of two");
3480 ad
->a
.aligned
= exact_log2p1(n
);
3481 if (n
!= 1 << (ad
->a
.aligned
- 1))
3482 tcc_error("alignment of %d is larger than implemented", n
);
3494 /* currently, no need to handle it because tcc does not
3495 track unused objects */
3499 /* currently, no need to handle it because tcc does not
3500 track unused objects */
3505 ad
->f
.func_call
= FUNC_CDECL
;
3510 ad
->f
.func_call
= FUNC_STDCALL
;
3512 #ifdef TCC_TARGET_I386
3522 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3528 ad
->f
.func_call
= FUNC_FASTCALLW
;
3535 ad
->attr_mode
= VT_LLONG
+ 1;
3538 ad
->attr_mode
= VT_BYTE
+ 1;
3541 ad
->attr_mode
= VT_SHORT
+ 1;
3545 ad
->attr_mode
= VT_INT
+ 1;
3548 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3555 ad
->a
.dllexport
= 1;
3557 case TOK_NODECORATE
:
3558 ad
->a
.nodecorate
= 1;
3561 ad
->a
.dllimport
= 1;
3564 if (tcc_state
->warn_unsupported
)
3565 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3566 /* skip parameters */
3568 int parenthesis
= 0;
3572 else if (tok
== ')')
3575 } while (parenthesis
&& tok
!= -1);
3588 static Sym
* find_field (CType
*type
, int v
)
3592 while ((s
= s
->next
) != NULL
) {
3593 if ((s
->v
& SYM_FIELD
) &&
3594 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3595 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3596 Sym
*ret
= find_field (&s
->type
, v
);
3606 static void struct_add_offset (Sym
*s
, int offset
)
3608 while ((s
= s
->next
) != NULL
) {
3609 if ((s
->v
& SYM_FIELD
) &&
3610 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3611 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3612 struct_add_offset(s
->type
.ref
, offset
);
3618 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3620 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3621 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3622 int pcc
= !tcc_state
->ms_bitfields
;
3623 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3630 prevbt
= VT_STRUCT
; /* make it never match */
3635 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3636 if (f
->type
.t
& VT_BITFIELD
)
3637 bit_size
= BIT_SIZE(f
->type
.t
);
3640 size
= type_size(&f
->type
, &align
);
3641 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3644 if (pcc
&& bit_size
== 0) {
3645 /* in pcc mode, packing does not affect zero-width bitfields */
3648 /* in pcc mode, attribute packed overrides if set. */
3649 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3652 /* pragma pack overrides align if lesser and packs bitfields always */
3655 if (pragma_pack
< align
)
3656 align
= pragma_pack
;
3657 /* in pcc mode pragma pack also overrides individual align */
3658 if (pcc
&& pragma_pack
< a
)
3662 /* some individual align was specified */
3666 if (type
->ref
->type
.t
== VT_UNION
) {
3667 if (pcc
&& bit_size
>= 0)
3668 size
= (bit_size
+ 7) >> 3;
3673 } else if (bit_size
< 0) {
3675 c
+= (bit_pos
+ 7) >> 3;
3676 c
= (c
+ align
- 1) & -align
;
3685 /* A bit-field. Layout is more complicated. There are two
3686 options: PCC (GCC) compatible and MS compatible */
3688 /* In PCC layout a bit-field is placed adjacent to the
3689 preceding bit-fields, except if:
3691 - an individual alignment was given
3692 - it would overflow its base type container and
3693 there is no packing */
3694 if (bit_size
== 0) {
3696 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3698 } else if (f
->a
.aligned
) {
3700 } else if (!packed
) {
3702 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3703 if (ofs
> size
/ align
)
3707 /* in pcc mode, long long bitfields have type int if they fit */
3708 if (size
== 8 && bit_size
<= 32)
3709 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3711 while (bit_pos
>= align
* 8)
3712 c
+= align
, bit_pos
-= align
* 8;
3715 /* In PCC layout named bit-fields influence the alignment
3716 of the containing struct using the base types alignment,
3717 except for packed fields (which here have correct align). */
3718 if (f
->v
& SYM_FIRST_ANOM
3719 // && bit_size // ??? gcc on ARM/rpi does that
3724 bt
= f
->type
.t
& VT_BTYPE
;
3725 if ((bit_pos
+ bit_size
> size
* 8)
3726 || (bit_size
> 0) == (bt
!= prevbt
)
3728 c
= (c
+ align
- 1) & -align
;
3731 /* In MS bitfield mode a bit-field run always uses
3732 at least as many bits as the underlying type.
3733 To start a new run it's also required that this
3734 or the last bit-field had non-zero width. */
3735 if (bit_size
|| prev_bit_size
)
3738 /* In MS layout the records alignment is normally
3739 influenced by the field, except for a zero-width
3740 field at the start of a run (but by further zero-width
3741 fields it is again). */
3742 if (bit_size
== 0 && prevbt
!= bt
)
3745 prev_bit_size
= bit_size
;
3748 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3749 | (bit_pos
<< VT_STRUCT_SHIFT
);
3750 bit_pos
+= bit_size
;
3752 if (align
> maxalign
)
3756 printf("set field %s offset %-2d size %-2d align %-2d",
3757 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3758 if (f
->type
.t
& VT_BITFIELD
) {
3759 printf(" pos %-2d bits %-2d",
3767 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3769 /* An anonymous struct/union. Adjust member offsets
3770 to reflect the real offset of our containing struct.
3771 Also set the offset of this anon member inside
3772 the outer struct to be zero. Via this it
3773 works when accessing the field offset directly
3774 (from base object), as well as when recursing
3775 members in initializer handling. */
3776 int v2
= f
->type
.ref
->v
;
3777 if (!(v2
& SYM_FIELD
) &&
3778 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3780 /* This happens only with MS extensions. The
3781 anon member has a named struct type, so it
3782 potentially is shared with other references.
3783 We need to unshare members so we can modify
3786 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3787 &f
->type
.ref
->type
, 0,
3789 pps
= &f
->type
.ref
->next
;
3790 while ((ass
= ass
->next
) != NULL
) {
3791 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3792 pps
= &((*pps
)->next
);
3796 struct_add_offset(f
->type
.ref
, offset
);
3806 c
+= (bit_pos
+ 7) >> 3;
3808 /* store size and alignment */
3809 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3813 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3814 /* can happen if individual align for some member was given. In
3815 this case MSVC ignores maxalign when aligning the size */
3820 c
= (c
+ a
- 1) & -a
;
3824 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3827 /* check whether we can access bitfields by their type */
3828 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3832 if (0 == (f
->type
.t
& VT_BITFIELD
))
3836 bit_size
= BIT_SIZE(f
->type
.t
);
3839 bit_pos
= BIT_POS(f
->type
.t
);
3840 size
= type_size(&f
->type
, &align
);
3841 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3844 /* try to access the field using a different type */
3845 c0
= -1, s
= align
= 1;
3847 px
= f
->c
* 8 + bit_pos
;
3848 cx
= (px
>> 3) & -align
;
3849 px
= px
- (cx
<< 3);
3852 s
= (px
+ bit_size
+ 7) >> 3;
3862 s
= type_size(&t
, &align
);
3866 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3867 /* update offset and bit position */
3870 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3871 | (bit_pos
<< VT_STRUCT_SHIFT
);
3875 printf("FIX field %s offset %-2d size %-2d align %-2d "
3876 "pos %-2d bits %-2d\n",
3877 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3878 cx
, s
, align
, px
, bit_size
);
3881 /* fall back to load/store single-byte wise */
3882 f
->auxtype
= VT_STRUCT
;
3884 printf("FIX field %s : load byte-wise\n",
3885 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3891 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3892 static void struct_decl(CType
*type
, int u
)
3894 int v
, c
, size
, align
, flexible
;
3895 int bit_size
, bsize
, bt
;
3897 AttributeDef ad
, ad1
;
3900 memset(&ad
, 0, sizeof ad
);
3902 parse_attribute(&ad
);
3906 /* struct already defined ? return it */
3908 expect("struct/union/enum name");
3910 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3913 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3915 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3920 /* Record the original enum/struct/union token. */
3921 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3923 /* we put an undefined size for struct/union */
3924 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3925 s
->r
= 0; /* default alignment is zero as gcc */
3927 type
->t
= s
->type
.t
;
3933 tcc_error("struct/union/enum already defined");
3934 /* cannot be empty */
3935 /* non empty enums are not allowed */
3938 long long ll
= 0, pl
= 0, nl
= 0;
3941 /* enum symbols have static storage */
3942 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3946 expect("identifier");
3948 if (ss
&& !local_stack
)
3949 tcc_error("redefinition of enumerator '%s'",
3950 get_tok_str(v
, NULL
));
3954 ll
= expr_const64();
3956 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3958 *ps
= ss
, ps
= &ss
->next
;
3967 /* NOTE: we accept a trailing comma */
3972 /* set integral type of the enum */
3975 if (pl
!= (unsigned)pl
)
3976 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3978 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3979 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3980 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3982 /* set type for enum members */
3983 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3985 if (ll
== (int)ll
) /* default is int if it fits */
3987 if (t
.t
& VT_UNSIGNED
) {
3988 ss
->type
.t
|= VT_UNSIGNED
;
3989 if (ll
== (unsigned)ll
)
3992 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3993 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3998 while (tok
!= '}') {
3999 if (!parse_btype(&btype
, &ad1
)) {
4005 tcc_error("flexible array member '%s' not at the end of struct",
4006 get_tok_str(v
, NULL
));
4012 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4014 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4015 expect("identifier");
4017 int v
= btype
.ref
->v
;
4018 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4019 if (tcc_state
->ms_extensions
== 0)
4020 expect("identifier");
4024 if (type_size(&type1
, &align
) < 0) {
4025 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4028 tcc_error("field '%s' has incomplete type",
4029 get_tok_str(v
, NULL
));
4031 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4032 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4033 (type1
.t
& VT_STORAGE
))
4034 tcc_error("invalid type for '%s'",
4035 get_tok_str(v
, NULL
));
4039 bit_size
= expr_const();
4040 /* XXX: handle v = 0 case for messages */
4042 tcc_error("negative width in bit-field '%s'",
4043 get_tok_str(v
, NULL
));
4044 if (v
&& bit_size
== 0)
4045 tcc_error("zero width for bit-field '%s'",
4046 get_tok_str(v
, NULL
));
4047 parse_attribute(&ad1
);
4049 size
= type_size(&type1
, &align
);
4050 if (bit_size
>= 0) {
4051 bt
= type1
.t
& VT_BTYPE
;
4057 tcc_error("bitfields must have scalar type");
4059 if (bit_size
> bsize
) {
4060 tcc_error("width of '%s' exceeds its type",
4061 get_tok_str(v
, NULL
));
4062 } else if (bit_size
== bsize
4063 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4064 /* no need for bit fields */
4066 } else if (bit_size
== 64) {
4067 tcc_error("field width 64 not implemented");
4069 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4071 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4074 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4075 /* Remember we've seen a real field to check
4076 for placement of flexible array member. */
4079 /* If member is a struct or bit-field, enforce
4080 placing into the struct (as anonymous). */
4082 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4087 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4092 if (tok
== ';' || tok
== TOK_EOF
)
4099 parse_attribute(&ad
);
4100 struct_layout(type
, &ad
);
4105 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4107 merge_symattr(&ad
->a
, &s
->a
);
4108 merge_funcattr(&ad
->f
, &s
->f
);
4111 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4112 are added to the element type, copied because it could be a typedef. */
4113 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4115 while (type
->t
& VT_ARRAY
) {
4116 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4117 type
= &type
->ref
->type
;
4119 type
->t
|= qualifiers
;
4122 /* return 0 if no type declaration. otherwise, return the basic type
4125 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4127 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
4131 memset(ad
, 0, sizeof(AttributeDef
));
4141 /* currently, we really ignore extension */
4151 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4152 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4153 tmbt
: tcc_error("too many basic types");
4156 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4161 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4174 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4175 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4176 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4177 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4184 #ifdef TCC_TARGET_ARM64
4186 /* GCC's __uint128_t appears in some Linux header files. Make it a
4187 synonym for long double to get the size and alignment right. */
4198 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4199 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4207 struct_decl(&type1
, VT_ENUM
);
4210 type
->ref
= type1
.ref
;
4213 struct_decl(&type1
, VT_STRUCT
);
4216 struct_decl(&type1
, VT_UNION
);
4219 /* type modifiers */
4224 parse_btype_qualify(type
, VT_CONSTANT
);
4232 parse_btype_qualify(type
, VT_VOLATILE
);
4239 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4240 tcc_error("signed and unsigned modifier");
4253 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4254 tcc_error("signed and unsigned modifier");
4255 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4271 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4272 tcc_error("multiple storage classes");
4283 /* GNUC attribute */
4284 case TOK_ATTRIBUTE1
:
4285 case TOK_ATTRIBUTE2
:
4286 parse_attribute(ad
);
4287 if (ad
->attr_mode
) {
4288 u
= ad
->attr_mode
-1;
4289 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4297 parse_expr_type(&type1
);
4298 /* remove all storage modifiers except typedef */
4299 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4301 sym_to_attr(ad
, type1
.ref
);
4307 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4309 t
&= ~(VT_BTYPE
|VT_LONG
);
4310 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4311 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4312 type
->ref
= s
->type
.ref
;
4314 parse_btype_qualify(type
, t
);
4316 /* get attributes from typedef */
4326 if (tcc_state
->char_is_unsigned
) {
4327 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4330 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4331 bt
= t
& (VT_BTYPE
|VT_LONG
);
4333 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4334 #ifdef TCC_TARGET_PE
4335 if (bt
== VT_LDOUBLE
)
4336 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4342 /* convert a function parameter type (array to pointer and function to
4343 function pointer) */
4344 static inline void convert_parameter_type(CType
*pt
)
4346 /* remove const and volatile qualifiers (XXX: const could be used
4347 to indicate a const function parameter */
4348 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4349 /* array must be transformed to pointer according to ANSI C */
4351 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4356 ST_FUNC
void parse_asm_str(CString
*astr
)
4359 parse_mult_str(astr
, "string constant");
4362 /* Parse an asm label and return the token */
4363 static int asm_label_instr(void)
4369 parse_asm_str(&astr
);
4372 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4374 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4379 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4381 int n
, l
, t1
, arg_size
, align
;
4382 Sym
**plast
, *s
, *first
;
4387 /* function type, or recursive declarator (return if so) */
4389 if (td
&& !(td
& TYPE_ABSTRACT
))
4393 else if (parse_btype(&pt
, &ad1
))
4396 merge_attr (ad
, &ad1
);
4405 /* read param name and compute offset */
4406 if (l
!= FUNC_OLD
) {
4407 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4409 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4410 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4411 tcc_error("parameter declared as void");
4415 expect("identifier");
4416 pt
.t
= VT_VOID
; /* invalid type */
4419 convert_parameter_type(&pt
);
4420 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4421 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4427 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4432 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4433 tcc_error("invalid type");
4436 /* if no parameters, then old type prototype */
4439 /* NOTE: const is ignored in returned type as it has a special
4440 meaning in gcc / C++ */
4441 type
->t
&= ~VT_CONSTANT
;
4442 /* some ancient pre-K&R C allows a function to return an array
4443 and the array brackets to be put after the arguments, such
4444 that "int c()[]" means something like "int[] c()" */
4447 skip(']'); /* only handle simple "[]" */
4450 /* we push a anonymous symbol which will contain the function prototype */
4451 ad
->f
.func_args
= arg_size
;
4452 ad
->f
.func_type
= l
;
4453 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4459 } else if (tok
== '[') {
4460 int saved_nocode_wanted
= nocode_wanted
;
4461 /* array definition */
4464 /* XXX The optional type-quals and static should only be accepted
4465 in parameter decls. The '*' as well, and then even only
4466 in prototypes (not function defs). */
4468 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4483 if (!local_stack
|| (storage
& VT_STATIC
))
4484 vpushi(expr_const());
4486 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4487 length must always be evaluated, even under nocode_wanted,
4488 so that its size slot is initialized (e.g. under sizeof
4493 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4496 tcc_error("invalid array size");
4498 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4499 tcc_error("size of variable length array should be an integer");
4504 /* parse next post type */
4505 post_type(type
, ad
, storage
, 0);
4506 if (type
->t
== VT_FUNC
)
4507 tcc_error("declaration of an array of functions");
4508 t1
|= type
->t
& VT_VLA
;
4511 loc
-= type_size(&int_type
, &align
);
4515 vla_runtime_type_size(type
, &align
);
4517 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4523 nocode_wanted
= saved_nocode_wanted
;
4525 /* we push an anonymous symbol which will contain the array
4527 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4528 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4534 /* Parse a type declarator (except basic type), and return the type
4535 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4536 expected. 'type' should contain the basic type. 'ad' is the
4537 attribute definition of the basic type. It can be modified by
4538 type_decl(). If this (possibly abstract) declarator is a pointer chain
4539 it returns the innermost pointed to type (equals *type, but is a different
4540 pointer), otherwise returns type itself, that's used for recursive calls. */
4541 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4544 int qualifiers
, storage
;
4546 /* recursive type, remove storage bits first, apply them later again */
4547 storage
= type
->t
& VT_STORAGE
;
4548 type
->t
&= ~VT_STORAGE
;
4551 while (tok
== '*') {
4559 qualifiers
|= VT_CONSTANT
;
4564 qualifiers
|= VT_VOLATILE
;
4570 /* XXX: clarify attribute handling */
4571 case TOK_ATTRIBUTE1
:
4572 case TOK_ATTRIBUTE2
:
4573 parse_attribute(ad
);
4577 type
->t
|= qualifiers
;
4579 /* innermost pointed to type is the one for the first derivation */
4580 ret
= pointed_type(type
);
4584 /* This is possibly a parameter type list for abstract declarators
4585 ('int ()'), use post_type for testing this. */
4586 if (!post_type(type
, ad
, 0, td
)) {
4587 /* It's not, so it's a nested declarator, and the post operations
4588 apply to the innermost pointed to type (if any). */
4589 /* XXX: this is not correct to modify 'ad' at this point, but
4590 the syntax is not clear */
4591 parse_attribute(ad
);
4592 post
= type_decl(type
, ad
, v
, td
);
4596 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4597 /* type identifier */
4602 if (!(td
& TYPE_ABSTRACT
))
4603 expect("identifier");
4606 post_type(post
, ad
, storage
, 0);
4607 parse_attribute(ad
);
4612 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4613 ST_FUNC
int lvalue_type(int t
)
4618 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4620 else if (bt
== VT_SHORT
)
4624 if (t
& VT_UNSIGNED
)
4625 r
|= VT_LVAL_UNSIGNED
;
4629 /* indirection with full error checking and bound check */
4630 ST_FUNC
void indir(void)
4632 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4633 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4637 if (vtop
->r
& VT_LVAL
)
4639 vtop
->type
= *pointed_type(&vtop
->type
);
4640 /* Arrays and functions are never lvalues */
4641 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4642 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4643 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4644 /* if bound checking, the referenced pointer must be checked */
4645 #ifdef CONFIG_TCC_BCHECK
4646 if (tcc_state
->do_bounds_check
)
4647 vtop
->r
|= VT_MUSTBOUND
;
4652 /* pass a parameter to a function and do type checking and casting */
4653 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4658 func_type
= func
->f
.func_type
;
4659 if (func_type
== FUNC_OLD
||
4660 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4661 /* default casting : only need to convert float to double */
4662 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4663 gen_cast_s(VT_DOUBLE
);
4664 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4665 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4666 type
.ref
= vtop
->type
.ref
;
4669 } else if (arg
== NULL
) {
4670 tcc_error("too many arguments to function");
4673 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4674 gen_assign_cast(&type
);
4678 /* parse an expression and return its type without any side effect. */
4679 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4688 /* parse an expression of the form '(type)' or '(expr)' and return its
4690 static void parse_expr_type(CType
*type
)
4696 if (parse_btype(type
, &ad
)) {
4697 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4699 expr_type(type
, gexpr
);
4704 static void parse_type(CType
*type
)
4709 if (!parse_btype(type
, &ad
)) {
4712 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4715 static void parse_builtin_params(int nc
, const char *args
)
4722 while ((c
= *args
++)) {
4726 case 'e': expr_eq(); continue;
4727 case 't': parse_type(&t
); vpush(&t
); continue;
4728 default: tcc_error("internal error"); break;
4736 static void try_call_scope_cleanup(Sym
*stop
)
4738 Sym
*cls
= current_cleanups
;
4740 for (; cls
!= stop
; cls
= cls
->ncl
) {
4741 Sym
*fs
= cls
->next
;
4742 Sym
*vs
= cls
->prev_tok
;
4744 vpushsym(&fs
->type
, fs
);
4745 vset(&vs
->type
, vs
->r
, vs
->c
);
4747 mk_pointer(&vtop
->type
);
4753 static void try_call_cleanup_goto(Sym
*cleanupstate
)
4758 if (!current_cleanups
)
4761 /* search NCA of both cleanup chains given parents and initial depth */
4762 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
4763 for (ccd
= ncleanups
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
4765 for (cc
= current_cleanups
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
4767 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
4770 try_call_scope_cleanup(cc
);
4773 ST_FUNC
void unary(void)
4775 int n
, t
, align
, size
, r
, sizeof_caller
;
4780 sizeof_caller
= in_sizeof
;
4783 /* XXX: GCC 2.95.3 does not generate a table although it should be
4791 #ifdef TCC_TARGET_PE
4792 t
= VT_SHORT
|VT_UNSIGNED
;
4800 vsetc(&type
, VT_CONST
, &tokc
);
4804 t
= VT_INT
| VT_UNSIGNED
;
4810 t
= VT_LLONG
| VT_UNSIGNED
;
4822 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4825 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4827 case TOK___FUNCTION__
:
4829 goto tok_identifier
;
4835 /* special function name identifier */
4836 len
= strlen(funcname
) + 1;
4837 /* generate char[len] type */
4842 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4843 if (!NODATA_WANTED
) {
4844 ptr
= section_ptr_add(data_section
, len
);
4845 memcpy(ptr
, funcname
, len
);
4851 #ifdef TCC_TARGET_PE
4852 t
= VT_SHORT
| VT_UNSIGNED
;
4858 /* string parsing */
4860 if (tcc_state
->char_is_unsigned
)
4861 t
= VT_BYTE
| VT_UNSIGNED
;
4863 if (tcc_state
->warn_write_strings
)
4868 memset(&ad
, 0, sizeof(AttributeDef
));
4869 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4874 if (parse_btype(&type
, &ad
)) {
4875 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4877 /* check ISOC99 compound literal */
4879 /* data is allocated locally by default */
4884 /* all except arrays are lvalues */
4885 if (!(type
.t
& VT_ARRAY
))
4886 r
|= lvalue_type(type
.t
);
4887 memset(&ad
, 0, sizeof(AttributeDef
));
4888 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4890 if (sizeof_caller
) {
4897 } else if (tok
== '{') {
4898 int saved_nocode_wanted
= nocode_wanted
;
4900 tcc_error("expected constant");
4901 /* save all registers */
4903 /* statement expression : we do not accept break/continue
4904 inside as GCC does. We do retain the nocode_wanted state,
4905 as statement expressions can't ever be entered from the
4906 outside, so any reactivation of code emission (from labels
4907 or loop heads) can be disabled again after the end of it. */
4908 block(NULL
, NULL
, 1);
4909 nocode_wanted
= saved_nocode_wanted
;
4924 /* functions names must be treated as function pointers,
4925 except for unary '&' and sizeof. Since we consider that
4926 functions are not lvalues, we only have to handle it
4927 there and in function calls. */
4928 /* arrays can also be used although they are not lvalues */
4929 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4930 !(vtop
->type
.t
& VT_ARRAY
))
4932 mk_pointer(&vtop
->type
);
4938 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4939 gen_cast_s(VT_BOOL
);
4940 vtop
->c
.i
= !vtop
->c
.i
;
4941 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4945 vseti(VT_JMP
, gvtst(1, 0));
4957 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4958 tcc_error("pointer not accepted for unary plus");
4959 /* In order to force cast, we add zero, except for floating point
4960 where we really need an noop (otherwise -0.0 will be transformed
4962 if (!is_float(vtop
->type
.t
)) {
4974 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
4975 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
4976 size
= type_size(&type
, &align
);
4977 if (s
&& s
->a
.aligned
)
4978 align
= 1 << (s
->a
.aligned
- 1);
4979 if (t
== TOK_SIZEOF
) {
4980 if (!(type
.t
& VT_VLA
)) {
4982 tcc_error("sizeof applied to an incomplete type");
4985 vla_runtime_type_size(&type
, &align
);
4990 vtop
->type
.t
|= VT_UNSIGNED
;
4993 case TOK_builtin_expect
:
4994 /* __builtin_expect is a no-op for now */
4995 parse_builtin_params(0, "ee");
4998 case TOK_builtin_types_compatible_p
:
4999 parse_builtin_params(0, "tt");
5000 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5001 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5002 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5006 case TOK_builtin_choose_expr
:
5033 case TOK_builtin_constant_p
:
5034 parse_builtin_params(1, "e");
5035 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5039 case TOK_builtin_frame_address
:
5040 case TOK_builtin_return_address
:
5046 if (tok
!= TOK_CINT
) {
5047 tcc_error("%s only takes positive integers",
5048 tok1
== TOK_builtin_return_address
?
5049 "__builtin_return_address" :
5050 "__builtin_frame_address");
5052 level
= (uint32_t)tokc
.i
;
5057 vset(&type
, VT_LOCAL
, 0); /* local frame */
5059 mk_pointer(&vtop
->type
);
5060 indir(); /* -> parent frame */
5062 if (tok1
== TOK_builtin_return_address
) {
5063 // assume return address is just above frame pointer on stack
5066 mk_pointer(&vtop
->type
);
5071 #ifdef TCC_TARGET_X86_64
5072 #ifdef TCC_TARGET_PE
5073 case TOK_builtin_va_start
:
5074 parse_builtin_params(0, "ee");
5075 r
= vtop
->r
& VT_VALMASK
;
5079 tcc_error("__builtin_va_start expects a local variable");
5081 vtop
->type
= char_pointer_type
;
5086 case TOK_builtin_va_arg_types
:
5087 parse_builtin_params(0, "t");
5088 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5095 #ifdef TCC_TARGET_ARM64
5096 case TOK___va_start
: {
5097 parse_builtin_params(0, "ee");
5101 vtop
->type
.t
= VT_VOID
;
5104 case TOK___va_arg
: {
5105 parse_builtin_params(0, "et");
5113 case TOK___arm64_clear_cache
: {
5114 parse_builtin_params(0, "ee");
5117 vtop
->type
.t
= VT_VOID
;
5121 /* pre operations */
5132 t
= vtop
->type
.t
& VT_BTYPE
;
5134 /* In IEEE negate(x) isn't subtract(0,x), but rather
5138 vtop
->c
.f
= -1.0 * 0.0;
5139 else if (t
== VT_DOUBLE
)
5140 vtop
->c
.d
= -1.0 * 0.0;
5142 vtop
->c
.ld
= -1.0 * 0.0;
5150 goto tok_identifier
;
5152 /* allow to take the address of a label */
5153 if (tok
< TOK_UIDENT
)
5154 expect("label identifier");
5155 s
= label_find(tok
);
5157 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5159 if (s
->r
== LABEL_DECLARED
)
5160 s
->r
= LABEL_FORWARD
;
5163 s
->type
.t
= VT_VOID
;
5164 mk_pointer(&s
->type
);
5165 s
->type
.t
|= VT_STATIC
;
5167 vpushsym(&s
->type
, s
);
5173 CType controlling_type
;
5174 int has_default
= 0;
5177 TokenString
*str
= NULL
;
5178 int saved_const_wanted
= const_wanted
;
5183 expr_type(&controlling_type
, expr_eq
);
5184 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5185 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5186 mk_pointer(&controlling_type
);
5187 const_wanted
= saved_const_wanted
;
5191 if (tok
== TOK_DEFAULT
) {
5193 tcc_error("too many 'default'");
5199 AttributeDef ad_tmp
;
5202 parse_btype(&cur_type
, &ad_tmp
);
5203 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5204 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5206 tcc_error("type match twice");
5216 skip_or_save_block(&str
);
5218 skip_or_save_block(NULL
);
5225 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5226 tcc_error("type '%s' does not match any association", buf
);
5228 begin_macro(str
, 1);
5237 // special qnan , snan and infinity values
5242 vtop
->type
.t
= VT_FLOAT
;
5247 goto special_math_val
;
5250 goto special_math_val
;
5257 expect("identifier");
5259 if (!s
|| IS_ASM_SYM(s
)) {
5260 const char *name
= get_tok_str(t
, NULL
);
5262 tcc_error("'%s' undeclared", name
);
5263 /* for simple function calls, we tolerate undeclared
5264 external reference to int() function */
5265 if (tcc_state
->warn_implicit_function_declaration
5266 #ifdef TCC_TARGET_PE
5267 /* people must be warned about using undeclared WINAPI functions
5268 (which usually start with uppercase letter) */
5269 || (name
[0] >= 'A' && name
[0] <= 'Z')
5272 tcc_warning("implicit declaration of function '%s'", name
);
5273 s
= external_global_sym(t
, &func_old_type
, 0);
5277 /* A symbol that has a register is a local register variable,
5278 which starts out as VT_LOCAL value. */
5279 if ((r
& VT_VALMASK
) < VT_CONST
)
5280 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5282 vset(&s
->type
, r
, s
->c
);
5283 /* Point to s as backpointer (even without r&VT_SYM).
5284 Will be used by at least the x86 inline asm parser for
5290 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5291 vtop
->c
.i
= s
->enum_val
;
5296 /* post operations */
5298 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5301 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5304 if (tok
== TOK_ARROW
)
5306 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5309 /* expect pointer on structure */
5310 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5311 expect("struct or union");
5312 if (tok
== TOK_CDOUBLE
)
5313 expect("field name");
5315 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5316 expect("field name");
5317 s
= find_field(&vtop
->type
, tok
);
5319 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5320 /* add field offset to pointer */
5321 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5324 /* change type to field type, and set to lvalue */
5325 vtop
->type
= s
->type
;
5326 vtop
->type
.t
|= qualifiers
;
5327 /* an array is never an lvalue */
5328 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5329 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5330 #ifdef CONFIG_TCC_BCHECK
5331 /* if bound checking, the referenced pointer must be checked */
5332 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5333 vtop
->r
|= VT_MUSTBOUND
;
5337 } else if (tok
== '[') {
5343 } else if (tok
== '(') {
5346 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5349 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5350 /* pointer test (no array accepted) */
5351 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5352 vtop
->type
= *pointed_type(&vtop
->type
);
5353 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5357 expect("function pointer");
5360 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5362 /* get return type */
5365 sa
= s
->next
; /* first parameter */
5366 nb_args
= regsize
= 0;
5368 /* compute first implicit argument if a structure is returned */
5369 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5370 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5371 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5372 &ret_align
, ®size
);
5374 /* get some space for the returned structure */
5375 size
= type_size(&s
->type
, &align
);
5376 #ifdef TCC_TARGET_ARM64
5377 /* On arm64, a small struct is return in registers.
5378 It is much easier to write it to memory if we know
5379 that we are allowed to write some extra bytes, so
5380 round the allocated space up to a power of 2: */
5382 while (size
& (size
- 1))
5383 size
= (size
| (size
- 1)) + 1;
5385 loc
= (loc
- size
) & -align
;
5387 ret
.r
= VT_LOCAL
| VT_LVAL
;
5388 /* pass it as 'int' to avoid structure arg passing
5390 vseti(VT_LOCAL
, loc
);
5400 /* return in register */
5401 if (is_float(ret
.type
.t
)) {
5402 ret
.r
= reg_fret(ret
.type
.t
);
5403 #ifdef TCC_TARGET_X86_64
5404 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5408 #ifndef TCC_TARGET_ARM64
5409 #ifdef TCC_TARGET_X86_64
5410 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5412 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5423 gfunc_param_typed(s
, sa
);
5433 tcc_error("too few arguments to function");
5435 gfunc_call(nb_args
);
5438 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5439 vsetc(&ret
.type
, r
, &ret
.c
);
5440 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5443 /* handle packed struct return */
5444 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5447 size
= type_size(&s
->type
, &align
);
5448 /* We're writing whole regs often, make sure there's enough
5449 space. Assume register size is power of 2. */
5450 if (regsize
> align
)
5452 loc
= (loc
- size
) & -align
;
5456 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5460 if (--ret_nregs
== 0)
5464 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5472 ST_FUNC
void expr_prod(void)
5477 while (tok
== '*' || tok
== '/' || tok
== '%') {
5485 ST_FUNC
void expr_sum(void)
5490 while (tok
== '+' || tok
== '-') {
5498 static void expr_shift(void)
5503 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5511 static void expr_cmp(void)
5516 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5517 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5525 static void expr_cmpeq(void)
5530 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5538 static void expr_and(void)
5541 while (tok
== '&') {
5548 static void expr_xor(void)
5551 while (tok
== '^') {
5558 static void expr_or(void)
5561 while (tok
== '|') {
5568 static void expr_land(void)
5571 if (tok
== TOK_LAND
) {
5574 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5575 gen_cast_s(VT_BOOL
);
5580 while (tok
== TOK_LAND
) {
5596 if (tok
!= TOK_LAND
) {
5609 static void expr_lor(void)
5612 if (tok
== TOK_LOR
) {
5615 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5616 gen_cast_s(VT_BOOL
);
5621 while (tok
== TOK_LOR
) {
5637 if (tok
!= TOK_LOR
) {
5650 /* Assuming vtop is a value used in a conditional context
5651 (i.e. compared with zero) return 0 if it's false, 1 if
5652 true and -1 if it can't be statically determined. */
5653 static int condition_3way(void)
5656 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5657 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5659 gen_cast_s(VT_BOOL
);
5666 static void expr_cond(void)
5668 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5670 CType type
, type1
, type2
;
5675 c
= condition_3way();
5676 g
= (tok
== ':' && gnu_ext
);
5678 /* needed to avoid having different registers saved in
5680 if (is_float(vtop
->type
.t
)) {
5682 #ifdef TCC_TARGET_X86_64
5683 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5707 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5708 mk_pointer(&vtop
->type
);
5710 sv
= *vtop
; /* save value to handle it later */
5711 vtop
--; /* no vpop so that FP stack is not flushed */
5727 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5728 mk_pointer(&vtop
->type
);
5731 bt1
= t1
& VT_BTYPE
;
5733 bt2
= t2
& VT_BTYPE
;
5737 /* cast operands to correct type according to ISOC rules */
5738 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5739 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5740 } else if (is_float(bt1
) || is_float(bt2
)) {
5741 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5742 type
.t
= VT_LDOUBLE
;
5744 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5749 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5750 /* cast to biggest op */
5751 type
.t
= VT_LLONG
| VT_LONG
;
5752 if (bt1
== VT_LLONG
)
5754 if (bt2
== VT_LLONG
)
5756 /* convert to unsigned if it does not fit in a long long */
5757 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5758 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5759 type
.t
|= VT_UNSIGNED
;
5760 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5761 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5762 /* If one is a null ptr constant the result type
5764 if (is_null_pointer (vtop
)) type
= type1
;
5765 else if (is_null_pointer (&sv
)) type
= type2
;
5766 else if (bt1
!= bt2
)
5767 tcc_error("incompatible types in conditional expressions");
5769 CType
*pt1
= pointed_type(&type1
);
5770 CType
*pt2
= pointed_type(&type2
);
5771 int pbt1
= pt1
->t
& VT_BTYPE
;
5772 int pbt2
= pt2
->t
& VT_BTYPE
;
5773 int newquals
, copied
= 0;
5774 /* pointers to void get preferred, otherwise the
5775 pointed to types minus qualifs should be compatible */
5776 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5777 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5778 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5779 tcc_warning("pointer type mismatch in conditional expression\n");
5781 /* combine qualifs */
5782 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5783 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5786 /* copy the pointer target symbol */
5787 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5790 pointed_type(&type
)->t
|= newquals
;
5792 /* pointers to incomplete arrays get converted to
5793 pointers to completed ones if possible */
5794 if (pt1
->t
& VT_ARRAY
5795 && pt2
->t
& VT_ARRAY
5796 && pointed_type(&type
)->ref
->c
< 0
5797 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5800 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5802 pointed_type(&type
)->ref
=
5803 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5804 0, pointed_type(&type
)->ref
->c
);
5805 pointed_type(&type
)->ref
->c
=
5806 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5809 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5810 /* XXX: test structure compatibility */
5811 type
= bt1
== VT_STRUCT
? type1
: type2
;
5813 /* integer operations */
5814 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5815 /* convert to unsigned if it does not fit in an integer */
5816 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5817 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5818 type
.t
|= VT_UNSIGNED
;
5820 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5821 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5822 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5824 /* now we convert second operand */
5828 mk_pointer(&vtop
->type
);
5830 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5835 if (is_float(type
.t
)) {
5837 #ifdef TCC_TARGET_X86_64
5838 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5842 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5843 /* for long longs, we use fixed registers to avoid having
5844 to handle a complicated move */
5855 /* this is horrible, but we must also convert first
5861 mk_pointer(&vtop
->type
);
5863 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5867 if (c
< 0 || islv
) {
5869 move_reg(r2
, r1
, type
.t
);
5879 static void expr_eq(void)
5885 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5886 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5887 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5902 ST_FUNC
void gexpr(void)
5913 /* parse a constant expression and return value in vtop. */
5914 static void expr_const1(void)
5923 /* parse an integer constant and return its value. */
5924 static inline int64_t expr_const64(void)
5928 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5929 expect("constant expression");
5935 /* parse an integer constant and return its value.
5936 Complain if it doesn't fit 32bit (signed or unsigned). */
5937 ST_FUNC
int expr_const(void)
5940 int64_t wc
= expr_const64();
5942 if (c
!= wc
&& (unsigned)c
!= wc
)
5943 tcc_error("constant exceeds 32 bit");
5947 /* return the label token if current token is a label, otherwise
5949 static int is_label(void)
5953 /* fast test first */
5954 if (tok
< TOK_UIDENT
)
5956 /* no need to save tokc because tok is an identifier */
5962 unget_tok(last_tok
);
5967 #ifndef TCC_TARGET_ARM64
5968 static void gfunc_return(CType
*func_type
)
5970 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5971 CType type
, ret_type
;
5972 int ret_align
, ret_nregs
, regsize
;
5973 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5974 &ret_align
, ®size
);
5975 if (0 == ret_nregs
) {
5976 /* if returning structure, must copy it to implicit
5977 first pointer arg location */
5980 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5983 /* copy structure value to pointer */
5986 /* returning structure packed into registers */
5987 int r
, size
, addr
, align
;
5988 size
= type_size(func_type
,&align
);
5989 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5990 (vtop
->c
.i
& (ret_align
-1)))
5991 && (align
& (ret_align
-1))) {
5992 loc
= (loc
- size
) & -ret_align
;
5995 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5999 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6001 vtop
->type
= ret_type
;
6002 if (is_float(ret_type
.t
))
6003 r
= rc_fret(ret_type
.t
);
6014 if (--ret_nregs
== 0)
6016 /* We assume that when a structure is returned in multiple
6017 registers, their classes are consecutive values of the
6020 vtop
->c
.i
+= regsize
;
6024 } else if (is_float(func_type
->t
)) {
6025 gv(rc_fret(func_type
->t
));
6029 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6033 static int case_cmp(const void *pa
, const void *pb
)
6035 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6036 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6037 return a
< b
? -1 : a
> b
;
6040 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6044 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6062 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6064 gcase(base
, len
/2, bsym
);
6065 if (cur_switch
->def_sym
)
6066 gjmp_addr(cur_switch
->def_sym
);
6068 *bsym
= gjmp(*bsym
);
6072 base
+= e
; len
-= e
;
6082 if (p
->v1
== p
->v2
) {
6084 gtst_addr(0, p
->sym
);
6094 gtst_addr(0, p
->sym
);
6100 static void block(int *bsym
, int *csym
, int is_expr
)
6102 int a
, b
, c
, d
, cond
;
6105 /* generate line number info */
6106 if (tcc_state
->do_debug
)
6107 tcc_debug_line(tcc_state
);
6110 /* default return value is (void) */
6112 vtop
->type
.t
= VT_VOID
;
6115 if (tok
== TOK_IF
) {
6117 int saved_nocode_wanted
= nocode_wanted
;
6122 cond
= condition_3way();
6128 nocode_wanted
|= 0x20000000;
6129 block(bsym
, csym
, 0);
6131 nocode_wanted
= saved_nocode_wanted
;
6132 if (tok
== TOK_ELSE
) {
6137 nocode_wanted
|= 0x20000000;
6138 block(bsym
, csym
, 0);
6139 gsym(d
); /* patch else jmp */
6141 nocode_wanted
= saved_nocode_wanted
;
6144 } else if (tok
== TOK_WHILE
) {
6145 int saved_nocode_wanted
;
6146 nocode_wanted
&= ~0x20000000;
6156 saved_nocode_wanted
= nocode_wanted
;
6158 nocode_wanted
= saved_nocode_wanted
;
6163 } else if (tok
== '{') {
6164 Sym
*llabel
, *lcleanup
;
6165 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
6166 int lncleanups
= ncleanups
;
6169 /* record local declaration stack position */
6171 llabel
= local_label_stack
;
6172 lcleanup
= current_cleanups
;
6175 /* handle local labels declarations */
6176 while (tok
== TOK_LABEL
) {
6179 if (tok
< TOK_UIDENT
)
6180 expect("label identifier");
6181 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6191 while (tok
!= '}') {
6192 if ((a
= is_label()))
6199 block(bsym
, csym
, is_expr
);
6203 if (current_cleanups
!= lcleanup
) {
6207 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> lncleanups
;)
6208 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6213 try_call_scope_cleanup(lcleanup
);
6214 pcl
->jnext
= gjmp(0);
6216 goto remove_pending
;
6225 if (!nocode_wanted
) {
6226 try_call_scope_cleanup(lcleanup
);
6230 current_cleanups
= lcleanup
;
6231 ncleanups
= lncleanups
;
6232 /* pop locally defined labels */
6233 label_pop(&local_label_stack
, llabel
, is_expr
);
6234 /* pop locally defined symbols */
6236 /* In the is_expr case (a statement expression is finished here),
6237 vtop might refer to symbols on the local_stack. Either via the
6238 type or via vtop->sym. We can't pop those nor any that in turn
6239 might be referred to. To make it easier we don't roll back
6240 any symbols in that case; some upper level call to block() will
6241 do that. We do have to remove such symbols from the lookup
6242 tables, though. sym_pop will do that. */
6243 sym_pop(&local_stack
, s
, is_expr
);
6245 /* Pop VLA frames and restore stack pointer if required */
6246 if (vlas_in_scope
> saved_vlas_in_scope
) {
6247 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
6250 vlas_in_scope
= saved_vlas_in_scope
;
6253 } else if (tok
== TOK_RETURN
) {
6257 gen_assign_cast(&func_vt
);
6258 try_call_scope_cleanup(NULL
);
6259 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6262 gfunc_return(&func_vt
);
6264 try_call_scope_cleanup(NULL
);
6267 /* jump unless last stmt in top-level block */
6268 if (tok
!= '}' || local_scope
!= 1)
6270 nocode_wanted
|= 0x20000000;
6271 } else if (tok
== TOK_BREAK
) {
6274 tcc_error("cannot break");
6275 *bsym
= gjmp(*bsym
);
6278 nocode_wanted
|= 0x20000000;
6279 } else if (tok
== TOK_CONTINUE
) {
6282 tcc_error("cannot continue");
6283 vla_sp_restore_root();
6284 *csym
= gjmp(*csym
);
6287 nocode_wanted
|= 0x20000000;
6288 } else if (tok
== TOK_FOR
) {
6290 int saved_nocode_wanted
;
6291 nocode_wanted
&= ~0x20000000;
6297 /* c99 for-loop init decl? */
6298 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6299 /* no, regular for-loop init expr */
6325 saved_nocode_wanted
= nocode_wanted
;
6327 nocode_wanted
= saved_nocode_wanted
;
6332 sym_pop(&local_stack
, s
, 0);
6335 if (tok
== TOK_DO
) {
6336 int saved_nocode_wanted
;
6337 nocode_wanted
&= ~0x20000000;
6343 saved_nocode_wanted
= nocode_wanted
;
6349 nocode_wanted
= saved_nocode_wanted
;
6353 nocode_wanted
= saved_nocode_wanted
;
6358 if (tok
== TOK_SWITCH
) {
6359 struct switch_t
*saved
, sw
;
6360 int saved_nocode_wanted
= nocode_wanted
;
6366 switchval
= *vtop
--;
6368 b
= gjmp(0); /* jump to first case */
6369 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6373 nocode_wanted
= saved_nocode_wanted
;
6374 a
= gjmp(a
); /* add implicit break */
6377 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6378 for (b
= 1; b
< sw
.n
; b
++)
6379 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6380 tcc_error("duplicate case value");
6381 /* Our switch table sorting is signed, so the compared
6382 value needs to be as well when it's 64bit. */
6383 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6384 switchval
.type
.t
&= ~VT_UNSIGNED
;
6386 gcase(sw
.p
, sw
.n
, &a
);
6389 gjmp_addr(sw
.def_sym
);
6390 dynarray_reset(&sw
.p
, &sw
.n
);
6395 if (tok
== TOK_CASE
) {
6396 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6399 nocode_wanted
&= ~0x20000000;
6401 cr
->v1
= cr
->v2
= expr_const64();
6402 if (gnu_ext
&& tok
== TOK_DOTS
) {
6404 cr
->v2
= expr_const64();
6405 if (cr
->v2
< cr
->v1
)
6406 tcc_warning("empty case range");
6409 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6412 goto block_after_label
;
6414 if (tok
== TOK_DEFAULT
) {
6419 if (cur_switch
->def_sym
)
6420 tcc_error("too many 'default'");
6421 cur_switch
->def_sym
= ind
;
6423 goto block_after_label
;
6425 if (tok
== TOK_GOTO
) {
6427 if (tok
== '*' && gnu_ext
) {
6431 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6434 } else if (tok
>= TOK_UIDENT
) {
6435 s
= label_find(tok
);
6436 /* put forward definition if needed */
6438 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6439 else if (s
->r
== LABEL_DECLARED
)
6440 s
->r
= LABEL_FORWARD
;
6442 vla_sp_restore_root();
6443 if (s
->r
& LABEL_FORWARD
) {
6444 /* start new goto chain for cleanups, linked via label->next */
6445 if (current_cleanups
) {
6446 sym_push2(&pending_gotos
, SYM_FIELD
, 0, ncleanups
);
6447 pending_gotos
->prev_tok
= s
;
6448 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6449 pending_gotos
->next
= s
;
6451 s
->jnext
= gjmp(s
->jnext
);
6453 try_call_cleanup_goto(s
->cleanupstate
);
6454 gjmp_addr(s
->jnext
);
6458 expect("label identifier");
6461 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6470 if (s
->r
== LABEL_DEFINED
)
6471 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6472 s
->r
= LABEL_DEFINED
;
6474 Sym
*pcl
; /* pending cleanup goto */
6475 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6477 sym_pop(&s
->next
, NULL
, 0);
6481 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6484 s
->cleanupstate
= current_cleanups
;
6486 /* we accept this, but it is a mistake */
6488 nocode_wanted
&= ~0x20000000;
6490 tcc_warning("deprecated use of label at end of compound statement");
6494 block(bsym
, csym
, is_expr
);
6497 /* expression case */
6512 /* This skips over a stream of tokens containing balanced {} and ()
6513 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6514 with a '{'). If STR then allocates and stores the skipped tokens
6515 in *STR. This doesn't check if () and {} are nested correctly,
6516 i.e. "({)}" is accepted. */
6517 static void skip_or_save_block(TokenString
**str
)
6519 int braces
= tok
== '{';
6522 *str
= tok_str_alloc();
6524 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6526 if (tok
== TOK_EOF
) {
6527 if (str
|| level
> 0)
6528 tcc_error("unexpected end of file");
6533 tok_str_add_tok(*str
);
6536 if (t
== '{' || t
== '(') {
6538 } else if (t
== '}' || t
== ')') {
6540 if (level
== 0 && braces
&& t
== '}')
6545 tok_str_add(*str
, -1);
6546 tok_str_add(*str
, 0);
6550 #define EXPR_CONST 1
6553 static void parse_init_elem(int expr_type
)
6555 int saved_global_expr
;
6558 /* compound literals must be allocated globally in this case */
6559 saved_global_expr
= global_expr
;
6562 global_expr
= saved_global_expr
;
6563 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6564 (compound literals). */
6565 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6566 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6567 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6568 #ifdef TCC_TARGET_PE
6569 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6572 tcc_error("initializer element is not constant");
6580 /* put zeros for variable based init */
6581 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6584 /* nothing to do because globals are already set to zero */
6586 vpush_global_sym(&func_old_type
, TOK_memset
);
6588 #ifdef TCC_TARGET_ARM
6600 #define DIF_SIZE_ONLY 2
6601 #define DIF_HAVE_ELEM 4
6603 /* t is the array or struct type. c is the array or struct
6604 address. cur_field is the pointer to the current
6605 field, for arrays the 'c' member contains the current start
6606 index. 'flags' is as in decl_initializer.
6607 'al' contains the already initialized length of the
6608 current container (starting at c). This returns the new length of that. */
6609 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6610 Sym
**cur_field
, int flags
, int al
)
6613 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6614 unsigned long corig
= c
;
6618 if (flags
& DIF_HAVE_ELEM
)
6620 if (gnu_ext
&& (l
= is_label()) != 0)
6622 /* NOTE: we only support ranges for last designator */
6623 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6625 if (!(type
->t
& VT_ARRAY
))
6626 expect("array type");
6628 index
= index_last
= expr_const();
6629 if (tok
== TOK_DOTS
&& gnu_ext
) {
6631 index_last
= expr_const();
6635 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6637 tcc_error("invalid index");
6639 (*cur_field
)->c
= index_last
;
6640 type
= pointed_type(type
);
6641 elem_size
= type_size(type
, &align
);
6642 c
+= index
* elem_size
;
6643 nb_elems
= index_last
- index
+ 1;
6649 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6650 expect("struct/union type");
6651 f
= find_field(type
, l
);
6664 } else if (!gnu_ext
) {
6669 if (type
->t
& VT_ARRAY
) {
6670 index
= (*cur_field
)->c
;
6671 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6672 tcc_error("index too large");
6673 type
= pointed_type(type
);
6674 c
+= index
* type_size(type
, &align
);
6677 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6678 *cur_field
= f
= f
->next
;
6680 tcc_error("too many field init");
6685 /* must put zero in holes (note that doing it that way
6686 ensures that it even works with designators) */
6687 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
6688 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6689 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
6691 /* XXX: make it more general */
6692 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
6693 unsigned long c_end
;
6698 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6699 for (i
= 1; i
< nb_elems
; i
++) {
6700 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6705 } else if (!NODATA_WANTED
) {
6706 c_end
= c
+ nb_elems
* elem_size
;
6707 if (c_end
> sec
->data_allocated
)
6708 section_realloc(sec
, c_end
);
6709 src
= sec
->data
+ c
;
6711 for(i
= 1; i
< nb_elems
; i
++) {
6713 memcpy(dst
, src
, elem_size
);
6717 c
+= nb_elems
* type_size(type
, &align
);
6723 /* store a value or an expression directly in global data or in local array */
6724 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6731 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6735 /* XXX: not portable */
6736 /* XXX: generate error if incorrect relocation */
6737 gen_assign_cast(&dtype
);
6738 bt
= type
->t
& VT_BTYPE
;
6740 if ((vtop
->r
& VT_SYM
)
6743 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6744 || (type
->t
& VT_BITFIELD
))
6745 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6747 tcc_error("initializer element is not computable at load time");
6749 if (NODATA_WANTED
) {
6754 size
= type_size(type
, &align
);
6755 section_reserve(sec
, c
+ size
);
6756 ptr
= sec
->data
+ c
;
6758 /* XXX: make code faster ? */
6759 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6760 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6761 /* XXX This rejects compound literals like
6762 '(void *){ptr}'. The problem is that '&sym' is
6763 represented the same way, which would be ruled out
6764 by the SYM_FIRST_ANOM check above, but also '"string"'
6765 in 'char *p = "string"' is represented the same
6766 with the type being VT_PTR and the symbol being an
6767 anonymous one. That is, there's no difference in vtop
6768 between '(void *){x}' and '&(void *){x}'. Ignore
6769 pointer typed entities here. Hopefully no real code
6770 will every use compound literals with scalar type. */
6771 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6772 /* These come from compound literals, memcpy stuff over. */
6776 esym
= elfsym(vtop
->sym
);
6777 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6778 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6780 /* We need to copy over all memory contents, and that
6781 includes relocations. Use the fact that relocs are
6782 created it order, so look from the end of relocs
6783 until we hit one before the copied region. */
6784 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6785 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6786 while (num_relocs
--) {
6788 if (rel
->r_offset
>= esym
->st_value
+ size
)
6790 if (rel
->r_offset
< esym
->st_value
)
6792 /* Note: if the same fields are initialized multiple
6793 times (possible with designators) then we possibly
6794 add multiple relocations for the same offset here.
6795 That would lead to wrong code, the last reloc needs
6796 to win. We clean this up later after the whole
6797 initializer is parsed. */
6798 put_elf_reloca(symtab_section
, sec
,
6799 c
+ rel
->r_offset
- esym
->st_value
,
6800 ELFW(R_TYPE
)(rel
->r_info
),
6801 ELFW(R_SYM
)(rel
->r_info
),
6811 if (type
->t
& VT_BITFIELD
) {
6812 int bit_pos
, bit_size
, bits
, n
;
6813 unsigned char *p
, v
, m
;
6814 bit_pos
= BIT_POS(vtop
->type
.t
);
6815 bit_size
= BIT_SIZE(vtop
->type
.t
);
6816 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6817 bit_pos
&= 7, bits
= 0;
6822 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6823 m
= ((1 << n
) - 1) << bit_pos
;
6824 *p
= (*p
& ~m
) | (v
& m
);
6825 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6829 /* XXX: when cross-compiling we assume that each type has the
6830 same representation on host and target, which is likely to
6831 be wrong in the case of long double */
6833 vtop
->c
.i
= vtop
->c
.i
!= 0;
6835 *(char *)ptr
|= vtop
->c
.i
;
6838 *(short *)ptr
|= vtop
->c
.i
;
6841 *(float*)ptr
= vtop
->c
.f
;
6844 *(double *)ptr
= vtop
->c
.d
;
6847 #if defined TCC_IS_NATIVE_387
6848 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6849 memcpy(ptr
, &vtop
->c
.ld
, 10);
6851 else if (sizeof (long double) == sizeof (double))
6852 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
6854 else if (vtop
->c
.ld
== 0.0)
6858 if (sizeof(long double) == LDOUBLE_SIZE
)
6859 *(long double*)ptr
= vtop
->c
.ld
;
6860 else if (sizeof(double) == LDOUBLE_SIZE
)
6861 *(double *)ptr
= (double)vtop
->c
.ld
;
6863 tcc_error("can't cross compile long double constants");
6867 *(long long *)ptr
|= vtop
->c
.i
;
6874 addr_t val
= vtop
->c
.i
;
6876 if (vtop
->r
& VT_SYM
)
6877 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6879 *(addr_t
*)ptr
|= val
;
6881 if (vtop
->r
& VT_SYM
)
6882 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6883 *(addr_t
*)ptr
|= val
;
6889 int val
= vtop
->c
.i
;
6891 if (vtop
->r
& VT_SYM
)
6892 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6896 if (vtop
->r
& VT_SYM
)
6897 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6906 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6913 /* 't' contains the type and storage info. 'c' is the offset of the
6914 object in section 'sec'. If 'sec' is NULL, it means stack based
6915 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
6916 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
6917 size only evaluation is wanted (only for arrays). */
6918 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6921 int len
, n
, no_oblock
, nb
, i
;
6927 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
6928 /* In case of strings we have special handling for arrays, so
6929 don't consume them as initializer value (which would commit them
6930 to some anonymous symbol). */
6931 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6932 !(flags
& DIF_SIZE_ONLY
)) {
6933 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6934 flags
|= DIF_HAVE_ELEM
;
6937 if ((flags
& DIF_HAVE_ELEM
) &&
6938 !(type
->t
& VT_ARRAY
) &&
6939 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6940 The source type might have VT_CONSTANT set, which is
6941 of course assignable to non-const elements. */
6942 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6943 init_putv(type
, sec
, c
);
6944 } else if (type
->t
& VT_ARRAY
) {
6947 t1
= pointed_type(type
);
6948 size1
= type_size(t1
, &align1
);
6951 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6954 tcc_error("character array initializer must be a literal,"
6955 " optionally enclosed in braces");
6960 /* only parse strings here if correct type (otherwise: handle
6961 them as ((w)char *) expressions */
6962 if ((tok
== TOK_LSTR
&&
6963 #ifdef TCC_TARGET_PE
6964 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6966 (t1
->t
& VT_BTYPE
) == VT_INT
6968 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6970 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6973 /* compute maximum number of chars wanted */
6975 cstr_len
= tokc
.str
.size
;
6977 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6980 if (n
>= 0 && nb
> (n
- len
))
6982 if (!(flags
& DIF_SIZE_ONLY
)) {
6984 tcc_warning("initializer-string for array is too long");
6985 /* in order to go faster for common case (char
6986 string in global variable, we handle it
6988 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6990 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6994 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6996 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6998 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
7005 /* only add trailing zero if enough storage (no
7006 warning in this case since it is standard) */
7007 if (n
< 0 || len
< n
) {
7008 if (!(flags
& DIF_SIZE_ONLY
)) {
7010 init_putv(t1
, sec
, c
+ (len
* size1
));
7021 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7022 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7023 flags
&= ~DIF_HAVE_ELEM
;
7024 if (type
->t
& VT_ARRAY
) {
7026 /* special test for multi dimensional arrays (may not
7027 be strictly correct if designators are used at the
7029 if (no_oblock
&& len
>= n
*size1
)
7032 if (s
->type
.t
== VT_UNION
)
7036 if (no_oblock
&& f
== NULL
)
7045 /* put zeros at the end */
7046 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7047 init_putz(sec
, c
+ len
, n
*size1
- len
);
7050 /* patch type size if needed, which happens only for array types */
7052 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7053 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7056 if ((flags
& DIF_FIRST
) || tok
== '{') {
7064 } else if (tok
== '{') {
7065 if (flags
& DIF_HAVE_ELEM
)
7068 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7070 } else if ((flags
& DIF_SIZE_ONLY
)) {
7071 /* If we supported only ISO C we wouldn't have to accept calling
7072 this on anything than an array if DIF_SIZE_ONLY (and even then
7073 only on the outermost level, so no recursion would be needed),
7074 because initializing a flex array member isn't supported.
7075 But GNU C supports it, so we need to recurse even into
7076 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7077 /* just skip expression */
7078 skip_or_save_block(NULL
);
7080 if (!(flags
& DIF_HAVE_ELEM
)) {
7081 /* This should happen only when we haven't parsed
7082 the init element above for fear of committing a
7083 string constant to memory too early. */
7084 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7085 expect("string constant");
7086 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7088 init_putv(type
, sec
, c
);
7092 /* parse an initializer for type 't' if 'has_init' is non zero, and
7093 allocate space in local or global data space ('r' is either
7094 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7095 variable 'v' of scope 'scope' is declared before initializers
7096 are parsed. If 'v' is zero, then a reference to the new object
7097 is put in the value stack. If 'has_init' is 2, a special parsing
7098 is done to handle string constants. */
7099 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7100 int has_init
, int v
, int scope
)
7102 int size
, align
, addr
;
7103 TokenString
*init_str
= NULL
;
7106 Sym
*flexible_array
;
7108 int saved_nocode_wanted
= nocode_wanted
;
7109 #ifdef CONFIG_TCC_BCHECK
7113 /* Always allocate static or global variables */
7114 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7115 nocode_wanted
|= 0x80000000;
7117 #ifdef CONFIG_TCC_BCHECK
7118 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7121 flexible_array
= NULL
;
7122 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7123 Sym
*field
= type
->ref
->next
;
7126 field
= field
->next
;
7127 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7128 flexible_array
= field
;
7132 size
= type_size(type
, &align
);
7133 /* If unknown size, we must evaluate it before
7134 evaluating initializers because
7135 initializers can generate global data too
7136 (e.g. string pointers or ISOC99 compound
7137 literals). It also simplifies local
7138 initializers handling */
7139 if (size
< 0 || (flexible_array
&& has_init
)) {
7141 tcc_error("unknown type size");
7142 /* get all init string */
7143 if (has_init
== 2) {
7144 init_str
= tok_str_alloc();
7145 /* only get strings */
7146 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7147 tok_str_add_tok(init_str
);
7150 tok_str_add(init_str
, -1);
7151 tok_str_add(init_str
, 0);
7153 skip_or_save_block(&init_str
);
7158 begin_macro(init_str
, 1);
7160 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7161 /* prepare second initializer parsing */
7162 macro_ptr
= init_str
->str
;
7165 /* if still unknown size, error */
7166 size
= type_size(type
, &align
);
7168 tcc_error("unknown type size");
7170 /* If there's a flex member and it was used in the initializer
7172 if (flexible_array
&&
7173 flexible_array
->type
.ref
->c
> 0)
7174 size
+= flexible_array
->type
.ref
->c
7175 * pointed_size(&flexible_array
->type
);
7176 /* take into account specified alignment if bigger */
7177 if (ad
->a
.aligned
) {
7178 int speca
= 1 << (ad
->a
.aligned
- 1);
7181 } else if (ad
->a
.packed
) {
7185 if (!v
&& NODATA_WANTED
)
7186 size
= 0, align
= 1;
7188 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7190 #ifdef CONFIG_TCC_BCHECK
7191 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7195 loc
= (loc
- size
) & -align
;
7197 #ifdef CONFIG_TCC_BCHECK
7198 /* handles bounds */
7199 /* XXX: currently, since we do only one pass, we cannot track
7200 '&' operators, so we add only arrays */
7201 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7203 /* add padding between regions */
7205 /* then add local bound info */
7206 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7207 bounds_ptr
[0] = addr
;
7208 bounds_ptr
[1] = size
;
7212 /* local variable */
7213 #ifdef CONFIG_TCC_ASM
7214 if (ad
->asm_label
) {
7215 int reg
= asm_parse_regvar(ad
->asm_label
);
7217 r
= (r
& ~VT_VALMASK
) | reg
;
7220 sym
= sym_push(v
, type
, r
, addr
);
7221 if (ad
->cleanup_func
) {
7222 Sym
*cls
= sym_push2(&all_cleanups
, SYM_FIELD
| ++ncleanups
, 0, 0);
7223 cls
->prev_tok
= sym
;
7224 cls
->next
= ad
->cleanup_func
;
7225 cls
->ncl
= current_cleanups
;
7226 current_cleanups
= cls
;
7231 /* push local reference */
7232 vset(type
, r
, addr
);
7235 if (v
&& scope
== VT_CONST
) {
7236 /* see if the symbol was already defined */
7239 patch_storage(sym
, ad
, type
);
7240 /* we accept several definitions of the same global variable. */
7241 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7246 /* allocate symbol in corresponding section */
7251 else if (tcc_state
->nocommon
)
7256 addr
= section_add(sec
, size
, align
);
7257 #ifdef CONFIG_TCC_BCHECK
7258 /* add padding if bound check */
7260 section_add(sec
, 1, 1);
7263 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7264 sec
= common_section
;
7269 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7270 patch_storage(sym
, ad
, NULL
);
7272 /* Local statics have a scope until now (for
7273 warnings), remove it here. */
7275 /* update symbol definition */
7276 put_extern_sym(sym
, sec
, addr
, size
);
7278 /* push global reference */
7279 sym
= get_sym_ref(type
, sec
, addr
, size
);
7280 vpushsym(type
, sym
);
7284 #ifdef CONFIG_TCC_BCHECK
7285 /* handles bounds now because the symbol must be defined
7286 before for the relocation */
7290 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7291 /* then add global bound info */
7292 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7293 bounds_ptr
[0] = 0; /* relocated */
7294 bounds_ptr
[1] = size
;
7299 if (type
->t
& VT_VLA
) {
7305 /* save current stack pointer */
7306 if (vlas_in_scope
== 0) {
7307 if (vla_sp_root_loc
== -1)
7308 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
7309 gen_vla_sp_save(vla_sp_root_loc
);
7312 vla_runtime_type_size(type
, &a
);
7313 gen_vla_alloc(type
, a
);
7314 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7315 /* on _WIN64, because of the function args scratch area, the
7316 result of alloca differs from RSP and is returned in RAX. */
7317 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7319 gen_vla_sp_save(addr
);
7323 } else if (has_init
) {
7324 size_t oldreloc_offset
= 0;
7325 if (sec
&& sec
->reloc
)
7326 oldreloc_offset
= sec
->reloc
->data_offset
;
7327 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7328 if (sec
&& sec
->reloc
)
7329 squeeze_multi_relocs(sec
, oldreloc_offset
);
7330 /* patch flexible array member size back to -1, */
7331 /* for possible subsequent similar declarations */
7333 flexible_array
->type
.ref
->c
= -1;
7337 /* restore parse state if needed */
7343 nocode_wanted
= saved_nocode_wanted
;
7346 /* parse a function defined by symbol 'sym' and generate its code in
7347 'cur_text_section' */
7348 static void gen_function(Sym
*sym
)
7351 ind
= cur_text_section
->data_offset
;
7352 if (sym
->a
.aligned
) {
7353 size_t newoff
= section_add(cur_text_section
, 0,
7354 1 << (sym
->a
.aligned
- 1));
7355 gen_fill_nops(newoff
- ind
);
7357 /* NOTE: we patch the symbol size later */
7358 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7359 funcname
= get_tok_str(sym
->v
, NULL
);
7361 /* Initialize VLA state */
7363 vla_sp_root_loc
= -1;
7364 /* put debug symbol */
7365 tcc_debug_funcstart(tcc_state
, sym
);
7366 /* push a dummy symbol to enable local sym storage */
7367 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7368 local_scope
= 1; /* for function parameters */
7369 gfunc_prolog(&sym
->type
);
7370 reset_local_scope();
7372 clear_temp_local_var_list();
7373 block(NULL
, NULL
, 0);
7374 if (!(nocode_wanted
& 0x20000000)
7375 && ((func_vt
.t
& VT_BTYPE
) == VT_INT
)
7376 && !strcmp (funcname
, "main"))
7380 gen_assign_cast(&func_vt
);
7381 gfunc_return(&func_vt
);
7386 cur_text_section
->data_offset
= ind
;
7387 label_pop(&global_label_stack
, NULL
, 0);
7388 /* reset local stack */
7389 reset_local_scope();
7390 sym_pop(&local_stack
, NULL
, 0);
7391 /* end of function */
7392 /* patch symbol size */
7393 elfsym(sym
)->st_size
= ind
- func_ind
;
7394 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7395 /* It's better to crash than to generate wrong code */
7396 cur_text_section
= NULL
;
7397 funcname
= ""; /* for safety */
7398 func_vt
.t
= VT_VOID
; /* for safety */
7399 func_var
= 0; /* for safety */
7400 ind
= 0; /* for safety */
7401 nocode_wanted
= 0x80000000;
7405 static void gen_inline_functions(TCCState
*s
)
7408 int inline_generated
, i
, ln
;
7409 struct InlineFunc
*fn
;
7411 ln
= file
->line_num
;
7412 /* iterate while inline function are referenced */
7414 inline_generated
= 0;
7415 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7416 fn
= s
->inline_fns
[i
];
7418 if (sym
&& sym
->c
) {
7419 /* the function was used: generate its code and
7420 convert it to a normal function */
7423 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7424 sym
->type
.t
&= ~VT_INLINE
;
7426 begin_macro(fn
->func_str
, 1);
7428 cur_text_section
= text_section
;
7432 inline_generated
= 1;
7435 } while (inline_generated
);
7436 file
->line_num
= ln
;
7439 ST_FUNC
void free_inline_functions(TCCState
*s
)
7442 /* free tokens of unused inline functions */
7443 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7444 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7446 tok_str_free(fn
->func_str
);
7448 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7451 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7452 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7453 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7458 AttributeDef ad
, adbase
;
7461 if (!parse_btype(&btype
, &adbase
)) {
7462 if (is_for_loop_init
)
7464 /* skip redundant ';' if not in old parameter decl scope */
7465 if (tok
== ';' && l
!= VT_CMP
) {
7471 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7472 /* global asm block */
7476 if (tok
>= TOK_UIDENT
) {
7477 /* special test for old K&R protos without explicit int
7478 type. Only accepted when defining global data */
7482 expect("declaration");
7487 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7488 int v
= btype
.ref
->v
;
7489 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7490 tcc_warning("unnamed struct/union that defines no instances");
7494 if (IS_ENUM(btype
.t
)) {
7499 while (1) { /* iterate thru each declaration */
7501 /* If the base type itself was an array type of unspecified
7502 size (like in 'typedef int arr[]; arr x = {1};') then
7503 we will overwrite the unknown size by the real one for
7504 this decl. We need to unshare the ref symbol holding
7506 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7507 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7510 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7514 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7515 printf("type = '%s'\n", buf
);
7518 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7519 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7520 tcc_error("function without file scope cannot be static");
7522 /* if old style function prototype, we accept a
7525 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7526 decl0(VT_CMP
, 0, sym
);
7529 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7530 ad
.asm_label
= asm_label_instr();
7531 /* parse one last attribute list, after asm label */
7532 parse_attribute(&ad
);
7537 #ifdef TCC_TARGET_PE
7538 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7539 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7540 tcc_error("cannot have dll linkage with static or typedef");
7541 if (ad
.a
.dllimport
) {
7542 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7545 type
.t
|= VT_EXTERN
;
7551 tcc_error("cannot use local functions");
7552 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7553 expect("function definition");
7555 /* reject abstract declarators in function definition
7556 make old style params without decl have int type */
7558 while ((sym
= sym
->next
) != NULL
) {
7559 if (!(sym
->v
& ~SYM_FIELD
))
7560 expect("identifier");
7561 if (sym
->type
.t
== VT_VOID
)
7562 sym
->type
= int_type
;
7565 /* XXX: cannot do better now: convert extern line to static inline */
7566 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7567 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7569 /* put function symbol */
7570 sym
= external_global_sym(v
, &type
, 0);
7571 type
.t
&= ~VT_EXTERN
;
7572 patch_storage(sym
, &ad
, &type
);
7574 /* static inline functions are just recorded as a kind
7575 of macro. Their code will be emitted at the end of
7576 the compilation unit only if they are used */
7577 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7578 (VT_INLINE
| VT_STATIC
)) {
7579 struct InlineFunc
*fn
;
7580 const char *filename
;
7582 filename
= file
? file
->filename
: "";
7583 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7584 strcpy(fn
->filename
, filename
);
7586 skip_or_save_block(&fn
->func_str
);
7587 dynarray_add(&tcc_state
->inline_fns
,
7588 &tcc_state
->nb_inline_fns
, fn
);
7590 /* compute text section */
7591 cur_text_section
= ad
.section
;
7592 if (!cur_text_section
)
7593 cur_text_section
= text_section
;
7599 /* find parameter in function parameter list */
7600 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7601 if ((sym
->v
& ~SYM_FIELD
) == v
)
7603 tcc_error("declaration for parameter '%s' but no such parameter",
7604 get_tok_str(v
, NULL
));
7606 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7607 tcc_error("storage class specified for '%s'",
7608 get_tok_str(v
, NULL
));
7609 if (sym
->type
.t
!= VT_VOID
)
7610 tcc_error("redefinition of parameter '%s'",
7611 get_tok_str(v
, NULL
));
7612 convert_parameter_type(&type
);
7614 } else if (type
.t
& VT_TYPEDEF
) {
7615 /* save typedefed type */
7616 /* XXX: test storage specifiers ? */
7618 if (sym
&& sym
->sym_scope
== local_scope
) {
7619 if (!is_compatible_types(&sym
->type
, &type
)
7620 || !(sym
->type
.t
& VT_TYPEDEF
))
7621 tcc_error("incompatible redefinition of '%s'",
7622 get_tok_str(v
, NULL
));
7625 sym
= sym_push(v
, &type
, 0, 0);
7629 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7630 && !(type
.t
& VT_EXTERN
)) {
7631 tcc_error("declaration of void object");
7634 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7635 /* external function definition */
7636 /* specific case for func_call attribute */
7638 } else if (!(type
.t
& VT_ARRAY
)) {
7639 /* not lvalue if array */
7640 r
|= lvalue_type(type
.t
);
7642 has_init
= (tok
== '=');
7643 if (has_init
&& (type
.t
& VT_VLA
))
7644 tcc_error("variable length array cannot be initialized");
7645 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7646 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7647 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7648 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7649 /* external variable or function */
7650 /* NOTE: as GCC, uninitialized global static
7651 arrays of null size are considered as
7653 type
.t
|= VT_EXTERN
;
7654 sym
= external_sym(v
, &type
, r
, &ad
);
7655 if (ad
.alias_target
) {
7658 alias_target
= sym_find(ad
.alias_target
);
7659 esym
= elfsym(alias_target
);
7661 tcc_error("unsupported forward __alias__ attribute");
7662 /* Local statics have a scope until now (for
7663 warnings), remove it here. */
7665 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7668 if (type
.t
& VT_STATIC
)
7674 else if (l
== VT_CONST
)
7675 /* uninitialized global variables may be overridden */
7676 type
.t
|= VT_EXTERN
;
7677 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7681 if (is_for_loop_init
)
7693 static void decl(int l
)
7698 /* ------------------------------------------------------------------------- */