2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
42 static Sym
*all_cleanups
, *current_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int section_sym
;
49 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
50 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
51 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
53 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
55 ST_DATA
int const_wanted
; /* true if constant wanted */
56 ST_DATA
int nocode_wanted
; /* no code generation wanted */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
59 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
60 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
61 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
63 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
64 ST_DATA
const char *funcname
;
67 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
69 ST_DATA
struct switch_t
{
73 } **p
; int n
; /* list of case ranges */
74 int def_sym
; /* default symbol */
75 } *cur_switch
; /* current switch */
77 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
78 /*list of temporary local variables on the stack in current function. */
79 ST_DATA
struct temp_local_variable
{
80 int location
; //offset on stack. Svalue.c.i
83 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
84 short nb_temp_local_vars
;
86 /* ------------------------------------------------------------------------- */
88 static void gen_cast(CType
*type
);
89 static void gen_cast_s(int t
);
90 static inline CType
*pointed_type(CType
*type
);
91 static int is_compatible_types(CType
*type1
, CType
*type2
);
92 static int parse_btype(CType
*type
, AttributeDef
*ad
);
93 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
94 static void parse_expr_type(CType
*type
);
95 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
96 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
97 static void block(int *bsym
, Sym
*bcl
, int *csym
, Sym
*ccl
, int is_expr
);
98 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
99 static void decl(int l
);
100 static int decl0(int l
, int is_for_loop_init
, Sym
*);
101 static void expr_eq(void);
102 static void vla_runtime_type_size(CType
*type
, int *a
);
103 static void vla_sp_restore(void);
104 static void vla_sp_restore_root(void);
105 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
106 static inline int64_t expr_const64(void);
107 static void vpush64(int ty
, unsigned long long v
);
108 static void vpush(CType
*type
);
109 static int gvtst(int inv
, int t
);
110 static void gen_inline_functions(TCCState
*s
);
111 static void skip_or_save_block(TokenString
**str
);
112 static void gv_dup(void);
113 static int get_temp_local_var(int size
,int align
);
114 static void clear_temp_local_var_list();
117 static void reset_local_scope(void)
119 if (current_cleanups
)
120 tcc_error("ICE current_cleanups");
121 sym_pop(&all_cleanups
, NULL
, 0);
125 ST_INLN
int is_float(int t
)
129 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
132 /* we use our own 'finite' function to avoid potential problems with
133 non standard math libs */
134 /* XXX: endianness dependent */
135 ST_FUNC
int ieee_finite(double d
)
138 memcpy(p
, &d
, sizeof(double));
139 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
142 /* compiling intel long double natively */
143 #if (defined __i386__ || defined __x86_64__) \
144 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
145 # define TCC_IS_NATIVE_387
148 ST_FUNC
void test_lvalue(void)
150 if (!(vtop
->r
& VT_LVAL
))
154 ST_FUNC
void check_vstack(void)
157 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
160 /* ------------------------------------------------------------------------- */
161 /* vstack debugging aid */
164 void pv (const char *lbl
, int a
, int b
)
167 for (i
= a
; i
< a
+ b
; ++i
) {
168 SValue
*p
= &vtop
[-i
];
169 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
170 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
175 /* ------------------------------------------------------------------------- */
176 /* start of translation unit info */
177 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
182 /* file info: full path + filename */
183 section_sym
= put_elf_sym(symtab_section
, 0, 0,
184 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
185 text_section
->sh_num
, NULL
);
186 getcwd(buf
, sizeof(buf
));
188 normalize_slashes(buf
);
190 pstrcat(buf
, sizeof(buf
), "/");
191 put_stabs_r(buf
, N_SO
, 0, 0,
192 text_section
->data_offset
, text_section
, section_sym
);
193 put_stabs_r(file
->filename
, N_SO
, 0, 0,
194 text_section
->data_offset
, text_section
, section_sym
);
199 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
200 symbols can be safely used */
201 put_elf_sym(symtab_section
, 0, 0,
202 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
203 SHN_ABS
, file
->filename
);
206 /* put end of translation unit info */
207 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
211 put_stabs_r(NULL
, N_SO
, 0, 0,
212 text_section
->data_offset
, text_section
, section_sym
);
216 /* generate line number info */
217 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
221 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
222 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
224 last_line_num
= file
->line_num
;
228 /* put function symbol */
229 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
237 /* XXX: we put here a dummy type */
238 snprintf(buf
, sizeof(buf
), "%s:%c1",
239 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
240 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
241 cur_text_section
, sym
->c
);
242 /* //gr gdb wants a line at the function */
243 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
249 /* put function size */
250 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
254 put_stabn(N_FUN
, 0, 0, size
);
257 /* ------------------------------------------------------------------------- */
258 ST_FUNC
int tccgen_compile(TCCState
*s1
)
260 cur_text_section
= NULL
;
262 anon_sym
= SYM_FIRST_ANOM
;
265 nocode_wanted
= 0x80000000;
267 /* define some often used types */
269 char_pointer_type
.t
= VT_BYTE
;
270 mk_pointer(&char_pointer_type
);
272 size_type
.t
= VT_INT
| VT_UNSIGNED
;
273 ptrdiff_type
.t
= VT_INT
;
275 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
276 ptrdiff_type
.t
= VT_LLONG
;
278 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
279 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
281 func_old_type
.t
= VT_FUNC
;
282 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
283 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
284 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
288 #ifdef TCC_TARGET_ARM
293 printf("%s: **** new file\n", file
->filename
);
296 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
299 gen_inline_functions(s1
);
301 /* end of translation unit info */
306 /* ------------------------------------------------------------------------- */
307 ST_FUNC ElfSym
*elfsym(Sym
*s
)
311 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
314 /* apply storage attributes to Elf symbol */
315 ST_FUNC
void update_storage(Sym
*sym
)
318 int sym_bind
, old_sym_bind
;
324 if (sym
->a
.visibility
)
325 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
328 if (sym
->type
.t
& VT_STATIC
)
329 sym_bind
= STB_LOCAL
;
330 else if (sym
->a
.weak
)
333 sym_bind
= STB_GLOBAL
;
334 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
335 if (sym_bind
!= old_sym_bind
) {
336 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
340 if (sym
->a
.dllimport
)
341 esym
->st_other
|= ST_PE_IMPORT
;
342 if (sym
->a
.dllexport
)
343 esym
->st_other
|= ST_PE_EXPORT
;
347 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
348 get_tok_str(sym
->v
, NULL
),
349 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
357 /* ------------------------------------------------------------------------- */
358 /* update sym->c so that it points to an external symbol in section
359 'section' with value 'value' */
361 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
362 addr_t value
, unsigned long size
,
363 int can_add_underscore
)
365 int sym_type
, sym_bind
, info
, other
, t
;
369 #ifdef CONFIG_TCC_BCHECK
374 name
= get_tok_str(sym
->v
, NULL
);
375 #ifdef CONFIG_TCC_BCHECK
376 if (tcc_state
->do_bounds_check
) {
377 /* XXX: avoid doing that for statics ? */
378 /* if bound checking is activated, we change some function
379 names by adding the "__bound" prefix */
382 /* XXX: we rely only on malloc hooks */
395 strcpy(buf
, "__bound_");
403 if ((t
& VT_BTYPE
) == VT_FUNC
) {
405 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
406 sym_type
= STT_NOTYPE
;
408 sym_type
= STT_OBJECT
;
411 sym_bind
= STB_LOCAL
;
413 sym_bind
= STB_GLOBAL
;
416 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
417 Sym
*ref
= sym
->type
.ref
;
418 if (ref
->a
.nodecorate
) {
419 can_add_underscore
= 0;
421 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
422 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
424 other
|= ST_PE_STDCALL
;
425 can_add_underscore
= 0;
429 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
431 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
435 name
= get_tok_str(sym
->asm_label
, NULL
);
436 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
437 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
440 esym
->st_value
= value
;
441 esym
->st_size
= size
;
442 esym
->st_shndx
= sh_num
;
447 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
448 addr_t value
, unsigned long size
)
450 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
451 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
454 /* add a new relocation entry to symbol 'sym' in section 's' */
455 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
460 if (nocode_wanted
&& s
== cur_text_section
)
465 put_extern_sym(sym
, NULL
, 0, 0);
469 /* now we can add ELF relocation info */
470 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
474 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
476 greloca(s
, sym
, offset
, type
, 0);
480 /* ------------------------------------------------------------------------- */
481 /* symbol allocator */
482 static Sym
*__sym_malloc(void)
484 Sym
*sym_pool
, *sym
, *last_sym
;
487 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
488 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
490 last_sym
= sym_free_first
;
492 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
493 sym
->next
= last_sym
;
497 sym_free_first
= last_sym
;
501 static inline Sym
*sym_malloc(void)
505 sym
= sym_free_first
;
507 sym
= __sym_malloc();
508 sym_free_first
= sym
->next
;
511 sym
= tcc_malloc(sizeof(Sym
));
516 ST_INLN
void sym_free(Sym
*sym
)
519 sym
->next
= sym_free_first
;
520 sym_free_first
= sym
;
526 /* push, without hashing */
527 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
532 memset(s
, 0, sizeof *s
);
542 /* find a symbol and return its associated structure. 's' is the top
543 of the symbol stack */
544 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
556 /* structure lookup */
557 ST_INLN Sym
*struct_find(int v
)
560 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
562 return table_ident
[v
]->sym_struct
;
565 /* find an identifier */
566 ST_INLN Sym
*sym_find(int v
)
569 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
571 return table_ident
[v
]->sym_identifier
;
574 static int sym_scope(Sym
*s
)
576 if (IS_ENUM_VAL (s
->type
.t
))
577 return s
->type
.ref
->sym_scope
;
582 /* push a given symbol on the symbol stack */
583 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
592 s
= sym_push2(ps
, v
, type
->t
, c
);
593 s
->type
.ref
= type
->ref
;
595 /* don't record fields or anonymous symbols */
597 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
598 /* record symbol in token array */
599 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
601 ps
= &ts
->sym_struct
;
603 ps
= &ts
->sym_identifier
;
606 s
->sym_scope
= local_scope
;
607 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
608 tcc_error("redeclaration of '%s'",
609 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
614 /* push a global identifier */
615 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
618 s
= sym_push2(&global_stack
, v
, t
, c
);
619 s
->r
= VT_CONST
| VT_SYM
;
620 /* don't record anonymous symbol */
621 if (v
< SYM_FIRST_ANOM
) {
622 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
623 /* modify the top most local identifier, so that sym_identifier will
624 point to 's' when popped; happens when called from inline asm */
625 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
626 ps
= &(*ps
)->prev_tok
;
633 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
634 pop them yet from the list, but do remove them from the token array. */
635 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
645 /* remove symbol in token array */
647 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
648 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
650 ps
= &ts
->sym_struct
;
652 ps
= &ts
->sym_identifier
;
663 /* ------------------------------------------------------------------------- */
665 static void vsetc(CType
*type
, int r
, CValue
*vc
)
669 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
670 tcc_error("memory full (vstack)");
671 /* cannot let cpu flags if other instruction are generated. Also
672 avoid leaving VT_JMP anywhere except on the top of the stack
673 because it would complicate the code generator.
675 Don't do this when nocode_wanted. vtop might come from
676 !nocode_wanted regions (see 88_codeopt.c) and transforming
677 it to a register without actually generating code is wrong
678 as their value might still be used for real. All values
679 we push under nocode_wanted will eventually be popped
680 again, so that the VT_CMP/VT_JMP value will be in vtop
681 when code is unsuppressed again.
683 Same logic below in vswap(); */
684 if (vtop
>= vstack
&& !nocode_wanted
) {
685 v
= vtop
->r
& VT_VALMASK
;
686 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
698 ST_FUNC
void vswap(void)
701 /* cannot vswap cpu flags. See comment at vsetc() above */
702 if (vtop
>= vstack
&& !nocode_wanted
) {
703 int v
= vtop
->r
& VT_VALMASK
;
704 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
712 /* pop stack value */
713 ST_FUNC
void vpop(void)
716 v
= vtop
->r
& VT_VALMASK
;
717 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
718 /* for x86, we need to pop the FP stack */
720 o(0xd8dd); /* fstp %st(0) */
723 if (v
== VT_JMP
|| v
== VT_JMPI
) {
724 /* need to put correct jump if && or || without test */
730 /* push constant of type "type" with useless value */
731 ST_FUNC
void vpush(CType
*type
)
733 vset(type
, VT_CONST
, 0);
736 /* push integer constant */
737 ST_FUNC
void vpushi(int v
)
741 vsetc(&int_type
, VT_CONST
, &cval
);
744 /* push a pointer sized constant */
745 static void vpushs(addr_t v
)
749 vsetc(&size_type
, VT_CONST
, &cval
);
752 /* push arbitrary 64bit constant */
753 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
760 vsetc(&ctype
, VT_CONST
, &cval
);
763 /* push long long constant */
764 static inline void vpushll(long long v
)
766 vpush64(VT_LLONG
, v
);
769 ST_FUNC
void vset(CType
*type
, int r
, int v
)
774 vsetc(type
, r
, &cval
);
777 static void vseti(int r
, int v
)
785 ST_FUNC
void vpushv(SValue
*v
)
787 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
788 tcc_error("memory full (vstack)");
793 static void vdup(void)
798 /* rotate n first stack elements to the bottom
799 I1 ... In -> I2 ... In I1 [top is right]
801 ST_FUNC
void vrotb(int n
)
812 /* rotate the n elements before entry e towards the top
813 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
815 ST_FUNC
void vrote(SValue
*e
, int n
)
821 for(i
= 0;i
< n
- 1; i
++)
826 /* rotate n first stack elements to the top
827 I1 ... In -> In I1 ... I(n-1) [top is right]
829 ST_FUNC
void vrott(int n
)
834 /* push a symbol value of TYPE */
835 static inline void vpushsym(CType
*type
, Sym
*sym
)
839 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
843 /* Return a static symbol pointing to a section */
844 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
850 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
851 sym
->type
.t
|= VT_STATIC
;
852 put_extern_sym(sym
, sec
, offset
, size
);
856 /* push a reference to a section offset by adding a dummy symbol */
857 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
859 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
862 /* define a new external reference to a symbol 'v' of type 'u' */
863 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
869 /* push forward reference */
870 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
871 s
->type
.ref
= type
->ref
;
872 } else if (IS_ASM_SYM(s
)) {
873 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
874 s
->type
.ref
= type
->ref
;
880 /* Merge symbol attributes. */
881 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
883 if (sa1
->aligned
&& !sa
->aligned
)
884 sa
->aligned
= sa1
->aligned
;
885 sa
->packed
|= sa1
->packed
;
886 sa
->weak
|= sa1
->weak
;
887 if (sa1
->visibility
!= STV_DEFAULT
) {
888 int vis
= sa
->visibility
;
889 if (vis
== STV_DEFAULT
890 || vis
> sa1
->visibility
)
891 vis
= sa1
->visibility
;
892 sa
->visibility
= vis
;
894 sa
->dllexport
|= sa1
->dllexport
;
895 sa
->nodecorate
|= sa1
->nodecorate
;
896 sa
->dllimport
|= sa1
->dllimport
;
899 /* Merge function attributes. */
900 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
902 if (fa1
->func_call
&& !fa
->func_call
)
903 fa
->func_call
= fa1
->func_call
;
904 if (fa1
->func_type
&& !fa
->func_type
)
905 fa
->func_type
= fa1
->func_type
;
906 if (fa1
->func_args
&& !fa
->func_args
)
907 fa
->func_args
= fa1
->func_args
;
910 /* Merge attributes. */
911 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
913 merge_symattr(&ad
->a
, &ad1
->a
);
914 merge_funcattr(&ad
->f
, &ad1
->f
);
917 ad
->section
= ad1
->section
;
918 if (ad1
->alias_target
)
919 ad
->alias_target
= ad1
->alias_target
;
921 ad
->asm_label
= ad1
->asm_label
;
923 ad
->attr_mode
= ad1
->attr_mode
;
926 /* Merge some type attributes. */
927 static void patch_type(Sym
*sym
, CType
*type
)
929 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
930 if (!(sym
->type
.t
& VT_EXTERN
))
931 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
932 sym
->type
.t
&= ~VT_EXTERN
;
935 if (IS_ASM_SYM(sym
)) {
936 /* stay static if both are static */
937 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
938 sym
->type
.ref
= type
->ref
;
941 if (!is_compatible_types(&sym
->type
, type
)) {
942 tcc_error("incompatible types for redefinition of '%s'",
943 get_tok_str(sym
->v
, NULL
));
945 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
946 int static_proto
= sym
->type
.t
& VT_STATIC
;
947 /* warn if static follows non-static function declaration */
948 if ((type
->t
& VT_STATIC
) && !static_proto
&& !(type
->t
& VT_INLINE
))
949 tcc_warning("static storage ignored for redefinition of '%s'",
950 get_tok_str(sym
->v
, NULL
));
952 if (0 == (type
->t
& VT_EXTERN
)) {
953 /* put complete type, use static from prototype */
954 sym
->type
.t
= (type
->t
& ~VT_STATIC
) | static_proto
;
955 if (type
->t
& VT_INLINE
)
956 sym
->type
.t
= type
->t
;
957 sym
->type
.ref
= type
->ref
;
961 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
962 /* set array size if it was omitted in extern declaration */
963 if (sym
->type
.ref
->c
< 0)
964 sym
->type
.ref
->c
= type
->ref
->c
;
965 else if (sym
->type
.ref
->c
!= type
->ref
->c
)
966 tcc_error("conflicting type for '%s'", get_tok_str(sym
->v
, NULL
));
968 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
969 tcc_warning("storage mismatch for redefinition of '%s'",
970 get_tok_str(sym
->v
, NULL
));
975 /* Merge some storage attributes. */
976 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
979 patch_type(sym
, type
);
982 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
983 tcc_error("incompatible dll linkage for redefinition of '%s'",
984 get_tok_str(sym
->v
, NULL
));
986 merge_symattr(&sym
->a
, &ad
->a
);
988 sym
->asm_label
= ad
->asm_label
;
992 /* define a new external reference to a symbol 'v' */
993 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
997 if (!s
|| (!IS_ASM_SYM(s
) && !(s
->type
.t
& VT_EXTERN
)
998 && (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)) {
999 if (s
&& !is_compatible_types(&s
->type
, type
))
1000 tcc_error("conflicting types for '%s'", get_tok_str(s
->v
, NULL
));
1001 /* push forward reference */
1002 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
1004 s
->asm_label
= ad
->asm_label
;
1007 if (s
->type
.ref
== func_old_type
.ref
) {
1008 s
->type
.ref
= type
->ref
;
1009 s
->r
= r
| VT_CONST
| VT_SYM
;
1010 s
->type
.t
|= VT_EXTERN
;
1012 patch_storage(s
, ad
, type
);
1017 /* push a reference to global symbol v */
1018 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1020 vpushsym(type
, external_global_sym(v
, type
));
1023 /* save registers up to (vtop - n) stack entry */
1024 ST_FUNC
void save_regs(int n
)
1027 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1031 /* save r to the memory stack, and mark it as being free */
1032 ST_FUNC
void save_reg(int r
)
1034 save_reg_upstack(r
, 0);
1037 /* save r to the memory stack, and mark it as being free,
1038 if seen up to (vtop - n) stack entry */
1039 ST_FUNC
void save_reg_upstack(int r
, int n
)
1041 int l
, saved
, size
, align
;
1045 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1050 /* modify all stack values */
1053 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1054 if ((p
->r
& VT_VALMASK
) == r
||
1055 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
1056 /* must save value on stack if not already done */
1058 /* NOTE: must reload 'r' because r might be equal to r2 */
1059 r
= p
->r
& VT_VALMASK
;
1060 /* store register in the stack */
1062 if ((p
->r
& VT_LVAL
) ||
1063 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1065 type
= &char_pointer_type
;
1069 size
= type_size(type
, &align
);
1070 l
=get_temp_local_var(size
,align
);
1071 sv
.type
.t
= type
->t
;
1072 sv
.r
= VT_LOCAL
| VT_LVAL
;
1075 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1076 /* x86 specific: need to pop fp register ST0 if saved */
1077 if (r
== TREG_ST0
) {
1078 o(0xd8dd); /* fstp %st(0) */
1082 /* special long long case */
1083 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1090 /* mark that stack entry as being saved on the stack */
1091 if (p
->r
& VT_LVAL
) {
1092 /* also clear the bounded flag because the
1093 relocation address of the function was stored in
1095 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1097 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1105 #ifdef TCC_TARGET_ARM
1106 /* find a register of class 'rc2' with at most one reference on stack.
1107 * If none, call get_reg(rc) */
1108 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1113 for(r
=0;r
<NB_REGS
;r
++) {
1114 if (reg_classes
[r
] & rc2
) {
1117 for(p
= vstack
; p
<= vtop
; p
++) {
1118 if ((p
->r
& VT_VALMASK
) == r
||
1119 (p
->r2
& VT_VALMASK
) == r
)
1130 /* find a free register of class 'rc'. If none, save one register */
1131 ST_FUNC
int get_reg(int rc
)
1136 /* find a free register */
1137 for(r
=0;r
<NB_REGS
;r
++) {
1138 if (reg_classes
[r
] & rc
) {
1141 for(p
=vstack
;p
<=vtop
;p
++) {
1142 if ((p
->r
& VT_VALMASK
) == r
||
1143 (p
->r2
& VT_VALMASK
) == r
)
1151 /* no register left : free the first one on the stack (VERY
1152 IMPORTANT to start from the bottom to ensure that we don't
1153 spill registers used in gen_opi()) */
1154 for(p
=vstack
;p
<=vtop
;p
++) {
1155 /* look at second register (if long long) */
1156 r
= p
->r2
& VT_VALMASK
;
1157 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1159 r
= p
->r
& VT_VALMASK
;
1160 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1166 /* Should never comes here */
1170 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1171 static int get_temp_local_var(int size
,int align
){
1173 struct temp_local_variable
*temp_var
;
1180 for(i
=0;i
<nb_temp_local_vars
;i
++){
1181 temp_var
=&arr_temp_local_vars
[i
];
1182 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1185 /*check if temp_var is free*/
1187 for(p
=vstack
;p
<=vtop
;p
++) {
1189 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1190 if(p
->c
.i
==temp_var
->location
){
1197 found_var
=temp_var
->location
;
1203 loc
= (loc
- size
) & -align
;
1204 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1205 temp_var
=&arr_temp_local_vars
[i
];
1206 temp_var
->location
=loc
;
1207 temp_var
->size
=size
;
1208 temp_var
->align
=align
;
1209 nb_temp_local_vars
++;
1216 static void clear_temp_local_var_list(){
1217 nb_temp_local_vars
=0;
1220 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1222 static void move_reg(int r
, int s
, int t
)
1236 /* get address of vtop (vtop MUST BE an lvalue) */
1237 ST_FUNC
void gaddrof(void)
1239 vtop
->r
&= ~VT_LVAL
;
1240 /* tricky: if saved lvalue, then we can go back to lvalue */
1241 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1242 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1247 #ifdef CONFIG_TCC_BCHECK
1248 /* generate lvalue bound code */
1249 static void gbound(void)
1254 vtop
->r
&= ~VT_MUSTBOUND
;
1255 /* if lvalue, then use checking code before dereferencing */
1256 if (vtop
->r
& VT_LVAL
) {
1257 /* if not VT_BOUNDED value, then make one */
1258 if (!(vtop
->r
& VT_BOUNDED
)) {
1259 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1260 /* must save type because we must set it to int to get pointer */
1262 vtop
->type
.t
= VT_PTR
;
1265 gen_bounded_ptr_add();
1266 vtop
->r
|= lval_type
;
1269 /* then check for dereferencing */
1270 gen_bounded_ptr_deref();
1275 static void incr_bf_adr(int o
)
1277 vtop
->type
= char_pointer_type
;
1281 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1282 | (VT_BYTE
|VT_UNSIGNED
);
1283 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1284 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1287 /* single-byte load mode for packed or otherwise unaligned bitfields */
1288 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1291 save_reg_upstack(vtop
->r
, 1);
1292 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1293 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1302 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1304 vpushi((1 << n
) - 1), gen_op('&');
1307 vpushi(bits
), gen_op(TOK_SHL
);
1310 bits
+= n
, bit_size
-= n
, o
= 1;
1313 if (!(type
->t
& VT_UNSIGNED
)) {
1314 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1315 vpushi(n
), gen_op(TOK_SHL
);
1316 vpushi(n
), gen_op(TOK_SAR
);
1320 /* single-byte store mode for packed or otherwise unaligned bitfields */
1321 static void store_packed_bf(int bit_pos
, int bit_size
)
1323 int bits
, n
, o
, m
, c
;
1325 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1327 save_reg_upstack(vtop
->r
, 1);
1328 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1330 incr_bf_adr(o
); // X B
1332 c
? vdup() : gv_dup(); // B V X
1335 vpushi(bits
), gen_op(TOK_SHR
);
1337 vpushi(bit_pos
), gen_op(TOK_SHL
);
1342 m
= ((1 << n
) - 1) << bit_pos
;
1343 vpushi(m
), gen_op('&'); // X B V1
1344 vpushv(vtop
-1); // X B V1 B
1345 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1346 gen_op('&'); // X B V1 B1
1347 gen_op('|'); // X B V2
1349 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1350 vstore(), vpop(); // X B
1351 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1356 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1359 if (0 == sv
->type
.ref
)
1361 t
= sv
->type
.ref
->auxtype
;
1362 if (t
!= -1 && t
!= VT_STRUCT
) {
1363 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1364 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1369 /* store vtop a register belonging to class 'rc'. lvalues are
1370 converted to values. Cannot be used if cannot be converted to
1371 register value (such as structures). */
1372 ST_FUNC
int gv(int rc
)
1374 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1376 /* NOTE: get_reg can modify vstack[] */
1377 if (vtop
->type
.t
& VT_BITFIELD
) {
1380 bit_pos
= BIT_POS(vtop
->type
.t
);
1381 bit_size
= BIT_SIZE(vtop
->type
.t
);
1382 /* remove bit field info to avoid loops */
1383 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1386 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1387 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1388 type
.t
|= VT_UNSIGNED
;
1390 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1392 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1397 if (r
== VT_STRUCT
) {
1398 load_packed_bf(&type
, bit_pos
, bit_size
);
1400 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1401 /* cast to int to propagate signedness in following ops */
1403 /* generate shifts */
1404 vpushi(bits
- (bit_pos
+ bit_size
));
1406 vpushi(bits
- bit_size
);
1407 /* NOTE: transformed to SHR if unsigned */
1412 if (is_float(vtop
->type
.t
) &&
1413 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1414 unsigned long offset
;
1415 /* CPUs usually cannot use float constants, so we store them
1416 generically in data segment */
1417 size
= type_size(&vtop
->type
, &align
);
1419 size
= 0, align
= 1;
1420 offset
= section_add(data_section
, size
, align
);
1421 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1423 init_putv(&vtop
->type
, data_section
, offset
);
1426 #ifdef CONFIG_TCC_BCHECK
1427 if (vtop
->r
& VT_MUSTBOUND
)
1431 r
= vtop
->r
& VT_VALMASK
;
1432 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1433 #ifndef TCC_TARGET_ARM64
1436 #ifdef TCC_TARGET_X86_64
1437 else if (rc
== RC_FRET
)
1441 /* need to reload if:
1443 - lvalue (need to dereference pointer)
1444 - already a register, but not in the right class */
1446 || (vtop
->r
& VT_LVAL
)
1447 || !(reg_classes
[r
] & rc
)
1449 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1450 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1452 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1458 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1459 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1461 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1462 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1463 unsigned long long ll
;
1465 int r2
, original_type
;
1466 original_type
= vtop
->type
.t
;
1467 /* two register type load : expand to two words
1470 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1473 vtop
->c
.i
= ll
; /* first word */
1475 vtop
->r
= r
; /* save register value */
1476 vpushi(ll
>> 32); /* second word */
1479 if (vtop
->r
& VT_LVAL
) {
1480 /* We do not want to modifier the long long
1481 pointer here, so the safest (and less
1482 efficient) is to save all the other registers
1483 in the stack. XXX: totally inefficient. */
1487 /* lvalue_save: save only if used further down the stack */
1488 save_reg_upstack(vtop
->r
, 1);
1490 /* load from memory */
1491 vtop
->type
.t
= load_type
;
1494 vtop
[-1].r
= r
; /* save register value */
1495 /* increment pointer to get second word */
1496 vtop
->type
.t
= addr_type
;
1501 vtop
->type
.t
= load_type
;
1503 /* move registers */
1506 vtop
[-1].r
= r
; /* save register value */
1507 vtop
->r
= vtop
[-1].r2
;
1509 /* Allocate second register. Here we rely on the fact that
1510 get_reg() tries first to free r2 of an SValue. */
1514 /* write second register */
1516 vtop
->type
.t
= original_type
;
1517 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1519 /* lvalue of scalar type : need to use lvalue type
1520 because of possible cast */
1523 /* compute memory access type */
1524 if (vtop
->r
& VT_LVAL_BYTE
)
1526 else if (vtop
->r
& VT_LVAL_SHORT
)
1528 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1532 /* restore wanted type */
1535 /* one register type load */
1540 #ifdef TCC_TARGET_C67
1541 /* uses register pairs for doubles */
1542 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1549 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1550 ST_FUNC
void gv2(int rc1
, int rc2
)
1554 /* generate more generic register first. But VT_JMP or VT_CMP
1555 values must be generated first in all cases to avoid possible
1557 v
= vtop
[0].r
& VT_VALMASK
;
1558 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1563 /* test if reload is needed for first register */
1564 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1574 /* test if reload is needed for first register */
1575 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1581 #ifndef TCC_TARGET_ARM64
1582 /* wrapper around RC_FRET to return a register by type */
1583 static int rc_fret(int t
)
1585 #ifdef TCC_TARGET_X86_64
1586 if (t
== VT_LDOUBLE
) {
1594 /* wrapper around REG_FRET to return a register by type */
1595 static int reg_fret(int t
)
1597 #ifdef TCC_TARGET_X86_64
1598 if (t
== VT_LDOUBLE
) {
1606 /* expand 64bit on stack in two ints */
1607 ST_FUNC
void lexpand(void)
1610 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1611 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1612 if (v
== VT_CONST
) {
1615 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1621 vtop
[0].r
= vtop
[-1].r2
;
1622 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1624 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1629 /* build a long long from two ints */
1630 static void lbuild(int t
)
1632 gv2(RC_INT
, RC_INT
);
1633 vtop
[-1].r2
= vtop
[0].r
;
1634 vtop
[-1].type
.t
= t
;
1639 /* convert stack entry to register and duplicate its value in another
1641 static void gv_dup(void)
1648 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1649 if (t
& VT_BITFIELD
) {
1659 /* stack: H L L1 H1 */
1669 /* duplicate value */
1674 #ifdef TCC_TARGET_X86_64
1675 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1685 load(r1
, &sv
); /* move r to r1 */
1687 /* duplicates value */
1693 /* Generate value test
1695 * Generate a test for any value (jump, comparison and integers) */
1696 ST_FUNC
int gvtst(int inv
, int t
)
1698 int v
= vtop
->r
& VT_VALMASK
;
1699 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1703 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1704 /* constant jmp optimization */
1705 if ((vtop
->c
.i
!= 0) != inv
)
1710 return gtst(inv
, t
);
1714 /* generate CPU independent (unsigned) long long operations */
1715 static void gen_opl(int op
)
1717 int t
, a
, b
, op1
, c
, i
;
1719 unsigned short reg_iret
= REG_IRET
;
1720 unsigned short reg_lret
= REG_LRET
;
1726 func
= TOK___divdi3
;
1729 func
= TOK___udivdi3
;
1732 func
= TOK___moddi3
;
1735 func
= TOK___umoddi3
;
1742 /* call generic long long function */
1743 vpush_global_sym(&func_old_type
, func
);
1748 vtop
->r2
= reg_lret
;
1756 //pv("gen_opl A",0,2);
1762 /* stack: L1 H1 L2 H2 */
1767 vtop
[-2] = vtop
[-3];
1770 /* stack: H1 H2 L1 L2 */
1771 //pv("gen_opl B",0,4);
1777 /* stack: H1 H2 L1 L2 ML MH */
1780 /* stack: ML MH H1 H2 L1 L2 */
1784 /* stack: ML MH H1 L2 H2 L1 */
1789 /* stack: ML MH M1 M2 */
1792 } else if (op
== '+' || op
== '-') {
1793 /* XXX: add non carry method too (for MIPS or alpha) */
1799 /* stack: H1 H2 (L1 op L2) */
1802 gen_op(op1
+ 1); /* TOK_xxxC2 */
1805 /* stack: H1 H2 (L1 op L2) */
1808 /* stack: (L1 op L2) H1 H2 */
1810 /* stack: (L1 op L2) (H1 op H2) */
1818 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1819 t
= vtop
[-1].type
.t
;
1823 /* stack: L H shift */
1825 /* constant: simpler */
1826 /* NOTE: all comments are for SHL. the other cases are
1827 done by swapping words */
1838 if (op
!= TOK_SAR
) {
1871 /* XXX: should provide a faster fallback on x86 ? */
1874 func
= TOK___ashrdi3
;
1877 func
= TOK___lshrdi3
;
1880 func
= TOK___ashldi3
;
1886 /* compare operations */
1892 /* stack: L1 H1 L2 H2 */
1894 vtop
[-1] = vtop
[-2];
1896 /* stack: L1 L2 H1 H2 */
1899 /* when values are equal, we need to compare low words. since
1900 the jump is inverted, we invert the test too. */
1903 else if (op1
== TOK_GT
)
1905 else if (op1
== TOK_ULT
)
1907 else if (op1
== TOK_UGT
)
1917 /* generate non equal test */
1923 /* compare low. Always unsigned */
1927 else if (op1
== TOK_LE
)
1929 else if (op1
== TOK_GT
)
1931 else if (op1
== TOK_GE
)
1942 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1944 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1945 return (a
^ b
) >> 63 ? -x
: x
;
1948 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1950 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1953 /* handle integer constant optimizations and various machine
1955 static void gen_opic(int op
)
1957 SValue
*v1
= vtop
- 1;
1959 int t1
= v1
->type
.t
& VT_BTYPE
;
1960 int t2
= v2
->type
.t
& VT_BTYPE
;
1961 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1962 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1963 uint64_t l1
= c1
? v1
->c
.i
: 0;
1964 uint64_t l2
= c2
? v2
->c
.i
: 0;
1965 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1967 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1968 l1
= ((uint32_t)l1
|
1969 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1970 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1971 l2
= ((uint32_t)l2
|
1972 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1976 case '+': l1
+= l2
; break;
1977 case '-': l1
-= l2
; break;
1978 case '&': l1
&= l2
; break;
1979 case '^': l1
^= l2
; break;
1980 case '|': l1
|= l2
; break;
1981 case '*': l1
*= l2
; break;
1988 /* if division by zero, generate explicit division */
1991 tcc_error("division by zero in constant");
1995 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1996 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1997 case TOK_UDIV
: l1
= l1
/ l2
; break;
1998 case TOK_UMOD
: l1
= l1
% l2
; break;
2001 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2002 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2004 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2007 case TOK_ULT
: l1
= l1
< l2
; break;
2008 case TOK_UGE
: l1
= l1
>= l2
; break;
2009 case TOK_EQ
: l1
= l1
== l2
; break;
2010 case TOK_NE
: l1
= l1
!= l2
; break;
2011 case TOK_ULE
: l1
= l1
<= l2
; break;
2012 case TOK_UGT
: l1
= l1
> l2
; break;
2013 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2014 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2015 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2016 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2018 case TOK_LAND
: l1
= l1
&& l2
; break;
2019 case TOK_LOR
: l1
= l1
|| l2
; break;
2023 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2024 l1
= ((uint32_t)l1
|
2025 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2029 /* if commutative ops, put c2 as constant */
2030 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2031 op
== '|' || op
== '*')) {
2033 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2034 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2036 if (!const_wanted
&&
2038 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2039 (l1
== -1 && op
== TOK_SAR
))) {
2040 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2042 } else if (!const_wanted
&&
2043 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2045 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2046 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2047 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2052 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2055 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2056 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2059 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2060 /* filter out NOP operations like x*1, x-0, x&-1... */
2062 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2063 /* try to use shifts instead of muls or divs */
2064 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2073 else if (op
== TOK_PDIV
)
2079 } else if (c2
&& (op
== '+' || op
== '-') &&
2080 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2081 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2082 /* symbol + constant case */
2086 /* The backends can't always deal with addends to symbols
2087 larger than +-1<<31. Don't construct such. */
2094 /* call low level op generator */
2095 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2096 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2104 /* generate a floating point operation with constant propagation */
2105 static void gen_opif(int op
)
2109 #if defined _MSC_VER && defined _AMD64_
2110 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2117 /* currently, we cannot do computations with forward symbols */
2118 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2119 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2121 if (v1
->type
.t
== VT_FLOAT
) {
2124 } else if (v1
->type
.t
== VT_DOUBLE
) {
2132 /* NOTE: we only do constant propagation if finite number (not
2133 NaN or infinity) (ANSI spec) */
2134 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2138 case '+': f1
+= f2
; break;
2139 case '-': f1
-= f2
; break;
2140 case '*': f1
*= f2
; break;
2143 /* If not in initializer we need to potentially generate
2144 FP exceptions at runtime, otherwise we want to fold. */
2150 /* XXX: also handles tests ? */
2154 /* XXX: overflow test ? */
2155 if (v1
->type
.t
== VT_FLOAT
) {
2157 } else if (v1
->type
.t
== VT_DOUBLE
) {
2169 static int pointed_size(CType
*type
)
2172 return type_size(pointed_type(type
), &align
);
2175 static void vla_runtime_pointed_size(CType
*type
)
2178 vla_runtime_type_size(pointed_type(type
), &align
);
2181 static inline int is_null_pointer(SValue
*p
)
2183 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2185 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2186 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2187 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2188 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2189 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2190 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2193 static inline int is_integer_btype(int bt
)
2195 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2196 bt
== VT_INT
|| bt
== VT_LLONG
);
2199 /* check types for comparison or subtraction of pointers */
2200 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2202 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2205 /* null pointers are accepted for all comparisons as gcc */
2206 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2210 bt1
= type1
->t
& VT_BTYPE
;
2211 bt2
= type2
->t
& VT_BTYPE
;
2212 /* accept comparison between pointer and integer with a warning */
2213 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2214 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2215 tcc_warning("comparison between pointer and integer");
2219 /* both must be pointers or implicit function pointers */
2220 if (bt1
== VT_PTR
) {
2221 type1
= pointed_type(type1
);
2222 } else if (bt1
!= VT_FUNC
)
2223 goto invalid_operands
;
2225 if (bt2
== VT_PTR
) {
2226 type2
= pointed_type(type2
);
2227 } else if (bt2
!= VT_FUNC
) {
2229 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2231 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2232 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2236 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2237 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2238 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2239 /* gcc-like error if '-' is used */
2241 goto invalid_operands
;
2243 tcc_warning("comparison of distinct pointer types lacks a cast");
2247 /* generic gen_op: handles types problems */
2248 ST_FUNC
void gen_op(int op
)
2250 int u
, t1
, t2
, bt1
, bt2
, t
;
2254 t1
= vtop
[-1].type
.t
;
2255 t2
= vtop
[0].type
.t
;
2256 bt1
= t1
& VT_BTYPE
;
2257 bt2
= t2
& VT_BTYPE
;
2259 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2260 tcc_error("operation on a struct");
2261 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2262 if (bt2
== VT_FUNC
) {
2263 mk_pointer(&vtop
->type
);
2266 if (bt1
== VT_FUNC
) {
2268 mk_pointer(&vtop
->type
);
2273 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2274 /* at least one operand is a pointer */
2275 /* relational op: must be both pointers */
2276 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2277 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2278 /* pointers are handled are unsigned */
2280 t
= VT_LLONG
| VT_UNSIGNED
;
2282 t
= VT_INT
| VT_UNSIGNED
;
2286 /* if both pointers, then it must be the '-' op */
2287 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2289 tcc_error("cannot use pointers here");
2290 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2291 /* XXX: check that types are compatible */
2292 if (vtop
[-1].type
.t
& VT_VLA
) {
2293 vla_runtime_pointed_size(&vtop
[-1].type
);
2295 vpushi(pointed_size(&vtop
[-1].type
));
2299 vtop
->type
.t
= ptrdiff_type
.t
;
2303 /* exactly one pointer : must be '+' or '-'. */
2304 if (op
!= '-' && op
!= '+')
2305 tcc_error("cannot use pointers here");
2306 /* Put pointer as first operand */
2307 if (bt2
== VT_PTR
) {
2309 t
= t1
, t1
= t2
, t2
= t
;
2312 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2313 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2316 type1
= vtop
[-1].type
;
2317 type1
.t
&= ~VT_ARRAY
;
2318 if (vtop
[-1].type
.t
& VT_VLA
)
2319 vla_runtime_pointed_size(&vtop
[-1].type
);
2321 u
= pointed_size(&vtop
[-1].type
);
2323 tcc_error("unknown array element size");
2327 /* XXX: cast to int ? (long long case) */
2333 /* #ifdef CONFIG_TCC_BCHECK
2334 The main reason to removing this code:
2341 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2342 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2344 When this code is on. then the output looks like
2346 v+(i-j) = 0xbff84000
2348 /* if evaluating constant expression, no code should be
2349 generated, so no bound check */
2350 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2351 /* if bounded pointers, we generate a special code to
2358 gen_bounded_ptr_add();
2364 /* put again type if gen_opic() swaped operands */
2367 } else if (is_float(bt1
) || is_float(bt2
)) {
2368 /* compute bigger type and do implicit casts */
2369 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2371 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2376 /* floats can only be used for a few operations */
2377 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2378 (op
< TOK_ULT
|| op
> TOK_GT
))
2379 tcc_error("invalid operands for binary operation");
2381 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2382 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2383 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2385 t
|= (VT_LONG
& t1
);
2387 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2388 /* cast to biggest op */
2389 t
= VT_LLONG
| VT_LONG
;
2390 if (bt1
== VT_LLONG
)
2392 if (bt2
== VT_LLONG
)
2394 /* convert to unsigned if it does not fit in a long long */
2395 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2396 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2400 /* integer operations */
2401 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2402 /* convert to unsigned if it does not fit in an integer */
2403 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2404 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2407 /* XXX: currently, some unsigned operations are explicit, so
2408 we modify them here */
2409 if (t
& VT_UNSIGNED
) {
2416 else if (op
== TOK_LT
)
2418 else if (op
== TOK_GT
)
2420 else if (op
== TOK_LE
)
2422 else if (op
== TOK_GE
)
2430 /* special case for shifts and long long: we keep the shift as
2432 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2439 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2440 /* relational op: the result is an int */
2441 vtop
->type
.t
= VT_INT
;
2446 // Make sure that we have converted to an rvalue:
2447 if (vtop
->r
& VT_LVAL
)
2448 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2451 #ifndef TCC_TARGET_ARM
2452 /* generic itof for unsigned long long case */
2453 static void gen_cvt_itof1(int t
)
2455 #ifdef TCC_TARGET_ARM64
2458 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2459 (VT_LLONG
| VT_UNSIGNED
)) {
2462 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2463 #if LDOUBLE_SIZE != 8
2464 else if (t
== VT_LDOUBLE
)
2465 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2468 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2472 vtop
->r
= reg_fret(t
);
2480 /* generic ftoi for unsigned long long case */
2481 static void gen_cvt_ftoi1(int t
)
2483 #ifdef TCC_TARGET_ARM64
2488 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2489 /* not handled natively */
2490 st
= vtop
->type
.t
& VT_BTYPE
;
2492 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2493 #if LDOUBLE_SIZE != 8
2494 else if (st
== VT_LDOUBLE
)
2495 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2498 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2503 vtop
->r2
= REG_LRET
;
2510 /* force char or short cast */
2511 static void force_charshort_cast(int t
)
2515 /* cannot cast static initializers */
2516 if (STATIC_DATA_WANTED
)
2520 /* XXX: add optimization if lvalue : just change type and offset */
2525 if (t
& VT_UNSIGNED
) {
2526 vpushi((1 << bits
) - 1);
2529 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2535 /* result must be signed or the SAR is converted to an SHL
2536 This was not the case when "t" was a signed short
2537 and the last value on the stack was an unsigned int */
2538 vtop
->type
.t
&= ~VT_UNSIGNED
;
2544 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2545 static void gen_cast_s(int t
)
2553 static void gen_cast(CType
*type
)
2555 int sbt
, dbt
, sf
, df
, c
, p
;
2557 /* special delayed cast for char/short */
2558 /* XXX: in some cases (multiple cascaded casts), it may still
2560 if (vtop
->r
& VT_MUSTCAST
) {
2561 vtop
->r
&= ~VT_MUSTCAST
;
2562 force_charshort_cast(vtop
->type
.t
);
2565 /* bitfields first get cast to ints */
2566 if (vtop
->type
.t
& VT_BITFIELD
) {
2570 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2571 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2576 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2577 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2578 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2579 c
&= dbt
!= VT_LDOUBLE
;
2582 /* constant case: we can do it now */
2583 /* XXX: in ISOC, cannot do it if error in convert */
2584 if (sbt
== VT_FLOAT
)
2585 vtop
->c
.ld
= vtop
->c
.f
;
2586 else if (sbt
== VT_DOUBLE
)
2587 vtop
->c
.ld
= vtop
->c
.d
;
2590 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2591 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2592 vtop
->c
.ld
= vtop
->c
.i
;
2594 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2596 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2597 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2599 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2602 if (dbt
== VT_FLOAT
)
2603 vtop
->c
.f
= (float)vtop
->c
.ld
;
2604 else if (dbt
== VT_DOUBLE
)
2605 vtop
->c
.d
= (double)vtop
->c
.ld
;
2606 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2607 vtop
->c
.i
= vtop
->c
.ld
;
2608 } else if (sf
&& dbt
== VT_BOOL
) {
2609 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2612 vtop
->c
.i
= vtop
->c
.ld
;
2613 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2615 else if (sbt
& VT_UNSIGNED
)
2616 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2618 else if (sbt
== VT_PTR
)
2621 else if (sbt
!= VT_LLONG
)
2622 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2623 -(vtop
->c
.i
& 0x80000000));
2625 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2627 else if (dbt
== VT_BOOL
)
2628 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2630 else if (dbt
== VT_PTR
)
2633 else if (dbt
!= VT_LLONG
) {
2634 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2635 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2638 if (!(dbt
& VT_UNSIGNED
))
2639 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2642 } else if (p
&& dbt
== VT_BOOL
) {
2646 /* non constant case: generate code */
2648 /* convert from fp to fp */
2651 /* convert int to fp */
2654 /* convert fp to int */
2655 if (dbt
== VT_BOOL
) {
2659 /* we handle char/short/etc... with generic code */
2660 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2661 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2665 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2666 /* additional cast for char/short... */
2672 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2673 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2674 /* scalar to long long */
2675 /* machine independent conversion */
2677 /* generate high word */
2678 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2682 if (sbt
== VT_PTR
) {
2683 /* cast from pointer to int before we apply
2684 shift operation, which pointers don't support*/
2691 /* patch second register */
2692 vtop
[-1].r2
= vtop
->r
;
2696 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2697 (dbt
& VT_BTYPE
) == VT_PTR
||
2698 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2699 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2700 (sbt
& VT_BTYPE
) != VT_PTR
&&
2701 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2702 /* need to convert from 32bit to 64bit */
2704 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2705 #if defined(TCC_TARGET_ARM64)
2707 #elif defined(TCC_TARGET_X86_64)
2709 /* x86_64 specific: movslq */
2711 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2718 } else if (dbt
== VT_BOOL
) {
2719 /* scalar to bool */
2722 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2723 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2724 if (sbt
== VT_PTR
) {
2725 vtop
->type
.t
= VT_INT
;
2726 tcc_warning("nonportable conversion from pointer to char/short");
2728 force_charshort_cast(dbt
);
2729 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2731 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2733 /* from long long: just take low order word */
2738 vtop
->type
.t
|= VT_UNSIGNED
;
2742 /* if lvalue and single word type, nothing to do because
2743 the lvalue already contains the real type size (see
2744 VT_LVAL_xxx constants) */
2747 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2748 /* if we are casting between pointer types,
2749 we must update the VT_LVAL_xxx size */
2750 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2751 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2754 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2757 /* return type size as known at compile time. Put alignment at 'a' */
2758 ST_FUNC
int type_size(CType
*type
, int *a
)
2763 bt
= type
->t
& VT_BTYPE
;
2764 if (bt
== VT_STRUCT
) {
2769 } else if (bt
== VT_PTR
) {
2770 if (type
->t
& VT_ARRAY
) {
2774 ts
= type_size(&s
->type
, a
);
2776 if (ts
< 0 && s
->c
< 0)
2784 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
2785 return -1; /* incomplete enum */
2786 } else if (bt
== VT_LDOUBLE
) {
2788 return LDOUBLE_SIZE
;
2789 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2790 #ifdef TCC_TARGET_I386
2791 #ifdef TCC_TARGET_PE
2796 #elif defined(TCC_TARGET_ARM)
2806 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2809 } else if (bt
== VT_SHORT
) {
2812 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2816 /* char, void, function, _Bool */
2822 /* push type size as known at runtime time on top of value stack. Put
2824 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2826 if (type
->t
& VT_VLA
) {
2827 type_size(&type
->ref
->type
, a
);
2828 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2830 vpushi(type_size(type
, a
));
2834 static void vla_sp_restore(void) {
2835 if (vlas_in_scope
) {
2836 gen_vla_sp_restore(vla_sp_loc
);
2840 static void vla_sp_restore_root(void) {
2841 if (vlas_in_scope
) {
2842 gen_vla_sp_restore(vla_sp_root_loc
);
2846 /* return the pointed type of t */
2847 static inline CType
*pointed_type(CType
*type
)
2849 return &type
->ref
->type
;
2852 /* modify type so that its it is a pointer to type. */
2853 ST_FUNC
void mk_pointer(CType
*type
)
2856 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2857 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2861 /* compare function types. OLD functions match any new functions */
2862 static int is_compatible_func(CType
*type1
, CType
*type2
)
2868 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2870 /* check func_call */
2871 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2873 /* XXX: not complete */
2874 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2876 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2878 while (s1
!= NULL
) {
2881 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2891 /* return true if type1 and type2 are the same. If unqualified is
2892 true, qualifiers on the types are ignored.
2894 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2898 t1
= type1
->t
& VT_TYPE
;
2899 t2
= type2
->t
& VT_TYPE
;
2901 /* strip qualifiers before comparing */
2902 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2903 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2906 /* Default Vs explicit signedness only matters for char */
2907 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2911 /* XXX: bitfields ? */
2914 /* test more complicated cases */
2915 bt1
= t1
& (VT_BTYPE
| VT_ARRAY
);
2916 if (bt1
== VT_PTR
) {
2917 type1
= pointed_type(type1
);
2918 type2
= pointed_type(type2
);
2919 return is_compatible_types(type1
, type2
);
2920 } else if (bt1
& VT_ARRAY
) {
2921 return type1
->ref
->c
< 0 || type2
->ref
->c
< 0
2922 || type1
->ref
->c
== type2
->ref
->c
;
2923 } else if (bt1
== VT_STRUCT
) {
2924 return (type1
->ref
== type2
->ref
);
2925 } else if (bt1
== VT_FUNC
) {
2926 return is_compatible_func(type1
, type2
);
2927 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
2928 return type1
->ref
== type2
->ref
;
2934 /* return true if type1 and type2 are exactly the same (including
2937 static int is_compatible_types(CType
*type1
, CType
*type2
)
2939 return compare_types(type1
,type2
,0);
2942 /* return true if type1 and type2 are the same (ignoring qualifiers).
2944 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2946 return compare_types(type1
,type2
,1);
2949 /* print a type. If 'varstr' is not NULL, then the variable is also
2950 printed in the type */
2952 /* XXX: add array and function pointers */
2953 static void type_to_str(char *buf
, int buf_size
,
2954 CType
*type
, const char *varstr
)
2966 pstrcat(buf
, buf_size
, "extern ");
2968 pstrcat(buf
, buf_size
, "static ");
2970 pstrcat(buf
, buf_size
, "typedef ");
2972 pstrcat(buf
, buf_size
, "inline ");
2973 if (t
& VT_VOLATILE
)
2974 pstrcat(buf
, buf_size
, "volatile ");
2975 if (t
& VT_CONSTANT
)
2976 pstrcat(buf
, buf_size
, "const ");
2978 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2979 || ((t
& VT_UNSIGNED
)
2980 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2983 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2985 buf_size
-= strlen(buf
);
3020 tstr
= "long double";
3022 pstrcat(buf
, buf_size
, tstr
);
3029 pstrcat(buf
, buf_size
, tstr
);
3030 v
= type
->ref
->v
& ~SYM_STRUCT
;
3031 if (v
>= SYM_FIRST_ANOM
)
3032 pstrcat(buf
, buf_size
, "<anonymous>");
3034 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3039 if (varstr
&& '*' == *varstr
) {
3040 pstrcat(buf1
, sizeof(buf1
), "(");
3041 pstrcat(buf1
, sizeof(buf1
), varstr
);
3042 pstrcat(buf1
, sizeof(buf1
), ")");
3044 pstrcat(buf1
, buf_size
, "(");
3046 while (sa
!= NULL
) {
3048 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3049 pstrcat(buf1
, sizeof(buf1
), buf2
);
3052 pstrcat(buf1
, sizeof(buf1
), ", ");
3054 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3055 pstrcat(buf1
, sizeof(buf1
), ", ...");
3056 pstrcat(buf1
, sizeof(buf1
), ")");
3057 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3062 if (varstr
&& '*' == *varstr
)
3063 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3065 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3066 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3069 pstrcpy(buf1
, sizeof(buf1
), "*");
3070 if (t
& VT_CONSTANT
)
3071 pstrcat(buf1
, buf_size
, "const ");
3072 if (t
& VT_VOLATILE
)
3073 pstrcat(buf1
, buf_size
, "volatile ");
3075 pstrcat(buf1
, sizeof(buf1
), varstr
);
3076 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3080 pstrcat(buf
, buf_size
, " ");
3081 pstrcat(buf
, buf_size
, varstr
);
3086 /* verify type compatibility to store vtop in 'dt' type, and generate
3088 static void gen_assign_cast(CType
*dt
)
3090 CType
*st
, *type1
, *type2
;
3091 char buf1
[256], buf2
[256];
3092 int dbt
, sbt
, qualwarn
, lvl
;
3094 st
= &vtop
->type
; /* source type */
3095 dbt
= dt
->t
& VT_BTYPE
;
3096 sbt
= st
->t
& VT_BTYPE
;
3097 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3098 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3099 ; /* It is Ok if both are void */
3101 tcc_error("cannot cast from/to void");
3103 if (dt
->t
& VT_CONSTANT
)
3104 tcc_warning("assignment of read-only location");
3107 /* special cases for pointers */
3108 /* '0' can also be a pointer */
3109 if (is_null_pointer(vtop
))
3111 /* accept implicit pointer to integer cast with warning */
3112 if (is_integer_btype(sbt
)) {
3113 tcc_warning("assignment makes pointer from integer without a cast");
3116 type1
= pointed_type(dt
);
3118 type2
= pointed_type(st
);
3119 else if (sbt
== VT_FUNC
)
3120 type2
= st
; /* a function is implicitly a function pointer */
3123 if (is_compatible_types(type1
, type2
))
3125 for (qualwarn
= lvl
= 0;; ++lvl
) {
3126 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3127 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3129 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3130 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3131 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3133 type1
= pointed_type(type1
);
3134 type2
= pointed_type(type2
);
3136 if (!is_compatible_unqualified_types(type1
, type2
)) {
3137 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3138 /* void * can match anything */
3139 } else if (dbt
== sbt
3140 && is_integer_btype(sbt
& VT_BTYPE
)
3141 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3142 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3143 /* Like GCC don't warn by default for merely changes
3144 in pointer target signedness. Do warn for different
3145 base types, though, in particular for unsigned enums
3146 and signed int targets. */
3148 tcc_warning("assignment from incompatible pointer type");
3153 tcc_warning("assignment discards qualifiers from pointer target type");
3159 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3160 tcc_warning("assignment makes integer from pointer without a cast");
3161 } else if (sbt
== VT_STRUCT
) {
3162 goto case_VT_STRUCT
;
3164 /* XXX: more tests */
3168 if (!is_compatible_unqualified_types(dt
, st
)) {
3170 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3171 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3172 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3179 /* store vtop in lvalue pushed on stack */
3180 ST_FUNC
void vstore(void)
3182 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3184 ft
= vtop
[-1].type
.t
;
3185 sbt
= vtop
->type
.t
& VT_BTYPE
;
3186 dbt
= ft
& VT_BTYPE
;
3187 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3188 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3189 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3190 /* optimize char/short casts */
3191 delayed_cast
= VT_MUSTCAST
;
3192 vtop
->type
.t
= ft
& VT_TYPE
;
3193 /* XXX: factorize */
3194 if (ft
& VT_CONSTANT
)
3195 tcc_warning("assignment of read-only location");
3198 if (!(ft
& VT_BITFIELD
))
3199 gen_assign_cast(&vtop
[-1].type
);
3202 if (sbt
== VT_STRUCT
) {
3203 /* if structure, only generate pointer */
3204 /* structure assignment : generate memcpy */
3205 /* XXX: optimize if small size */
3206 size
= type_size(&vtop
->type
, &align
);
3210 vtop
->type
.t
= VT_PTR
;
3213 /* address of memcpy() */
3216 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3217 else if(!(align
& 3))
3218 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3221 /* Use memmove, rather than memcpy, as dest and src may be same: */
3222 vpush_global_sym(&func_old_type
, TOK_memmove
);
3227 vtop
->type
.t
= VT_PTR
;
3233 /* leave source on stack */
3234 } else if (ft
& VT_BITFIELD
) {
3235 /* bitfield store handling */
3237 /* save lvalue as expression result (example: s.b = s.a = n;) */
3238 vdup(), vtop
[-1] = vtop
[-2];
3240 bit_pos
= BIT_POS(ft
);
3241 bit_size
= BIT_SIZE(ft
);
3242 /* remove bit field info to avoid loops */
3243 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3245 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3246 gen_cast(&vtop
[-1].type
);
3247 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3250 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3251 if (r
== VT_STRUCT
) {
3252 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3253 store_packed_bf(bit_pos
, bit_size
);
3255 unsigned long long mask
= (1ULL << bit_size
) - 1;
3256 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3258 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3261 vpushi((unsigned)mask
);
3268 /* duplicate destination */
3271 /* load destination, mask and or with source */
3272 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3273 vpushll(~(mask
<< bit_pos
));
3275 vpushi(~((unsigned)mask
<< bit_pos
));
3280 /* ... and discard */
3283 } else if (dbt
== VT_VOID
) {
3286 #ifdef CONFIG_TCC_BCHECK
3287 /* bound check case */
3288 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3297 #ifdef TCC_TARGET_X86_64
3298 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3300 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3305 r
= gv(rc
); /* generate value */
3306 /* if lvalue was saved on stack, must read it */
3307 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3309 t
= get_reg(RC_INT
);
3315 sv
.r
= VT_LOCAL
| VT_LVAL
;
3316 sv
.c
.i
= vtop
[-1].c
.i
;
3318 vtop
[-1].r
= t
| VT_LVAL
;
3320 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3322 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3323 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3325 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3326 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3328 vtop
[-1].type
.t
= load_type
;
3331 /* convert to int to increment easily */
3332 vtop
->type
.t
= addr_type
;
3338 vtop
[-1].type
.t
= load_type
;
3339 /* XXX: it works because r2 is spilled last ! */
3340 store(vtop
->r2
, vtop
- 1);
3346 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3347 vtop
->r
|= delayed_cast
;
3351 /* post defines POST/PRE add. c is the token ++ or -- */
3352 ST_FUNC
void inc(int post
, int c
)
3355 vdup(); /* save lvalue */
3357 gv_dup(); /* duplicate value */
3362 vpushi(c
- TOK_MID
);
3364 vstore(); /* store value */
3366 vpop(); /* if post op, return saved value */
3369 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3371 /* read the string */
3375 while (tok
== TOK_STR
) {
3376 /* XXX: add \0 handling too ? */
3377 cstr_cat(astr
, tokc
.str
.data
, -1);
3380 cstr_ccat(astr
, '\0');
3383 /* If I is >= 1 and a power of two, returns log2(i)+1.
3384 If I is 0 returns 0. */
3385 static int exact_log2p1(int i
)
3390 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3401 /* Parse __attribute__((...)) GNUC extension. */
3402 static void parse_attribute(AttributeDef
*ad
)
3408 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3413 while (tok
!= ')') {
3414 if (tok
< TOK_IDENT
)
3415 expect("attribute name");
3427 tcc_warning("implicit declaration of function '%s'",
3428 get_tok_str(tok
, &tokc
));
3429 s
= external_global_sym(tok
, &func_old_type
);
3431 ad
->cleanup_func
= s
;
3439 parse_mult_str(&astr
, "section name");
3440 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3447 parse_mult_str(&astr
, "alias(\"target\")");
3448 ad
->alias_target
= /* save string as token, for later */
3449 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3453 case TOK_VISIBILITY1
:
3454 case TOK_VISIBILITY2
:
3456 parse_mult_str(&astr
,
3457 "visibility(\"default|hidden|internal|protected\")");
3458 if (!strcmp (astr
.data
, "default"))
3459 ad
->a
.visibility
= STV_DEFAULT
;
3460 else if (!strcmp (astr
.data
, "hidden"))
3461 ad
->a
.visibility
= STV_HIDDEN
;
3462 else if (!strcmp (astr
.data
, "internal"))
3463 ad
->a
.visibility
= STV_INTERNAL
;
3464 else if (!strcmp (astr
.data
, "protected"))
3465 ad
->a
.visibility
= STV_PROTECTED
;
3467 expect("visibility(\"default|hidden|internal|protected\")");
3476 if (n
<= 0 || (n
& (n
- 1)) != 0)
3477 tcc_error("alignment must be a positive power of two");
3482 ad
->a
.aligned
= exact_log2p1(n
);
3483 if (n
!= 1 << (ad
->a
.aligned
- 1))
3484 tcc_error("alignment of %d is larger than implemented", n
);
3496 /* currently, no need to handle it because tcc does not
3497 track unused objects */
3501 /* currently, no need to handle it because tcc does not
3502 track unused objects */
3507 ad
->f
.func_call
= FUNC_CDECL
;
3512 ad
->f
.func_call
= FUNC_STDCALL
;
3514 #ifdef TCC_TARGET_I386
3524 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3530 ad
->f
.func_call
= FUNC_FASTCALLW
;
3537 ad
->attr_mode
= VT_LLONG
+ 1;
3540 ad
->attr_mode
= VT_BYTE
+ 1;
3543 ad
->attr_mode
= VT_SHORT
+ 1;
3547 ad
->attr_mode
= VT_INT
+ 1;
3550 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3557 ad
->a
.dllexport
= 1;
3559 case TOK_NODECORATE
:
3560 ad
->a
.nodecorate
= 1;
3563 ad
->a
.dllimport
= 1;
3566 if (tcc_state
->warn_unsupported
)
3567 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3568 /* skip parameters */
3570 int parenthesis
= 0;
3574 else if (tok
== ')')
3577 } while (parenthesis
&& tok
!= -1);
3590 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3594 while ((s
= s
->next
) != NULL
) {
3595 if ((s
->v
& SYM_FIELD
) &&
3596 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3597 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3598 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3610 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3612 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3613 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3614 int pcc
= !tcc_state
->ms_bitfields
;
3615 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3622 prevbt
= VT_STRUCT
; /* make it never match */
3627 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3628 if (f
->type
.t
& VT_BITFIELD
)
3629 bit_size
= BIT_SIZE(f
->type
.t
);
3632 size
= type_size(&f
->type
, &align
);
3633 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3636 if (pcc
&& bit_size
== 0) {
3637 /* in pcc mode, packing does not affect zero-width bitfields */
3640 /* in pcc mode, attribute packed overrides if set. */
3641 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3644 /* pragma pack overrides align if lesser and packs bitfields always */
3647 if (pragma_pack
< align
)
3648 align
= pragma_pack
;
3649 /* in pcc mode pragma pack also overrides individual align */
3650 if (pcc
&& pragma_pack
< a
)
3654 /* some individual align was specified */
3658 if (type
->ref
->type
.t
== VT_UNION
) {
3659 if (pcc
&& bit_size
>= 0)
3660 size
= (bit_size
+ 7) >> 3;
3665 } else if (bit_size
< 0) {
3667 c
+= (bit_pos
+ 7) >> 3;
3668 c
= (c
+ align
- 1) & -align
;
3677 /* A bit-field. Layout is more complicated. There are two
3678 options: PCC (GCC) compatible and MS compatible */
3680 /* In PCC layout a bit-field is placed adjacent to the
3681 preceding bit-fields, except if:
3683 - an individual alignment was given
3684 - it would overflow its base type container and
3685 there is no packing */
3686 if (bit_size
== 0) {
3688 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3690 } else if (f
->a
.aligned
) {
3692 } else if (!packed
) {
3694 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3695 if (ofs
> size
/ align
)
3699 /* in pcc mode, long long bitfields have type int if they fit */
3700 if (size
== 8 && bit_size
<= 32)
3701 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3703 while (bit_pos
>= align
* 8)
3704 c
+= align
, bit_pos
-= align
* 8;
3707 /* In PCC layout named bit-fields influence the alignment
3708 of the containing struct using the base types alignment,
3709 except for packed fields (which here have correct align). */
3710 if (f
->v
& SYM_FIRST_ANOM
3711 // && bit_size // ??? gcc on ARM/rpi does that
3716 bt
= f
->type
.t
& VT_BTYPE
;
3717 if ((bit_pos
+ bit_size
> size
* 8)
3718 || (bit_size
> 0) == (bt
!= prevbt
)
3720 c
= (c
+ align
- 1) & -align
;
3723 /* In MS bitfield mode a bit-field run always uses
3724 at least as many bits as the underlying type.
3725 To start a new run it's also required that this
3726 or the last bit-field had non-zero width. */
3727 if (bit_size
|| prev_bit_size
)
3730 /* In MS layout the records alignment is normally
3731 influenced by the field, except for a zero-width
3732 field at the start of a run (but by further zero-width
3733 fields it is again). */
3734 if (bit_size
== 0 && prevbt
!= bt
)
3737 prev_bit_size
= bit_size
;
3740 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3741 | (bit_pos
<< VT_STRUCT_SHIFT
);
3742 bit_pos
+= bit_size
;
3744 if (align
> maxalign
)
3748 printf("set field %s offset %-2d size %-2d align %-2d",
3749 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3750 if (f
->type
.t
& VT_BITFIELD
) {
3751 printf(" pos %-2d bits %-2d",
3764 c
+= (bit_pos
+ 7) >> 3;
3766 /* store size and alignment */
3767 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3771 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3772 /* can happen if individual align for some member was given. In
3773 this case MSVC ignores maxalign when aligning the size */
3778 c
= (c
+ a
- 1) & -a
;
3782 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3785 /* check whether we can access bitfields by their type */
3786 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3790 if (0 == (f
->type
.t
& VT_BITFIELD
))
3794 bit_size
= BIT_SIZE(f
->type
.t
);
3797 bit_pos
= BIT_POS(f
->type
.t
);
3798 size
= type_size(&f
->type
, &align
);
3799 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3802 /* try to access the field using a different type */
3803 c0
= -1, s
= align
= 1;
3805 px
= f
->c
* 8 + bit_pos
;
3806 cx
= (px
>> 3) & -align
;
3807 px
= px
- (cx
<< 3);
3810 s
= (px
+ bit_size
+ 7) >> 3;
3820 s
= type_size(&t
, &align
);
3824 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3825 /* update offset and bit position */
3828 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3829 | (bit_pos
<< VT_STRUCT_SHIFT
);
3833 printf("FIX field %s offset %-2d size %-2d align %-2d "
3834 "pos %-2d bits %-2d\n",
3835 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3836 cx
, s
, align
, px
, bit_size
);
3839 /* fall back to load/store single-byte wise */
3840 f
->auxtype
= VT_STRUCT
;
3842 printf("FIX field %s : load byte-wise\n",
3843 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3849 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3850 static void struct_decl(CType
*type
, int u
)
3852 int v
, c
, size
, align
, flexible
;
3853 int bit_size
, bsize
, bt
;
3855 AttributeDef ad
, ad1
;
3858 memset(&ad
, 0, sizeof ad
);
3860 parse_attribute(&ad
);
3864 /* struct already defined ? return it */
3866 expect("struct/union/enum name");
3868 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3871 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3873 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3878 /* Record the original enum/struct/union token. */
3879 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3881 /* we put an undefined size for struct/union */
3882 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3883 s
->r
= 0; /* default alignment is zero as gcc */
3885 type
->t
= s
->type
.t
;
3891 tcc_error("struct/union/enum already defined");
3893 /* cannot be empty */
3894 /* non empty enums are not allowed */
3897 long long ll
= 0, pl
= 0, nl
= 0;
3900 /* enum symbols have static storage */
3901 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3905 expect("identifier");
3907 if (ss
&& !local_stack
)
3908 tcc_error("redefinition of enumerator '%s'",
3909 get_tok_str(v
, NULL
));
3913 ll
= expr_const64();
3915 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3917 *ps
= ss
, ps
= &ss
->next
;
3926 /* NOTE: we accept a trailing comma */
3931 /* set integral type of the enum */
3934 if (pl
!= (unsigned)pl
)
3935 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3937 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3938 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3939 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3941 /* set type for enum members */
3942 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3944 if (ll
== (int)ll
) /* default is int if it fits */
3946 if (t
.t
& VT_UNSIGNED
) {
3947 ss
->type
.t
|= VT_UNSIGNED
;
3948 if (ll
== (unsigned)ll
)
3951 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3952 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3957 while (tok
!= '}') {
3958 if (!parse_btype(&btype
, &ad1
)) {
3964 tcc_error("flexible array member '%s' not at the end of struct",
3965 get_tok_str(v
, NULL
));
3971 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3973 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3974 expect("identifier");
3976 int v
= btype
.ref
->v
;
3977 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3978 if (tcc_state
->ms_extensions
== 0)
3979 expect("identifier");
3983 if (type_size(&type1
, &align
) < 0) {
3984 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3987 tcc_error("field '%s' has incomplete type",
3988 get_tok_str(v
, NULL
));
3990 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3991 (type1
.t
& VT_BTYPE
) == VT_VOID
||
3992 (type1
.t
& VT_STORAGE
))
3993 tcc_error("invalid type for '%s'",
3994 get_tok_str(v
, NULL
));
3998 bit_size
= expr_const();
3999 /* XXX: handle v = 0 case for messages */
4001 tcc_error("negative width in bit-field '%s'",
4002 get_tok_str(v
, NULL
));
4003 if (v
&& bit_size
== 0)
4004 tcc_error("zero width for bit-field '%s'",
4005 get_tok_str(v
, NULL
));
4006 parse_attribute(&ad1
);
4008 size
= type_size(&type1
, &align
);
4009 if (bit_size
>= 0) {
4010 bt
= type1
.t
& VT_BTYPE
;
4016 tcc_error("bitfields must have scalar type");
4018 if (bit_size
> bsize
) {
4019 tcc_error("width of '%s' exceeds its type",
4020 get_tok_str(v
, NULL
));
4021 } else if (bit_size
== bsize
4022 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4023 /* no need for bit fields */
4025 } else if (bit_size
== 64) {
4026 tcc_error("field width 64 not implemented");
4028 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4030 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4033 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4034 /* Remember we've seen a real field to check
4035 for placement of flexible array member. */
4038 /* If member is a struct or bit-field, enforce
4039 placing into the struct (as anonymous). */
4041 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4046 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4051 if (tok
== ';' || tok
== TOK_EOF
)
4058 parse_attribute(&ad
);
4059 struct_layout(type
, &ad
);
4064 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4066 merge_symattr(&ad
->a
, &s
->a
);
4067 merge_funcattr(&ad
->f
, &s
->f
);
4070 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4071 are added to the element type, copied because it could be a typedef. */
4072 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4074 while (type
->t
& VT_ARRAY
) {
4075 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4076 type
= &type
->ref
->type
;
4078 type
->t
|= qualifiers
;
4081 /* return 0 if no type declaration. otherwise, return the basic type
4084 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4086 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
4090 memset(ad
, 0, sizeof(AttributeDef
));
4100 /* currently, we really ignore extension */
4110 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4111 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4112 tmbt
: tcc_error("too many basic types");
4115 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4120 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4137 memset(&ad1
, 0, sizeof(AttributeDef
));
4138 if (parse_btype(&type1
, &ad1
)) {
4139 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4141 n
= 1 << (ad1
.a
.aligned
- 1);
4143 type_size(&type1
, &n
);
4146 if (n
<= 0 || (n
& (n
- 1)) != 0)
4147 tcc_error("alignment must be a positive power of two");
4150 ad
->a
.aligned
= exact_log2p1(n
);
4154 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4155 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4156 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4157 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4164 #ifdef TCC_TARGET_ARM64
4166 /* GCC's __uint128_t appears in some Linux header files. Make it a
4167 synonym for long double to get the size and alignment right. */
4178 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4179 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4187 struct_decl(&type1
, VT_ENUM
);
4190 type
->ref
= type1
.ref
;
4193 struct_decl(&type1
, VT_STRUCT
);
4196 struct_decl(&type1
, VT_UNION
);
4199 /* type modifiers */
4204 parse_btype_qualify(type
, VT_CONSTANT
);
4212 parse_btype_qualify(type
, VT_VOLATILE
);
4219 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4220 tcc_error("signed and unsigned modifier");
4233 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4234 tcc_error("signed and unsigned modifier");
4235 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4251 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4252 tcc_error("multiple storage classes");
4263 /* currently, no need to handle it because tcc does not
4264 track unused objects */
4267 /* GNUC attribute */
4268 case TOK_ATTRIBUTE1
:
4269 case TOK_ATTRIBUTE2
:
4270 parse_attribute(ad
);
4271 if (ad
->attr_mode
) {
4272 u
= ad
->attr_mode
-1;
4273 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4281 parse_expr_type(&type1
);
4282 /* remove all storage modifiers except typedef */
4283 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4285 sym_to_attr(ad
, type1
.ref
);
4291 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4293 t
&= ~(VT_BTYPE
|VT_LONG
);
4294 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4295 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4296 type
->ref
= s
->type
.ref
;
4298 parse_btype_qualify(type
, t
);
4300 /* get attributes from typedef */
4310 if (tcc_state
->char_is_unsigned
) {
4311 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4314 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4315 bt
= t
& (VT_BTYPE
|VT_LONG
);
4317 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4318 #ifdef TCC_TARGET_PE
4319 if (bt
== VT_LDOUBLE
)
4320 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4326 /* convert a function parameter type (array to pointer and function to
4327 function pointer) */
4328 static inline void convert_parameter_type(CType
*pt
)
4330 /* remove const and volatile qualifiers (XXX: const could be used
4331 to indicate a const function parameter */
4332 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4333 /* array must be transformed to pointer according to ANSI C */
4335 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4340 ST_FUNC
void parse_asm_str(CString
*astr
)
4343 parse_mult_str(astr
, "string constant");
4346 /* Parse an asm label and return the token */
4347 static int asm_label_instr(void)
4353 parse_asm_str(&astr
);
4356 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4358 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4363 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4365 int n
, l
, t1
, arg_size
, align
;
4366 Sym
**plast
, *s
, *first
;
4371 /* function type, or recursive declarator (return if so) */
4373 if (td
&& !(td
& TYPE_ABSTRACT
))
4377 else if (parse_btype(&pt
, &ad1
))
4380 merge_attr (ad
, &ad1
);
4389 /* read param name and compute offset */
4390 if (l
!= FUNC_OLD
) {
4391 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4393 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4394 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4395 tcc_error("parameter declared as void");
4399 expect("identifier");
4400 pt
.t
= VT_VOID
; /* invalid type */
4403 convert_parameter_type(&pt
);
4404 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4405 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4411 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4416 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4417 tcc_error("invalid type");
4420 /* if no parameters, then old type prototype */
4423 /* NOTE: const is ignored in returned type as it has a special
4424 meaning in gcc / C++ */
4425 type
->t
&= ~VT_CONSTANT
;
4426 /* some ancient pre-K&R C allows a function to return an array
4427 and the array brackets to be put after the arguments, such
4428 that "int c()[]" means something like "int[] c()" */
4431 skip(']'); /* only handle simple "[]" */
4434 /* we push a anonymous symbol which will contain the function prototype */
4435 ad
->f
.func_args
= arg_size
;
4436 ad
->f
.func_type
= l
;
4437 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4443 } else if (tok
== '[') {
4444 int saved_nocode_wanted
= nocode_wanted
;
4445 /* array definition */
4448 /* XXX The optional type-quals and static should only be accepted
4449 in parameter decls. The '*' as well, and then even only
4450 in prototypes (not function defs). */
4452 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4467 if (!local_stack
|| (storage
& VT_STATIC
))
4468 vpushi(expr_const());
4470 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4471 length must always be evaluated, even under nocode_wanted,
4472 so that its size slot is initialized (e.g. under sizeof
4477 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4480 tcc_error("invalid array size");
4482 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4483 tcc_error("size of variable length array should be an integer");
4489 /* parse next post type */
4490 post_type(type
, ad
, storage
, 0);
4491 if (type
->t
== VT_FUNC
)
4492 tcc_error("declaration of an array of functions");
4493 t1
|= type
->t
& VT_VLA
;
4497 tcc_error("need explicit inner array size in VLAs");
4498 loc
-= type_size(&int_type
, &align
);
4502 vla_runtime_type_size(type
, &align
);
4504 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4510 nocode_wanted
= saved_nocode_wanted
;
4512 /* we push an anonymous symbol which will contain the array
4514 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4515 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4521 /* Parse a type declarator (except basic type), and return the type
4522 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4523 expected. 'type' should contain the basic type. 'ad' is the
4524 attribute definition of the basic type. It can be modified by
4525 type_decl(). If this (possibly abstract) declarator is a pointer chain
4526 it returns the innermost pointed to type (equals *type, but is a different
4527 pointer), otherwise returns type itself, that's used for recursive calls. */
4528 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4531 int qualifiers
, storage
;
4533 /* recursive type, remove storage bits first, apply them later again */
4534 storage
= type
->t
& VT_STORAGE
;
4535 type
->t
&= ~VT_STORAGE
;
4538 while (tok
== '*') {
4546 qualifiers
|= VT_CONSTANT
;
4551 qualifiers
|= VT_VOLATILE
;
4557 /* XXX: clarify attribute handling */
4558 case TOK_ATTRIBUTE1
:
4559 case TOK_ATTRIBUTE2
:
4560 parse_attribute(ad
);
4564 type
->t
|= qualifiers
;
4566 /* innermost pointed to type is the one for the first derivation */
4567 ret
= pointed_type(type
);
4571 /* This is possibly a parameter type list for abstract declarators
4572 ('int ()'), use post_type for testing this. */
4573 if (!post_type(type
, ad
, 0, td
)) {
4574 /* It's not, so it's a nested declarator, and the post operations
4575 apply to the innermost pointed to type (if any). */
4576 /* XXX: this is not correct to modify 'ad' at this point, but
4577 the syntax is not clear */
4578 parse_attribute(ad
);
4579 post
= type_decl(type
, ad
, v
, td
);
4583 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4584 /* type identifier */
4589 if (!(td
& TYPE_ABSTRACT
))
4590 expect("identifier");
4593 post_type(post
, ad
, storage
, 0);
4594 parse_attribute(ad
);
4599 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4600 ST_FUNC
int lvalue_type(int t
)
4605 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4607 else if (bt
== VT_SHORT
)
4611 if (t
& VT_UNSIGNED
)
4612 r
|= VT_LVAL_UNSIGNED
;
4616 /* indirection with full error checking and bound check */
4617 ST_FUNC
void indir(void)
4619 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4620 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4624 if (vtop
->r
& VT_LVAL
)
4626 vtop
->type
= *pointed_type(&vtop
->type
);
4627 /* Arrays and functions are never lvalues */
4628 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4629 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4630 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4631 /* if bound checking, the referenced pointer must be checked */
4632 #ifdef CONFIG_TCC_BCHECK
4633 if (tcc_state
->do_bounds_check
)
4634 vtop
->r
|= VT_MUSTBOUND
;
4639 /* pass a parameter to a function and do type checking and casting */
4640 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4645 func_type
= func
->f
.func_type
;
4646 if (func_type
== FUNC_OLD
||
4647 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4648 /* default casting : only need to convert float to double */
4649 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4650 gen_cast_s(VT_DOUBLE
);
4651 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4652 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4653 type
.ref
= vtop
->type
.ref
;
4656 } else if (arg
== NULL
) {
4657 tcc_error("too many arguments to function");
4660 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4661 gen_assign_cast(&type
);
4665 /* parse an expression and return its type without any side effect. */
4666 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4675 /* parse an expression of the form '(type)' or '(expr)' and return its
4677 static void parse_expr_type(CType
*type
)
4683 if (parse_btype(type
, &ad
)) {
4684 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4686 expr_type(type
, gexpr
);
4691 static void parse_type(CType
*type
)
4696 if (!parse_btype(type
, &ad
)) {
4699 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4702 static void parse_builtin_params(int nc
, const char *args
)
4709 while ((c
= *args
++)) {
4713 case 'e': expr_eq(); continue;
4714 case 't': parse_type(&t
); vpush(&t
); continue;
4715 default: tcc_error("internal error"); break;
4723 static void try_call_scope_cleanup(Sym
*stop
)
4725 Sym
*cls
= current_cleanups
;
4727 for (; cls
!= stop
; cls
= cls
->ncl
) {
4728 Sym
*fs
= cls
->next
;
4729 Sym
*vs
= cls
->prev_tok
;
4731 vpushsym(&fs
->type
, fs
);
4732 vset(&vs
->type
, vs
->r
, vs
->c
);
4734 mk_pointer(&vtop
->type
);
4740 static void try_call_cleanup_goto(Sym
*cleanupstate
)
4745 if (!current_cleanups
)
4748 /* search NCA of both cleanup chains given parents and initial depth */
4749 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
4750 for (ccd
= ncleanups
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
4752 for (cc
= current_cleanups
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
4754 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
4757 try_call_scope_cleanup(cc
);
4760 ST_FUNC
void unary(void)
4762 int n
, t
, align
, size
, r
, sizeof_caller
;
4767 sizeof_caller
= in_sizeof
;
4770 /* XXX: GCC 2.95.3 does not generate a table although it should be
4778 #ifdef TCC_TARGET_PE
4779 t
= VT_SHORT
|VT_UNSIGNED
;
4787 vsetc(&type
, VT_CONST
, &tokc
);
4791 t
= VT_INT
| VT_UNSIGNED
;
4797 t
= VT_LLONG
| VT_UNSIGNED
;
4809 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4812 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4814 case TOK___FUNCTION__
:
4816 goto tok_identifier
;
4822 /* special function name identifier */
4823 len
= strlen(funcname
) + 1;
4824 /* generate char[len] type */
4829 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4830 if (!NODATA_WANTED
) {
4831 ptr
= section_ptr_add(data_section
, len
);
4832 memcpy(ptr
, funcname
, len
);
4838 #ifdef TCC_TARGET_PE
4839 t
= VT_SHORT
| VT_UNSIGNED
;
4845 /* string parsing */
4847 if (tcc_state
->char_is_unsigned
)
4848 t
= VT_BYTE
| VT_UNSIGNED
;
4850 if (tcc_state
->warn_write_strings
)
4855 memset(&ad
, 0, sizeof(AttributeDef
));
4856 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4861 if (parse_btype(&type
, &ad
)) {
4862 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4864 /* check ISOC99 compound literal */
4866 /* data is allocated locally by default */
4871 /* all except arrays are lvalues */
4872 if (!(type
.t
& VT_ARRAY
))
4873 r
|= lvalue_type(type
.t
);
4874 memset(&ad
, 0, sizeof(AttributeDef
));
4875 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4877 if (sizeof_caller
) {
4884 } else if (tok
== '{') {
4885 int saved_nocode_wanted
= nocode_wanted
;
4887 tcc_error("expected constant");
4888 /* save all registers */
4890 /* statement expression : we do not accept break/continue
4891 inside as GCC does. We do retain the nocode_wanted state,
4892 as statement expressions can't ever be entered from the
4893 outside, so any reactivation of code emission (from labels
4894 or loop heads) can be disabled again after the end of it. */
4895 block(NULL
, NULL
, NULL
, NULL
, 1);
4896 nocode_wanted
= saved_nocode_wanted
;
4911 /* functions names must be treated as function pointers,
4912 except for unary '&' and sizeof. Since we consider that
4913 functions are not lvalues, we only have to handle it
4914 there and in function calls. */
4915 /* arrays can also be used although they are not lvalues */
4916 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4917 !(vtop
->type
.t
& VT_ARRAY
))
4919 mk_pointer(&vtop
->type
);
4925 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4926 gen_cast_s(VT_BOOL
);
4927 vtop
->c
.i
= !vtop
->c
.i
;
4928 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4932 vseti(VT_JMP
, gvtst(1, 0));
4944 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4945 tcc_error("pointer not accepted for unary plus");
4946 /* In order to force cast, we add zero, except for floating point
4947 where we really need an noop (otherwise -0.0 will be transformed
4949 if (!is_float(vtop
->type
.t
)) {
4961 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
4962 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
4963 size
= type_size(&type
, &align
);
4964 if (s
&& s
->a
.aligned
)
4965 align
= 1 << (s
->a
.aligned
- 1);
4966 if (t
== TOK_SIZEOF
) {
4967 if (!(type
.t
& VT_VLA
)) {
4969 tcc_error("sizeof applied to an incomplete type");
4972 vla_runtime_type_size(&type
, &align
);
4977 vtop
->type
.t
|= VT_UNSIGNED
;
4980 case TOK_builtin_expect
:
4981 /* __builtin_expect is a no-op for now */
4982 parse_builtin_params(0, "ee");
4985 case TOK_builtin_types_compatible_p
:
4986 parse_builtin_params(0, "tt");
4987 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4988 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4989 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4993 case TOK_builtin_choose_expr
:
5020 case TOK_builtin_constant_p
:
5021 parse_builtin_params(1, "e");
5022 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5026 case TOK_builtin_frame_address
:
5027 case TOK_builtin_return_address
:
5033 if (tok
!= TOK_CINT
) {
5034 tcc_error("%s only takes positive integers",
5035 tok1
== TOK_builtin_return_address
?
5036 "__builtin_return_address" :
5037 "__builtin_frame_address");
5039 level
= (uint32_t)tokc
.i
;
5044 vset(&type
, VT_LOCAL
, 0); /* local frame */
5046 mk_pointer(&vtop
->type
);
5047 indir(); /* -> parent frame */
5049 if (tok1
== TOK_builtin_return_address
) {
5050 // assume return address is just above frame pointer on stack
5053 mk_pointer(&vtop
->type
);
5058 #ifdef TCC_TARGET_X86_64
5059 #ifdef TCC_TARGET_PE
5060 case TOK_builtin_va_start
:
5061 parse_builtin_params(0, "ee");
5062 r
= vtop
->r
& VT_VALMASK
;
5066 tcc_error("__builtin_va_start expects a local variable");
5068 vtop
->type
= char_pointer_type
;
5073 case TOK_builtin_va_arg_types
:
5074 parse_builtin_params(0, "t");
5075 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5082 #ifdef TCC_TARGET_ARM64
5083 case TOK___va_start
: {
5084 parse_builtin_params(0, "ee");
5088 vtop
->type
.t
= VT_VOID
;
5091 case TOK___va_arg
: {
5092 parse_builtin_params(0, "et");
5100 case TOK___arm64_clear_cache
: {
5101 parse_builtin_params(0, "ee");
5104 vtop
->type
.t
= VT_VOID
;
5108 /* pre operations */
5119 t
= vtop
->type
.t
& VT_BTYPE
;
5121 /* In IEEE negate(x) isn't subtract(0,x), but rather
5125 vtop
->c
.f
= -1.0 * 0.0;
5126 else if (t
== VT_DOUBLE
)
5127 vtop
->c
.d
= -1.0 * 0.0;
5129 vtop
->c
.ld
= -1.0 * 0.0;
5137 goto tok_identifier
;
5139 /* allow to take the address of a label */
5140 if (tok
< TOK_UIDENT
)
5141 expect("label identifier");
5142 s
= label_find(tok
);
5144 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5146 if (s
->r
== LABEL_DECLARED
)
5147 s
->r
= LABEL_FORWARD
;
5150 s
->type
.t
= VT_VOID
;
5151 mk_pointer(&s
->type
);
5152 s
->type
.t
|= VT_STATIC
;
5154 vpushsym(&s
->type
, s
);
5160 CType controlling_type
;
5161 int has_default
= 0;
5164 TokenString
*str
= NULL
;
5165 int saved_const_wanted
= const_wanted
;
5170 expr_type(&controlling_type
, expr_eq
);
5171 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5172 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5173 mk_pointer(&controlling_type
);
5174 const_wanted
= saved_const_wanted
;
5178 if (tok
== TOK_DEFAULT
) {
5180 tcc_error("too many 'default'");
5186 AttributeDef ad_tmp
;
5189 parse_btype(&cur_type
, &ad_tmp
);
5190 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5191 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5193 tcc_error("type match twice");
5203 skip_or_save_block(&str
);
5205 skip_or_save_block(NULL
);
5212 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5213 tcc_error("type '%s' does not match any association", buf
);
5215 begin_macro(str
, 1);
5224 // special qnan , snan and infinity values
5229 vtop
->type
.t
= VT_FLOAT
;
5234 goto special_math_val
;
5237 goto special_math_val
;
5244 expect("identifier");
5246 if (!s
|| IS_ASM_SYM(s
)) {
5247 const char *name
= get_tok_str(t
, NULL
);
5249 tcc_error("'%s' undeclared", name
);
5250 /* for simple function calls, we tolerate undeclared
5251 external reference to int() function */
5252 if (tcc_state
->warn_implicit_function_declaration
5253 #ifdef TCC_TARGET_PE
5254 /* people must be warned about using undeclared WINAPI functions
5255 (which usually start with uppercase letter) */
5256 || (name
[0] >= 'A' && name
[0] <= 'Z')
5259 tcc_warning("implicit declaration of function '%s'", name
);
5260 s
= external_global_sym(t
, &func_old_type
);
5264 /* A symbol that has a register is a local register variable,
5265 which starts out as VT_LOCAL value. */
5266 if ((r
& VT_VALMASK
) < VT_CONST
)
5267 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5269 vset(&s
->type
, r
, s
->c
);
5270 /* Point to s as backpointer (even without r&VT_SYM).
5271 Will be used by at least the x86 inline asm parser for
5277 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5278 vtop
->c
.i
= s
->enum_val
;
5283 /* post operations */
5285 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5288 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5289 int qualifiers
, cumofs
= 0;
5291 if (tok
== TOK_ARROW
)
5293 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5296 /* expect pointer on structure */
5297 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5298 expect("struct or union");
5299 if (tok
== TOK_CDOUBLE
)
5300 expect("field name");
5302 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5303 expect("field name");
5304 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5306 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5307 /* add field offset to pointer */
5308 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5309 vpushi(cumofs
+ s
->c
);
5311 /* change type to field type, and set to lvalue */
5312 vtop
->type
= s
->type
;
5313 vtop
->type
.t
|= qualifiers
;
5314 /* an array is never an lvalue */
5315 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5316 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5317 #ifdef CONFIG_TCC_BCHECK
5318 /* if bound checking, the referenced pointer must be checked */
5319 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5320 vtop
->r
|= VT_MUSTBOUND
;
5324 } else if (tok
== '[') {
5330 } else if (tok
== '(') {
5333 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5336 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5337 /* pointer test (no array accepted) */
5338 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5339 vtop
->type
= *pointed_type(&vtop
->type
);
5340 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5344 expect("function pointer");
5347 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5349 /* get return type */
5352 sa
= s
->next
; /* first parameter */
5353 nb_args
= regsize
= 0;
5355 /* compute first implicit argument if a structure is returned */
5356 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5357 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5358 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5359 &ret_align
, ®size
);
5361 /* get some space for the returned structure */
5362 size
= type_size(&s
->type
, &align
);
5363 #ifdef TCC_TARGET_ARM64
5364 /* On arm64, a small struct is return in registers.
5365 It is much easier to write it to memory if we know
5366 that we are allowed to write some extra bytes, so
5367 round the allocated space up to a power of 2: */
5369 while (size
& (size
- 1))
5370 size
= (size
| (size
- 1)) + 1;
5372 loc
= (loc
- size
) & -align
;
5374 ret
.r
= VT_LOCAL
| VT_LVAL
;
5375 /* pass it as 'int' to avoid structure arg passing
5377 vseti(VT_LOCAL
, loc
);
5387 /* return in register */
5388 if (is_float(ret
.type
.t
)) {
5389 ret
.r
= reg_fret(ret
.type
.t
);
5390 #ifdef TCC_TARGET_X86_64
5391 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5395 #ifndef TCC_TARGET_ARM64
5396 #ifdef TCC_TARGET_X86_64
5397 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5399 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5410 gfunc_param_typed(s
, sa
);
5420 tcc_error("too few arguments to function");
5422 gfunc_call(nb_args
);
5425 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5426 vsetc(&ret
.type
, r
, &ret
.c
);
5427 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5430 /* handle packed struct return */
5431 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5434 size
= type_size(&s
->type
, &align
);
5435 /* We're writing whole regs often, make sure there's enough
5436 space. Assume register size is power of 2. */
5437 if (regsize
> align
)
5439 loc
= (loc
- size
) & -align
;
5443 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5447 if (--ret_nregs
== 0)
5451 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5459 ST_FUNC
void expr_prod(void)
5464 while (tok
== '*' || tok
== '/' || tok
== '%') {
5472 ST_FUNC
void expr_sum(void)
5477 while (tok
== '+' || tok
== '-') {
5485 static void expr_shift(void)
5490 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5498 static void expr_cmp(void)
5503 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5504 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5512 static void expr_cmpeq(void)
5517 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5525 static void expr_and(void)
5528 while (tok
== '&') {
5535 static void expr_xor(void)
5538 while (tok
== '^') {
5545 static void expr_or(void)
5548 while (tok
== '|') {
5555 static void expr_land(void)
5558 if (tok
== TOK_LAND
) {
5561 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5562 gen_cast_s(VT_BOOL
);
5567 while (tok
== TOK_LAND
) {
5583 if (tok
!= TOK_LAND
) {
5596 static void expr_lor(void)
5599 if (tok
== TOK_LOR
) {
5602 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5603 gen_cast_s(VT_BOOL
);
5608 while (tok
== TOK_LOR
) {
5624 if (tok
!= TOK_LOR
) {
5637 /* Assuming vtop is a value used in a conditional context
5638 (i.e. compared with zero) return 0 if it's false, 1 if
5639 true and -1 if it can't be statically determined. */
5640 static int condition_3way(void)
5643 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5644 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5646 gen_cast_s(VT_BOOL
);
5653 static void expr_cond(void)
5655 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5657 CType type
, type1
, type2
;
5662 c
= condition_3way();
5663 g
= (tok
== ':' && gnu_ext
);
5665 /* needed to avoid having different registers saved in
5667 if (is_float(vtop
->type
.t
)) {
5669 #ifdef TCC_TARGET_X86_64
5670 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5694 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5695 mk_pointer(&vtop
->type
);
5697 sv
= *vtop
; /* save value to handle it later */
5698 vtop
--; /* no vpop so that FP stack is not flushed */
5714 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5715 mk_pointer(&vtop
->type
);
5718 bt1
= t1
& VT_BTYPE
;
5720 bt2
= t2
& VT_BTYPE
;
5724 /* cast operands to correct type according to ISOC rules */
5725 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5726 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5727 } else if (is_float(bt1
) || is_float(bt2
)) {
5728 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5729 type
.t
= VT_LDOUBLE
;
5731 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5736 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5737 /* cast to biggest op */
5738 type
.t
= VT_LLONG
| VT_LONG
;
5739 if (bt1
== VT_LLONG
)
5741 if (bt2
== VT_LLONG
)
5743 /* convert to unsigned if it does not fit in a long long */
5744 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5745 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5746 type
.t
|= VT_UNSIGNED
;
5747 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5748 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5749 /* If one is a null ptr constant the result type
5751 if (is_null_pointer (vtop
)) type
= type1
;
5752 else if (is_null_pointer (&sv
)) type
= type2
;
5753 else if (bt1
!= bt2
)
5754 tcc_error("incompatible types in conditional expressions");
5756 CType
*pt1
= pointed_type(&type1
);
5757 CType
*pt2
= pointed_type(&type2
);
5758 int pbt1
= pt1
->t
& VT_BTYPE
;
5759 int pbt2
= pt2
->t
& VT_BTYPE
;
5760 int newquals
, copied
= 0;
5761 /* pointers to void get preferred, otherwise the
5762 pointed to types minus qualifs should be compatible */
5763 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5764 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5765 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5766 tcc_warning("pointer type mismatch in conditional expression\n");
5768 /* combine qualifs */
5769 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5770 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5773 /* copy the pointer target symbol */
5774 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5777 pointed_type(&type
)->t
|= newquals
;
5779 /* pointers to incomplete arrays get converted to
5780 pointers to completed ones if possible */
5781 if (pt1
->t
& VT_ARRAY
5782 && pt2
->t
& VT_ARRAY
5783 && pointed_type(&type
)->ref
->c
< 0
5784 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5787 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5789 pointed_type(&type
)->ref
=
5790 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5791 0, pointed_type(&type
)->ref
->c
);
5792 pointed_type(&type
)->ref
->c
=
5793 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5796 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5797 /* XXX: test structure compatibility */
5798 type
= bt1
== VT_STRUCT
? type1
: type2
;
5800 /* integer operations */
5801 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5802 /* convert to unsigned if it does not fit in an integer */
5803 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5804 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5805 type
.t
|= VT_UNSIGNED
;
5807 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5808 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5809 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5811 /* now we convert second operand */
5815 mk_pointer(&vtop
->type
);
5817 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5822 if (is_float(type
.t
)) {
5824 #ifdef TCC_TARGET_X86_64
5825 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5829 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5830 /* for long longs, we use fixed registers to avoid having
5831 to handle a complicated move */
5842 /* this is horrible, but we must also convert first
5848 mk_pointer(&vtop
->type
);
5850 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5854 if (c
< 0 || islv
) {
5856 move_reg(r2
, r1
, type
.t
);
5866 static void expr_eq(void)
5872 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5873 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5874 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5889 ST_FUNC
void gexpr(void)
5900 /* parse a constant expression and return value in vtop. */
5901 static void expr_const1(void)
5910 /* parse an integer constant and return its value. */
5911 static inline int64_t expr_const64(void)
5915 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5916 expect("constant expression");
5922 /* parse an integer constant and return its value.
5923 Complain if it doesn't fit 32bit (signed or unsigned). */
5924 ST_FUNC
int expr_const(void)
5927 int64_t wc
= expr_const64();
5929 if (c
!= wc
&& (unsigned)c
!= wc
)
5930 tcc_error("constant exceeds 32 bit");
5934 /* return the label token if current token is a label, otherwise
5936 static int is_label(void)
5940 /* fast test first */
5941 if (tok
< TOK_UIDENT
)
5943 /* no need to save tokc because tok is an identifier */
5949 unget_tok(last_tok
);
5954 #ifndef TCC_TARGET_ARM64
5955 static void gfunc_return(CType
*func_type
)
5957 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5958 CType type
, ret_type
;
5959 int ret_align
, ret_nregs
, regsize
;
5960 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5961 &ret_align
, ®size
);
5962 if (0 == ret_nregs
) {
5963 /* if returning structure, must copy it to implicit
5964 first pointer arg location */
5967 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5970 /* copy structure value to pointer */
5973 /* returning structure packed into registers */
5974 int r
, size
, addr
, align
;
5975 size
= type_size(func_type
,&align
);
5976 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5977 (vtop
->c
.i
& (ret_align
-1)))
5978 && (align
& (ret_align
-1))) {
5979 loc
= (loc
- size
) & -ret_align
;
5982 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5986 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5988 vtop
->type
= ret_type
;
5989 if (is_float(ret_type
.t
))
5990 r
= rc_fret(ret_type
.t
);
6001 if (--ret_nregs
== 0)
6003 /* We assume that when a structure is returned in multiple
6004 registers, their classes are consecutive values of the
6007 vtop
->c
.i
+= regsize
;
6011 } else if (is_float(func_type
->t
)) {
6012 gv(rc_fret(func_type
->t
));
6016 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6020 static int case_cmp(const void *pa
, const void *pb
)
6022 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6023 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6024 return a
< b
? -1 : a
> b
;
6027 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6031 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6049 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6051 gcase(base
, len
/2, bsym
);
6052 if (cur_switch
->def_sym
)
6053 gjmp_addr(cur_switch
->def_sym
);
6055 *bsym
= gjmp(*bsym
);
6059 base
+= e
; len
-= e
;
6069 if (p
->v1
== p
->v2
) {
6071 gtst_addr(0, p
->sym
);
6081 gtst_addr(0, p
->sym
);
6087 static void block(int *bsym
, Sym
*bcl
, int *csym
, Sym
*ccl
, int is_expr
)
6089 int a
, b
, c
, d
, cond
;
6092 /* generate line number info */
6093 if (tcc_state
->do_debug
)
6094 tcc_debug_line(tcc_state
);
6097 /* default return value is (void) */
6099 vtop
->type
.t
= VT_VOID
;
6102 if (tok
== TOK_IF
) {
6104 int saved_nocode_wanted
= nocode_wanted
;
6109 cond
= condition_3way();
6115 nocode_wanted
|= 0x20000000;
6116 block(bsym
, bcl
, csym
, ccl
, 0);
6118 nocode_wanted
= saved_nocode_wanted
;
6119 if (tok
== TOK_ELSE
) {
6124 nocode_wanted
|= 0x20000000;
6125 block(bsym
, bcl
, csym
, ccl
, 0);
6126 gsym(d
); /* patch else jmp */
6128 nocode_wanted
= saved_nocode_wanted
;
6131 } else if (tok
== TOK_WHILE
) {
6132 int saved_nocode_wanted
;
6133 nocode_wanted
&= ~0x20000000;
6143 saved_nocode_wanted
= nocode_wanted
;
6144 block(&a
, current_cleanups
, &b
, current_cleanups
, 0);
6145 nocode_wanted
= saved_nocode_wanted
;
6150 } else if (tok
== '{') {
6151 Sym
*llabel
, *lcleanup
;
6152 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
6153 int lncleanups
= ncleanups
;
6156 /* record local declaration stack position */
6158 llabel
= local_label_stack
;
6159 lcleanup
= current_cleanups
;
6162 /* handle local labels declarations */
6163 while (tok
== TOK_LABEL
) {
6166 if (tok
< TOK_UIDENT
)
6167 expect("label identifier");
6168 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6178 while (tok
!= '}') {
6179 if ((a
= is_label()))
6186 block(bsym
, bcl
, csym
, ccl
, is_expr
);
6190 if (current_cleanups
!= lcleanup
) {
6194 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> lncleanups
;)
6195 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6200 try_call_scope_cleanup(lcleanup
);
6201 pcl
->jnext
= gjmp(0);
6203 goto remove_pending
;
6212 if (!nocode_wanted
) {
6213 try_call_scope_cleanup(lcleanup
);
6217 current_cleanups
= lcleanup
;
6218 ncleanups
= lncleanups
;
6219 /* pop locally defined labels */
6220 label_pop(&local_label_stack
, llabel
, is_expr
);
6221 /* pop locally defined symbols */
6223 /* In the is_expr case (a statement expression is finished here),
6224 vtop might refer to symbols on the local_stack. Either via the
6225 type or via vtop->sym. We can't pop those nor any that in turn
6226 might be referred to. To make it easier we don't roll back
6227 any symbols in that case; some upper level call to block() will
6228 do that. We do have to remove such symbols from the lookup
6229 tables, though. sym_pop will do that. */
6230 sym_pop(&local_stack
, s
, is_expr
);
6232 /* Pop VLA frames and restore stack pointer if required */
6233 if (vlas_in_scope
> saved_vlas_in_scope
) {
6234 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
6237 vlas_in_scope
= saved_vlas_in_scope
;
6240 } else if (tok
== TOK_RETURN
) {
6244 gen_assign_cast(&func_vt
);
6245 try_call_scope_cleanup(NULL
);
6246 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6249 gfunc_return(&func_vt
);
6251 try_call_scope_cleanup(NULL
);
6254 /* jump unless last stmt in top-level block */
6255 if (tok
!= '}' || local_scope
!= 1)
6257 nocode_wanted
|= 0x20000000;
6258 } else if (tok
== TOK_BREAK
) {
6261 tcc_error("cannot break");
6262 try_call_scope_cleanup(bcl
);
6263 *bsym
= gjmp(*bsym
);
6266 nocode_wanted
|= 0x20000000;
6267 } else if (tok
== TOK_CONTINUE
) {
6270 tcc_error("cannot continue");
6271 try_call_scope_cleanup(ccl
);
6272 vla_sp_restore_root();
6273 *csym
= gjmp(*csym
);
6276 nocode_wanted
|= 0x20000000;
6277 } else if (tok
== TOK_FOR
) {
6279 int saved_nocode_wanted
;
6280 Sym
*lcleanup
= current_cleanups
;
6281 int lncleanups
= ncleanups
;
6283 nocode_wanted
&= ~0x20000000;
6289 /* c99 for-loop init decl? */
6290 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6291 /* no, regular for-loop init expr */
6317 saved_nocode_wanted
= nocode_wanted
;
6318 block(&a
, current_cleanups
, &b
, current_cleanups
, 0);
6319 nocode_wanted
= saved_nocode_wanted
;
6324 try_call_scope_cleanup(lcleanup
);
6325 ncleanups
= lncleanups
;
6326 current_cleanups
= lcleanup
;
6327 sym_pop(&local_stack
, s
, 0);
6330 if (tok
== TOK_DO
) {
6331 int saved_nocode_wanted
;
6332 nocode_wanted
&= ~0x20000000;
6338 saved_nocode_wanted
= nocode_wanted
;
6339 block(&a
, current_cleanups
, &b
, current_cleanups
, 0);
6344 nocode_wanted
= saved_nocode_wanted
;
6348 nocode_wanted
= saved_nocode_wanted
;
6353 if (tok
== TOK_SWITCH
) {
6354 struct switch_t
*saved
, sw
;
6355 int saved_nocode_wanted
= nocode_wanted
;
6361 switchval
= *vtop
--;
6363 b
= gjmp(0); /* jump to first case */
6364 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6367 block(&a
, current_cleanups
, csym
, ccl
, 0);
6368 nocode_wanted
= saved_nocode_wanted
;
6369 a
= gjmp(a
); /* add implicit break */
6372 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6373 for (b
= 1; b
< sw
.n
; b
++)
6374 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6375 tcc_error("duplicate case value");
6376 /* Our switch table sorting is signed, so the compared
6377 value needs to be as well when it's 64bit. */
6378 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6379 switchval
.type
.t
&= ~VT_UNSIGNED
;
6381 gcase(sw
.p
, sw
.n
, &a
);
6384 gjmp_addr(sw
.def_sym
);
6385 dynarray_reset(&sw
.p
, &sw
.n
);
6390 if (tok
== TOK_CASE
) {
6391 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6394 nocode_wanted
&= ~0x20000000;
6396 cr
->v1
= cr
->v2
= expr_const64();
6397 if (gnu_ext
&& tok
== TOK_DOTS
) {
6399 cr
->v2
= expr_const64();
6400 if (cr
->v2
< cr
->v1
)
6401 tcc_warning("empty case range");
6404 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6407 goto block_after_label
;
6409 if (tok
== TOK_DEFAULT
) {
6414 if (cur_switch
->def_sym
)
6415 tcc_error("too many 'default'");
6416 cur_switch
->def_sym
= ind
;
6418 goto block_after_label
;
6420 if (tok
== TOK_GOTO
) {
6422 if (tok
== '*' && gnu_ext
) {
6426 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6429 } else if (tok
>= TOK_UIDENT
) {
6430 s
= label_find(tok
);
6431 /* put forward definition if needed */
6433 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6434 else if (s
->r
== LABEL_DECLARED
)
6435 s
->r
= LABEL_FORWARD
;
6437 vla_sp_restore_root();
6438 if (s
->r
& LABEL_FORWARD
) {
6439 /* start new goto chain for cleanups, linked via label->next */
6440 if (current_cleanups
) {
6441 sym_push2(&pending_gotos
, SYM_FIELD
, 0, ncleanups
);
6442 pending_gotos
->prev_tok
= s
;
6443 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6444 pending_gotos
->next
= s
;
6446 s
->jnext
= gjmp(s
->jnext
);
6448 try_call_cleanup_goto(s
->cleanupstate
);
6449 gjmp_addr(s
->jnext
);
6453 expect("label identifier");
6456 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6465 if (s
->r
== LABEL_DEFINED
)
6466 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6467 s
->r
= LABEL_DEFINED
;
6469 Sym
*pcl
; /* pending cleanup goto */
6470 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6472 sym_pop(&s
->next
, NULL
, 0);
6476 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6479 s
->cleanupstate
= current_cleanups
;
6481 /* we accept this, but it is a mistake */
6483 nocode_wanted
&= ~0x20000000;
6485 tcc_warning("deprecated use of label at end of compound statement");
6489 block(bsym
, bcl
, csym
, ccl
, is_expr
);
6492 /* expression case */
6507 /* This skips over a stream of tokens containing balanced {} and ()
6508 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6509 with a '{'). If STR then allocates and stores the skipped tokens
6510 in *STR. This doesn't check if () and {} are nested correctly,
6511 i.e. "({)}" is accepted. */
6512 static void skip_or_save_block(TokenString
**str
)
6514 int braces
= tok
== '{';
6517 *str
= tok_str_alloc();
6519 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6521 if (tok
== TOK_EOF
) {
6522 if (str
|| level
> 0)
6523 tcc_error("unexpected end of file");
6528 tok_str_add_tok(*str
);
6531 if (t
== '{' || t
== '(') {
6533 } else if (t
== '}' || t
== ')') {
6535 if (level
== 0 && braces
&& t
== '}')
6540 tok_str_add(*str
, -1);
6541 tok_str_add(*str
, 0);
6545 #define EXPR_CONST 1
6548 static void parse_init_elem(int expr_type
)
6550 int saved_global_expr
;
6553 /* compound literals must be allocated globally in this case */
6554 saved_global_expr
= global_expr
;
6557 global_expr
= saved_global_expr
;
6558 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6559 (compound literals). */
6560 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6561 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6562 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6563 #ifdef TCC_TARGET_PE
6564 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6567 tcc_error("initializer element is not constant");
6575 /* put zeros for variable based init */
6576 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6579 /* nothing to do because globals are already set to zero */
6581 vpush_global_sym(&func_old_type
, TOK_memset
);
6583 #ifdef TCC_TARGET_ARM
6595 #define DIF_SIZE_ONLY 2
6596 #define DIF_HAVE_ELEM 4
6598 /* t is the array or struct type. c is the array or struct
6599 address. cur_field is the pointer to the current
6600 field, for arrays the 'c' member contains the current start
6601 index. 'flags' is as in decl_initializer.
6602 'al' contains the already initialized length of the
6603 current container (starting at c). This returns the new length of that. */
6604 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6605 Sym
**cur_field
, int flags
, int al
)
6608 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6609 unsigned long corig
= c
;
6613 if (flags
& DIF_HAVE_ELEM
)
6615 if (gnu_ext
&& (l
= is_label()) != 0)
6617 /* NOTE: we only support ranges for last designator */
6618 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6620 if (!(type
->t
& VT_ARRAY
))
6621 expect("array type");
6623 index
= index_last
= expr_const();
6624 if (tok
== TOK_DOTS
&& gnu_ext
) {
6626 index_last
= expr_const();
6630 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6632 tcc_error("invalid index");
6634 (*cur_field
)->c
= index_last
;
6635 type
= pointed_type(type
);
6636 elem_size
= type_size(type
, &align
);
6637 c
+= index
* elem_size
;
6638 nb_elems
= index_last
- index
+ 1;
6645 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6646 expect("struct/union type");
6647 f
= find_field(type
, l
, &cumofs
);
6660 } else if (!gnu_ext
) {
6665 if (type
->t
& VT_ARRAY
) {
6666 index
= (*cur_field
)->c
;
6667 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6668 tcc_error("index too large");
6669 type
= pointed_type(type
);
6670 c
+= index
* type_size(type
, &align
);
6673 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6674 *cur_field
= f
= f
->next
;
6676 tcc_error("too many field init");
6681 /* must put zero in holes (note that doing it that way
6682 ensures that it even works with designators) */
6683 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
6684 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6685 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
6687 /* XXX: make it more general */
6688 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
6689 unsigned long c_end
;
6694 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6695 for (i
= 1; i
< nb_elems
; i
++) {
6696 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6701 } else if (!NODATA_WANTED
) {
6702 c_end
= c
+ nb_elems
* elem_size
;
6703 if (c_end
> sec
->data_allocated
)
6704 section_realloc(sec
, c_end
);
6705 src
= sec
->data
+ c
;
6707 for(i
= 1; i
< nb_elems
; i
++) {
6709 memcpy(dst
, src
, elem_size
);
6713 c
+= nb_elems
* type_size(type
, &align
);
6719 /* store a value or an expression directly in global data or in local array */
6720 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6727 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6731 /* XXX: not portable */
6732 /* XXX: generate error if incorrect relocation */
6733 gen_assign_cast(&dtype
);
6734 bt
= type
->t
& VT_BTYPE
;
6736 if ((vtop
->r
& VT_SYM
)
6739 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6740 || (type
->t
& VT_BITFIELD
))
6741 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6743 tcc_error("initializer element is not computable at load time");
6745 if (NODATA_WANTED
) {
6750 size
= type_size(type
, &align
);
6751 section_reserve(sec
, c
+ size
);
6752 ptr
= sec
->data
+ c
;
6754 /* XXX: make code faster ? */
6755 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6756 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6757 /* XXX This rejects compound literals like
6758 '(void *){ptr}'. The problem is that '&sym' is
6759 represented the same way, which would be ruled out
6760 by the SYM_FIRST_ANOM check above, but also '"string"'
6761 in 'char *p = "string"' is represented the same
6762 with the type being VT_PTR and the symbol being an
6763 anonymous one. That is, there's no difference in vtop
6764 between '(void *){x}' and '&(void *){x}'. Ignore
6765 pointer typed entities here. Hopefully no real code
6766 will every use compound literals with scalar type. */
6767 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6768 /* These come from compound literals, memcpy stuff over. */
6772 esym
= elfsym(vtop
->sym
);
6773 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6774 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6776 /* We need to copy over all memory contents, and that
6777 includes relocations. Use the fact that relocs are
6778 created it order, so look from the end of relocs
6779 until we hit one before the copied region. */
6780 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6781 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6782 while (num_relocs
--) {
6784 if (rel
->r_offset
>= esym
->st_value
+ size
)
6786 if (rel
->r_offset
< esym
->st_value
)
6788 /* Note: if the same fields are initialized multiple
6789 times (possible with designators) then we possibly
6790 add multiple relocations for the same offset here.
6791 That would lead to wrong code, the last reloc needs
6792 to win. We clean this up later after the whole
6793 initializer is parsed. */
6794 put_elf_reloca(symtab_section
, sec
,
6795 c
+ rel
->r_offset
- esym
->st_value
,
6796 ELFW(R_TYPE
)(rel
->r_info
),
6797 ELFW(R_SYM
)(rel
->r_info
),
6807 if (type
->t
& VT_BITFIELD
) {
6808 int bit_pos
, bit_size
, bits
, n
;
6809 unsigned char *p
, v
, m
;
6810 bit_pos
= BIT_POS(vtop
->type
.t
);
6811 bit_size
= BIT_SIZE(vtop
->type
.t
);
6812 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6813 bit_pos
&= 7, bits
= 0;
6818 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6819 m
= ((1 << n
) - 1) << bit_pos
;
6820 *p
= (*p
& ~m
) | (v
& m
);
6821 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6825 /* XXX: when cross-compiling we assume that each type has the
6826 same representation on host and target, which is likely to
6827 be wrong in the case of long double */
6829 vtop
->c
.i
= vtop
->c
.i
!= 0;
6831 *(char *)ptr
|= vtop
->c
.i
;
6834 *(short *)ptr
|= vtop
->c
.i
;
6837 *(float*)ptr
= vtop
->c
.f
;
6840 *(double *)ptr
= vtop
->c
.d
;
6843 #if defined TCC_IS_NATIVE_387
6844 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6845 memcpy(ptr
, &vtop
->c
.ld
, 10);
6847 else if (sizeof (long double) == sizeof (double))
6848 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
6850 else if (vtop
->c
.ld
== 0.0)
6854 if (sizeof(long double) == LDOUBLE_SIZE
)
6855 *(long double*)ptr
= vtop
->c
.ld
;
6856 else if (sizeof(double) == LDOUBLE_SIZE
)
6857 *(double *)ptr
= (double)vtop
->c
.ld
;
6859 tcc_error("can't cross compile long double constants");
6863 *(long long *)ptr
|= vtop
->c
.i
;
6870 addr_t val
= vtop
->c
.i
;
6872 if (vtop
->r
& VT_SYM
)
6873 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6875 *(addr_t
*)ptr
|= val
;
6877 if (vtop
->r
& VT_SYM
)
6878 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6879 *(addr_t
*)ptr
|= val
;
6885 int val
= vtop
->c
.i
;
6887 if (vtop
->r
& VT_SYM
)
6888 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6892 if (vtop
->r
& VT_SYM
)
6893 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6902 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6909 /* 't' contains the type and storage info. 'c' is the offset of the
6910 object in section 'sec'. If 'sec' is NULL, it means stack based
6911 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
6912 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
6913 size only evaluation is wanted (only for arrays). */
6914 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6917 int len
, n
, no_oblock
, nb
, i
;
6923 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
6924 /* In case of strings we have special handling for arrays, so
6925 don't consume them as initializer value (which would commit them
6926 to some anonymous symbol). */
6927 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6928 !(flags
& DIF_SIZE_ONLY
)) {
6929 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6930 flags
|= DIF_HAVE_ELEM
;
6933 if ((flags
& DIF_HAVE_ELEM
) &&
6934 !(type
->t
& VT_ARRAY
) &&
6935 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6936 The source type might have VT_CONSTANT set, which is
6937 of course assignable to non-const elements. */
6938 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6939 init_putv(type
, sec
, c
);
6940 } else if (type
->t
& VT_ARRAY
) {
6943 t1
= pointed_type(type
);
6944 size1
= type_size(t1
, &align1
);
6947 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6950 tcc_error("character array initializer must be a literal,"
6951 " optionally enclosed in braces");
6956 /* only parse strings here if correct type (otherwise: handle
6957 them as ((w)char *) expressions */
6958 if ((tok
== TOK_LSTR
&&
6959 #ifdef TCC_TARGET_PE
6960 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6962 (t1
->t
& VT_BTYPE
) == VT_INT
6964 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6966 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6969 /* compute maximum number of chars wanted */
6971 cstr_len
= tokc
.str
.size
;
6973 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6976 if (n
>= 0 && nb
> (n
- len
))
6978 if (!(flags
& DIF_SIZE_ONLY
)) {
6980 tcc_warning("initializer-string for array is too long");
6981 /* in order to go faster for common case (char
6982 string in global variable, we handle it
6984 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6986 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6990 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6992 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6994 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
7001 /* only add trailing zero if enough storage (no
7002 warning in this case since it is standard) */
7003 if (n
< 0 || len
< n
) {
7004 if (!(flags
& DIF_SIZE_ONLY
)) {
7006 init_putv(t1
, sec
, c
+ (len
* size1
));
7017 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7018 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7019 flags
&= ~DIF_HAVE_ELEM
;
7020 if (type
->t
& VT_ARRAY
) {
7022 /* special test for multi dimensional arrays (may not
7023 be strictly correct if designators are used at the
7025 if (no_oblock
&& len
>= n
*size1
)
7028 if (s
->type
.t
== VT_UNION
)
7032 if (no_oblock
&& f
== NULL
)
7041 /* put zeros at the end */
7042 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7043 init_putz(sec
, c
+ len
, n
*size1
- len
);
7046 /* patch type size if needed, which happens only for array types */
7048 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7049 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7052 if ((flags
& DIF_FIRST
) || tok
== '{') {
7060 } else if (tok
== '{') {
7061 if (flags
& DIF_HAVE_ELEM
)
7064 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7066 } else if ((flags
& DIF_SIZE_ONLY
)) {
7067 /* If we supported only ISO C we wouldn't have to accept calling
7068 this on anything than an array if DIF_SIZE_ONLY (and even then
7069 only on the outermost level, so no recursion would be needed),
7070 because initializing a flex array member isn't supported.
7071 But GNU C supports it, so we need to recurse even into
7072 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7073 /* just skip expression */
7074 skip_or_save_block(NULL
);
7076 if (!(flags
& DIF_HAVE_ELEM
)) {
7077 /* This should happen only when we haven't parsed
7078 the init element above for fear of committing a
7079 string constant to memory too early. */
7080 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7081 expect("string constant");
7082 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7084 init_putv(type
, sec
, c
);
7088 /* parse an initializer for type 't' if 'has_init' is non zero, and
7089 allocate space in local or global data space ('r' is either
7090 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7091 variable 'v' of scope 'scope' is declared before initializers
7092 are parsed. If 'v' is zero, then a reference to the new object
7093 is put in the value stack. If 'has_init' is 2, a special parsing
7094 is done to handle string constants. */
7095 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7096 int has_init
, int v
, int scope
)
7098 int size
, align
, addr
;
7099 TokenString
*init_str
= NULL
;
7102 Sym
*flexible_array
;
7104 int saved_nocode_wanted
= nocode_wanted
;
7105 #ifdef CONFIG_TCC_BCHECK
7109 /* Always allocate static or global variables */
7110 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7111 nocode_wanted
|= 0x80000000;
7113 #ifdef CONFIG_TCC_BCHECK
7114 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7117 flexible_array
= NULL
;
7118 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7119 Sym
*field
= type
->ref
->next
;
7122 field
= field
->next
;
7123 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7124 flexible_array
= field
;
7128 size
= type_size(type
, &align
);
7129 /* If unknown size, we must evaluate it before
7130 evaluating initializers because
7131 initializers can generate global data too
7132 (e.g. string pointers or ISOC99 compound
7133 literals). It also simplifies local
7134 initializers handling */
7135 if (size
< 0 || (flexible_array
&& has_init
)) {
7137 tcc_error("unknown type size");
7138 /* get all init string */
7139 if (has_init
== 2) {
7140 init_str
= tok_str_alloc();
7141 /* only get strings */
7142 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7143 tok_str_add_tok(init_str
);
7146 tok_str_add(init_str
, -1);
7147 tok_str_add(init_str
, 0);
7149 skip_or_save_block(&init_str
);
7154 begin_macro(init_str
, 1);
7156 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7157 /* prepare second initializer parsing */
7158 macro_ptr
= init_str
->str
;
7161 /* if still unknown size, error */
7162 size
= type_size(type
, &align
);
7164 tcc_error("unknown type size");
7166 /* If there's a flex member and it was used in the initializer
7168 if (flexible_array
&&
7169 flexible_array
->type
.ref
->c
> 0)
7170 size
+= flexible_array
->type
.ref
->c
7171 * pointed_size(&flexible_array
->type
);
7172 /* take into account specified alignment if bigger */
7173 if (ad
->a
.aligned
) {
7174 int speca
= 1 << (ad
->a
.aligned
- 1);
7177 } else if (ad
->a
.packed
) {
7181 if (!v
&& NODATA_WANTED
)
7182 size
= 0, align
= 1;
7184 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7186 #ifdef CONFIG_TCC_BCHECK
7187 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7191 loc
= (loc
- size
) & -align
;
7193 #ifdef CONFIG_TCC_BCHECK
7194 /* handles bounds */
7195 /* XXX: currently, since we do only one pass, we cannot track
7196 '&' operators, so we add only arrays */
7197 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7199 /* add padding between regions */
7201 /* then add local bound info */
7202 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7203 bounds_ptr
[0] = addr
;
7204 bounds_ptr
[1] = size
;
7208 /* local variable */
7209 #ifdef CONFIG_TCC_ASM
7210 if (ad
->asm_label
) {
7211 int reg
= asm_parse_regvar(ad
->asm_label
);
7213 r
= (r
& ~VT_VALMASK
) | reg
;
7216 sym
= sym_push(v
, type
, r
, addr
);
7217 if (ad
->cleanup_func
) {
7218 Sym
*cls
= sym_push2(&all_cleanups
, SYM_FIELD
| ++ncleanups
, 0, 0);
7219 cls
->prev_tok
= sym
;
7220 cls
->next
= ad
->cleanup_func
;
7221 cls
->ncl
= current_cleanups
;
7222 current_cleanups
= cls
;
7227 /* push local reference */
7228 vset(type
, r
, addr
);
7231 if (v
&& scope
== VT_CONST
) {
7232 /* see if the symbol was already defined */
7235 patch_storage(sym
, ad
, type
);
7236 /* we accept several definitions of the same global variable. */
7237 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7242 /* allocate symbol in corresponding section */
7247 else if (tcc_state
->nocommon
)
7252 addr
= section_add(sec
, size
, align
);
7253 #ifdef CONFIG_TCC_BCHECK
7254 /* add padding if bound check */
7256 section_add(sec
, 1, 1);
7259 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7260 sec
= common_section
;
7265 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7266 patch_storage(sym
, ad
, NULL
);
7268 /* Local statics have a scope until now (for
7269 warnings), remove it here. */
7271 /* update symbol definition */
7272 put_extern_sym(sym
, sec
, addr
, size
);
7274 /* push global reference */
7275 vpush_ref(type
, sec
, addr
, size
);
7280 #ifdef CONFIG_TCC_BCHECK
7281 /* handles bounds now because the symbol must be defined
7282 before for the relocation */
7286 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7287 /* then add global bound info */
7288 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7289 bounds_ptr
[0] = 0; /* relocated */
7290 bounds_ptr
[1] = size
;
7295 if (type
->t
& VT_VLA
) {
7301 /* save current stack pointer */
7302 if (vlas_in_scope
== 0) {
7303 if (vla_sp_root_loc
== -1)
7304 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
7305 gen_vla_sp_save(vla_sp_root_loc
);
7308 vla_runtime_type_size(type
, &a
);
7309 gen_vla_alloc(type
, a
);
7310 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7311 /* on _WIN64, because of the function args scratch area, the
7312 result of alloca differs from RSP and is returned in RAX. */
7313 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7315 gen_vla_sp_save(addr
);
7319 } else if (has_init
) {
7320 size_t oldreloc_offset
= 0;
7321 if (sec
&& sec
->reloc
)
7322 oldreloc_offset
= sec
->reloc
->data_offset
;
7323 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7324 if (sec
&& sec
->reloc
)
7325 squeeze_multi_relocs(sec
, oldreloc_offset
);
7326 /* patch flexible array member size back to -1, */
7327 /* for possible subsequent similar declarations */
7329 flexible_array
->type
.ref
->c
= -1;
7333 /* restore parse state if needed */
7339 nocode_wanted
= saved_nocode_wanted
;
7342 /* parse a function defined by symbol 'sym' and generate its code in
7343 'cur_text_section' */
7344 static void gen_function(Sym
*sym
)
7347 ind
= cur_text_section
->data_offset
;
7348 if (sym
->a
.aligned
) {
7349 size_t newoff
= section_add(cur_text_section
, 0,
7350 1 << (sym
->a
.aligned
- 1));
7351 gen_fill_nops(newoff
- ind
);
7353 /* NOTE: we patch the symbol size later */
7354 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7355 funcname
= get_tok_str(sym
->v
, NULL
);
7357 /* Initialize VLA state */
7359 vla_sp_root_loc
= -1;
7360 /* put debug symbol */
7361 tcc_debug_funcstart(tcc_state
, sym
);
7362 /* push a dummy symbol to enable local sym storage */
7363 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7364 local_scope
= 1; /* for function parameters */
7365 gfunc_prolog(&sym
->type
);
7366 reset_local_scope();
7368 clear_temp_local_var_list();
7369 block(NULL
, NULL
, NULL
, NULL
, 0);
7370 if (!(nocode_wanted
& 0x20000000)
7371 && ((func_vt
.t
& VT_BTYPE
) == VT_INT
)
7372 && !strcmp (funcname
, "main"))
7376 gen_assign_cast(&func_vt
);
7377 gfunc_return(&func_vt
);
7382 cur_text_section
->data_offset
= ind
;
7383 label_pop(&global_label_stack
, NULL
, 0);
7384 /* reset local stack */
7385 reset_local_scope();
7386 sym_pop(&local_stack
, NULL
, 0);
7387 /* end of function */
7388 /* patch symbol size */
7389 elfsym(sym
)->st_size
= ind
- func_ind
;
7390 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7391 /* It's better to crash than to generate wrong code */
7392 cur_text_section
= NULL
;
7393 funcname
= ""; /* for safety */
7394 func_vt
.t
= VT_VOID
; /* for safety */
7395 func_var
= 0; /* for safety */
7396 ind
= 0; /* for safety */
7397 nocode_wanted
= 0x80000000;
7401 static void gen_inline_functions(TCCState
*s
)
7404 int inline_generated
, i
, ln
;
7405 struct InlineFunc
*fn
;
7407 ln
= file
->line_num
;
7408 /* iterate while inline function are referenced */
7410 inline_generated
= 0;
7411 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7412 fn
= s
->inline_fns
[i
];
7414 if (sym
&& sym
->c
) {
7415 /* the function was used: generate its code and
7416 convert it to a normal function */
7419 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7420 sym
->type
.t
&= ~VT_INLINE
;
7422 begin_macro(fn
->func_str
, 1);
7424 cur_text_section
= text_section
;
7428 inline_generated
= 1;
7431 } while (inline_generated
);
7432 file
->line_num
= ln
;
7435 ST_FUNC
void free_inline_functions(TCCState
*s
)
7438 /* free tokens of unused inline functions */
7439 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7440 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7442 tok_str_free(fn
->func_str
);
7444 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7447 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7448 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7449 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7454 AttributeDef ad
, adbase
;
7457 if (tok
== TOK_STATIC_ASSERT
) {
7465 tcc_error("%s", get_tok_str(tok
, &tokc
));
7471 if (!parse_btype(&btype
, &adbase
)) {
7472 if (is_for_loop_init
)
7474 /* skip redundant ';' if not in old parameter decl scope */
7475 if (tok
== ';' && l
!= VT_CMP
) {
7481 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7482 /* global asm block */
7486 if (tok
>= TOK_UIDENT
) {
7487 /* special test for old K&R protos without explicit int
7488 type. Only accepted when defining global data */
7492 expect("declaration");
7497 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7498 int v
= btype
.ref
->v
;
7499 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7500 tcc_warning("unnamed struct/union that defines no instances");
7504 if (IS_ENUM(btype
.t
)) {
7509 while (1) { /* iterate thru each declaration */
7511 /* If the base type itself was an array type of unspecified
7512 size (like in 'typedef int arr[]; arr x = {1};') then
7513 we will overwrite the unknown size by the real one for
7514 this decl. We need to unshare the ref symbol holding
7516 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7517 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7520 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7524 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7525 printf("type = '%s'\n", buf
);
7528 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7529 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7530 tcc_error("function without file scope cannot be static");
7532 /* if old style function prototype, we accept a
7535 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7536 decl0(VT_CMP
, 0, sym
);
7539 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7540 ad
.asm_label
= asm_label_instr();
7541 /* parse one last attribute list, after asm label */
7542 parse_attribute(&ad
);
7547 #ifdef TCC_TARGET_PE
7548 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7549 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7550 tcc_error("cannot have dll linkage with static or typedef");
7551 if (ad
.a
.dllimport
) {
7552 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7555 type
.t
|= VT_EXTERN
;
7561 tcc_error("cannot use local functions");
7562 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7563 expect("function definition");
7565 /* reject abstract declarators in function definition
7566 make old style params without decl have int type */
7568 while ((sym
= sym
->next
) != NULL
) {
7569 if (!(sym
->v
& ~SYM_FIELD
))
7570 expect("identifier");
7571 if (sym
->type
.t
== VT_VOID
)
7572 sym
->type
= int_type
;
7575 /* XXX: cannot do better now: convert extern line to static inline */
7576 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7577 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7579 /* put function symbol */
7580 sym
= external_sym(v
, &type
, 0, &ad
);
7582 /* static inline functions are just recorded as a kind
7583 of macro. Their code will be emitted at the end of
7584 the compilation unit only if they are used */
7585 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7586 (VT_INLINE
| VT_STATIC
)) {
7587 struct InlineFunc
*fn
;
7588 const char *filename
;
7590 filename
= file
? file
->filename
: "";
7591 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7592 strcpy(fn
->filename
, filename
);
7594 skip_or_save_block(&fn
->func_str
);
7595 dynarray_add(&tcc_state
->inline_fns
,
7596 &tcc_state
->nb_inline_fns
, fn
);
7598 /* compute text section */
7599 cur_text_section
= ad
.section
;
7600 if (!cur_text_section
)
7601 cur_text_section
= text_section
;
7607 /* find parameter in function parameter list */
7608 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7609 if ((sym
->v
& ~SYM_FIELD
) == v
)
7611 tcc_error("declaration for parameter '%s' but no such parameter",
7612 get_tok_str(v
, NULL
));
7614 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7615 tcc_error("storage class specified for '%s'",
7616 get_tok_str(v
, NULL
));
7617 if (sym
->type
.t
!= VT_VOID
)
7618 tcc_error("redefinition of parameter '%s'",
7619 get_tok_str(v
, NULL
));
7620 convert_parameter_type(&type
);
7622 } else if (type
.t
& VT_TYPEDEF
) {
7623 /* save typedefed type */
7624 /* XXX: test storage specifiers ? */
7626 if (sym
&& sym
->sym_scope
== local_scope
) {
7627 if (!is_compatible_types(&sym
->type
, &type
)
7628 || !(sym
->type
.t
& VT_TYPEDEF
))
7629 tcc_error("incompatible redefinition of '%s'",
7630 get_tok_str(v
, NULL
));
7633 sym
= sym_push(v
, &type
, 0, 0);
7637 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7638 && !(type
.t
& VT_EXTERN
)) {
7639 tcc_error("declaration of void object");
7642 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7643 /* external function definition */
7644 /* specific case for func_call attribute */
7646 } else if (!(type
.t
& VT_ARRAY
)) {
7647 /* not lvalue if array */
7648 r
|= lvalue_type(type
.t
);
7650 has_init
= (tok
== '=');
7651 if (has_init
&& (type
.t
& VT_VLA
))
7652 tcc_error("variable length array cannot be initialized");
7653 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7654 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7655 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7656 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7657 /* external variable or function */
7658 /* NOTE: as GCC, uninitialized global static
7659 arrays of null size are considered as
7661 type
.t
|= VT_EXTERN
;
7662 sym
= external_sym(v
, &type
, r
, &ad
);
7663 if (ad
.alias_target
) {
7666 alias_target
= sym_find(ad
.alias_target
);
7667 esym
= elfsym(alias_target
);
7669 tcc_error("unsupported forward __alias__ attribute");
7670 /* Local statics have a scope until now (for
7671 warnings), remove it here. */
7673 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7676 if (type
.t
& VT_STATIC
)
7682 else if (l
== VT_CONST
)
7683 /* uninitialized global variables may be overridden */
7684 type
.t
|= VT_EXTERN
;
7685 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7689 if (is_for_loop_init
)
7701 static void decl(int l
)
7706 /* ------------------------------------------------------------------------- */