2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
42 static Sym
*all_cleanups
, *pending_gotos
;
43 static int local_scope
;
45 static int in_generic
;
46 static int section_sym
;
48 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
50 ST_DATA
int const_wanted
; /* true if constant wanted */
51 ST_DATA
int nocode_wanted
; /* no code generation wanted */
52 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
53 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 /* Automagical code suppression ----> */
56 #define CODE_OFF() (nocode_wanted |= 0x20000000)
57 #define CODE_ON() (nocode_wanted &= ~0x20000000)
59 /* Clear 'nocode_wanted' at label if it was used */
60 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
61 static int gind(void) { CODE_ON(); return ind
; }
63 /* Set 'nocode_wanted' after unconditional jumps */
64 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
65 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
67 /* These are #undef'd at the end of this file */
68 #define gjmp_addr gjmp_addr_acs
72 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
74 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
76 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
77 ST_DATA
const char *funcname
;
80 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
82 ST_DATA
struct switch_t
{
86 } **p
; int n
; /* list of case ranges */
87 int def_sym
; /* default symbol */
90 } *cur_switch
; /* current switch */
92 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
93 /*list of temporary local variables on the stack in current function. */
94 ST_DATA
struct temp_local_variable
{
95 int location
; //offset on stack. Svalue.c.i
98 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
99 short nb_temp_local_vars
;
101 static struct scope
{
103 struct { int loc
, num
; } vla
;
104 struct { Sym
*s
; int n
; } cl
;
107 } *cur_scope
, *loop_scope
, *root_scope
;
109 /* ------------------------------------------------------------------------- */
111 static void gen_cast(CType
*type
);
112 static void gen_cast_s(int t
);
113 static inline CType
*pointed_type(CType
*type
);
114 static int is_compatible_types(CType
*type1
, CType
*type2
);
115 static int parse_btype(CType
*type
, AttributeDef
*ad
);
116 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
117 static void parse_expr_type(CType
*type
);
118 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
119 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
120 static void block(int is_expr
);
121 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
122 static void decl(int l
);
123 static int decl0(int l
, int is_for_loop_init
, Sym
*);
124 static void expr_eq(void);
125 static void vla_runtime_type_size(CType
*type
, int *a
);
126 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
127 static inline int64_t expr_const64(void);
128 static void vpush64(int ty
, unsigned long long v
);
129 static void vpush(CType
*type
);
130 static int gvtst(int inv
, int t
);
131 static void gen_inline_functions(TCCState
*s
);
132 static void skip_or_save_block(TokenString
**str
);
133 static void gv_dup(void);
134 static int get_temp_local_var(int size
,int align
);
135 static void clear_temp_local_var_list();
137 ST_INLN
int is_float(int t
)
141 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
144 /* we use our own 'finite' function to avoid potential problems with
145 non standard math libs */
146 /* XXX: endianness dependent */
147 ST_FUNC
int ieee_finite(double d
)
150 memcpy(p
, &d
, sizeof(double));
151 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
154 /* compiling intel long double natively */
155 #if (defined __i386__ || defined __x86_64__) \
156 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
157 # define TCC_IS_NATIVE_387
160 ST_FUNC
void test_lvalue(void)
162 if (!(vtop
->r
& VT_LVAL
))
166 ST_FUNC
void check_vstack(void)
169 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
172 /* ------------------------------------------------------------------------- */
173 /* vstack debugging aid */
176 void pv (const char *lbl
, int a
, int b
)
179 for (i
= a
; i
< a
+ b
; ++i
) {
180 SValue
*p
= &vtop
[-i
];
181 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
182 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
187 /* ------------------------------------------------------------------------- */
188 /* start of translation unit info */
189 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
194 /* file info: full path + filename */
195 section_sym
= put_elf_sym(symtab_section
, 0, 0,
196 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
197 text_section
->sh_num
, NULL
);
198 getcwd(buf
, sizeof(buf
));
200 normalize_slashes(buf
);
202 pstrcat(buf
, sizeof(buf
), "/");
203 put_stabs_r(buf
, N_SO
, 0, 0,
204 text_section
->data_offset
, text_section
, section_sym
);
205 put_stabs_r(file
->filename
, N_SO
, 0, 0,
206 text_section
->data_offset
, text_section
, section_sym
);
211 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
212 symbols can be safely used */
213 put_elf_sym(symtab_section
, 0, 0,
214 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
215 SHN_ABS
, file
->filename
);
218 /* put end of translation unit info */
219 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
223 put_stabs_r(NULL
, N_SO
, 0, 0,
224 text_section
->data_offset
, text_section
, section_sym
);
228 /* generate line number info */
229 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
233 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
234 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
236 last_line_num
= file
->line_num
;
240 /* put function symbol */
241 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
249 /* XXX: we put here a dummy type */
250 snprintf(buf
, sizeof(buf
), "%s:%c1",
251 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
252 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
253 cur_text_section
, sym
->c
);
254 /* //gr gdb wants a line at the function */
255 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
261 /* put function size */
262 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
266 put_stabn(N_FUN
, 0, 0, size
);
269 /* ------------------------------------------------------------------------- */
270 ST_FUNC
int tccgen_compile(TCCState
*s1
)
272 cur_text_section
= NULL
;
274 anon_sym
= SYM_FIRST_ANOM
;
277 nocode_wanted
= 0x80000000;
280 /* define some often used types */
282 char_pointer_type
.t
= VT_BYTE
;
283 mk_pointer(&char_pointer_type
);
285 size_type
.t
= VT_INT
| VT_UNSIGNED
;
286 ptrdiff_type
.t
= VT_INT
;
288 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
289 ptrdiff_type
.t
= VT_LLONG
;
291 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
292 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
294 func_old_type
.t
= VT_FUNC
;
295 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
296 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
297 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
301 #ifdef TCC_TARGET_ARM
306 printf("%s: **** new file\n", file
->filename
);
309 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
312 gen_inline_functions(s1
);
314 /* end of translation unit info */
319 /* ------------------------------------------------------------------------- */
320 ST_FUNC ElfSym
*elfsym(Sym
*s
)
324 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
327 /* apply storage attributes to Elf symbol */
328 ST_FUNC
void update_storage(Sym
*sym
)
331 int sym_bind
, old_sym_bind
;
337 if (sym
->a
.visibility
)
338 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
341 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
342 sym_bind
= STB_LOCAL
;
343 else if (sym
->a
.weak
)
346 sym_bind
= STB_GLOBAL
;
347 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
348 if (sym_bind
!= old_sym_bind
) {
349 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
353 if (sym
->a
.dllimport
)
354 esym
->st_other
|= ST_PE_IMPORT
;
355 if (sym
->a
.dllexport
)
356 esym
->st_other
|= ST_PE_EXPORT
;
360 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
361 get_tok_str(sym
->v
, NULL
),
362 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
370 /* ------------------------------------------------------------------------- */
371 /* update sym->c so that it points to an external symbol in section
372 'section' with value 'value' */
374 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
375 addr_t value
, unsigned long size
,
376 int can_add_underscore
)
378 int sym_type
, sym_bind
, info
, other
, t
;
382 #ifdef CONFIG_TCC_BCHECK
387 name
= get_tok_str(sym
->v
, NULL
);
388 #ifdef CONFIG_TCC_BCHECK
389 if (tcc_state
->do_bounds_check
) {
390 /* XXX: avoid doing that for statics ? */
391 /* if bound checking is activated, we change some function
392 names by adding the "__bound" prefix */
395 /* XXX: we rely only on malloc hooks */
408 strcpy(buf
, "__bound_");
416 if ((t
& VT_BTYPE
) == VT_FUNC
) {
418 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
419 sym_type
= STT_NOTYPE
;
421 sym_type
= STT_OBJECT
;
423 if (t
& (VT_STATIC
| VT_INLINE
))
424 sym_bind
= STB_LOCAL
;
426 sym_bind
= STB_GLOBAL
;
429 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
430 Sym
*ref
= sym
->type
.ref
;
431 if (ref
->a
.nodecorate
) {
432 can_add_underscore
= 0;
434 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
435 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
437 other
|= ST_PE_STDCALL
;
438 can_add_underscore
= 0;
442 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
444 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
448 name
= get_tok_str(sym
->asm_label
, NULL
);
449 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
450 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
453 esym
->st_value
= value
;
454 esym
->st_size
= size
;
455 esym
->st_shndx
= sh_num
;
460 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
461 addr_t value
, unsigned long size
)
463 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
464 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
467 /* add a new relocation entry to symbol 'sym' in section 's' */
468 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
473 if (nocode_wanted
&& s
== cur_text_section
)
478 put_extern_sym(sym
, NULL
, 0, 0);
482 /* now we can add ELF relocation info */
483 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
487 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
489 greloca(s
, sym
, offset
, type
, 0);
493 /* ------------------------------------------------------------------------- */
494 /* symbol allocator */
495 static Sym
*__sym_malloc(void)
497 Sym
*sym_pool
, *sym
, *last_sym
;
500 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
501 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
503 last_sym
= sym_free_first
;
505 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
506 sym
->next
= last_sym
;
510 sym_free_first
= last_sym
;
514 static inline Sym
*sym_malloc(void)
518 sym
= sym_free_first
;
520 sym
= __sym_malloc();
521 sym_free_first
= sym
->next
;
524 sym
= tcc_malloc(sizeof(Sym
));
529 ST_INLN
void sym_free(Sym
*sym
)
532 sym
->next
= sym_free_first
;
533 sym_free_first
= sym
;
539 /* push, without hashing */
540 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
545 memset(s
, 0, sizeof *s
);
555 /* find a symbol and return its associated structure. 's' is the top
556 of the symbol stack */
557 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
569 /* structure lookup */
570 ST_INLN Sym
*struct_find(int v
)
573 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
575 return table_ident
[v
]->sym_struct
;
578 /* find an identifier */
579 ST_INLN Sym
*sym_find(int v
)
582 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
584 return table_ident
[v
]->sym_identifier
;
587 static int sym_scope(Sym
*s
)
589 if (IS_ENUM_VAL (s
->type
.t
))
590 return s
->type
.ref
->sym_scope
;
595 /* push a given symbol on the symbol stack */
596 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
605 s
= sym_push2(ps
, v
, type
->t
, c
);
606 s
->type
.ref
= type
->ref
;
608 /* don't record fields or anonymous symbols */
610 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
611 /* record symbol in token array */
612 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
614 ps
= &ts
->sym_struct
;
616 ps
= &ts
->sym_identifier
;
619 s
->sym_scope
= local_scope
;
620 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
621 tcc_error("redeclaration of '%s'",
622 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
627 /* push a global identifier */
628 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
631 s
= sym_push2(&global_stack
, v
, t
, c
);
632 s
->r
= VT_CONST
| VT_SYM
;
633 /* don't record anonymous symbol */
634 if (v
< SYM_FIRST_ANOM
) {
635 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
636 /* modify the top most local identifier, so that sym_identifier will
637 point to 's' when popped; happens when called from inline asm */
638 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
639 ps
= &(*ps
)->prev_tok
;
646 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
647 pop them yet from the list, but do remove them from the token array. */
648 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
658 /* remove symbol in token array */
660 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
661 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
663 ps
= &ts
->sym_struct
;
665 ps
= &ts
->sym_identifier
;
676 /* ------------------------------------------------------------------------- */
677 static void vcheck_cmp(void)
679 /* cannot let cpu flags if other instruction are generated. Also
680 avoid leaving VT_JMP anywhere except on the top of the stack
681 because it would complicate the code generator.
683 Don't do this when nocode_wanted. vtop might come from
684 !nocode_wanted regions (see 88_codeopt.c) and transforming
685 it to a register without actually generating code is wrong
686 as their value might still be used for real. All values
687 we push under nocode_wanted will eventually be popped
688 again, so that the VT_CMP/VT_JMP value will be in vtop
689 when code is unsuppressed again. */
691 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
695 static void vsetc(CType
*type
, int r
, CValue
*vc
)
697 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
698 tcc_error("memory full (vstack)");
708 ST_FUNC
void vswap(void)
718 /* pop stack value */
719 ST_FUNC
void vpop(void)
722 v
= vtop
->r
& VT_VALMASK
;
723 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
724 /* for x86, we need to pop the FP stack */
726 o(0xd8dd); /* fstp %st(0) */
730 /* need to put correct jump if && or || without test */
737 /* push constant of type "type" with useless value */
738 ST_FUNC
void vpush(CType
*type
)
740 vset(type
, VT_CONST
, 0);
743 /* push integer constant */
744 ST_FUNC
void vpushi(int v
)
748 vsetc(&int_type
, VT_CONST
, &cval
);
751 /* push a pointer sized constant */
752 static void vpushs(addr_t v
)
756 vsetc(&size_type
, VT_CONST
, &cval
);
759 /* push arbitrary 64bit constant */
760 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
767 vsetc(&ctype
, VT_CONST
, &cval
);
770 /* push long long constant */
771 static inline void vpushll(long long v
)
773 vpush64(VT_LLONG
, v
);
776 ST_FUNC
void vset(CType
*type
, int r
, int v
)
781 vsetc(type
, r
, &cval
);
784 static void vseti(int r
, int v
)
792 ST_FUNC
void vpushv(SValue
*v
)
794 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
795 tcc_error("memory full (vstack)");
800 static void vdup(void)
805 /* rotate n first stack elements to the bottom
806 I1 ... In -> I2 ... In I1 [top is right]
808 ST_FUNC
void vrotb(int n
)
820 /* rotate the n elements before entry e towards the top
821 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
823 ST_FUNC
void vrote(SValue
*e
, int n
)
830 for(i
= 0;i
< n
- 1; i
++)
835 /* rotate n first stack elements to the top
836 I1 ... In -> In I1 ... I(n-1) [top is right]
838 ST_FUNC
void vrott(int n
)
843 /* ------------------------------------------------------------------------- */
844 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
846 /* called from generators to set the result from relational ops */
847 ST_FUNC
void vset_VT_CMP(int op
)
855 /* called once before asking generators to load VT_CMP to a register */
856 static void vset_VT_JMP(void)
858 int op
= vtop
->cmp_op
;
859 if (vtop
->jtrue
|| vtop
->jfalse
) {
860 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
861 int inv
= op
& (op
< 2); /* small optimization */
862 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
864 /* otherwise convert flags (rsp. 0/1) to register */
866 if (op
< 2) /* doesn't seem to happen */
871 /* Set CPU Flags, doesn't yet jump */
872 static void gvtst_set(int inv
, int t
)
875 if (vtop
->r
!= VT_CMP
) {
878 if (vtop
->r
== VT_CMP
) /* must be VT_CONST otherwise */
880 else if (vtop
->r
== VT_CONST
)
881 vset_VT_CMP(vtop
->c
.i
!= 0);
885 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
886 *p
= gjmp_append(*p
, t
);
889 /* Generate value test
891 * Generate a test for any value (jump, comparison and integers) */
892 static int gvtst(int inv
, int t
)
898 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
903 /* jump to the wanted target */
905 t
= gjmp_cond(op
^ inv
, t
);
908 /* resolve complementary jumps to here */
915 /* ------------------------------------------------------------------------- */
916 /* push a symbol value of TYPE */
917 static inline void vpushsym(CType
*type
, Sym
*sym
)
921 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
925 /* Return a static symbol pointing to a section */
926 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
932 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
933 sym
->type
.t
|= VT_STATIC
;
934 put_extern_sym(sym
, sec
, offset
, size
);
938 /* push a reference to a section offset by adding a dummy symbol */
939 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
941 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
944 /* define a new external reference to a symbol 'v' of type 'u' */
945 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
951 /* push forward reference */
952 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
953 s
->type
.ref
= type
->ref
;
954 } else if (IS_ASM_SYM(s
)) {
955 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
956 s
->type
.ref
= type
->ref
;
962 /* Merge symbol attributes. */
963 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
965 if (sa1
->aligned
&& !sa
->aligned
)
966 sa
->aligned
= sa1
->aligned
;
967 sa
->packed
|= sa1
->packed
;
968 sa
->weak
|= sa1
->weak
;
969 if (sa1
->visibility
!= STV_DEFAULT
) {
970 int vis
= sa
->visibility
;
971 if (vis
== STV_DEFAULT
972 || vis
> sa1
->visibility
)
973 vis
= sa1
->visibility
;
974 sa
->visibility
= vis
;
976 sa
->dllexport
|= sa1
->dllexport
;
977 sa
->nodecorate
|= sa1
->nodecorate
;
978 sa
->dllimport
|= sa1
->dllimport
;
981 /* Merge function attributes. */
982 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
984 if (fa1
->func_call
&& !fa
->func_call
)
985 fa
->func_call
= fa1
->func_call
;
986 if (fa1
->func_type
&& !fa
->func_type
)
987 fa
->func_type
= fa1
->func_type
;
988 if (fa1
->func_args
&& !fa
->func_args
)
989 fa
->func_args
= fa1
->func_args
;
992 /* Merge attributes. */
993 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
995 merge_symattr(&ad
->a
, &ad1
->a
);
996 merge_funcattr(&ad
->f
, &ad1
->f
);
999 ad
->section
= ad1
->section
;
1000 if (ad1
->alias_target
)
1001 ad
->alias_target
= ad1
->alias_target
;
1003 ad
->asm_label
= ad1
->asm_label
;
1005 ad
->attr_mode
= ad1
->attr_mode
;
1008 /* Merge some type attributes. */
1009 static void patch_type(Sym
*sym
, CType
*type
)
1011 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1012 if (!(sym
->type
.t
& VT_EXTERN
))
1013 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1014 sym
->type
.t
&= ~VT_EXTERN
;
1017 if (IS_ASM_SYM(sym
)) {
1018 /* stay static if both are static */
1019 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1020 sym
->type
.ref
= type
->ref
;
1023 if (!is_compatible_types(&sym
->type
, type
)) {
1024 tcc_error("incompatible types for redefinition of '%s'",
1025 get_tok_str(sym
->v
, NULL
));
1027 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1028 int static_proto
= sym
->type
.t
& VT_STATIC
;
1029 /* warn if static follows non-static function declaration */
1030 if ((type
->t
& VT_STATIC
) && !static_proto
1031 /* XXX this test for inline shouldn't be here. Until we
1032 implement gnu-inline mode again it silences a warning for
1033 mingw caused by our workarounds. */
1034 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1035 tcc_warning("static storage ignored for redefinition of '%s'",
1036 get_tok_str(sym
->v
, NULL
));
1038 /* set 'inline' if both agree or if one has static */
1039 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1040 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1041 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1042 static_proto
|= VT_INLINE
;
1045 if (0 == (type
->t
& VT_EXTERN
)) {
1046 /* put complete type, use static from prototype */
1047 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1048 sym
->type
.ref
= type
->ref
;
1050 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1053 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1054 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1055 sym
->type
.ref
= type
->ref
;
1059 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1060 /* set array size if it was omitted in extern declaration */
1061 sym
->type
.ref
->c
= type
->ref
->c
;
1063 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1064 tcc_warning("storage mismatch for redefinition of '%s'",
1065 get_tok_str(sym
->v
, NULL
));
1069 /* Merge some storage attributes. */
1070 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1073 patch_type(sym
, type
);
1075 #ifdef TCC_TARGET_PE
1076 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1077 tcc_error("incompatible dll linkage for redefinition of '%s'",
1078 get_tok_str(sym
->v
, NULL
));
1080 merge_symattr(&sym
->a
, &ad
->a
);
1082 sym
->asm_label
= ad
->asm_label
;
1083 update_storage(sym
);
1086 /* copy sym to other stack */
1087 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1090 s
= sym_malloc(), *s
= *s0
;
1091 s
->prev
= *ps
, *ps
= s
;
1092 if (s
->v
< SYM_FIRST_ANOM
) {
1093 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1094 s
->prev_tok
= *ps
, *ps
= s
;
1099 /* copy a list of syms */
1100 static void sym_copy_ref(Sym
*s0
, Sym
**ps
)
1102 Sym
*s
, **sp
= &s0
->type
.ref
;
1103 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
)
1104 sp
= &(*sp
= sym_copy(s
, ps
))->next
;
1107 /* define a new external reference to a symbol 'v' */
1108 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1112 /* look for global symbol */
1114 while (s
&& s
->sym_scope
)
1118 /* push forward reference */
1119 s
= global_identifier_push(v
, type
->t
, 0);
1122 s
->asm_label
= ad
->asm_label
;
1123 s
->type
.ref
= type
->ref
;
1124 bt
= s
->type
.t
& (VT_BTYPE
|VT_ARRAY
);
1125 /* copy type to the global stack also */
1126 if (local_scope
&& (bt
== VT_FUNC
|| (bt
& VT_ARRAY
)))
1127 sym_copy_ref(s
, &global_stack
);
1129 patch_storage(s
, ad
, type
);
1130 bt
= s
->type
.t
& VT_BTYPE
;
1132 /* push variables to local scope if any */
1133 if (local_stack
&& bt
!= VT_FUNC
)
1134 s
= sym_copy(s
, &local_stack
);
1138 /* push a reference to global symbol v */
1139 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1141 vpushsym(type
, external_global_sym(v
, type
));
1144 /* save registers up to (vtop - n) stack entry */
1145 ST_FUNC
void save_regs(int n
)
1148 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1152 /* save r to the memory stack, and mark it as being free */
1153 ST_FUNC
void save_reg(int r
)
1155 save_reg_upstack(r
, 0);
1158 /* save r to the memory stack, and mark it as being free,
1159 if seen up to (vtop - n) stack entry */
1160 ST_FUNC
void save_reg_upstack(int r
, int n
)
1162 int l
, saved
, size
, align
;
1166 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1171 /* modify all stack values */
1174 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1175 if ((p
->r
& VT_VALMASK
) == r
||
1176 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
1177 /* must save value on stack if not already done */
1179 /* NOTE: must reload 'r' because r might be equal to r2 */
1180 r
= p
->r
& VT_VALMASK
;
1181 /* store register in the stack */
1183 if ((p
->r
& VT_LVAL
) ||
1184 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1186 type
= &char_pointer_type
;
1190 size
= type_size(type
, &align
);
1191 l
=get_temp_local_var(size
,align
);
1192 sv
.type
.t
= type
->t
;
1193 sv
.r
= VT_LOCAL
| VT_LVAL
;
1196 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1197 /* x86 specific: need to pop fp register ST0 if saved */
1198 if (r
== TREG_ST0
) {
1199 o(0xd8dd); /* fstp %st(0) */
1203 /* special long long case */
1204 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1211 /* mark that stack entry as being saved on the stack */
1212 if (p
->r
& VT_LVAL
) {
1213 /* also clear the bounded flag because the
1214 relocation address of the function was stored in
1216 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1218 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1226 #ifdef TCC_TARGET_ARM
1227 /* find a register of class 'rc2' with at most one reference on stack.
1228 * If none, call get_reg(rc) */
1229 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1234 for(r
=0;r
<NB_REGS
;r
++) {
1235 if (reg_classes
[r
] & rc2
) {
1238 for(p
= vstack
; p
<= vtop
; p
++) {
1239 if ((p
->r
& VT_VALMASK
) == r
||
1240 (p
->r2
& VT_VALMASK
) == r
)
1251 /* find a free register of class 'rc'. If none, save one register */
1252 ST_FUNC
int get_reg(int rc
)
1257 /* find a free register */
1258 for(r
=0;r
<NB_REGS
;r
++) {
1259 if (reg_classes
[r
] & rc
) {
1262 for(p
=vstack
;p
<=vtop
;p
++) {
1263 if ((p
->r
& VT_VALMASK
) == r
||
1264 (p
->r2
& VT_VALMASK
) == r
)
1272 /* no register left : free the first one on the stack (VERY
1273 IMPORTANT to start from the bottom to ensure that we don't
1274 spill registers used in gen_opi()) */
1275 for(p
=vstack
;p
<=vtop
;p
++) {
1276 /* look at second register (if long long) */
1277 r
= p
->r2
& VT_VALMASK
;
1278 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1280 r
= p
->r
& VT_VALMASK
;
1281 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1287 /* Should never comes here */
1291 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1292 static int get_temp_local_var(int size
,int align
){
1294 struct temp_local_variable
*temp_var
;
1301 for(i
=0;i
<nb_temp_local_vars
;i
++){
1302 temp_var
=&arr_temp_local_vars
[i
];
1303 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1306 /*check if temp_var is free*/
1308 for(p
=vstack
;p
<=vtop
;p
++) {
1310 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1311 if(p
->c
.i
==temp_var
->location
){
1318 found_var
=temp_var
->location
;
1324 loc
= (loc
- size
) & -align
;
1325 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1326 temp_var
=&arr_temp_local_vars
[i
];
1327 temp_var
->location
=loc
;
1328 temp_var
->size
=size
;
1329 temp_var
->align
=align
;
1330 nb_temp_local_vars
++;
1337 static void clear_temp_local_var_list(){
1338 nb_temp_local_vars
=0;
1341 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1343 static void move_reg(int r
, int s
, int t
)
1357 /* get address of vtop (vtop MUST BE an lvalue) */
1358 ST_FUNC
void gaddrof(void)
1360 vtop
->r
&= ~VT_LVAL
;
1361 /* tricky: if saved lvalue, then we can go back to lvalue */
1362 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1363 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1368 #ifdef CONFIG_TCC_BCHECK
1369 /* generate lvalue bound code */
1370 static void gbound(void)
1375 vtop
->r
&= ~VT_MUSTBOUND
;
1376 /* if lvalue, then use checking code before dereferencing */
1377 if (vtop
->r
& VT_LVAL
) {
1378 /* if not VT_BOUNDED value, then make one */
1379 if (!(vtop
->r
& VT_BOUNDED
)) {
1380 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1381 /* must save type because we must set it to int to get pointer */
1383 vtop
->type
.t
= VT_PTR
;
1386 gen_bounded_ptr_add();
1387 vtop
->r
|= lval_type
;
1390 /* then check for dereferencing */
1391 gen_bounded_ptr_deref();
1396 static void incr_bf_adr(int o
)
1398 vtop
->type
= char_pointer_type
;
1402 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1403 | (VT_BYTE
|VT_UNSIGNED
);
1404 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1405 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1408 /* single-byte load mode for packed or otherwise unaligned bitfields */
1409 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1412 save_reg_upstack(vtop
->r
, 1);
1413 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1414 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1423 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1425 vpushi((1 << n
) - 1), gen_op('&');
1428 vpushi(bits
), gen_op(TOK_SHL
);
1431 bits
+= n
, bit_size
-= n
, o
= 1;
1434 if (!(type
->t
& VT_UNSIGNED
)) {
1435 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1436 vpushi(n
), gen_op(TOK_SHL
);
1437 vpushi(n
), gen_op(TOK_SAR
);
1441 /* single-byte store mode for packed or otherwise unaligned bitfields */
1442 static void store_packed_bf(int bit_pos
, int bit_size
)
1444 int bits
, n
, o
, m
, c
;
1446 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1448 save_reg_upstack(vtop
->r
, 1);
1449 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1451 incr_bf_adr(o
); // X B
1453 c
? vdup() : gv_dup(); // B V X
1456 vpushi(bits
), gen_op(TOK_SHR
);
1458 vpushi(bit_pos
), gen_op(TOK_SHL
);
1463 m
= ((1 << n
) - 1) << bit_pos
;
1464 vpushi(m
), gen_op('&'); // X B V1
1465 vpushv(vtop
-1); // X B V1 B
1466 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1467 gen_op('&'); // X B V1 B1
1468 gen_op('|'); // X B V2
1470 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1471 vstore(), vpop(); // X B
1472 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1477 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1480 if (0 == sv
->type
.ref
)
1482 t
= sv
->type
.ref
->auxtype
;
1483 if (t
!= -1 && t
!= VT_STRUCT
) {
1484 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1485 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1490 /* store vtop a register belonging to class 'rc'. lvalues are
1491 converted to values. Cannot be used if cannot be converted to
1492 register value (such as structures). */
1493 ST_FUNC
int gv(int rc
)
1495 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1497 /* NOTE: get_reg can modify vstack[] */
1498 if (vtop
->type
.t
& VT_BITFIELD
) {
1501 bit_pos
= BIT_POS(vtop
->type
.t
);
1502 bit_size
= BIT_SIZE(vtop
->type
.t
);
1503 /* remove bit field info to avoid loops */
1504 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1507 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1508 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1509 type
.t
|= VT_UNSIGNED
;
1511 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1513 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1518 if (r
== VT_STRUCT
) {
1519 load_packed_bf(&type
, bit_pos
, bit_size
);
1521 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1522 /* cast to int to propagate signedness in following ops */
1524 /* generate shifts */
1525 vpushi(bits
- (bit_pos
+ bit_size
));
1527 vpushi(bits
- bit_size
);
1528 /* NOTE: transformed to SHR if unsigned */
1533 if (is_float(vtop
->type
.t
) &&
1534 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1535 unsigned long offset
;
1536 /* CPUs usually cannot use float constants, so we store them
1537 generically in data segment */
1538 size
= type_size(&vtop
->type
, &align
);
1540 size
= 0, align
= 1;
1541 offset
= section_add(data_section
, size
, align
);
1542 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1544 init_putv(&vtop
->type
, data_section
, offset
);
1547 #ifdef CONFIG_TCC_BCHECK
1548 if (vtop
->r
& VT_MUSTBOUND
)
1551 #ifdef TCC_TARGET_RISCV64
1553 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
&& rc
== RC_FLOAT
)
1557 r
= vtop
->r
& VT_VALMASK
;
1558 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1559 #ifndef TCC_TARGET_ARM64
1560 #ifndef TCC_TARGET_RISCV64 /* XXX: remove the whole LRET/QRET class */
1563 #ifdef TCC_TARGET_X86_64
1564 else if (rc
== RC_FRET
)
1569 /* need to reload if:
1571 - lvalue (need to dereference pointer)
1572 - already a register, but not in the right class */
1574 || (vtop
->r
& VT_LVAL
)
1575 || !(reg_classes
[r
] & rc
)
1576 #ifdef TCC_TARGET_RISCV64
1577 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& (vtop
->r2
>= NB_REGS
|| !(reg_classes
[vtop
->r2
] & rc2
)))
1578 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
&& (vtop
->r2
>= NB_REGS
|| !(reg_classes
[vtop
->r2
] & rc2
)))
1580 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1581 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1583 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1588 #ifdef TCC_TARGET_RISCV64
1589 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
)) {
1590 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= VT_LLONG
;
1592 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1593 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1595 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1596 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1597 unsigned long long ll
;
1599 int r2
, original_type
;
1600 original_type
= vtop
->type
.t
;
1601 /* two register type load : expand to two words
1604 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1607 vtop
->c
.i
= ll
; /* first word */
1609 vtop
->r
= r
; /* save register value */
1610 vpushi(ll
>> 32); /* second word */
1613 if (vtop
->r
& VT_LVAL
) {
1614 /* We do not want to modifier the long long
1615 pointer here, so the safest (and less
1616 efficient) is to save all the other registers
1617 in the stack. XXX: totally inefficient. */
1621 /* lvalue_save: save only if used further down the stack */
1622 save_reg_upstack(vtop
->r
, 1);
1624 /* load from memory */
1625 vtop
->type
.t
= load_type
;
1628 vtop
[-1].r
= r
; /* save register value */
1629 /* increment pointer to get second word */
1630 vtop
->type
.t
= addr_type
;
1635 vtop
->type
.t
= load_type
;
1637 /* move registers */
1640 vtop
[-1].r
= r
; /* save register value */
1641 vtop
->r
= vtop
[-1].r2
;
1643 /* Allocate second register. Here we rely on the fact that
1644 get_reg() tries first to free r2 of an SValue. */
1648 /* write second register */
1650 vtop
->type
.t
= original_type
;
1651 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1653 /* lvalue of scalar type : need to use lvalue type
1654 because of possible cast */
1657 /* compute memory access type */
1658 if (vtop
->r
& VT_LVAL_BYTE
)
1660 else if (vtop
->r
& VT_LVAL_SHORT
)
1662 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1666 /* restore wanted type */
1669 if (vtop
->r
== VT_CMP
)
1671 /* one register type load */
1676 #ifdef TCC_TARGET_C67
1677 /* uses register pairs for doubles */
1678 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1685 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1686 ST_FUNC
void gv2(int rc1
, int rc2
)
1688 /* generate more generic register first. But VT_JMP or VT_CMP
1689 values must be generated first in all cases to avoid possible
1691 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1696 /* test if reload is needed for first register */
1697 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1707 /* test if reload is needed for first register */
1708 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1714 #ifndef TCC_TARGET_ARM64
1715 /* wrapper around RC_FRET to return a register by type */
1716 static int rc_fret(int t
)
1718 #ifdef TCC_TARGET_X86_64
1719 if (t
== VT_LDOUBLE
) {
1722 #elif defined TCC_TARGET_RISCV64
1723 if (t
== VT_LDOUBLE
)
1730 /* wrapper around REG_FRET to return a register by type */
1731 static int reg_fret(int t
)
1733 #ifdef TCC_TARGET_X86_64
1734 if (t
== VT_LDOUBLE
) {
1737 #elif defined TCC_TARGET_RISCV64
1738 if (t
== VT_LDOUBLE
)
1745 /* expand 64bit on stack in two ints */
1746 ST_FUNC
void lexpand(void)
1749 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1750 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1751 if (v
== VT_CONST
) {
1754 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1760 vtop
[0].r
= vtop
[-1].r2
;
1761 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1763 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1768 /* build a long long from two ints */
1769 static void lbuild(int t
)
1771 gv2(RC_INT
, RC_INT
);
1772 vtop
[-1].r2
= vtop
[0].r
;
1773 vtop
[-1].type
.t
= t
;
1778 /* convert stack entry to register and duplicate its value in another
1780 static void gv_dup(void)
1787 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1788 if (t
& VT_BITFIELD
) {
1798 /* stack: H L L1 H1 */
1808 /* duplicate value */
1813 #ifdef TCC_TARGET_X86_64
1814 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1817 #elif defined TCC_TARGET_RISCV64
1818 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
1827 load(r1
, &sv
); /* move r to r1 */
1829 /* duplicates value */
1836 /* generate CPU independent (unsigned) long long operations */
1837 static void gen_opl(int op
)
1839 int t
, a
, b
, op1
, c
, i
;
1841 unsigned short reg_iret
= REG_IRET
;
1842 unsigned short reg_lret
= REG_LRET
;
1848 func
= TOK___divdi3
;
1851 func
= TOK___udivdi3
;
1854 func
= TOK___moddi3
;
1857 func
= TOK___umoddi3
;
1864 /* call generic long long function */
1865 vpush_global_sym(&func_old_type
, func
);
1870 vtop
->r2
= reg_lret
;
1878 //pv("gen_opl A",0,2);
1884 /* stack: L1 H1 L2 H2 */
1889 vtop
[-2] = vtop
[-3];
1892 /* stack: H1 H2 L1 L2 */
1893 //pv("gen_opl B",0,4);
1899 /* stack: H1 H2 L1 L2 ML MH */
1902 /* stack: ML MH H1 H2 L1 L2 */
1906 /* stack: ML MH H1 L2 H2 L1 */
1911 /* stack: ML MH M1 M2 */
1914 } else if (op
== '+' || op
== '-') {
1915 /* XXX: add non carry method too (for MIPS or alpha) */
1921 /* stack: H1 H2 (L1 op L2) */
1924 gen_op(op1
+ 1); /* TOK_xxxC2 */
1927 /* stack: H1 H2 (L1 op L2) */
1930 /* stack: (L1 op L2) H1 H2 */
1932 /* stack: (L1 op L2) (H1 op H2) */
1940 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1941 t
= vtop
[-1].type
.t
;
1945 /* stack: L H shift */
1947 /* constant: simpler */
1948 /* NOTE: all comments are for SHL. the other cases are
1949 done by swapping words */
1960 if (op
!= TOK_SAR
) {
1993 /* XXX: should provide a faster fallback on x86 ? */
1996 func
= TOK___ashrdi3
;
1999 func
= TOK___lshrdi3
;
2002 func
= TOK___ashldi3
;
2008 /* compare operations */
2014 /* stack: L1 H1 L2 H2 */
2016 vtop
[-1] = vtop
[-2];
2018 /* stack: L1 L2 H1 H2 */
2022 /* when values are equal, we need to compare low words. since
2023 the jump is inverted, we invert the test too. */
2026 else if (op1
== TOK_GT
)
2028 else if (op1
== TOK_ULT
)
2030 else if (op1
== TOK_UGT
)
2040 /* generate non equal test */
2042 vset_VT_CMP(TOK_NE
);
2046 /* compare low. Always unsigned */
2050 else if (op1
== TOK_LE
)
2052 else if (op1
== TOK_GT
)
2054 else if (op1
== TOK_GE
)
2057 #if 0//def TCC_TARGET_I386
2058 if (op
== TOK_NE
) { gsym(b
); break; }
2059 if (op
== TOK_EQ
) { gsym(a
); break; }
2068 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2070 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2071 return (a
^ b
) >> 63 ? -x
: x
;
2074 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2076 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2079 /* handle integer constant optimizations and various machine
2081 static void gen_opic(int op
)
2083 SValue
*v1
= vtop
- 1;
2085 int t1
= v1
->type
.t
& VT_BTYPE
;
2086 int t2
= v2
->type
.t
& VT_BTYPE
;
2087 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2088 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2089 uint64_t l1
= c1
? v1
->c
.i
: 0;
2090 uint64_t l2
= c2
? v2
->c
.i
: 0;
2091 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2093 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2094 l1
= ((uint32_t)l1
|
2095 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2096 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2097 l2
= ((uint32_t)l2
|
2098 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2102 case '+': l1
+= l2
; break;
2103 case '-': l1
-= l2
; break;
2104 case '&': l1
&= l2
; break;
2105 case '^': l1
^= l2
; break;
2106 case '|': l1
|= l2
; break;
2107 case '*': l1
*= l2
; break;
2114 /* if division by zero, generate explicit division */
2117 tcc_error("division by zero in constant");
2121 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2122 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2123 case TOK_UDIV
: l1
= l1
/ l2
; break;
2124 case TOK_UMOD
: l1
= l1
% l2
; break;
2127 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2128 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2130 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2133 case TOK_ULT
: l1
= l1
< l2
; break;
2134 case TOK_UGE
: l1
= l1
>= l2
; break;
2135 case TOK_EQ
: l1
= l1
== l2
; break;
2136 case TOK_NE
: l1
= l1
!= l2
; break;
2137 case TOK_ULE
: l1
= l1
<= l2
; break;
2138 case TOK_UGT
: l1
= l1
> l2
; break;
2139 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2140 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2141 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2142 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2144 case TOK_LAND
: l1
= l1
&& l2
; break;
2145 case TOK_LOR
: l1
= l1
|| l2
; break;
2149 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2150 l1
= ((uint32_t)l1
|
2151 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2155 /* if commutative ops, put c2 as constant */
2156 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2157 op
== '|' || op
== '*')) {
2159 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2160 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2162 if (!const_wanted
&&
2164 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2165 (l1
== -1 && op
== TOK_SAR
))) {
2166 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2168 } else if (!const_wanted
&&
2169 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2171 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2172 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2173 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2178 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2181 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2182 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2185 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2186 /* filter out NOP operations like x*1, x-0, x&-1... */
2188 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2189 /* try to use shifts instead of muls or divs */
2190 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2199 else if (op
== TOK_PDIV
)
2205 } else if (c2
&& (op
== '+' || op
== '-') &&
2206 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2207 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2208 /* symbol + constant case */
2212 /* The backends can't always deal with addends to symbols
2213 larger than +-1<<31. Don't construct such. */
2220 /* call low level op generator */
2221 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2222 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2230 /* generate a floating point operation with constant propagation */
2231 static void gen_opif(int op
)
2235 #if defined _MSC_VER && defined __x86_64__
2236 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2243 /* currently, we cannot do computations with forward symbols */
2244 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2245 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2247 if (v1
->type
.t
== VT_FLOAT
) {
2250 } else if (v1
->type
.t
== VT_DOUBLE
) {
2258 /* NOTE: we only do constant propagation if finite number (not
2259 NaN or infinity) (ANSI spec) */
2260 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2264 case '+': f1
+= f2
; break;
2265 case '-': f1
-= f2
; break;
2266 case '*': f1
*= f2
; break;
2269 /* If not in initializer we need to potentially generate
2270 FP exceptions at runtime, otherwise we want to fold. */
2276 /* XXX: also handles tests ? */
2280 /* XXX: overflow test ? */
2281 if (v1
->type
.t
== VT_FLOAT
) {
2283 } else if (v1
->type
.t
== VT_DOUBLE
) {
2295 static int pointed_size(CType
*type
)
2298 return type_size(pointed_type(type
), &align
);
2301 static void vla_runtime_pointed_size(CType
*type
)
2304 vla_runtime_type_size(pointed_type(type
), &align
);
2307 static inline int is_null_pointer(SValue
*p
)
2309 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2311 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2312 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2313 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2314 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2315 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2316 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2319 static inline int is_integer_btype(int bt
)
2321 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2322 bt
== VT_INT
|| bt
== VT_LLONG
);
2325 /* check types for comparison or subtraction of pointers */
2326 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2328 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2331 /* null pointers are accepted for all comparisons as gcc */
2332 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2336 bt1
= type1
->t
& VT_BTYPE
;
2337 bt2
= type2
->t
& VT_BTYPE
;
2338 /* accept comparison between pointer and integer with a warning */
2339 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2340 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2341 tcc_warning("comparison between pointer and integer");
2345 /* both must be pointers or implicit function pointers */
2346 if (bt1
== VT_PTR
) {
2347 type1
= pointed_type(type1
);
2348 } else if (bt1
!= VT_FUNC
)
2349 goto invalid_operands
;
2351 if (bt2
== VT_PTR
) {
2352 type2
= pointed_type(type2
);
2353 } else if (bt2
!= VT_FUNC
) {
2355 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2357 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2358 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2362 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2363 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2364 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2365 /* gcc-like error if '-' is used */
2367 goto invalid_operands
;
2369 tcc_warning("comparison of distinct pointer types lacks a cast");
2373 /* generic gen_op: handles types problems */
2374 ST_FUNC
void gen_op(int op
)
2376 int u
, t1
, t2
, bt1
, bt2
, t
;
2380 t1
= vtop
[-1].type
.t
;
2381 t2
= vtop
[0].type
.t
;
2382 bt1
= t1
& VT_BTYPE
;
2383 bt2
= t2
& VT_BTYPE
;
2385 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2386 tcc_error("operation on a struct");
2387 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2388 if (bt2
== VT_FUNC
) {
2389 mk_pointer(&vtop
->type
);
2392 if (bt1
== VT_FUNC
) {
2394 mk_pointer(&vtop
->type
);
2399 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2400 /* at least one operand is a pointer */
2401 /* relational op: must be both pointers */
2402 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2403 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2404 /* pointers are handled are unsigned */
2406 t
= VT_LLONG
| VT_UNSIGNED
;
2408 t
= VT_INT
| VT_UNSIGNED
;
2412 /* if both pointers, then it must be the '-' op */
2413 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2415 tcc_error("cannot use pointers here");
2416 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2417 /* XXX: check that types are compatible */
2418 if (vtop
[-1].type
.t
& VT_VLA
) {
2419 vla_runtime_pointed_size(&vtop
[-1].type
);
2421 vpushi(pointed_size(&vtop
[-1].type
));
2425 vtop
->type
.t
= ptrdiff_type
.t
;
2429 /* exactly one pointer : must be '+' or '-'. */
2430 if (op
!= '-' && op
!= '+')
2431 tcc_error("cannot use pointers here");
2432 /* Put pointer as first operand */
2433 if (bt2
== VT_PTR
) {
2435 t
= t1
, t1
= t2
, t2
= t
;
2438 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2439 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2442 type1
= vtop
[-1].type
;
2443 type1
.t
&= ~VT_ARRAY
;
2444 if (vtop
[-1].type
.t
& VT_VLA
)
2445 vla_runtime_pointed_size(&vtop
[-1].type
);
2447 u
= pointed_size(&vtop
[-1].type
);
2449 tcc_error("unknown array element size");
2453 /* XXX: cast to int ? (long long case) */
2459 /* #ifdef CONFIG_TCC_BCHECK
2460 The main reason to removing this code:
2467 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2468 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2470 When this code is on. then the output looks like
2472 v+(i-j) = 0xbff84000
2474 /* if evaluating constant expression, no code should be
2475 generated, so no bound check */
2476 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2477 /* if bounded pointers, we generate a special code to
2484 gen_bounded_ptr_add();
2490 /* put again type if gen_opic() swaped operands */
2493 } else if (is_float(bt1
) || is_float(bt2
)) {
2494 /* compute bigger type and do implicit casts */
2495 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2497 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2502 /* floats can only be used for a few operations */
2503 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2504 (op
< TOK_ULT
|| op
> TOK_GT
))
2505 tcc_error("invalid operands for binary operation");
2507 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2508 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2509 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2511 t
|= (VT_LONG
& t1
);
2513 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2514 /* cast to biggest op */
2515 t
= VT_LLONG
| VT_LONG
;
2516 if (bt1
== VT_LLONG
)
2518 if (bt2
== VT_LLONG
)
2520 /* convert to unsigned if it does not fit in a long long */
2521 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2522 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2526 /* integer operations */
2527 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2528 /* convert to unsigned if it does not fit in an integer */
2529 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2530 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2533 /* XXX: currently, some unsigned operations are explicit, so
2534 we modify them here */
2535 if (t
& VT_UNSIGNED
) {
2542 else if (op
== TOK_LT
)
2544 else if (op
== TOK_GT
)
2546 else if (op
== TOK_LE
)
2548 else if (op
== TOK_GE
)
2556 /* special case for shifts and long long: we keep the shift as
2558 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2565 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2566 /* relational op: the result is an int */
2567 vtop
->type
.t
= VT_INT
;
2572 // Make sure that we have converted to an rvalue:
2573 if (vtop
->r
& VT_LVAL
)
2574 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2577 #ifndef TCC_TARGET_ARM
2578 /* generic itof for unsigned long long case */
2579 static void gen_cvt_itof1(int t
)
2581 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2584 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2585 (VT_LLONG
| VT_UNSIGNED
)) {
2588 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2589 #if LDOUBLE_SIZE != 8
2590 else if (t
== VT_LDOUBLE
)
2591 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2594 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2598 vtop
->r
= reg_fret(t
);
2606 /* generic ftoi for unsigned long long case */
2607 static void gen_cvt_ftoi1(int t
)
2609 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2614 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2615 /* not handled natively */
2616 st
= vtop
->type
.t
& VT_BTYPE
;
2618 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2619 #if LDOUBLE_SIZE != 8
2620 else if (st
== VT_LDOUBLE
)
2621 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2624 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2629 vtop
->r2
= REG_LRET
;
2636 /* force char or short cast */
2637 static void force_charshort_cast(int t
)
2641 /* cannot cast static initializers */
2642 if (STATIC_DATA_WANTED
)
2646 /* XXX: add optimization if lvalue : just change type and offset */
2651 if (t
& VT_UNSIGNED
) {
2652 vpushi((1 << bits
) - 1);
2655 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2661 /* result must be signed or the SAR is converted to an SHL
2662 This was not the case when "t" was a signed short
2663 and the last value on the stack was an unsigned int */
2664 vtop
->type
.t
&= ~VT_UNSIGNED
;
2670 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2671 static void gen_cast_s(int t
)
2679 static void gen_cast(CType
*type
)
2681 int sbt
, dbt
, sf
, df
, c
, p
;
2683 /* special delayed cast for char/short */
2684 /* XXX: in some cases (multiple cascaded casts), it may still
2686 if (vtop
->r
& VT_MUSTCAST
) {
2687 vtop
->r
&= ~VT_MUSTCAST
;
2688 force_charshort_cast(vtop
->type
.t
);
2691 /* bitfields first get cast to ints */
2692 if (vtop
->type
.t
& VT_BITFIELD
) {
2696 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2697 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2702 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2703 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2704 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2705 c
&= dbt
!= VT_LDOUBLE
;
2708 /* constant case: we can do it now */
2709 /* XXX: in ISOC, cannot do it if error in convert */
2710 if (sbt
== VT_FLOAT
)
2711 vtop
->c
.ld
= vtop
->c
.f
;
2712 else if (sbt
== VT_DOUBLE
)
2713 vtop
->c
.ld
= vtop
->c
.d
;
2716 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2717 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2718 vtop
->c
.ld
= vtop
->c
.i
;
2720 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2722 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2723 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2725 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2728 if (dbt
== VT_FLOAT
)
2729 vtop
->c
.f
= (float)vtop
->c
.ld
;
2730 else if (dbt
== VT_DOUBLE
)
2731 vtop
->c
.d
= (double)vtop
->c
.ld
;
2732 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2733 vtop
->c
.i
= vtop
->c
.ld
;
2734 } else if (sf
&& dbt
== VT_BOOL
) {
2735 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2738 vtop
->c
.i
= vtop
->c
.ld
;
2739 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2741 else if (sbt
& VT_UNSIGNED
)
2742 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2744 else if (sbt
== VT_PTR
)
2747 else if (sbt
!= VT_LLONG
)
2748 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2749 -(vtop
->c
.i
& 0x80000000));
2751 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2753 else if (dbt
== VT_BOOL
)
2754 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2756 else if (dbt
== VT_PTR
)
2759 else if (dbt
!= VT_LLONG
) {
2760 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2761 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2764 if (!(dbt
& VT_UNSIGNED
))
2765 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2768 } else if (p
&& dbt
== VT_BOOL
) {
2772 /* non constant case: generate code */
2774 /* convert from fp to fp */
2777 /* convert int to fp */
2780 /* convert fp to int */
2781 if (dbt
== VT_BOOL
) {
2785 /* we handle char/short/etc... with generic code */
2786 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2787 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2791 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2792 /* additional cast for char/short... */
2798 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2799 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2800 /* scalar to long long */
2801 /* machine independent conversion */
2803 /* generate high word */
2804 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2808 if (sbt
== VT_PTR
) {
2809 /* cast from pointer to int before we apply
2810 shift operation, which pointers don't support*/
2817 /* patch second register */
2818 vtop
[-1].r2
= vtop
->r
;
2822 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2823 (dbt
& VT_BTYPE
) == VT_PTR
||
2824 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2825 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2826 (sbt
& VT_BTYPE
) != VT_PTR
&&
2827 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2828 /* need to convert from 32bit to 64bit */
2830 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2831 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_RISCV64)
2833 #elif defined(TCC_TARGET_X86_64)
2835 /* x86_64 specific: movslq */
2837 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2844 } else if (dbt
== VT_BOOL
) {
2845 /* scalar to bool */
2848 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2849 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2850 if (sbt
== VT_PTR
) {
2851 vtop
->type
.t
= VT_INT
;
2852 tcc_warning("nonportable conversion from pointer to char/short");
2854 force_charshort_cast(dbt
);
2855 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2857 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2859 /* from long long: just take low order word */
2863 /* XXX some architectures (e.g. risc-v) would like it
2864 better for this merely being a 32-to-64 sign or zero-
2867 vtop
->type
.t
|= VT_UNSIGNED
;
2871 /* if lvalue and single word type, nothing to do because
2872 the lvalue already contains the real type size (see
2873 VT_LVAL_xxx constants) */
2876 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2877 /* if we are casting between pointer types,
2878 we must update the VT_LVAL_xxx size */
2879 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2880 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2883 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2886 /* return type size as known at compile time. Put alignment at 'a' */
2887 ST_FUNC
int type_size(CType
*type
, int *a
)
2892 bt
= type
->t
& VT_BTYPE
;
2893 if (bt
== VT_STRUCT
) {
2898 } else if (bt
== VT_PTR
) {
2899 if (type
->t
& VT_ARRAY
) {
2903 ts
= type_size(&s
->type
, a
);
2905 if (ts
< 0 && s
->c
< 0)
2913 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
2914 return -1; /* incomplete enum */
2915 } else if (bt
== VT_LDOUBLE
) {
2917 return LDOUBLE_SIZE
;
2918 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2919 #ifdef TCC_TARGET_I386
2920 #ifdef TCC_TARGET_PE
2925 #elif defined(TCC_TARGET_ARM)
2935 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2938 } else if (bt
== VT_SHORT
) {
2941 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2945 /* char, void, function, _Bool */
2951 /* push type size as known at runtime time on top of value stack. Put
2953 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2955 if (type
->t
& VT_VLA
) {
2956 type_size(&type
->ref
->type
, a
);
2957 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2959 vpushi(type_size(type
, a
));
2963 /* return the pointed type of t */
2964 static inline CType
*pointed_type(CType
*type
)
2966 return &type
->ref
->type
;
2969 /* modify type so that its it is a pointer to type. */
2970 ST_FUNC
void mk_pointer(CType
*type
)
2973 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2974 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2978 /* compare function types. OLD functions match any new functions */
2979 static int is_compatible_func(CType
*type1
, CType
*type2
)
2985 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2987 if (s1
->f
.func_type
!= s2
->f
.func_type
2988 && s1
->f
.func_type
!= FUNC_OLD
2989 && s2
->f
.func_type
!= FUNC_OLD
)
2991 /* we should check the function return type for FUNC_OLD too
2992 but that causes problems with the internally used support
2993 functions such as TOK_memmove */
2994 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
2996 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
2999 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
3010 /* return true if type1 and type2 are the same. If unqualified is
3011 true, qualifiers on the types are ignored.
3013 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3017 t1
= type1
->t
& VT_TYPE
;
3018 t2
= type2
->t
& VT_TYPE
;
3020 /* strip qualifiers before comparing */
3021 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3022 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3025 /* Default Vs explicit signedness only matters for char */
3026 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3030 /* XXX: bitfields ? */
3035 && !(type1
->ref
->c
< 0
3036 || type2
->ref
->c
< 0
3037 || type1
->ref
->c
== type2
->ref
->c
))
3040 /* test more complicated cases */
3041 bt1
= t1
& VT_BTYPE
;
3042 if (bt1
== VT_PTR
) {
3043 type1
= pointed_type(type1
);
3044 type2
= pointed_type(type2
);
3045 return is_compatible_types(type1
, type2
);
3046 } else if (bt1
== VT_STRUCT
) {
3047 return (type1
->ref
== type2
->ref
);
3048 } else if (bt1
== VT_FUNC
) {
3049 return is_compatible_func(type1
, type2
);
3050 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
3051 return type1
->ref
== type2
->ref
;
3057 /* return true if type1 and type2 are exactly the same (including
3060 static int is_compatible_types(CType
*type1
, CType
*type2
)
3062 return compare_types(type1
,type2
,0);
3065 /* return true if type1 and type2 are the same (ignoring qualifiers).
3067 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3069 return compare_types(type1
,type2
,1);
3072 /* print a type. If 'varstr' is not NULL, then the variable is also
3073 printed in the type */
3075 /* XXX: add array and function pointers */
3076 static void type_to_str(char *buf
, int buf_size
,
3077 CType
*type
, const char *varstr
)
3089 pstrcat(buf
, buf_size
, "extern ");
3091 pstrcat(buf
, buf_size
, "static ");
3093 pstrcat(buf
, buf_size
, "typedef ");
3095 pstrcat(buf
, buf_size
, "inline ");
3096 if (t
& VT_VOLATILE
)
3097 pstrcat(buf
, buf_size
, "volatile ");
3098 if (t
& VT_CONSTANT
)
3099 pstrcat(buf
, buf_size
, "const ");
3101 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
3102 || ((t
& VT_UNSIGNED
)
3103 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
3106 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
3108 buf_size
-= strlen(buf
);
3143 tstr
= "long double";
3145 pstrcat(buf
, buf_size
, tstr
);
3152 pstrcat(buf
, buf_size
, tstr
);
3153 v
= type
->ref
->v
& ~SYM_STRUCT
;
3154 if (v
>= SYM_FIRST_ANOM
)
3155 pstrcat(buf
, buf_size
, "<anonymous>");
3157 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3162 if (varstr
&& '*' == *varstr
) {
3163 pstrcat(buf1
, sizeof(buf1
), "(");
3164 pstrcat(buf1
, sizeof(buf1
), varstr
);
3165 pstrcat(buf1
, sizeof(buf1
), ")");
3167 pstrcat(buf1
, buf_size
, "(");
3169 while (sa
!= NULL
) {
3171 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3172 pstrcat(buf1
, sizeof(buf1
), buf2
);
3175 pstrcat(buf1
, sizeof(buf1
), ", ");
3177 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3178 pstrcat(buf1
, sizeof(buf1
), ", ...");
3179 pstrcat(buf1
, sizeof(buf1
), ")");
3180 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3185 if (varstr
&& '*' == *varstr
)
3186 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3188 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3189 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3192 pstrcpy(buf1
, sizeof(buf1
), "*");
3193 if (t
& VT_CONSTANT
)
3194 pstrcat(buf1
, buf_size
, "const ");
3195 if (t
& VT_VOLATILE
)
3196 pstrcat(buf1
, buf_size
, "volatile ");
3198 pstrcat(buf1
, sizeof(buf1
), varstr
);
3199 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3203 pstrcat(buf
, buf_size
, " ");
3204 pstrcat(buf
, buf_size
, varstr
);
3209 /* verify type compatibility to store vtop in 'dt' type, and generate
3211 static void gen_assign_cast(CType
*dt
)
3213 CType
*st
, *type1
, *type2
;
3214 char buf1
[256], buf2
[256];
3215 int dbt
, sbt
, qualwarn
, lvl
;
3217 st
= &vtop
->type
; /* source type */
3218 dbt
= dt
->t
& VT_BTYPE
;
3219 sbt
= st
->t
& VT_BTYPE
;
3220 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3221 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3222 ; /* It is Ok if both are void */
3224 tcc_error("cannot cast from/to void");
3226 if (dt
->t
& VT_CONSTANT
)
3227 tcc_warning("assignment of read-only location");
3230 /* special cases for pointers */
3231 /* '0' can also be a pointer */
3232 if (is_null_pointer(vtop
))
3234 /* accept implicit pointer to integer cast with warning */
3235 if (is_integer_btype(sbt
)) {
3236 tcc_warning("assignment makes pointer from integer without a cast");
3239 type1
= pointed_type(dt
);
3241 type2
= pointed_type(st
);
3242 else if (sbt
== VT_FUNC
)
3243 type2
= st
; /* a function is implicitly a function pointer */
3246 if (is_compatible_types(type1
, type2
))
3248 for (qualwarn
= lvl
= 0;; ++lvl
) {
3249 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3250 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3252 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3253 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3254 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3256 type1
= pointed_type(type1
);
3257 type2
= pointed_type(type2
);
3259 if (!is_compatible_unqualified_types(type1
, type2
)) {
3260 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3261 /* void * can match anything */
3262 } else if (dbt
== sbt
3263 && is_integer_btype(sbt
& VT_BTYPE
)
3264 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3265 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3266 /* Like GCC don't warn by default for merely changes
3267 in pointer target signedness. Do warn for different
3268 base types, though, in particular for unsigned enums
3269 and signed int targets. */
3271 tcc_warning("assignment from incompatible pointer type");
3276 tcc_warning("assignment discards qualifiers from pointer target type");
3282 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3283 tcc_warning("assignment makes integer from pointer without a cast");
3284 } else if (sbt
== VT_STRUCT
) {
3285 goto case_VT_STRUCT
;
3287 /* XXX: more tests */
3291 if (!is_compatible_unqualified_types(dt
, st
)) {
3293 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3294 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3295 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3302 /* store vtop in lvalue pushed on stack */
3303 ST_FUNC
void vstore(void)
3305 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3307 ft
= vtop
[-1].type
.t
;
3308 sbt
= vtop
->type
.t
& VT_BTYPE
;
3309 dbt
= ft
& VT_BTYPE
;
3310 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3311 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3312 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3313 /* optimize char/short casts */
3314 delayed_cast
= VT_MUSTCAST
;
3315 vtop
->type
.t
= ft
& VT_TYPE
;
3316 /* XXX: factorize */
3317 if (ft
& VT_CONSTANT
)
3318 tcc_warning("assignment of read-only location");
3321 if (!(ft
& VT_BITFIELD
))
3322 gen_assign_cast(&vtop
[-1].type
);
3325 if (sbt
== VT_STRUCT
) {
3326 /* if structure, only generate pointer */
3327 /* structure assignment : generate memcpy */
3328 /* XXX: optimize if small size */
3329 size
= type_size(&vtop
->type
, &align
);
3333 vtop
->type
.t
= VT_PTR
;
3336 /* address of memcpy() */
3339 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3340 else if(!(align
& 3))
3341 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3344 /* Use memmove, rather than memcpy, as dest and src may be same: */
3345 vpush_global_sym(&func_old_type
, TOK_memmove
);
3350 vtop
->type
.t
= VT_PTR
;
3356 /* leave source on stack */
3357 } else if (ft
& VT_BITFIELD
) {
3358 /* bitfield store handling */
3360 /* save lvalue as expression result (example: s.b = s.a = n;) */
3361 vdup(), vtop
[-1] = vtop
[-2];
3363 bit_pos
= BIT_POS(ft
);
3364 bit_size
= BIT_SIZE(ft
);
3365 /* remove bit field info to avoid loops */
3366 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3368 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3369 gen_cast(&vtop
[-1].type
);
3370 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3373 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3374 if (r
== VT_STRUCT
) {
3375 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3376 store_packed_bf(bit_pos
, bit_size
);
3378 unsigned long long mask
= (1ULL << bit_size
) - 1;
3379 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3381 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3384 vpushi((unsigned)mask
);
3391 /* duplicate destination */
3394 /* load destination, mask and or with source */
3395 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3396 vpushll(~(mask
<< bit_pos
));
3398 vpushi(~((unsigned)mask
<< bit_pos
));
3403 /* ... and discard */
3406 } else if (dbt
== VT_VOID
) {
3409 #ifdef CONFIG_TCC_BCHECK
3410 /* bound check case */
3411 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3420 #ifdef TCC_TARGET_X86_64
3421 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3423 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3426 #elif defined TCC_TARGET_RISCV64
3427 if (dbt
== VT_LDOUBLE
)
3431 r
= gv(rc
); /* generate value */
3432 /* if lvalue was saved on stack, must read it */
3433 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3435 t
= get_reg(RC_INT
);
3441 sv
.r
= VT_LOCAL
| VT_LVAL
;
3442 sv
.c
.i
= vtop
[-1].c
.i
;
3444 vtop
[-1].r
= t
| VT_LVAL
;
3446 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3447 #ifdef TCC_TARGET_RISCV64
3448 if (dbt
== VT_QLONG
|| dbt
== VT_LDOUBLE
) {
3449 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= VT_LLONG
;
3451 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3452 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3454 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3455 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3457 vtop
[-1].type
.t
= load_type
;
3460 /* convert to int to increment easily */
3461 vtop
->type
.t
= addr_type
;
3467 vtop
[-1].type
.t
= load_type
;
3468 /* XXX: it works because r2 is spilled last ! */
3469 store(vtop
->r2
, vtop
- 1);
3475 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3476 vtop
->r
|= delayed_cast
;
3480 /* post defines POST/PRE add. c is the token ++ or -- */
3481 ST_FUNC
void inc(int post
, int c
)
3484 vdup(); /* save lvalue */
3486 gv_dup(); /* duplicate value */
3491 vpushi(c
- TOK_MID
);
3493 vstore(); /* store value */
3495 vpop(); /* if post op, return saved value */
3498 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3500 /* read the string */
3504 while (tok
== TOK_STR
) {
3505 /* XXX: add \0 handling too ? */
3506 cstr_cat(astr
, tokc
.str
.data
, -1);
3509 cstr_ccat(astr
, '\0');
3512 /* If I is >= 1 and a power of two, returns log2(i)+1.
3513 If I is 0 returns 0. */
3514 static int exact_log2p1(int i
)
3519 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3530 /* Parse __attribute__((...)) GNUC extension. */
3531 static void parse_attribute(AttributeDef
*ad
)
3537 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3542 while (tok
!= ')') {
3543 if (tok
< TOK_IDENT
)
3544 expect("attribute name");
3556 tcc_warning("implicit declaration of function '%s'",
3557 get_tok_str(tok
, &tokc
));
3558 s
= external_global_sym(tok
, &func_old_type
);
3560 ad
->cleanup_func
= s
;
3568 parse_mult_str(&astr
, "section name");
3569 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3576 parse_mult_str(&astr
, "alias(\"target\")");
3577 ad
->alias_target
= /* save string as token, for later */
3578 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3582 case TOK_VISIBILITY1
:
3583 case TOK_VISIBILITY2
:
3585 parse_mult_str(&astr
,
3586 "visibility(\"default|hidden|internal|protected\")");
3587 if (!strcmp (astr
.data
, "default"))
3588 ad
->a
.visibility
= STV_DEFAULT
;
3589 else if (!strcmp (astr
.data
, "hidden"))
3590 ad
->a
.visibility
= STV_HIDDEN
;
3591 else if (!strcmp (astr
.data
, "internal"))
3592 ad
->a
.visibility
= STV_INTERNAL
;
3593 else if (!strcmp (astr
.data
, "protected"))
3594 ad
->a
.visibility
= STV_PROTECTED
;
3596 expect("visibility(\"default|hidden|internal|protected\")");
3605 if (n
<= 0 || (n
& (n
- 1)) != 0)
3606 tcc_error("alignment must be a positive power of two");
3611 ad
->a
.aligned
= exact_log2p1(n
);
3612 if (n
!= 1 << (ad
->a
.aligned
- 1))
3613 tcc_error("alignment of %d is larger than implemented", n
);
3625 /* currently, no need to handle it because tcc does not
3626 track unused objects */
3630 ad
->f
.func_noreturn
= 1;
3635 ad
->f
.func_call
= FUNC_CDECL
;
3640 ad
->f
.func_call
= FUNC_STDCALL
;
3642 #ifdef TCC_TARGET_I386
3652 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3658 ad
->f
.func_call
= FUNC_FASTCALLW
;
3665 ad
->attr_mode
= VT_LLONG
+ 1;
3668 ad
->attr_mode
= VT_BYTE
+ 1;
3671 ad
->attr_mode
= VT_SHORT
+ 1;
3675 ad
->attr_mode
= VT_INT
+ 1;
3678 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3685 ad
->a
.dllexport
= 1;
3687 case TOK_NODECORATE
:
3688 ad
->a
.nodecorate
= 1;
3691 ad
->a
.dllimport
= 1;
3694 if (tcc_state
->warn_unsupported
)
3695 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3696 /* skip parameters */
3698 int parenthesis
= 0;
3702 else if (tok
== ')')
3705 } while (parenthesis
&& tok
!= -1);
3718 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3722 while ((s
= s
->next
) != NULL
) {
3723 if ((s
->v
& SYM_FIELD
) &&
3724 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3725 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3726 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3738 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3740 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3741 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3742 int pcc
= !tcc_state
->ms_bitfields
;
3743 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3750 prevbt
= VT_STRUCT
; /* make it never match */
3755 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3756 if (f
->type
.t
& VT_BITFIELD
)
3757 bit_size
= BIT_SIZE(f
->type
.t
);
3760 size
= type_size(&f
->type
, &align
);
3761 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3764 if (pcc
&& bit_size
== 0) {
3765 /* in pcc mode, packing does not affect zero-width bitfields */
3768 /* in pcc mode, attribute packed overrides if set. */
3769 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3772 /* pragma pack overrides align if lesser and packs bitfields always */
3775 if (pragma_pack
< align
)
3776 align
= pragma_pack
;
3777 /* in pcc mode pragma pack also overrides individual align */
3778 if (pcc
&& pragma_pack
< a
)
3782 /* some individual align was specified */
3786 if (type
->ref
->type
.t
== VT_UNION
) {
3787 if (pcc
&& bit_size
>= 0)
3788 size
= (bit_size
+ 7) >> 3;
3793 } else if (bit_size
< 0) {
3795 c
+= (bit_pos
+ 7) >> 3;
3796 c
= (c
+ align
- 1) & -align
;
3805 /* A bit-field. Layout is more complicated. There are two
3806 options: PCC (GCC) compatible and MS compatible */
3808 /* In PCC layout a bit-field is placed adjacent to the
3809 preceding bit-fields, except if:
3811 - an individual alignment was given
3812 - it would overflow its base type container and
3813 there is no packing */
3814 if (bit_size
== 0) {
3816 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3818 } else if (f
->a
.aligned
) {
3820 } else if (!packed
) {
3822 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3823 if (ofs
> size
/ align
)
3827 /* in pcc mode, long long bitfields have type int if they fit */
3828 if (size
== 8 && bit_size
<= 32)
3829 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3831 while (bit_pos
>= align
* 8)
3832 c
+= align
, bit_pos
-= align
* 8;
3835 /* In PCC layout named bit-fields influence the alignment
3836 of the containing struct using the base types alignment,
3837 except for packed fields (which here have correct align). */
3838 if (f
->v
& SYM_FIRST_ANOM
3839 // && bit_size // ??? gcc on ARM/rpi does that
3844 bt
= f
->type
.t
& VT_BTYPE
;
3845 if ((bit_pos
+ bit_size
> size
* 8)
3846 || (bit_size
> 0) == (bt
!= prevbt
)
3848 c
= (c
+ align
- 1) & -align
;
3851 /* In MS bitfield mode a bit-field run always uses
3852 at least as many bits as the underlying type.
3853 To start a new run it's also required that this
3854 or the last bit-field had non-zero width. */
3855 if (bit_size
|| prev_bit_size
)
3858 /* In MS layout the records alignment is normally
3859 influenced by the field, except for a zero-width
3860 field at the start of a run (but by further zero-width
3861 fields it is again). */
3862 if (bit_size
== 0 && prevbt
!= bt
)
3865 prev_bit_size
= bit_size
;
3868 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3869 | (bit_pos
<< VT_STRUCT_SHIFT
);
3870 bit_pos
+= bit_size
;
3872 if (align
> maxalign
)
3876 printf("set field %s offset %-2d size %-2d align %-2d",
3877 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3878 if (f
->type
.t
& VT_BITFIELD
) {
3879 printf(" pos %-2d bits %-2d",
3892 c
+= (bit_pos
+ 7) >> 3;
3894 /* store size and alignment */
3895 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3899 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3900 /* can happen if individual align for some member was given. In
3901 this case MSVC ignores maxalign when aligning the size */
3906 c
= (c
+ a
- 1) & -a
;
3910 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3913 /* check whether we can access bitfields by their type */
3914 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3918 if (0 == (f
->type
.t
& VT_BITFIELD
))
3922 bit_size
= BIT_SIZE(f
->type
.t
);
3925 bit_pos
= BIT_POS(f
->type
.t
);
3926 size
= type_size(&f
->type
, &align
);
3927 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3930 /* try to access the field using a different type */
3931 c0
= -1, s
= align
= 1;
3933 px
= f
->c
* 8 + bit_pos
;
3934 cx
= (px
>> 3) & -align
;
3935 px
= px
- (cx
<< 3);
3938 s
= (px
+ bit_size
+ 7) >> 3;
3948 s
= type_size(&t
, &align
);
3952 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3953 /* update offset and bit position */
3956 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3957 | (bit_pos
<< VT_STRUCT_SHIFT
);
3961 printf("FIX field %s offset %-2d size %-2d align %-2d "
3962 "pos %-2d bits %-2d\n",
3963 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3964 cx
, s
, align
, px
, bit_size
);
3967 /* fall back to load/store single-byte wise */
3968 f
->auxtype
= VT_STRUCT
;
3970 printf("FIX field %s : load byte-wise\n",
3971 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3977 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3978 static void struct_decl(CType
*type
, int u
)
3980 int v
, c
, size
, align
, flexible
;
3981 int bit_size
, bsize
, bt
;
3983 AttributeDef ad
, ad1
;
3986 memset(&ad
, 0, sizeof ad
);
3988 parse_attribute(&ad
);
3992 /* struct already defined ? return it */
3994 expect("struct/union/enum name");
3996 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3999 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4001 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4006 /* Record the original enum/struct/union token. */
4007 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4009 /* we put an undefined size for struct/union */
4010 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4011 s
->r
= 0; /* default alignment is zero as gcc */
4013 type
->t
= s
->type
.t
;
4019 tcc_error("struct/union/enum already defined");
4021 /* cannot be empty */
4022 /* non empty enums are not allowed */
4025 long long ll
= 0, pl
= 0, nl
= 0;
4028 /* enum symbols have static storage */
4029 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4033 expect("identifier");
4035 if (ss
&& !local_stack
)
4036 tcc_error("redefinition of enumerator '%s'",
4037 get_tok_str(v
, NULL
));
4041 ll
= expr_const64();
4043 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4045 *ps
= ss
, ps
= &ss
->next
;
4054 /* NOTE: we accept a trailing comma */
4059 /* set integral type of the enum */
4062 if (pl
!= (unsigned)pl
)
4063 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4065 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4066 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4067 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4069 /* set type for enum members */
4070 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4072 if (ll
== (int)ll
) /* default is int if it fits */
4074 if (t
.t
& VT_UNSIGNED
) {
4075 ss
->type
.t
|= VT_UNSIGNED
;
4076 if (ll
== (unsigned)ll
)
4079 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4080 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4085 while (tok
!= '}') {
4086 if (!parse_btype(&btype
, &ad1
)) {
4092 tcc_error("flexible array member '%s' not at the end of struct",
4093 get_tok_str(v
, NULL
));
4099 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4101 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4102 expect("identifier");
4104 int v
= btype
.ref
->v
;
4105 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4106 if (tcc_state
->ms_extensions
== 0)
4107 expect("identifier");
4111 if (type_size(&type1
, &align
) < 0) {
4112 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4115 tcc_error("field '%s' has incomplete type",
4116 get_tok_str(v
, NULL
));
4118 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4119 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4120 (type1
.t
& VT_STORAGE
))
4121 tcc_error("invalid type for '%s'",
4122 get_tok_str(v
, NULL
));
4126 bit_size
= expr_const();
4127 /* XXX: handle v = 0 case for messages */
4129 tcc_error("negative width in bit-field '%s'",
4130 get_tok_str(v
, NULL
));
4131 if (v
&& bit_size
== 0)
4132 tcc_error("zero width for bit-field '%s'",
4133 get_tok_str(v
, NULL
));
4134 parse_attribute(&ad1
);
4136 size
= type_size(&type1
, &align
);
4137 if (bit_size
>= 0) {
4138 bt
= type1
.t
& VT_BTYPE
;
4144 tcc_error("bitfields must have scalar type");
4146 if (bit_size
> bsize
) {
4147 tcc_error("width of '%s' exceeds its type",
4148 get_tok_str(v
, NULL
));
4149 } else if (bit_size
== bsize
4150 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4151 /* no need for bit fields */
4153 } else if (bit_size
== 64) {
4154 tcc_error("field width 64 not implemented");
4156 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4158 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4161 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4162 /* Remember we've seen a real field to check
4163 for placement of flexible array member. */
4166 /* If member is a struct or bit-field, enforce
4167 placing into the struct (as anonymous). */
4169 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4174 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4179 if (tok
== ';' || tok
== TOK_EOF
)
4186 parse_attribute(&ad
);
4187 struct_layout(type
, &ad
);
4192 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4194 merge_symattr(&ad
->a
, &s
->a
);
4195 merge_funcattr(&ad
->f
, &s
->f
);
4198 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4199 are added to the element type, copied because it could be a typedef. */
4200 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4202 while (type
->t
& VT_ARRAY
) {
4203 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4204 type
= &type
->ref
->type
;
4206 type
->t
|= qualifiers
;
4209 /* return 0 if no type declaration. otherwise, return the basic type
4212 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4214 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4218 memset(ad
, 0, sizeof(AttributeDef
));
4228 /* currently, we really ignore extension */
4238 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4239 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4240 tmbt
: tcc_error("too many basic types");
4243 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4248 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4265 memset(&ad1
, 0, sizeof(AttributeDef
));
4266 if (parse_btype(&type1
, &ad1
)) {
4267 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4269 n
= 1 << (ad1
.a
.aligned
- 1);
4271 type_size(&type1
, &n
);
4274 if (n
<= 0 || (n
& (n
- 1)) != 0)
4275 tcc_error("alignment must be a positive power of two");
4278 ad
->a
.aligned
= exact_log2p1(n
);
4282 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4283 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4284 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4285 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4292 #ifdef TCC_TARGET_ARM64
4294 /* GCC's __uint128_t appears in some Linux header files. Make it a
4295 synonym for long double to get the size and alignment right. */
4306 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4307 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4315 struct_decl(&type1
, VT_ENUM
);
4318 type
->ref
= type1
.ref
;
4321 struct_decl(&type1
, VT_STRUCT
);
4324 struct_decl(&type1
, VT_UNION
);
4327 /* type modifiers */
4332 parse_btype_qualify(type
, VT_CONSTANT
);
4340 parse_btype_qualify(type
, VT_VOLATILE
);
4347 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4348 tcc_error("signed and unsigned modifier");
4361 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4362 tcc_error("signed and unsigned modifier");
4363 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4379 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4380 tcc_error("multiple storage classes");
4391 /* currently, no need to handle it because tcc does not
4392 track unused objects */
4395 /* GNUC attribute */
4396 case TOK_ATTRIBUTE1
:
4397 case TOK_ATTRIBUTE2
:
4398 parse_attribute(ad
);
4399 if (ad
->attr_mode
) {
4400 u
= ad
->attr_mode
-1;
4401 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4409 parse_expr_type(&type1
);
4410 /* remove all storage modifiers except typedef */
4411 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4413 sym_to_attr(ad
, type1
.ref
);
4419 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4423 if (tok
== ':' && !in_generic
) {
4424 /* ignore if it's a label */
4429 t
&= ~(VT_BTYPE
|VT_LONG
);
4430 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4431 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4432 type
->ref
= s
->type
.ref
;
4434 parse_btype_qualify(type
, t
);
4436 /* get attributes from typedef */
4445 if (tcc_state
->char_is_unsigned
) {
4446 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4449 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4450 bt
= t
& (VT_BTYPE
|VT_LONG
);
4452 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4453 #ifdef TCC_TARGET_PE
4454 if (bt
== VT_LDOUBLE
)
4455 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4461 /* convert a function parameter type (array to pointer and function to
4462 function pointer) */
4463 static inline void convert_parameter_type(CType
*pt
)
4465 /* remove const and volatile qualifiers (XXX: const could be used
4466 to indicate a const function parameter */
4467 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4468 /* array must be transformed to pointer according to ANSI C */
4470 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4475 ST_FUNC
void parse_asm_str(CString
*astr
)
4478 parse_mult_str(astr
, "string constant");
4481 /* Parse an asm label and return the token */
4482 static int asm_label_instr(void)
4488 parse_asm_str(&astr
);
4491 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4493 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4498 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4500 int n
, l
, t1
, arg_size
, align
, unused_align
;
4501 Sym
**plast
, *s
, *first
;
4506 /* function type, or recursive declarator (return if so) */
4508 if (td
&& !(td
& TYPE_ABSTRACT
))
4512 else if (parse_btype(&pt
, &ad1
))
4515 merge_attr (ad
, &ad1
);
4524 /* read param name and compute offset */
4525 if (l
!= FUNC_OLD
) {
4526 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4528 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4529 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4530 tcc_error("parameter declared as void");
4534 expect("identifier");
4535 pt
.t
= VT_VOID
; /* invalid type */
4539 convert_parameter_type(&pt
);
4540 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4541 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4547 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4552 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4553 tcc_error("invalid type");
4556 /* if no parameters, then old type prototype */
4559 /* NOTE: const is ignored in returned type as it has a special
4560 meaning in gcc / C++ */
4561 type
->t
&= ~VT_CONSTANT
;
4562 /* some ancient pre-K&R C allows a function to return an array
4563 and the array brackets to be put after the arguments, such
4564 that "int c()[]" means something like "int[] c()" */
4567 skip(']'); /* only handle simple "[]" */
4570 /* we push a anonymous symbol which will contain the function prototype */
4571 ad
->f
.func_args
= arg_size
;
4572 ad
->f
.func_type
= l
;
4573 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4579 } else if (tok
== '[') {
4580 int saved_nocode_wanted
= nocode_wanted
;
4581 /* array definition */
4584 /* XXX The optional type-quals and static should only be accepted
4585 in parameter decls. The '*' as well, and then even only
4586 in prototypes (not function defs). */
4588 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4603 if (!local_stack
|| (storage
& VT_STATIC
))
4604 vpushi(expr_const());
4606 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4607 length must always be evaluated, even under nocode_wanted,
4608 so that its size slot is initialized (e.g. under sizeof
4613 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4616 tcc_error("invalid array size");
4618 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4619 tcc_error("size of variable length array should be an integer");
4625 /* parse next post type */
4626 post_type(type
, ad
, storage
, 0);
4628 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4629 tcc_error("declaration of an array of functions");
4630 if ((type
->t
& VT_BTYPE
) == VT_VOID
4631 || type_size(type
, &unused_align
) < 0)
4632 tcc_error("declaration of an array of incomplete type elements");
4634 t1
|= type
->t
& VT_VLA
;
4638 tcc_error("need explicit inner array size in VLAs");
4639 loc
-= type_size(&int_type
, &align
);
4643 vla_runtime_type_size(type
, &align
);
4645 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4651 nocode_wanted
= saved_nocode_wanted
;
4653 /* we push an anonymous symbol which will contain the array
4655 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4656 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4662 /* Parse a type declarator (except basic type), and return the type
4663 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4664 expected. 'type' should contain the basic type. 'ad' is the
4665 attribute definition of the basic type. It can be modified by
4666 type_decl(). If this (possibly abstract) declarator is a pointer chain
4667 it returns the innermost pointed to type (equals *type, but is a different
4668 pointer), otherwise returns type itself, that's used for recursive calls. */
4669 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4672 int qualifiers
, storage
;
4674 /* recursive type, remove storage bits first, apply them later again */
4675 storage
= type
->t
& VT_STORAGE
;
4676 type
->t
&= ~VT_STORAGE
;
4679 while (tok
== '*') {
4687 qualifiers
|= VT_CONSTANT
;
4692 qualifiers
|= VT_VOLATILE
;
4698 /* XXX: clarify attribute handling */
4699 case TOK_ATTRIBUTE1
:
4700 case TOK_ATTRIBUTE2
:
4701 parse_attribute(ad
);
4705 type
->t
|= qualifiers
;
4707 /* innermost pointed to type is the one for the first derivation */
4708 ret
= pointed_type(type
);
4712 /* This is possibly a parameter type list for abstract declarators
4713 ('int ()'), use post_type for testing this. */
4714 if (!post_type(type
, ad
, 0, td
)) {
4715 /* It's not, so it's a nested declarator, and the post operations
4716 apply to the innermost pointed to type (if any). */
4717 /* XXX: this is not correct to modify 'ad' at this point, but
4718 the syntax is not clear */
4719 parse_attribute(ad
);
4720 post
= type_decl(type
, ad
, v
, td
);
4724 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4725 /* type identifier */
4730 if (!(td
& TYPE_ABSTRACT
))
4731 expect("identifier");
4734 post_type(post
, ad
, storage
, 0);
4735 parse_attribute(ad
);
4740 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4741 ST_FUNC
int lvalue_type(int t
)
4746 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4748 else if (bt
== VT_SHORT
)
4752 if (t
& VT_UNSIGNED
)
4753 r
|= VT_LVAL_UNSIGNED
;
4757 /* indirection with full error checking and bound check */
4758 ST_FUNC
void indir(void)
4760 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4761 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4765 if (vtop
->r
& VT_LVAL
)
4767 vtop
->type
= *pointed_type(&vtop
->type
);
4768 /* Arrays and functions are never lvalues */
4769 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4770 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4771 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4772 /* if bound checking, the referenced pointer must be checked */
4773 #ifdef CONFIG_TCC_BCHECK
4774 if (tcc_state
->do_bounds_check
)
4775 vtop
->r
|= VT_MUSTBOUND
;
4780 /* pass a parameter to a function and do type checking and casting */
4781 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4786 func_type
= func
->f
.func_type
;
4787 if (func_type
== FUNC_OLD
||
4788 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4789 /* default casting : only need to convert float to double */
4790 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4791 gen_cast_s(VT_DOUBLE
);
4792 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4793 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4794 type
.ref
= vtop
->type
.ref
;
4797 } else if (arg
== NULL
) {
4798 tcc_error("too many arguments to function");
4801 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4802 gen_assign_cast(&type
);
4806 /* parse an expression and return its type without any side effect. */
4807 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4816 /* parse an expression of the form '(type)' or '(expr)' and return its
4818 static void parse_expr_type(CType
*type
)
4824 if (parse_btype(type
, &ad
)) {
4825 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4827 expr_type(type
, gexpr
);
4832 static void parse_type(CType
*type
)
4837 if (!parse_btype(type
, &ad
)) {
4840 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4843 static void parse_builtin_params(int nc
, const char *args
)
4850 while ((c
= *args
++)) {
4854 case 'e': expr_eq(); continue;
4855 case 't': parse_type(&t
); vpush(&t
); continue;
4856 default: tcc_error("internal error"); break;
4864 ST_FUNC
void unary(void)
4866 int n
, t
, align
, size
, r
, sizeof_caller
;
4871 sizeof_caller
= in_sizeof
;
4874 /* XXX: GCC 2.95.3 does not generate a table although it should be
4882 #ifdef TCC_TARGET_PE
4883 t
= VT_SHORT
|VT_UNSIGNED
;
4891 vsetc(&type
, VT_CONST
, &tokc
);
4895 t
= VT_INT
| VT_UNSIGNED
;
4901 t
= VT_LLONG
| VT_UNSIGNED
;
4913 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4916 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4918 case TOK___FUNCTION__
:
4920 goto tok_identifier
;
4926 /* special function name identifier */
4927 len
= strlen(funcname
) + 1;
4928 /* generate char[len] type */
4933 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4934 if (!NODATA_WANTED
) {
4935 ptr
= section_ptr_add(data_section
, len
);
4936 memcpy(ptr
, funcname
, len
);
4942 #ifdef TCC_TARGET_PE
4943 t
= VT_SHORT
| VT_UNSIGNED
;
4949 /* string parsing */
4951 if (tcc_state
->char_is_unsigned
)
4952 t
= VT_BYTE
| VT_UNSIGNED
;
4954 if (tcc_state
->warn_write_strings
)
4959 memset(&ad
, 0, sizeof(AttributeDef
));
4960 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4965 if (parse_btype(&type
, &ad
)) {
4966 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4968 /* check ISOC99 compound literal */
4970 /* data is allocated locally by default */
4975 /* all except arrays are lvalues */
4976 if (!(type
.t
& VT_ARRAY
))
4977 r
|= lvalue_type(type
.t
);
4978 memset(&ad
, 0, sizeof(AttributeDef
));
4979 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4981 if (sizeof_caller
) {
4988 } else if (tok
== '{') {
4989 int saved_nocode_wanted
= nocode_wanted
;
4991 tcc_error("expected constant");
4992 /* save all registers */
4994 /* statement expression : we do not accept break/continue
4995 inside as GCC does. We do retain the nocode_wanted state,
4996 as statement expressions can't ever be entered from the
4997 outside, so any reactivation of code emission (from labels
4998 or loop heads) can be disabled again after the end of it. */
5000 nocode_wanted
= saved_nocode_wanted
;
5015 /* functions names must be treated as function pointers,
5016 except for unary '&' and sizeof. Since we consider that
5017 functions are not lvalues, we only have to handle it
5018 there and in function calls. */
5019 /* arrays can also be used although they are not lvalues */
5020 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5021 !(vtop
->type
.t
& VT_ARRAY
))
5023 mk_pointer(&vtop
->type
);
5029 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5030 gen_cast_s(VT_BOOL
);
5031 vtop
->c
.i
= !vtop
->c
.i
;
5032 } else if (vtop
->r
== VT_CMP
) {
5034 n
= vtop
->jfalse
, vtop
->jfalse
= vtop
->jtrue
, vtop
->jtrue
= n
;
5049 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5050 tcc_error("pointer not accepted for unary plus");
5051 /* In order to force cast, we add zero, except for floating point
5052 where we really need an noop (otherwise -0.0 will be transformed
5054 if (!is_float(vtop
->type
.t
)) {
5066 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5068 if (vtop
[1].r
& VT_SYM
)
5069 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5070 size
= type_size(&type
, &align
);
5071 if (s
&& s
->a
.aligned
)
5072 align
= 1 << (s
->a
.aligned
- 1);
5073 if (t
== TOK_SIZEOF
) {
5074 if (!(type
.t
& VT_VLA
)) {
5076 tcc_error("sizeof applied to an incomplete type");
5079 vla_runtime_type_size(&type
, &align
);
5084 vtop
->type
.t
|= VT_UNSIGNED
;
5087 case TOK_builtin_expect
:
5088 /* __builtin_expect is a no-op for now */
5089 parse_builtin_params(0, "ee");
5092 case TOK_builtin_types_compatible_p
:
5093 parse_builtin_params(0, "tt");
5094 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5095 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5096 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5100 case TOK_builtin_choose_expr
:
5127 case TOK_builtin_constant_p
:
5128 parse_builtin_params(1, "e");
5129 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5133 case TOK_builtin_frame_address
:
5134 case TOK_builtin_return_address
:
5140 if (tok
!= TOK_CINT
) {
5141 tcc_error("%s only takes positive integers",
5142 tok1
== TOK_builtin_return_address
?
5143 "__builtin_return_address" :
5144 "__builtin_frame_address");
5146 level
= (uint32_t)tokc
.i
;
5151 vset(&type
, VT_LOCAL
, 0); /* local frame */
5153 mk_pointer(&vtop
->type
);
5154 indir(); /* -> parent frame */
5156 if (tok1
== TOK_builtin_return_address
) {
5157 // assume return address is just above frame pointer on stack
5160 mk_pointer(&vtop
->type
);
5165 #ifdef TCC_TARGET_X86_64
5166 #ifdef TCC_TARGET_PE
5167 case TOK_builtin_va_start
:
5168 parse_builtin_params(0, "ee");
5169 r
= vtop
->r
& VT_VALMASK
;
5173 tcc_error("__builtin_va_start expects a local variable");
5175 vtop
->type
= char_pointer_type
;
5180 case TOK_builtin_va_arg_types
:
5181 parse_builtin_params(0, "t");
5182 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5189 #ifdef TCC_TARGET_ARM64
5190 case TOK___va_start
: {
5191 parse_builtin_params(0, "ee");
5195 vtop
->type
.t
= VT_VOID
;
5198 case TOK___va_arg
: {
5199 parse_builtin_params(0, "et");
5207 case TOK___arm64_clear_cache
: {
5208 parse_builtin_params(0, "ee");
5211 vtop
->type
.t
= VT_VOID
;
5215 /* pre operations */
5226 t
= vtop
->type
.t
& VT_BTYPE
;
5228 /* In IEEE negate(x) isn't subtract(0,x), but rather
5232 vtop
->c
.f
= -1.0 * 0.0;
5233 else if (t
== VT_DOUBLE
)
5234 vtop
->c
.d
= -1.0 * 0.0;
5236 vtop
->c
.ld
= -1.0 * 0.0;
5244 goto tok_identifier
;
5246 /* allow to take the address of a label */
5247 if (tok
< TOK_UIDENT
)
5248 expect("label identifier");
5249 s
= label_find(tok
);
5251 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5253 if (s
->r
== LABEL_DECLARED
)
5254 s
->r
= LABEL_FORWARD
;
5257 s
->type
.t
= VT_VOID
;
5258 mk_pointer(&s
->type
);
5259 s
->type
.t
|= VT_STATIC
;
5261 vpushsym(&s
->type
, s
);
5267 CType controlling_type
;
5268 int has_default
= 0;
5271 TokenString
*str
= NULL
;
5272 int saved_const_wanted
= const_wanted
;
5277 expr_type(&controlling_type
, expr_eq
);
5278 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5279 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5280 mk_pointer(&controlling_type
);
5281 const_wanted
= saved_const_wanted
;
5285 if (tok
== TOK_DEFAULT
) {
5287 tcc_error("too many 'default'");
5293 AttributeDef ad_tmp
;
5298 parse_btype(&cur_type
, &ad_tmp
);
5301 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5302 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5304 tcc_error("type match twice");
5314 skip_or_save_block(&str
);
5316 skip_or_save_block(NULL
);
5323 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5324 tcc_error("type '%s' does not match any association", buf
);
5326 begin_macro(str
, 1);
5335 // special qnan , snan and infinity values
5340 vtop
->type
.t
= VT_FLOAT
;
5345 goto special_math_val
;
5348 goto special_math_val
;
5355 expect("identifier");
5357 if (!s
|| IS_ASM_SYM(s
)) {
5358 const char *name
= get_tok_str(t
, NULL
);
5360 tcc_error("'%s' undeclared", name
);
5361 /* for simple function calls, we tolerate undeclared
5362 external reference to int() function */
5363 if (tcc_state
->warn_implicit_function_declaration
5364 #ifdef TCC_TARGET_PE
5365 /* people must be warned about using undeclared WINAPI functions
5366 (which usually start with uppercase letter) */
5367 || (name
[0] >= 'A' && name
[0] <= 'Z')
5370 tcc_warning("implicit declaration of function '%s'", name
);
5371 s
= external_global_sym(t
, &func_old_type
);
5375 /* A symbol that has a register is a local register variable,
5376 which starts out as VT_LOCAL value. */
5377 if ((r
& VT_VALMASK
) < VT_CONST
)
5378 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5380 vset(&s
->type
, r
, s
->c
);
5381 /* Point to s as backpointer (even without r&VT_SYM).
5382 Will be used by at least the x86 inline asm parser for
5388 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5389 vtop
->c
.i
= s
->enum_val
;
5394 /* post operations */
5396 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5399 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5400 int qualifiers
, cumofs
= 0;
5402 if (tok
== TOK_ARROW
)
5404 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5407 /* expect pointer on structure */
5408 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5409 expect("struct or union");
5410 if (tok
== TOK_CDOUBLE
)
5411 expect("field name");
5413 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5414 expect("field name");
5415 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5417 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5418 /* add field offset to pointer */
5419 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5420 vpushi(cumofs
+ s
->c
);
5422 /* change type to field type, and set to lvalue */
5423 vtop
->type
= s
->type
;
5424 vtop
->type
.t
|= qualifiers
;
5425 /* an array is never an lvalue */
5426 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5427 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5428 #ifdef CONFIG_TCC_BCHECK
5429 /* if bound checking, the referenced pointer must be checked */
5430 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5431 vtop
->r
|= VT_MUSTBOUND
;
5435 } else if (tok
== '[') {
5441 } else if (tok
== '(') {
5444 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5447 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5448 /* pointer test (no array accepted) */
5449 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5450 vtop
->type
= *pointed_type(&vtop
->type
);
5451 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5455 expect("function pointer");
5458 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5460 /* get return type */
5463 sa
= s
->next
; /* first parameter */
5464 nb_args
= regsize
= 0;
5466 /* compute first implicit argument if a structure is returned */
5467 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5468 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5469 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5470 &ret_align
, ®size
);
5472 /* get some space for the returned structure */
5473 size
= type_size(&s
->type
, &align
);
5474 #ifdef TCC_TARGET_ARM64
5475 /* On arm64, a small struct is return in registers.
5476 It is much easier to write it to memory if we know
5477 that we are allowed to write some extra bytes, so
5478 round the allocated space up to a power of 2: */
5480 while (size
& (size
- 1))
5481 size
= (size
| (size
- 1)) + 1;
5483 loc
= (loc
- size
) & -align
;
5485 ret
.r
= VT_LOCAL
| VT_LVAL
;
5486 /* pass it as 'int' to avoid structure arg passing
5488 vseti(VT_LOCAL
, loc
);
5498 /* return in register */
5499 if (is_float(ret
.type
.t
)) {
5500 ret
.r
= reg_fret(ret
.type
.t
);
5501 #ifdef TCC_TARGET_X86_64
5502 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5506 #ifndef TCC_TARGET_ARM64
5507 #ifndef TCC_TARGET_RISCV64
5508 #ifdef TCC_TARGET_X86_64
5509 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5511 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5523 gfunc_param_typed(s
, sa
);
5533 tcc_error("too few arguments to function");
5535 gfunc_call(nb_args
);
5538 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5539 vsetc(&ret
.type
, r
, &ret
.c
);
5540 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5543 /* handle packed struct return */
5544 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5547 size
= type_size(&s
->type
, &align
);
5548 /* We're writing whole regs often, make sure there's enough
5549 space. Assume register size is power of 2. */
5550 if (regsize
> align
)
5552 loc
= (loc
- size
) & -align
;
5556 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5560 if (--ret_nregs
== 0)
5564 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5566 if (s
->f
.func_noreturn
)
5574 ST_FUNC
void expr_prod(void)
5579 while (tok
== '*' || tok
== '/' || tok
== '%') {
5587 ST_FUNC
void expr_sum(void)
5592 while (tok
== '+' || tok
== '-') {
5600 static void expr_shift(void)
5605 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5613 static void expr_cmp(void)
5618 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5619 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5627 static void expr_cmpeq(void)
5632 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5640 static void expr_and(void)
5643 while (tok
== '&') {
5650 static void expr_xor(void)
5653 while (tok
== '^') {
5660 static void expr_or(void)
5663 while (tok
== '|') {
5670 static int condition_3way(void);
5672 static void expr_landor(void(*e_fn
)(void), int e_op
, int i
)
5674 int t
= 0, cc
= 1, f
= 0, c
;
5676 c
= f
? i
: condition_3way();
5678 save_regs(1), cc
= 0;
5679 } else if (c
!= i
) {
5680 nocode_wanted
++, f
= 1;
5702 static void expr_land(void)
5705 if (tok
== TOK_LAND
)
5706 expr_landor(expr_or
, TOK_LAND
, 1);
5709 static void expr_lor(void)
5713 expr_landor(expr_land
, TOK_LOR
, 0);
5716 /* Assuming vtop is a value used in a conditional context
5717 (i.e. compared with zero) return 0 if it's false, 1 if
5718 true and -1 if it can't be statically determined. */
5719 static int condition_3way(void)
5722 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5723 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5725 gen_cast_s(VT_BOOL
);
5732 static int is_cond_bool(SValue
*sv
)
5734 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
5735 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
5736 return (unsigned)sv
->c
.i
< 2;
5737 if (sv
->r
== VT_CMP
)
5742 static void expr_cond(void)
5744 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5746 CType type
, type1
, type2
;
5752 c
= condition_3way();
5753 g
= (tok
== ':' && gnu_ext
);
5763 /* needed to avoid having different registers saved in
5766 if (is_float(vtop
->type
.t
)) {
5768 #ifdef TCC_TARGET_X86_64
5769 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5772 #elif defined TCC_TARGET_RISCV64
5773 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
)
5783 ncw_prev
= nocode_wanted
;
5790 if (c
< 0 && vtop
->r
== VT_CMP
) {
5796 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5797 mk_pointer(&vtop
->type
);
5799 sv
= *vtop
; /* save value to handle it later */
5800 vtop
--; /* no vpop so that FP stack is not flushed */
5810 nocode_wanted
= ncw_prev
;
5816 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
5817 if (sv
.r
== VT_CMP
) {
5828 nocode_wanted
= ncw_prev
;
5829 // tcc_warning("two conditions expr_cond");
5833 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5834 mk_pointer(&vtop
->type
);
5837 bt1
= t1
& VT_BTYPE
;
5839 bt2
= t2
& VT_BTYPE
;
5842 /* cast operands to correct type according to ISOC rules */
5843 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5844 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5845 } else if (is_float(bt1
) || is_float(bt2
)) {
5846 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5847 type
.t
= VT_LDOUBLE
;
5849 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5854 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5855 /* cast to biggest op */
5856 type
.t
= VT_LLONG
| VT_LONG
;
5857 if (bt1
== VT_LLONG
)
5859 if (bt2
== VT_LLONG
)
5861 /* convert to unsigned if it does not fit in a long long */
5862 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5863 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5864 type
.t
|= VT_UNSIGNED
;
5865 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5866 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5867 /* If one is a null ptr constant the result type
5869 if (is_null_pointer (vtop
)) type
= type1
;
5870 else if (is_null_pointer (&sv
)) type
= type2
;
5871 else if (bt1
!= bt2
)
5872 tcc_error("incompatible types in conditional expressions");
5874 CType
*pt1
= pointed_type(&type1
);
5875 CType
*pt2
= pointed_type(&type2
);
5876 int pbt1
= pt1
->t
& VT_BTYPE
;
5877 int pbt2
= pt2
->t
& VT_BTYPE
;
5878 int newquals
, copied
= 0;
5879 /* pointers to void get preferred, otherwise the
5880 pointed to types minus qualifs should be compatible */
5881 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5882 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5883 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5884 tcc_warning("pointer type mismatch in conditional expression\n");
5886 /* combine qualifs */
5887 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5888 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5891 /* copy the pointer target symbol */
5892 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5895 pointed_type(&type
)->t
|= newquals
;
5897 /* pointers to incomplete arrays get converted to
5898 pointers to completed ones if possible */
5899 if (pt1
->t
& VT_ARRAY
5900 && pt2
->t
& VT_ARRAY
5901 && pointed_type(&type
)->ref
->c
< 0
5902 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5905 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5907 pointed_type(&type
)->ref
=
5908 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5909 0, pointed_type(&type
)->ref
->c
);
5910 pointed_type(&type
)->ref
->c
=
5911 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5914 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5915 /* XXX: test structure compatibility */
5916 type
= bt1
== VT_STRUCT
? type1
: type2
;
5918 /* integer operations */
5919 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5920 /* convert to unsigned if it does not fit in an integer */
5921 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5922 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5923 type
.t
|= VT_UNSIGNED
;
5925 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5926 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5927 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5929 /* now we convert second operand */
5933 mk_pointer(&vtop
->type
);
5935 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5940 if (is_float(type
.t
)) {
5942 #ifdef TCC_TARGET_X86_64
5943 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5946 #elif defined TCC_TARGET_RISCV64
5947 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
)
5950 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5951 /* for long longs, we use fixed registers to avoid having
5952 to handle a complicated move */
5962 nocode_wanted
= ncw_prev
;
5964 /* this is horrible, but we must also convert first
5970 mk_pointer(&vtop
->type
);
5972 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5978 move_reg(r2
, r1
, type
.t
);
5989 static void expr_eq(void)
5995 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5996 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5997 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
6012 ST_FUNC
void gexpr(void)
6023 /* parse a constant expression and return value in vtop. */
6024 static void expr_const1(void)
6033 /* parse an integer constant and return its value. */
6034 static inline int64_t expr_const64(void)
6038 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6039 expect("constant expression");
6045 /* parse an integer constant and return its value.
6046 Complain if it doesn't fit 32bit (signed or unsigned). */
6047 ST_FUNC
int expr_const(void)
6050 int64_t wc
= expr_const64();
6052 if (c
!= wc
&& (unsigned)c
!= wc
)
6053 tcc_error("constant exceeds 32 bit");
6057 /* ------------------------------------------------------------------------- */
6058 /* return from function */
6060 #ifndef TCC_TARGET_ARM64
6061 #ifndef TCC_TARGET_RISCV64
6062 static void gfunc_return(CType
*func_type
)
6064 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6065 CType type
, ret_type
;
6066 int ret_align
, ret_nregs
, regsize
;
6067 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6068 &ret_align
, ®size
);
6069 if (0 == ret_nregs
) {
6070 /* if returning structure, must copy it to implicit
6071 first pointer arg location */
6074 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6077 /* copy structure value to pointer */
6080 /* returning structure packed into registers */
6081 int r
, size
, addr
, align
;
6082 size
= type_size(func_type
,&align
);
6083 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6084 (vtop
->c
.i
& (ret_align
-1)))
6085 && (align
& (ret_align
-1))) {
6086 loc
= (loc
- size
) & -ret_align
;
6089 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6093 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6095 vtop
->type
= ret_type
;
6096 if (is_float(ret_type
.t
))
6097 r
= rc_fret(ret_type
.t
);
6108 if (--ret_nregs
== 0)
6110 /* We assume that when a structure is returned in multiple
6111 registers, their classes are consecutive values of the
6114 vtop
->c
.i
+= regsize
;
6118 } else if (is_float(func_type
->t
)) {
6119 gv(rc_fret(func_type
->t
));
6123 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6128 static void check_func_return(void)
6130 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6132 if (!strcmp (funcname
, "main")
6133 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6134 /* main returns 0 by default */
6136 gen_assign_cast(&func_vt
);
6137 gfunc_return(&func_vt
);
6139 tcc_warning("function might return no value: '%s'", funcname
);
6143 /* ------------------------------------------------------------------------- */
6146 static int case_cmp(const void *pa
, const void *pb
)
6148 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6149 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6150 return a
< b
? -1 : a
> b
;
6153 static void gtst_addr(int t
, int a
)
6155 gsym_addr(gvtst(0, t
), a
);
6158 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6162 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6179 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6181 gcase(base
, len
/2, bsym
);
6185 base
+= e
; len
-= e
;
6195 if (p
->v1
== p
->v2
) {
6197 gtst_addr(0, p
->sym
);
6207 gtst_addr(0, p
->sym
);
6211 *bsym
= gjmp(*bsym
);
6214 /* ------------------------------------------------------------------------- */
6215 /* __attribute__((cleanup(fn))) */
6217 static void try_call_scope_cleanup(Sym
*stop
)
6219 Sym
*cls
= cur_scope
->cl
.s
;
6221 for (; cls
!= stop
; cls
= cls
->ncl
) {
6222 Sym
*fs
= cls
->next
;
6223 Sym
*vs
= cls
->prev_tok
;
6225 vpushsym(&fs
->type
, fs
);
6226 vset(&vs
->type
, vs
->r
, vs
->c
);
6228 mk_pointer(&vtop
->type
);
6234 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6239 if (!cur_scope
->cl
.s
)
6242 /* search NCA of both cleanup chains given parents and initial depth */
6243 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6244 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6246 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6248 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6251 try_call_scope_cleanup(cc
);
6254 /* call 'func' for each __attribute__((cleanup(func))) */
6255 static void block_cleanup(struct scope
*o
)
6259 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6260 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6265 try_call_scope_cleanup(o
->cl
.s
);
6266 pcl
->jnext
= gjmp(0);
6268 goto remove_pending
;
6278 try_call_scope_cleanup(o
->cl
.s
);
6281 /* ------------------------------------------------------------------------- */
6284 static void vla_restore(int loc
)
6287 gen_vla_sp_restore(loc
);
6290 static void vla_leave(struct scope
*o
)
6292 if (o
->vla
.num
< cur_scope
->vla
.num
)
6293 vla_restore(o
->vla
.loc
);
6296 /* ------------------------------------------------------------------------- */
6299 void new_scope(struct scope
*o
)
6301 /* copy and link previous scope */
6303 o
->prev
= cur_scope
;
6306 /* record local declaration stack position */
6307 o
->lstk
= local_stack
;
6308 o
->llstk
= local_label_stack
;
6313 void prev_scope(struct scope
*o
, int is_expr
)
6317 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6318 block_cleanup(o
->prev
);
6320 /* pop locally defined labels */
6321 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6323 /* In the is_expr case (a statement expression is finished here),
6324 vtop might refer to symbols on the local_stack. Either via the
6325 type or via vtop->sym. We can't pop those nor any that in turn
6326 might be referred to. To make it easier we don't roll back
6327 any symbols in that case; some upper level call to block() will
6328 do that. We do have to remove such symbols from the lookup
6329 tables, though. sym_pop will do that. */
6331 /* pop locally defined symbols */
6332 sym_pop(&local_stack
, o
->lstk
, is_expr
);
6334 cur_scope
= o
->prev
;
6338 /* leave a scope via break/continue(/goto) */
6339 void leave_scope(struct scope
*o
)
6343 try_call_scope_cleanup(o
->cl
.s
);
6347 /* ------------------------------------------------------------------------- */
6348 /* call block from 'for do while' loops */
6350 static void lblock(int *bsym
, int *csym
)
6352 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6353 int *b
= co
->bsym
, *c
= co
->csym
;
6367 static void block(int is_expr
)
6369 int a
, b
, c
, d
, e
, t
;
6373 /* default return value is (void) */
6375 vtop
->type
.t
= VT_VOID
;
6387 if (tok
== TOK_ELSE
) {
6392 gsym(d
); /* patch else jmp */
6397 } else if (t
== TOK_WHILE
) {
6409 } else if (t
== '{') {
6413 /* handle local labels declarations */
6414 while (tok
== TOK_LABEL
) {
6417 if (tok
< TOK_UIDENT
)
6418 expect("label identifier");
6419 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6421 } while (tok
== ',');
6425 while (tok
!= '}') {
6434 prev_scope(&o
, is_expr
);
6436 if (0 == local_scope
&& !nocode_wanted
)
6437 check_func_return();
6440 } else if (t
== TOK_RETURN
) {
6442 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6444 gexpr(), gen_assign_cast(&func_vt
);
6445 leave_scope(root_scope
);
6447 gfunc_return(&func_vt
);
6451 tcc_warning("'return' with no value.");
6453 /* jump unless last stmt in top-level block */
6454 if (tok
!= '}' || local_scope
!= 1)
6458 } else if (t
== TOK_BREAK
) {
6460 if (!cur_scope
->bsym
)
6461 tcc_error("cannot break");
6462 if (!cur_switch
|| cur_scope
->bsym
!= cur_switch
->bsym
)
6463 leave_scope(loop_scope
);
6465 leave_scope(cur_switch
->scope
);
6466 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6469 } else if (t
== TOK_CONTINUE
) {
6471 if (!cur_scope
->csym
)
6472 tcc_error("cannot continue");
6473 leave_scope(loop_scope
);
6474 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6477 } else if (t
== TOK_FOR
) {
6483 /* c99 for-loop init decl? */
6484 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6485 /* no, regular for-loop init expr */
6513 } else if (t
== TOK_DO
) {
6527 } else if (t
== TOK_SWITCH
) {
6528 struct switch_t
*saved
, sw
;
6535 sw
.scope
= cur_scope
;
6543 switchval
= *vtop
--;
6546 b
= gjmp(0); /* jump to first case */
6548 a
= gjmp(a
); /* add implicit break */
6552 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6553 for (b
= 1; b
< sw
.n
; b
++)
6554 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6555 tcc_error("duplicate case value");
6557 /* Our switch table sorting is signed, so the compared
6558 value needs to be as well when it's 64bit. */
6559 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6560 switchval
.type
.t
&= ~VT_UNSIGNED
;
6563 d
= 0, gcase(sw
.p
, sw
.n
, &d
);
6566 gsym_addr(d
, sw
.def_sym
);
6572 dynarray_reset(&sw
.p
, &sw
.n
);
6575 } else if (t
== TOK_CASE
) {
6576 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6579 cr
->v1
= cr
->v2
= expr_const64();
6580 if (gnu_ext
&& tok
== TOK_DOTS
) {
6582 cr
->v2
= expr_const64();
6583 if (cr
->v2
< cr
->v1
)
6584 tcc_warning("empty case range");
6587 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6590 goto block_after_label
;
6592 } else if (t
== TOK_DEFAULT
) {
6595 if (cur_switch
->def_sym
)
6596 tcc_error("too many 'default'");
6597 cur_switch
->def_sym
= gind();
6600 goto block_after_label
;
6602 } else if (t
== TOK_GOTO
) {
6603 vla_restore(root_scope
->vla
.loc
);
6604 if (tok
== '*' && gnu_ext
) {
6608 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6612 } else if (tok
>= TOK_UIDENT
) {
6613 s
= label_find(tok
);
6614 /* put forward definition if needed */
6616 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6617 else if (s
->r
== LABEL_DECLARED
)
6618 s
->r
= LABEL_FORWARD
;
6620 if (s
->r
& LABEL_FORWARD
) {
6621 /* start new goto chain for cleanups, linked via label->next */
6622 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
6623 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
6624 pending_gotos
->prev_tok
= s
;
6625 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6626 pending_gotos
->next
= s
;
6628 s
->jnext
= gjmp(s
->jnext
);
6630 try_call_cleanup_goto(s
->cleanupstate
);
6631 gjmp_addr(s
->jnext
);
6636 expect("label identifier");
6640 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
6644 if (tok
== ':' && t
>= TOK_UIDENT
) {
6649 if (s
->r
== LABEL_DEFINED
)
6650 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6651 s
->r
= LABEL_DEFINED
;
6653 Sym
*pcl
; /* pending cleanup goto */
6654 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6656 sym_pop(&s
->next
, NULL
, 0);
6660 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
6663 s
->cleanupstate
= cur_scope
->cl
.s
;
6666 vla_restore(cur_scope
->vla
.loc
);
6667 /* we accept this, but it is a mistake */
6669 tcc_warning("deprecated use of label at end of compound statement");
6675 /* expression case */
6691 /* This skips over a stream of tokens containing balanced {} and ()
6692 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6693 with a '{'). If STR then allocates and stores the skipped tokens
6694 in *STR. This doesn't check if () and {} are nested correctly,
6695 i.e. "({)}" is accepted. */
6696 static void skip_or_save_block(TokenString
**str
)
6698 int braces
= tok
== '{';
6701 *str
= tok_str_alloc();
6703 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6705 if (tok
== TOK_EOF
) {
6706 if (str
|| level
> 0)
6707 tcc_error("unexpected end of file");
6712 tok_str_add_tok(*str
);
6715 if (t
== '{' || t
== '(') {
6717 } else if (t
== '}' || t
== ')') {
6719 if (level
== 0 && braces
&& t
== '}')
6724 tok_str_add(*str
, -1);
6725 tok_str_add(*str
, 0);
6729 #define EXPR_CONST 1
6732 static void parse_init_elem(int expr_type
)
6734 int saved_global_expr
;
6737 /* compound literals must be allocated globally in this case */
6738 saved_global_expr
= global_expr
;
6741 global_expr
= saved_global_expr
;
6742 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6743 (compound literals). */
6744 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6745 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6746 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6747 #ifdef TCC_TARGET_PE
6748 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6751 tcc_error("initializer element is not constant");
6759 /* put zeros for variable based init */
6760 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6763 /* nothing to do because globals are already set to zero */
6765 vpush_global_sym(&func_old_type
, TOK_memset
);
6767 #ifdef TCC_TARGET_ARM
6779 #define DIF_SIZE_ONLY 2
6780 #define DIF_HAVE_ELEM 4
6782 /* t is the array or struct type. c is the array or struct
6783 address. cur_field is the pointer to the current
6784 field, for arrays the 'c' member contains the current start
6785 index. 'flags' is as in decl_initializer.
6786 'al' contains the already initialized length of the
6787 current container (starting at c). This returns the new length of that. */
6788 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6789 Sym
**cur_field
, int flags
, int al
)
6792 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6793 unsigned long corig
= c
;
6798 if (flags
& DIF_HAVE_ELEM
)
6801 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
6808 /* NOTE: we only support ranges for last designator */
6809 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6811 if (!(type
->t
& VT_ARRAY
))
6812 expect("array type");
6814 index
= index_last
= expr_const();
6815 if (tok
== TOK_DOTS
&& gnu_ext
) {
6817 index_last
= expr_const();
6821 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6823 tcc_error("invalid index");
6825 (*cur_field
)->c
= index_last
;
6826 type
= pointed_type(type
);
6827 elem_size
= type_size(type
, &align
);
6828 c
+= index
* elem_size
;
6829 nb_elems
= index_last
- index
+ 1;
6836 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6837 expect("struct/union type");
6839 f
= find_field(type
, l
, &cumofs
);
6852 } else if (!gnu_ext
) {
6857 if (type
->t
& VT_ARRAY
) {
6858 index
= (*cur_field
)->c
;
6859 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6860 tcc_error("index too large");
6861 type
= pointed_type(type
);
6862 c
+= index
* type_size(type
, &align
);
6865 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6866 *cur_field
= f
= f
->next
;
6868 tcc_error("too many field init");
6873 /* must put zero in holes (note that doing it that way
6874 ensures that it even works with designators) */
6875 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
6876 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6877 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
6879 /* XXX: make it more general */
6880 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
6881 unsigned long c_end
;
6886 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6887 for (i
= 1; i
< nb_elems
; i
++) {
6888 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6893 } else if (!NODATA_WANTED
) {
6894 c_end
= c
+ nb_elems
* elem_size
;
6895 if (c_end
> sec
->data_allocated
)
6896 section_realloc(sec
, c_end
);
6897 src
= sec
->data
+ c
;
6899 for(i
= 1; i
< nb_elems
; i
++) {
6901 memcpy(dst
, src
, elem_size
);
6905 c
+= nb_elems
* type_size(type
, &align
);
6911 /* store a value or an expression directly in global data or in local array */
6912 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6919 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6923 /* XXX: not portable */
6924 /* XXX: generate error if incorrect relocation */
6925 gen_assign_cast(&dtype
);
6926 bt
= type
->t
& VT_BTYPE
;
6928 if ((vtop
->r
& VT_SYM
)
6931 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6932 || (type
->t
& VT_BITFIELD
))
6933 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6935 tcc_error("initializer element is not computable at load time");
6937 if (NODATA_WANTED
) {
6942 size
= type_size(type
, &align
);
6943 section_reserve(sec
, c
+ size
);
6944 ptr
= sec
->data
+ c
;
6946 /* XXX: make code faster ? */
6947 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6948 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6949 /* XXX This rejects compound literals like
6950 '(void *){ptr}'. The problem is that '&sym' is
6951 represented the same way, which would be ruled out
6952 by the SYM_FIRST_ANOM check above, but also '"string"'
6953 in 'char *p = "string"' is represented the same
6954 with the type being VT_PTR and the symbol being an
6955 anonymous one. That is, there's no difference in vtop
6956 between '(void *){x}' and '&(void *){x}'. Ignore
6957 pointer typed entities here. Hopefully no real code
6958 will every use compound literals with scalar type. */
6959 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6960 /* These come from compound literals, memcpy stuff over. */
6964 esym
= elfsym(vtop
->sym
);
6965 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6966 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6968 /* We need to copy over all memory contents, and that
6969 includes relocations. Use the fact that relocs are
6970 created it order, so look from the end of relocs
6971 until we hit one before the copied region. */
6972 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6973 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6974 while (num_relocs
--) {
6976 if (rel
->r_offset
>= esym
->st_value
+ size
)
6978 if (rel
->r_offset
< esym
->st_value
)
6980 /* Note: if the same fields are initialized multiple
6981 times (possible with designators) then we possibly
6982 add multiple relocations for the same offset here.
6983 That would lead to wrong code, the last reloc needs
6984 to win. We clean this up later after the whole
6985 initializer is parsed. */
6986 put_elf_reloca(symtab_section
, sec
,
6987 c
+ rel
->r_offset
- esym
->st_value
,
6988 ELFW(R_TYPE
)(rel
->r_info
),
6989 ELFW(R_SYM
)(rel
->r_info
),
6999 if (type
->t
& VT_BITFIELD
) {
7000 int bit_pos
, bit_size
, bits
, n
;
7001 unsigned char *p
, v
, m
;
7002 bit_pos
= BIT_POS(vtop
->type
.t
);
7003 bit_size
= BIT_SIZE(vtop
->type
.t
);
7004 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7005 bit_pos
&= 7, bits
= 0;
7010 v
= vtop
->c
.i
>> bits
<< bit_pos
;
7011 m
= ((1 << n
) - 1) << bit_pos
;
7012 *p
= (*p
& ~m
) | (v
& m
);
7013 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7017 /* XXX: when cross-compiling we assume that each type has the
7018 same representation on host and target, which is likely to
7019 be wrong in the case of long double */
7021 vtop
->c
.i
= vtop
->c
.i
!= 0;
7023 *(char *)ptr
|= vtop
->c
.i
;
7026 *(short *)ptr
|= vtop
->c
.i
;
7029 *(float*)ptr
= vtop
->c
.f
;
7032 *(double *)ptr
= vtop
->c
.d
;
7035 #if defined TCC_IS_NATIVE_387
7036 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7037 memcpy(ptr
, &vtop
->c
.ld
, 10);
7039 else if (sizeof (long double) == sizeof (double))
7040 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7042 else if (vtop
->c
.ld
== 0.0)
7046 if (sizeof(long double) == LDOUBLE_SIZE
)
7047 *(long double*)ptr
= vtop
->c
.ld
;
7048 else if (sizeof(double) == LDOUBLE_SIZE
)
7049 *(double *)ptr
= (double)vtop
->c
.ld
;
7051 tcc_error("can't cross compile long double constants");
7055 *(long long *)ptr
|= vtop
->c
.i
;
7062 addr_t val
= vtop
->c
.i
;
7064 if (vtop
->r
& VT_SYM
)
7065 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7067 *(addr_t
*)ptr
|= val
;
7069 if (vtop
->r
& VT_SYM
)
7070 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7071 *(addr_t
*)ptr
|= val
;
7077 int val
= vtop
->c
.i
;
7079 if (vtop
->r
& VT_SYM
)
7080 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7084 if (vtop
->r
& VT_SYM
)
7085 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7094 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7101 /* 't' contains the type and storage info. 'c' is the offset of the
7102 object in section 'sec'. If 'sec' is NULL, it means stack based
7103 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7104 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7105 size only evaluation is wanted (only for arrays). */
7106 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
7109 int len
, n
, no_oblock
, nb
, i
;
7115 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7116 /* In case of strings we have special handling for arrays, so
7117 don't consume them as initializer value (which would commit them
7118 to some anonymous symbol). */
7119 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7120 !(flags
& DIF_SIZE_ONLY
)) {
7121 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7122 flags
|= DIF_HAVE_ELEM
;
7125 if ((flags
& DIF_HAVE_ELEM
) &&
7126 !(type
->t
& VT_ARRAY
) &&
7127 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7128 The source type might have VT_CONSTANT set, which is
7129 of course assignable to non-const elements. */
7130 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7131 init_putv(type
, sec
, c
);
7132 } else if (type
->t
& VT_ARRAY
) {
7135 t1
= pointed_type(type
);
7136 size1
= type_size(t1
, &align1
);
7139 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7142 tcc_error("character array initializer must be a literal,"
7143 " optionally enclosed in braces");
7148 /* only parse strings here if correct type (otherwise: handle
7149 them as ((w)char *) expressions */
7150 if ((tok
== TOK_LSTR
&&
7151 #ifdef TCC_TARGET_PE
7152 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7154 (t1
->t
& VT_BTYPE
) == VT_INT
7156 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7158 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7161 /* compute maximum number of chars wanted */
7163 cstr_len
= tokc
.str
.size
;
7165 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
7168 if (n
>= 0 && nb
> (n
- len
))
7170 if (!(flags
& DIF_SIZE_ONLY
)) {
7172 tcc_warning("initializer-string for array is too long");
7173 /* in order to go faster for common case (char
7174 string in global variable, we handle it
7176 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
7178 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
7182 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
7184 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
7186 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
7193 /* only add trailing zero if enough storage (no
7194 warning in this case since it is standard) */
7195 if (n
< 0 || len
< n
) {
7196 if (!(flags
& DIF_SIZE_ONLY
)) {
7198 init_putv(t1
, sec
, c
+ (len
* size1
));
7209 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7210 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7211 flags
&= ~DIF_HAVE_ELEM
;
7212 if (type
->t
& VT_ARRAY
) {
7214 /* special test for multi dimensional arrays (may not
7215 be strictly correct if designators are used at the
7217 if (no_oblock
&& len
>= n
*size1
)
7220 if (s
->type
.t
== VT_UNION
)
7224 if (no_oblock
&& f
== NULL
)
7233 /* put zeros at the end */
7234 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7235 init_putz(sec
, c
+ len
, n
*size1
- len
);
7238 /* patch type size if needed, which happens only for array types */
7240 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7241 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7244 if ((flags
& DIF_FIRST
) || tok
== '{') {
7252 } else if (tok
== '{') {
7253 if (flags
& DIF_HAVE_ELEM
)
7256 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7258 } else if ((flags
& DIF_SIZE_ONLY
)) {
7259 /* If we supported only ISO C we wouldn't have to accept calling
7260 this on anything than an array if DIF_SIZE_ONLY (and even then
7261 only on the outermost level, so no recursion would be needed),
7262 because initializing a flex array member isn't supported.
7263 But GNU C supports it, so we need to recurse even into
7264 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7265 /* just skip expression */
7266 skip_or_save_block(NULL
);
7268 if (!(flags
& DIF_HAVE_ELEM
)) {
7269 /* This should happen only when we haven't parsed
7270 the init element above for fear of committing a
7271 string constant to memory too early. */
7272 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7273 expect("string constant");
7274 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7276 init_putv(type
, sec
, c
);
7280 /* parse an initializer for type 't' if 'has_init' is non zero, and
7281 allocate space in local or global data space ('r' is either
7282 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7283 variable 'v' of scope 'scope' is declared before initializers
7284 are parsed. If 'v' is zero, then a reference to the new object
7285 is put in the value stack. If 'has_init' is 2, a special parsing
7286 is done to handle string constants. */
7287 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7288 int has_init
, int v
, int scope
)
7290 int size
, align
, addr
;
7291 TokenString
*init_str
= NULL
;
7294 Sym
*flexible_array
;
7296 int saved_nocode_wanted
= nocode_wanted
;
7297 #ifdef CONFIG_TCC_BCHECK
7301 /* Always allocate static or global variables */
7302 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7303 nocode_wanted
|= 0x80000000;
7305 #ifdef CONFIG_TCC_BCHECK
7306 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7309 flexible_array
= NULL
;
7310 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7311 Sym
*field
= type
->ref
->next
;
7314 field
= field
->next
;
7315 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7316 flexible_array
= field
;
7320 size
= type_size(type
, &align
);
7321 /* If unknown size, we must evaluate it before
7322 evaluating initializers because
7323 initializers can generate global data too
7324 (e.g. string pointers or ISOC99 compound
7325 literals). It also simplifies local
7326 initializers handling */
7327 if (size
< 0 || (flexible_array
&& has_init
)) {
7329 tcc_error("unknown type size");
7330 /* get all init string */
7331 if (has_init
== 2) {
7332 init_str
= tok_str_alloc();
7333 /* only get strings */
7334 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7335 tok_str_add_tok(init_str
);
7338 tok_str_add(init_str
, -1);
7339 tok_str_add(init_str
, 0);
7341 skip_or_save_block(&init_str
);
7346 begin_macro(init_str
, 1);
7348 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7349 /* prepare second initializer parsing */
7350 macro_ptr
= init_str
->str
;
7353 /* if still unknown size, error */
7354 size
= type_size(type
, &align
);
7356 tcc_error("unknown type size");
7358 /* If there's a flex member and it was used in the initializer
7360 if (flexible_array
&&
7361 flexible_array
->type
.ref
->c
> 0)
7362 size
+= flexible_array
->type
.ref
->c
7363 * pointed_size(&flexible_array
->type
);
7364 /* take into account specified alignment if bigger */
7365 if (ad
->a
.aligned
) {
7366 int speca
= 1 << (ad
->a
.aligned
- 1);
7369 } else if (ad
->a
.packed
) {
7373 if (!v
&& NODATA_WANTED
)
7374 size
= 0, align
= 1;
7376 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7378 #ifdef CONFIG_TCC_BCHECK
7379 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7383 loc
= (loc
- size
) & -align
;
7385 #ifdef CONFIG_TCC_BCHECK
7386 /* handles bounds */
7387 /* XXX: currently, since we do only one pass, we cannot track
7388 '&' operators, so we add only arrays */
7389 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7391 /* add padding between regions */
7393 /* then add local bound info */
7394 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7395 bounds_ptr
[0] = addr
;
7396 bounds_ptr
[1] = size
;
7400 /* local variable */
7401 #ifdef CONFIG_TCC_ASM
7402 if (ad
->asm_label
) {
7403 int reg
= asm_parse_regvar(ad
->asm_label
);
7405 r
= (r
& ~VT_VALMASK
) | reg
;
7408 sym
= sym_push(v
, type
, r
, addr
);
7409 if (ad
->cleanup_func
) {
7410 Sym
*cls
= sym_push2(&all_cleanups
,
7411 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7412 cls
->prev_tok
= sym
;
7413 cls
->next
= ad
->cleanup_func
;
7414 cls
->ncl
= cur_scope
->cl
.s
;
7415 cur_scope
->cl
.s
= cls
;
7420 /* push local reference */
7421 vset(type
, r
, addr
);
7424 if (v
&& scope
== VT_CONST
) {
7425 /* see if the symbol was already defined */
7428 patch_storage(sym
, ad
, type
);
7429 /* we accept several definitions of the same global variable. */
7430 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7435 /* allocate symbol in corresponding section */
7440 else if (tcc_state
->nocommon
)
7445 addr
= section_add(sec
, size
, align
);
7446 #ifdef CONFIG_TCC_BCHECK
7447 /* add padding if bound check */
7449 section_add(sec
, 1, 1);
7452 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7453 sec
= common_section
;
7458 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7459 patch_storage(sym
, ad
, NULL
);
7461 /* update symbol definition */
7462 put_extern_sym(sym
, sec
, addr
, size
);
7464 /* push global reference */
7465 vpush_ref(type
, sec
, addr
, size
);
7470 #ifdef CONFIG_TCC_BCHECK
7471 /* handles bounds now because the symbol must be defined
7472 before for the relocation */
7476 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7477 /* then add global bound info */
7478 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7479 bounds_ptr
[0] = 0; /* relocated */
7480 bounds_ptr
[1] = size
;
7485 if (type
->t
& VT_VLA
) {
7491 /* save current stack pointer */
7492 if (root_scope
->vla
.loc
== 0) {
7493 struct scope
*v
= cur_scope
;
7494 gen_vla_sp_save(loc
-= PTR_SIZE
);
7495 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
7498 vla_runtime_type_size(type
, &a
);
7499 gen_vla_alloc(type
, a
);
7500 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7501 /* on _WIN64, because of the function args scratch area, the
7502 result of alloca differs from RSP and is returned in RAX. */
7503 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7505 gen_vla_sp_save(addr
);
7506 cur_scope
->vla
.loc
= addr
;
7507 cur_scope
->vla
.num
++;
7509 } else if (has_init
) {
7510 size_t oldreloc_offset
= 0;
7511 if (sec
&& sec
->reloc
)
7512 oldreloc_offset
= sec
->reloc
->data_offset
;
7513 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7514 if (sec
&& sec
->reloc
)
7515 squeeze_multi_relocs(sec
, oldreloc_offset
);
7516 /* patch flexible array member size back to -1, */
7517 /* for possible subsequent similar declarations */
7519 flexible_array
->type
.ref
->c
= -1;
7523 /* restore parse state if needed */
7529 nocode_wanted
= saved_nocode_wanted
;
7532 /* parse a function defined by symbol 'sym' and generate its code in
7533 'cur_text_section' */
7534 static void gen_function(Sym
*sym
)
7536 /* Initialize VLA state */
7537 struct scope f
= { 0 };
7538 cur_scope
= root_scope
= &f
;
7541 ind
= cur_text_section
->data_offset
;
7542 if (sym
->a
.aligned
) {
7543 size_t newoff
= section_add(cur_text_section
, 0,
7544 1 << (sym
->a
.aligned
- 1));
7545 gen_fill_nops(newoff
- ind
);
7547 /* NOTE: we patch the symbol size later */
7548 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7550 funcname
= get_tok_str(sym
->v
, NULL
);
7553 /* put debug symbol */
7554 tcc_debug_funcstart(tcc_state
, sym
);
7555 /* push a dummy symbol to enable local sym storage */
7556 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7557 local_scope
= 1; /* for function parameters */
7558 gfunc_prolog(&sym
->type
);
7561 clear_temp_local_var_list();
7566 cur_text_section
->data_offset
= ind
;
7567 /* reset local stack */
7568 sym_pop(&local_stack
, NULL
, 0);
7570 label_pop(&global_label_stack
, NULL
, 0);
7571 sym_pop(&all_cleanups
, NULL
, 0);
7572 /* patch symbol size */
7573 elfsym(sym
)->st_size
= ind
- func_ind
;
7574 /* end of function */
7575 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7576 /* It's better to crash than to generate wrong code */
7577 cur_text_section
= NULL
;
7578 funcname
= ""; /* for safety */
7579 func_vt
.t
= VT_VOID
; /* for safety */
7580 func_var
= 0; /* for safety */
7581 ind
= 0; /* for safety */
7582 nocode_wanted
= 0x80000000;
7586 static void gen_inline_functions(TCCState
*s
)
7589 int inline_generated
, i
;
7590 struct InlineFunc
*fn
;
7592 tcc_open_bf(s
, ":inline:", 0);
7593 /* iterate while inline function are referenced */
7595 inline_generated
= 0;
7596 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7597 fn
= s
->inline_fns
[i
];
7599 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
7600 /* the function was used or forced (and then not internal):
7601 generate its code and convert it to a normal function */
7604 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7605 begin_macro(fn
->func_str
, 1);
7607 cur_text_section
= text_section
;
7611 inline_generated
= 1;
7614 } while (inline_generated
);
7618 ST_FUNC
void free_inline_functions(TCCState
*s
)
7621 /* free tokens of unused inline functions */
7622 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7623 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7625 tok_str_free(fn
->func_str
);
7627 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7630 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7631 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7632 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7637 AttributeDef ad
, adbase
;
7640 if (tok
== TOK_STATIC_ASSERT
) {
7648 tcc_error("%s", get_tok_str(tok
, &tokc
));
7654 if (!parse_btype(&btype
, &adbase
)) {
7655 if (is_for_loop_init
)
7657 /* skip redundant ';' if not in old parameter decl scope */
7658 if (tok
== ';' && l
!= VT_CMP
) {
7664 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7665 /* global asm block */
7669 if (tok
>= TOK_UIDENT
) {
7670 /* special test for old K&R protos without explicit int
7671 type. Only accepted when defining global data */
7675 expect("declaration");
7680 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7681 int v
= btype
.ref
->v
;
7682 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7683 tcc_warning("unnamed struct/union that defines no instances");
7687 if (IS_ENUM(btype
.t
)) {
7692 while (1) { /* iterate thru each declaration */
7694 /* If the base type itself was an array type of unspecified
7695 size (like in 'typedef int arr[]; arr x = {1};') then
7696 we will overwrite the unknown size by the real one for
7697 this decl. We need to unshare the ref symbol holding
7699 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7700 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7703 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7707 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7708 printf("type = '%s'\n", buf
);
7711 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7712 /* if old style function prototype, we accept a
7715 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7716 decl0(VT_CMP
, 0, sym
);
7717 /* always compile 'extern inline' */
7718 if (type
.t
& VT_EXTERN
)
7719 type
.t
&= ~VT_INLINE
;
7722 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7723 ad
.asm_label
= asm_label_instr();
7724 /* parse one last attribute list, after asm label */
7725 parse_attribute(&ad
);
7727 /* gcc does not allow __asm__("label") with function definition,
7734 #ifdef TCC_TARGET_PE
7735 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7736 if (type
.t
& VT_STATIC
)
7737 tcc_error("cannot have dll linkage with static");
7738 if (type
.t
& VT_TYPEDEF
) {
7739 tcc_warning("'%s' attribute ignored for typedef",
7740 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
7741 (ad
.a
.dllexport
= 0, "dllexport"));
7742 } else if (ad
.a
.dllimport
) {
7743 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7746 type
.t
|= VT_EXTERN
;
7752 tcc_error("cannot use local functions");
7753 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7754 expect("function definition");
7756 /* reject abstract declarators in function definition
7757 make old style params without decl have int type */
7759 while ((sym
= sym
->next
) != NULL
) {
7760 if (!(sym
->v
& ~SYM_FIELD
))
7761 expect("identifier");
7762 if (sym
->type
.t
== VT_VOID
)
7763 sym
->type
= int_type
;
7766 /* put function symbol */
7767 type
.t
&= ~VT_EXTERN
;
7768 sym
= external_sym(v
, &type
, 0, &ad
);
7769 /* static inline functions are just recorded as a kind
7770 of macro. Their code will be emitted at the end of
7771 the compilation unit only if they are used */
7772 if (sym
->type
.t
& VT_INLINE
) {
7773 struct InlineFunc
*fn
;
7774 const char *filename
;
7776 filename
= file
? file
->filename
: "";
7777 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7778 strcpy(fn
->filename
, filename
);
7780 skip_or_save_block(&fn
->func_str
);
7781 dynarray_add(&tcc_state
->inline_fns
,
7782 &tcc_state
->nb_inline_fns
, fn
);
7784 /* compute text section */
7785 cur_text_section
= ad
.section
;
7786 if (!cur_text_section
)
7787 cur_text_section
= text_section
;
7793 /* find parameter in function parameter list */
7794 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7795 if ((sym
->v
& ~SYM_FIELD
) == v
)
7797 tcc_error("declaration for parameter '%s' but no such parameter",
7798 get_tok_str(v
, NULL
));
7800 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7801 tcc_error("storage class specified for '%s'",
7802 get_tok_str(v
, NULL
));
7803 if (sym
->type
.t
!= VT_VOID
)
7804 tcc_error("redefinition of parameter '%s'",
7805 get_tok_str(v
, NULL
));
7806 convert_parameter_type(&type
);
7808 } else if (type
.t
& VT_TYPEDEF
) {
7809 /* save typedefed type */
7810 /* XXX: test storage specifiers ? */
7812 if (sym
&& sym
->sym_scope
== local_scope
) {
7813 if (!is_compatible_types(&sym
->type
, &type
)
7814 || !(sym
->type
.t
& VT_TYPEDEF
))
7815 tcc_error("incompatible redefinition of '%s'",
7816 get_tok_str(v
, NULL
));
7819 sym
= sym_push(v
, &type
, 0, 0);
7823 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7824 && !(type
.t
& VT_EXTERN
)) {
7825 tcc_error("declaration of void object");
7828 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7829 /* external function definition */
7830 /* specific case for func_call attribute */
7832 } else if (!(type
.t
& VT_ARRAY
)) {
7833 /* not lvalue if array */
7834 r
|= lvalue_type(type
.t
);
7836 has_init
= (tok
== '=');
7837 if (has_init
&& (type
.t
& VT_VLA
))
7838 tcc_error("variable length array cannot be initialized");
7839 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
7840 || (type
.t
& VT_BTYPE
) == VT_FUNC
7841 /* as with GCC, uninitialized global arrays with no size
7842 are considered extern: */
7843 || ((type
.t
& VT_ARRAY
) && !has_init
7844 && l
== VT_CONST
&& type
.ref
->c
< 0)
7846 /* external variable or function */
7847 type
.t
|= VT_EXTERN
;
7848 sym
= external_sym(v
, &type
, r
, &ad
);
7849 if (ad
.alias_target
) {
7852 alias_target
= sym_find(ad
.alias_target
);
7853 esym
= elfsym(alias_target
);
7855 tcc_error("unsupported forward __alias__ attribute");
7856 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7859 if (type
.t
& VT_STATIC
)
7865 else if (l
== VT_CONST
)
7866 /* uninitialized global variables may be overridden */
7867 type
.t
|= VT_EXTERN
;
7868 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7872 if (is_for_loop_init
)
7884 static void decl(int l
)
7889 /* ------------------------------------------------------------------------- */
7892 /* ------------------------------------------------------------------------- */