2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
42 static Sym
*all_cleanups
, *pending_gotos
;
43 static int local_scope
;
45 static int in_generic
;
46 static int section_sym
;
48 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
50 ST_DATA
int const_wanted
; /* true if constant wanted */
51 ST_DATA
int nocode_wanted
; /* no code generation wanted */
52 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
53 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 /* Automagical code suppression ----> */
56 #define CODE_OFF() (nocode_wanted |= 0x20000000)
57 #define CODE_ON() (nocode_wanted &= ~0x20000000)
59 /* Clear 'nocode_wanted' at label if it was used */
60 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
61 static int gind(void) { CODE_ON(); return ind
; }
63 /* Set 'nocode_wanted' after unconditional jumps */
64 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
65 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
67 /* These are #undef'd at the end of this file */
68 #define gjmp_addr gjmp_addr_acs
72 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
74 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
76 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
77 ST_DATA
const char *funcname
;
80 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
82 ST_DATA
struct switch_t
{
86 } **p
; int n
; /* list of case ranges */
87 int def_sym
; /* default symbol */
90 } *cur_switch
; /* current switch */
92 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
93 /*list of temporary local variables on the stack in current function. */
94 ST_DATA
struct temp_local_variable
{
95 int location
; //offset on stack. Svalue.c.i
98 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
99 short nb_temp_local_vars
;
101 static struct scope
{
103 struct { int loc
, num
; } vla
;
104 struct { Sym
*s
; int n
; } cl
;
107 } *cur_scope
, *loop_scope
, *root_scope
;
109 /* ------------------------------------------------------------------------- */
111 static void gen_cast(CType
*type
);
112 static void gen_cast_s(int t
);
113 static inline CType
*pointed_type(CType
*type
);
114 static int is_compatible_types(CType
*type1
, CType
*type2
);
115 static int parse_btype(CType
*type
, AttributeDef
*ad
);
116 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
117 static void parse_expr_type(CType
*type
);
118 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
119 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
120 static void block(int is_expr
);
121 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
122 static void decl(int l
);
123 static int decl0(int l
, int is_for_loop_init
, Sym
*);
124 static void expr_eq(void);
125 static void vla_runtime_type_size(CType
*type
, int *a
);
126 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
127 static inline int64_t expr_const64(void);
128 static void vpush64(int ty
, unsigned long long v
);
129 static void vpush(CType
*type
);
130 static int gvtst(int inv
, int t
);
131 static void gen_inline_functions(TCCState
*s
);
132 static void skip_or_save_block(TokenString
**str
);
133 static void gv_dup(void);
134 static int get_temp_local_var(int size
,int align
);
135 static void clear_temp_local_var_list();
137 ST_INLN
int is_float(int t
)
141 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
144 /* we use our own 'finite' function to avoid potential problems with
145 non standard math libs */
146 /* XXX: endianness dependent */
147 ST_FUNC
int ieee_finite(double d
)
150 memcpy(p
, &d
, sizeof(double));
151 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
154 /* compiling intel long double natively */
155 #if (defined __i386__ || defined __x86_64__) \
156 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
157 # define TCC_IS_NATIVE_387
160 ST_FUNC
void test_lvalue(void)
162 if (!(vtop
->r
& VT_LVAL
))
166 ST_FUNC
void check_vstack(void)
169 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
172 /* ------------------------------------------------------------------------- */
173 /* vstack debugging aid */
176 void pv (const char *lbl
, int a
, int b
)
179 for (i
= a
; i
< a
+ b
; ++i
) {
180 SValue
*p
= &vtop
[-i
];
181 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
182 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
187 /* ------------------------------------------------------------------------- */
188 /* start of translation unit info */
189 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
194 /* file info: full path + filename */
195 section_sym
= put_elf_sym(symtab_section
, 0, 0,
196 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
197 text_section
->sh_num
, NULL
);
198 getcwd(buf
, sizeof(buf
));
200 normalize_slashes(buf
);
202 pstrcat(buf
, sizeof(buf
), "/");
203 put_stabs_r(buf
, N_SO
, 0, 0,
204 text_section
->data_offset
, text_section
, section_sym
);
205 put_stabs_r(file
->filename
, N_SO
, 0, 0,
206 text_section
->data_offset
, text_section
, section_sym
);
211 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
212 symbols can be safely used */
213 put_elf_sym(symtab_section
, 0, 0,
214 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
215 SHN_ABS
, file
->filename
);
218 /* put end of translation unit info */
219 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
223 put_stabs_r(NULL
, N_SO
, 0, 0,
224 text_section
->data_offset
, text_section
, section_sym
);
228 /* generate line number info */
229 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
233 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
234 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
236 last_line_num
= file
->line_num
;
240 /* put function symbol */
241 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
249 /* XXX: we put here a dummy type */
250 snprintf(buf
, sizeof(buf
), "%s:%c1",
251 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
252 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
253 cur_text_section
, sym
->c
);
254 /* //gr gdb wants a line at the function */
255 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
261 /* put function size */
262 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
266 put_stabn(N_FUN
, 0, 0, size
);
269 /* ------------------------------------------------------------------------- */
270 ST_FUNC
int tccgen_compile(TCCState
*s1
)
272 cur_text_section
= NULL
;
274 anon_sym
= SYM_FIRST_ANOM
;
277 nocode_wanted
= 0x80000000;
280 /* define some often used types */
282 char_pointer_type
.t
= VT_BYTE
;
283 mk_pointer(&char_pointer_type
);
285 size_type
.t
= VT_INT
| VT_UNSIGNED
;
286 ptrdiff_type
.t
= VT_INT
;
288 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
289 ptrdiff_type
.t
= VT_LLONG
;
291 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
292 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
294 func_old_type
.t
= VT_FUNC
;
295 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
296 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
297 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
301 #ifdef TCC_TARGET_ARM
306 printf("%s: **** new file\n", file
->filename
);
309 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
312 gen_inline_functions(s1
);
314 /* end of translation unit info */
319 /* ------------------------------------------------------------------------- */
320 ST_FUNC ElfSym
*elfsym(Sym
*s
)
324 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
327 /* apply storage attributes to Elf symbol */
328 ST_FUNC
void update_storage(Sym
*sym
)
331 int sym_bind
, old_sym_bind
;
337 if (sym
->a
.visibility
)
338 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
341 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
342 sym_bind
= STB_LOCAL
;
343 else if (sym
->a
.weak
)
346 sym_bind
= STB_GLOBAL
;
347 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
348 if (sym_bind
!= old_sym_bind
) {
349 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
353 if (sym
->a
.dllimport
)
354 esym
->st_other
|= ST_PE_IMPORT
;
355 if (sym
->a
.dllexport
)
356 esym
->st_other
|= ST_PE_EXPORT
;
360 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
361 get_tok_str(sym
->v
, NULL
),
362 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
370 /* ------------------------------------------------------------------------- */
371 /* update sym->c so that it points to an external symbol in section
372 'section' with value 'value' */
374 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
375 addr_t value
, unsigned long size
,
376 int can_add_underscore
)
378 int sym_type
, sym_bind
, info
, other
, t
;
382 #ifdef CONFIG_TCC_BCHECK
387 name
= get_tok_str(sym
->v
, NULL
);
388 #ifdef CONFIG_TCC_BCHECK
389 if (tcc_state
->do_bounds_check
) {
390 /* XXX: avoid doing that for statics ? */
391 /* if bound checking is activated, we change some function
392 names by adding the "__bound" prefix */
395 /* XXX: we rely only on malloc hooks */
408 strcpy(buf
, "__bound_");
416 if ((t
& VT_BTYPE
) == VT_FUNC
) {
418 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
419 sym_type
= STT_NOTYPE
;
421 sym_type
= STT_OBJECT
;
423 if (t
& (VT_STATIC
| VT_INLINE
))
424 sym_bind
= STB_LOCAL
;
426 sym_bind
= STB_GLOBAL
;
429 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
430 Sym
*ref
= sym
->type
.ref
;
431 if (ref
->a
.nodecorate
) {
432 can_add_underscore
= 0;
434 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
435 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
437 other
|= ST_PE_STDCALL
;
438 can_add_underscore
= 0;
442 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
444 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
448 name
= get_tok_str(sym
->asm_label
, NULL
);
449 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
450 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
453 esym
->st_value
= value
;
454 esym
->st_size
= size
;
455 esym
->st_shndx
= sh_num
;
460 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
461 addr_t value
, unsigned long size
)
463 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
464 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
467 /* add a new relocation entry to symbol 'sym' in section 's' */
468 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
473 if (nocode_wanted
&& s
== cur_text_section
)
478 put_extern_sym(sym
, NULL
, 0, 0);
482 /* now we can add ELF relocation info */
483 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
487 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
489 greloca(s
, sym
, offset
, type
, 0);
493 /* ------------------------------------------------------------------------- */
494 /* symbol allocator */
495 static Sym
*__sym_malloc(void)
497 Sym
*sym_pool
, *sym
, *last_sym
;
500 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
501 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
503 last_sym
= sym_free_first
;
505 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
506 sym
->next
= last_sym
;
510 sym_free_first
= last_sym
;
514 static inline Sym
*sym_malloc(void)
518 sym
= sym_free_first
;
520 sym
= __sym_malloc();
521 sym_free_first
= sym
->next
;
524 sym
= tcc_malloc(sizeof(Sym
));
529 ST_INLN
void sym_free(Sym
*sym
)
532 sym
->next
= sym_free_first
;
533 sym_free_first
= sym
;
539 /* push, without hashing */
540 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
545 memset(s
, 0, sizeof *s
);
555 /* find a symbol and return its associated structure. 's' is the top
556 of the symbol stack */
557 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
569 /* structure lookup */
570 ST_INLN Sym
*struct_find(int v
)
573 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
575 return table_ident
[v
]->sym_struct
;
578 /* find an identifier */
579 ST_INLN Sym
*sym_find(int v
)
582 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
584 return table_ident
[v
]->sym_identifier
;
587 static int sym_scope(Sym
*s
)
589 if (IS_ENUM_VAL (s
->type
.t
))
590 return s
->type
.ref
->sym_scope
;
595 /* push a given symbol on the symbol stack */
596 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
605 s
= sym_push2(ps
, v
, type
->t
, c
);
606 s
->type
.ref
= type
->ref
;
608 /* don't record fields or anonymous symbols */
610 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
611 /* record symbol in token array */
612 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
614 ps
= &ts
->sym_struct
;
616 ps
= &ts
->sym_identifier
;
619 s
->sym_scope
= local_scope
;
620 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
621 tcc_error("redeclaration of '%s'",
622 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
627 /* push a global identifier */
628 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
631 s
= sym_push2(&global_stack
, v
, t
, c
);
632 s
->r
= VT_CONST
| VT_SYM
;
633 /* don't record anonymous symbol */
634 if (v
< SYM_FIRST_ANOM
) {
635 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
636 /* modify the top most local identifier, so that sym_identifier will
637 point to 's' when popped; happens when called from inline asm */
638 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
639 ps
= &(*ps
)->prev_tok
;
646 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
647 pop them yet from the list, but do remove them from the token array. */
648 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
658 /* remove symbol in token array */
660 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
661 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
663 ps
= &ts
->sym_struct
;
665 ps
= &ts
->sym_identifier
;
676 /* ------------------------------------------------------------------------- */
677 static void vcheck_cmp(void)
679 /* cannot let cpu flags if other instruction are generated. Also
680 avoid leaving VT_JMP anywhere except on the top of the stack
681 because it would complicate the code generator.
683 Don't do this when nocode_wanted. vtop might come from
684 !nocode_wanted regions (see 88_codeopt.c) and transforming
685 it to a register without actually generating code is wrong
686 as their value might still be used for real. All values
687 we push under nocode_wanted will eventually be popped
688 again, so that the VT_CMP/VT_JMP value will be in vtop
689 when code is unsuppressed again. */
691 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
695 static void vsetc(CType
*type
, int r
, CValue
*vc
)
697 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
698 tcc_error("memory full (vstack)");
708 ST_FUNC
void vswap(void)
718 /* pop stack value */
719 ST_FUNC
void vpop(void)
722 v
= vtop
->r
& VT_VALMASK
;
723 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
724 /* for x86, we need to pop the FP stack */
726 o(0xd8dd); /* fstp %st(0) */
730 /* need to put correct jump if && or || without test */
737 /* push constant of type "type" with useless value */
738 ST_FUNC
void vpush(CType
*type
)
740 vset(type
, VT_CONST
, 0);
743 /* push integer constant */
744 ST_FUNC
void vpushi(int v
)
748 vsetc(&int_type
, VT_CONST
, &cval
);
751 /* push a pointer sized constant */
752 static void vpushs(addr_t v
)
756 vsetc(&size_type
, VT_CONST
, &cval
);
759 /* push arbitrary 64bit constant */
760 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
767 vsetc(&ctype
, VT_CONST
, &cval
);
770 /* push long long constant */
771 static inline void vpushll(long long v
)
773 vpush64(VT_LLONG
, v
);
776 ST_FUNC
void vset(CType
*type
, int r
, int v
)
781 vsetc(type
, r
, &cval
);
784 static void vseti(int r
, int v
)
792 ST_FUNC
void vpushv(SValue
*v
)
794 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
795 tcc_error("memory full (vstack)");
800 static void vdup(void)
805 /* rotate n first stack elements to the bottom
806 I1 ... In -> I2 ... In I1 [top is right]
808 ST_FUNC
void vrotb(int n
)
820 /* rotate the n elements before entry e towards the top
821 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
823 ST_FUNC
void vrote(SValue
*e
, int n
)
830 for(i
= 0;i
< n
- 1; i
++)
835 /* rotate n first stack elements to the top
836 I1 ... In -> In I1 ... I(n-1) [top is right]
838 ST_FUNC
void vrott(int n
)
843 /* ------------------------------------------------------------------------- */
844 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
846 /* called from generators to set the result from relational ops */
847 ST_FUNC
void vset_VT_CMP(int op
)
855 /* called once before asking generators to load VT_CMP to a register */
856 static void vset_VT_JMP(void)
858 int op
= vtop
->cmp_op
;
859 if (vtop
->jtrue
|| vtop
->jfalse
) {
860 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
861 int inv
= op
& (op
< 2); /* small optimization */
862 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
864 /* otherwise convert flags (rsp. 0/1) to register */
866 if (op
< 2) /* doesn't seem to happen */
871 /* Set CPU Flags, doesn't yet jump */
872 static void gvtst_set(int inv
, int t
)
875 if (vtop
->r
!= VT_CMP
) {
878 if (vtop
->r
== VT_CMP
) /* must be VT_CONST otherwise */
880 else if (vtop
->r
== VT_CONST
)
881 vset_VT_CMP(vtop
->c
.i
!= 0);
885 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
886 *p
= gjmp_append(*p
, t
);
889 /* Generate value test
891 * Generate a test for any value (jump, comparison and integers) */
892 static int gvtst(int inv
, int t
)
898 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
903 /* jump to the wanted target */
905 t
= gjmp_cond(op
^ inv
, t
);
908 /* resolve complementary jumps to here */
915 /* ------------------------------------------------------------------------- */
916 /* push a symbol value of TYPE */
917 static inline void vpushsym(CType
*type
, Sym
*sym
)
921 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
925 /* Return a static symbol pointing to a section */
926 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
932 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
933 sym
->type
.t
|= VT_STATIC
;
934 put_extern_sym(sym
, sec
, offset
, size
);
938 /* push a reference to a section offset by adding a dummy symbol */
939 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
941 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
944 /* define a new external reference to a symbol 'v' of type 'u' */
945 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
951 /* push forward reference */
952 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
953 s
->type
.ref
= type
->ref
;
954 } else if (IS_ASM_SYM(s
)) {
955 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
956 s
->type
.ref
= type
->ref
;
962 /* Merge symbol attributes. */
963 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
965 if (sa1
->aligned
&& !sa
->aligned
)
966 sa
->aligned
= sa1
->aligned
;
967 sa
->packed
|= sa1
->packed
;
968 sa
->weak
|= sa1
->weak
;
969 if (sa1
->visibility
!= STV_DEFAULT
) {
970 int vis
= sa
->visibility
;
971 if (vis
== STV_DEFAULT
972 || vis
> sa1
->visibility
)
973 vis
= sa1
->visibility
;
974 sa
->visibility
= vis
;
976 sa
->dllexport
|= sa1
->dllexport
;
977 sa
->nodecorate
|= sa1
->nodecorate
;
978 sa
->dllimport
|= sa1
->dllimport
;
981 /* Merge function attributes. */
982 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
984 if (fa1
->func_call
&& !fa
->func_call
)
985 fa
->func_call
= fa1
->func_call
;
986 if (fa1
->func_type
&& !fa
->func_type
)
987 fa
->func_type
= fa1
->func_type
;
988 if (fa1
->func_args
&& !fa
->func_args
)
989 fa
->func_args
= fa1
->func_args
;
992 /* Merge attributes. */
993 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
995 merge_symattr(&ad
->a
, &ad1
->a
);
996 merge_funcattr(&ad
->f
, &ad1
->f
);
999 ad
->section
= ad1
->section
;
1000 if (ad1
->alias_target
)
1001 ad
->alias_target
= ad1
->alias_target
;
1003 ad
->asm_label
= ad1
->asm_label
;
1005 ad
->attr_mode
= ad1
->attr_mode
;
1008 /* Merge some type attributes. */
1009 static void patch_type(Sym
*sym
, CType
*type
)
1011 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1012 if (!(sym
->type
.t
& VT_EXTERN
))
1013 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1014 sym
->type
.t
&= ~VT_EXTERN
;
1017 if (IS_ASM_SYM(sym
)) {
1018 /* stay static if both are static */
1019 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1020 sym
->type
.ref
= type
->ref
;
1023 if (!is_compatible_types(&sym
->type
, type
)) {
1024 tcc_error("incompatible types for redefinition of '%s'",
1025 get_tok_str(sym
->v
, NULL
));
1027 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1028 int static_proto
= sym
->type
.t
& VT_STATIC
;
1029 /* warn if static follows non-static function declaration */
1030 if ((type
->t
& VT_STATIC
) && !static_proto
1031 /* XXX this test for inline shouldn't be here. Until we
1032 implement gnu-inline mode again it silences a warning for
1033 mingw caused by our workarounds. */
1034 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1035 tcc_warning("static storage ignored for redefinition of '%s'",
1036 get_tok_str(sym
->v
, NULL
));
1038 /* set 'inline' if both agree or if one has static */
1039 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1040 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1041 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1042 static_proto
|= VT_INLINE
;
1045 if (0 == (type
->t
& VT_EXTERN
)) {
1046 /* put complete type, use static from prototype */
1047 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1048 sym
->type
.ref
= type
->ref
;
1050 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1053 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1054 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1055 sym
->type
.ref
= type
->ref
;
1059 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1060 /* set array size if it was omitted in extern declaration */
1061 sym
->type
.ref
->c
= type
->ref
->c
;
1063 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1064 tcc_warning("storage mismatch for redefinition of '%s'",
1065 get_tok_str(sym
->v
, NULL
));
1069 /* Merge some storage attributes. */
1070 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1073 patch_type(sym
, type
);
1075 #ifdef TCC_TARGET_PE
1076 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1077 tcc_error("incompatible dll linkage for redefinition of '%s'",
1078 get_tok_str(sym
->v
, NULL
));
1080 merge_symattr(&sym
->a
, &ad
->a
);
1082 sym
->asm_label
= ad
->asm_label
;
1083 update_storage(sym
);
1086 /* copy sym to other stack */
1087 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1090 s
= sym_malloc(), *s
= *s0
;
1091 s
->prev
= *ps
, *ps
= s
;
1092 if (s
->v
< SYM_FIRST_ANOM
) {
1093 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1094 s
->prev_tok
= *ps
, *ps
= s
;
1099 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1100 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1102 int bt
= s
->type
.t
& VT_BTYPE
;
1103 if (bt
== VT_FUNC
|| bt
== VT_PTR
) {
1104 Sym
**sp
= &s
->type
.ref
;
1105 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1106 Sym
*s2
= sym_copy(s
, ps
);
1107 sp
= &(*sp
= s2
)->next
;
1108 sym_copy_ref(s2
, ps
);
1113 /* define a new external reference to a symbol 'v' */
1114 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1118 /* look for global symbol */
1120 while (s
&& s
->sym_scope
)
1124 /* push forward reference */
1125 s
= global_identifier_push(v
, type
->t
, 0);
1128 s
->asm_label
= ad
->asm_label
;
1129 s
->type
.ref
= type
->ref
;
1130 /* copy type to the global stack */
1132 sym_copy_ref(s
, &global_stack
);
1134 patch_storage(s
, ad
, type
);
1136 /* push variables on local_stack if any */
1137 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1138 s
= sym_copy(s
, &local_stack
);
1142 /* push a reference to global symbol v */
1143 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1145 vpushsym(type
, external_global_sym(v
, type
));
1148 /* save registers up to (vtop - n) stack entry */
1149 ST_FUNC
void save_regs(int n
)
1152 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1156 /* save r to the memory stack, and mark it as being free */
1157 ST_FUNC
void save_reg(int r
)
1159 save_reg_upstack(r
, 0);
1162 /* save r to the memory stack, and mark it as being free,
1163 if seen up to (vtop - n) stack entry */
1164 ST_FUNC
void save_reg_upstack(int r
, int n
)
1166 int l
, saved
, size
, align
;
1170 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1175 /* modify all stack values */
1178 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1179 if ((p
->r
& VT_VALMASK
) == r
|| (p
->r2
& VT_VALMASK
) == r
) {
1180 /* must save value on stack if not already done */
1182 /* NOTE: must reload 'r' because r might be equal to r2 */
1183 r
= p
->r
& VT_VALMASK
;
1184 /* store register in the stack */
1186 if ((p
->r
& VT_LVAL
) ||
1187 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1189 type
= &char_pointer_type
;
1193 size
= type_size(type
, &align
);
1194 l
=get_temp_local_var(size
,align
);
1195 sv
.type
.t
= type
->t
;
1196 sv
.r
= VT_LOCAL
| VT_LVAL
;
1199 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1200 /* x86 specific: need to pop fp register ST0 if saved */
1201 if (r
== TREG_ST0
) {
1202 o(0xd8dd); /* fstp %st(0) */
1205 /* special long long case */
1206 if ((p
->r2
& VT_VALMASK
) < VT_CONST
) {
1212 /* mark that stack entry as being saved on the stack */
1213 if (p
->r
& VT_LVAL
) {
1214 /* also clear the bounded flag because the
1215 relocation address of the function was stored in
1217 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1219 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1227 #ifdef TCC_TARGET_ARM
1228 /* find a register of class 'rc2' with at most one reference on stack.
1229 * If none, call get_reg(rc) */
1230 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1235 for(r
=0;r
<NB_REGS
;r
++) {
1236 if (reg_classes
[r
] & rc2
) {
1239 for(p
= vstack
; p
<= vtop
; p
++) {
1240 if ((p
->r
& VT_VALMASK
) == r
||
1241 (p
->r2
& VT_VALMASK
) == r
)
1252 /* find a free register of class 'rc'. If none, save one register */
1253 ST_FUNC
int get_reg(int rc
)
1258 /* find a free register */
1259 for(r
=0;r
<NB_REGS
;r
++) {
1260 if (reg_classes
[r
] & rc
) {
1263 for(p
=vstack
;p
<=vtop
;p
++) {
1264 if ((p
->r
& VT_VALMASK
) == r
||
1265 (p
->r2
& VT_VALMASK
) == r
)
1273 /* no register left : free the first one on the stack (VERY
1274 IMPORTANT to start from the bottom to ensure that we don't
1275 spill registers used in gen_opi()) */
1276 for(p
=vstack
;p
<=vtop
;p
++) {
1277 /* look at second register (if long long) */
1278 r
= p
->r2
& VT_VALMASK
;
1279 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1281 r
= p
->r
& VT_VALMASK
;
1282 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1288 /* Should never comes here */
1292 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1293 static int get_temp_local_var(int size
,int align
){
1295 struct temp_local_variable
*temp_var
;
1302 for(i
=0;i
<nb_temp_local_vars
;i
++){
1303 temp_var
=&arr_temp_local_vars
[i
];
1304 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1307 /*check if temp_var is free*/
1309 for(p
=vstack
;p
<=vtop
;p
++) {
1311 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1312 if(p
->c
.i
==temp_var
->location
){
1319 found_var
=temp_var
->location
;
1325 loc
= (loc
- size
) & -align
;
1326 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1327 temp_var
=&arr_temp_local_vars
[i
];
1328 temp_var
->location
=loc
;
1329 temp_var
->size
=size
;
1330 temp_var
->align
=align
;
1331 nb_temp_local_vars
++;
1338 static void clear_temp_local_var_list(){
1339 nb_temp_local_vars
=0;
1342 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1344 static void move_reg(int r
, int s
, int t
)
1358 /* get address of vtop (vtop MUST BE an lvalue) */
1359 ST_FUNC
void gaddrof(void)
1361 vtop
->r
&= ~VT_LVAL
;
1362 /* tricky: if saved lvalue, then we can go back to lvalue */
1363 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1364 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1369 #ifdef CONFIG_TCC_BCHECK
1370 /* generate lvalue bound code */
1371 static void gbound(void)
1376 vtop
->r
&= ~VT_MUSTBOUND
;
1377 /* if lvalue, then use checking code before dereferencing */
1378 if (vtop
->r
& VT_LVAL
) {
1379 /* if not VT_BOUNDED value, then make one */
1380 if (!(vtop
->r
& VT_BOUNDED
)) {
1381 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1382 /* must save type because we must set it to int to get pointer */
1384 vtop
->type
.t
= VT_PTR
;
1387 gen_bounded_ptr_add();
1388 vtop
->r
|= lval_type
;
1391 /* then check for dereferencing */
1392 gen_bounded_ptr_deref();
1397 static void incr_bf_adr(int o
)
1399 vtop
->type
= char_pointer_type
;
1403 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1404 | (VT_BYTE
|VT_UNSIGNED
);
1405 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1406 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1409 /* single-byte load mode for packed or otherwise unaligned bitfields */
1410 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1413 save_reg_upstack(vtop
->r
, 1);
1414 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1415 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1424 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1426 vpushi((1 << n
) - 1), gen_op('&');
1429 vpushi(bits
), gen_op(TOK_SHL
);
1432 bits
+= n
, bit_size
-= n
, o
= 1;
1435 if (!(type
->t
& VT_UNSIGNED
)) {
1436 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1437 vpushi(n
), gen_op(TOK_SHL
);
1438 vpushi(n
), gen_op(TOK_SAR
);
1442 /* single-byte store mode for packed or otherwise unaligned bitfields */
1443 static void store_packed_bf(int bit_pos
, int bit_size
)
1445 int bits
, n
, o
, m
, c
;
1447 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1449 save_reg_upstack(vtop
->r
, 1);
1450 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1452 incr_bf_adr(o
); // X B
1454 c
? vdup() : gv_dup(); // B V X
1457 vpushi(bits
), gen_op(TOK_SHR
);
1459 vpushi(bit_pos
), gen_op(TOK_SHL
);
1464 m
= ((1 << n
) - 1) << bit_pos
;
1465 vpushi(m
), gen_op('&'); // X B V1
1466 vpushv(vtop
-1); // X B V1 B
1467 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1468 gen_op('&'); // X B V1 B1
1469 gen_op('|'); // X B V2
1471 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1472 vstore(), vpop(); // X B
1473 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1478 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1481 if (0 == sv
->type
.ref
)
1483 t
= sv
->type
.ref
->auxtype
;
1484 if (t
!= -1 && t
!= VT_STRUCT
) {
1485 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1486 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1491 /* store vtop a register belonging to class 'rc'. lvalues are
1492 converted to values. Cannot be used if cannot be converted to
1493 register value (such as structures). */
1494 ST_FUNC
int gv(int rc
)
1496 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1498 /* NOTE: get_reg can modify vstack[] */
1499 if (vtop
->type
.t
& VT_BITFIELD
) {
1502 bit_pos
= BIT_POS(vtop
->type
.t
);
1503 bit_size
= BIT_SIZE(vtop
->type
.t
);
1504 /* remove bit field info to avoid loops */
1505 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1508 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1509 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1510 type
.t
|= VT_UNSIGNED
;
1512 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1514 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1519 if (r
== VT_STRUCT
) {
1520 load_packed_bf(&type
, bit_pos
, bit_size
);
1522 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1523 /* cast to int to propagate signedness in following ops */
1525 /* generate shifts */
1526 vpushi(bits
- (bit_pos
+ bit_size
));
1528 vpushi(bits
- bit_size
);
1529 /* NOTE: transformed to SHR if unsigned */
1534 if (is_float(vtop
->type
.t
) &&
1535 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1536 unsigned long offset
;
1537 /* CPUs usually cannot use float constants, so we store them
1538 generically in data segment */
1539 size
= type_size(&vtop
->type
, &align
);
1541 size
= 0, align
= 1;
1542 offset
= section_add(data_section
, size
, align
);
1543 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1545 init_putv(&vtop
->type
, data_section
, offset
);
1548 #ifdef CONFIG_TCC_BCHECK
1549 if (vtop
->r
& VT_MUSTBOUND
)
1552 #ifdef TCC_TARGET_RISCV64
1554 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
&& rc
== RC_FLOAT
)
1558 r
= vtop
->r
& VT_VALMASK
;
1559 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1560 #ifndef TCC_TARGET_ARM64
1561 #ifndef TCC_TARGET_RISCV64 /* XXX: remove the whole LRET/QRET class */
1564 #ifdef TCC_TARGET_X86_64
1565 else if (rc
== RC_FRET
)
1570 /* need to reload if:
1572 - lvalue (need to dereference pointer)
1573 - already a register, but not in the right class */
1575 || (vtop
->r
& VT_LVAL
)
1576 || !(reg_classes
[r
] & rc
)
1577 #ifdef TCC_TARGET_RISCV64
1578 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& (vtop
->r2
>= NB_REGS
|| !(reg_classes
[vtop
->r2
] & rc2
)))
1579 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
&& (vtop
->r2
>= NB_REGS
|| !(reg_classes
[vtop
->r2
] & rc2
)))
1581 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1582 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1584 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1589 #ifdef TCC_TARGET_RISCV64
1590 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
)) {
1591 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= VT_LLONG
;
1593 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1594 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1596 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1597 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1598 unsigned long long ll
;
1600 int r2
, original_type
;
1601 original_type
= vtop
->type
.t
;
1602 /* two register type load : expand to two words
1605 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1608 vtop
->c
.i
= ll
; /* first word */
1610 vtop
->r
= r
; /* save register value */
1611 vpushi(ll
>> 32); /* second word */
1614 if (vtop
->r
& VT_LVAL
) {
1615 /* We do not want to modifier the long long
1616 pointer here, so the safest (and less
1617 efficient) is to save all the other registers
1618 in the stack. XXX: totally inefficient. */
1622 /* lvalue_save: save only if used further down the stack */
1623 save_reg_upstack(vtop
->r
, 1);
1625 /* load from memory */
1626 vtop
->type
.t
= load_type
;
1629 vtop
[-1].r
= r
; /* save register value */
1630 /* increment pointer to get second word */
1631 vtop
->type
.t
= addr_type
;
1636 vtop
->type
.t
= load_type
;
1638 /* move registers */
1641 vtop
[-1].r
= r
; /* save register value */
1642 vtop
->r
= vtop
[-1].r2
;
1644 /* Allocate second register. Here we rely on the fact that
1645 get_reg() tries first to free r2 of an SValue. */
1649 /* write second register */
1651 vtop
->type
.t
= original_type
;
1652 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1654 /* lvalue of scalar type : need to use lvalue type
1655 because of possible cast */
1658 /* compute memory access type */
1659 if (vtop
->r
& VT_LVAL_BYTE
)
1661 else if (vtop
->r
& VT_LVAL_SHORT
)
1663 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1667 /* restore wanted type */
1670 if (vtop
->r
== VT_CMP
)
1672 /* one register type load */
1677 #ifdef TCC_TARGET_C67
1678 /* uses register pairs for doubles */
1679 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1686 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1687 ST_FUNC
void gv2(int rc1
, int rc2
)
1689 /* generate more generic register first. But VT_JMP or VT_CMP
1690 values must be generated first in all cases to avoid possible
1692 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1697 /* test if reload is needed for first register */
1698 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1708 /* test if reload is needed for first register */
1709 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1715 #ifndef TCC_TARGET_ARM64
1716 /* wrapper around RC_FRET to return a register by type */
1717 static int rc_fret(int t
)
1719 #ifdef TCC_TARGET_X86_64
1720 if (t
== VT_LDOUBLE
) {
1723 #elif defined TCC_TARGET_RISCV64
1724 if (t
== VT_LDOUBLE
)
1731 /* wrapper around REG_FRET to return a register by type */
1732 static int reg_fret(int t
)
1734 #ifdef TCC_TARGET_X86_64
1735 if (t
== VT_LDOUBLE
) {
1738 #elif defined TCC_TARGET_RISCV64
1739 if (t
== VT_LDOUBLE
)
1746 /* expand 64bit on stack in two ints */
1747 ST_FUNC
void lexpand(void)
1750 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1751 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1752 if (v
== VT_CONST
) {
1755 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1761 vtop
[0].r
= vtop
[-1].r2
;
1762 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1764 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1769 /* build a long long from two ints */
1770 static void lbuild(int t
)
1772 gv2(RC_INT
, RC_INT
);
1773 vtop
[-1].r2
= vtop
[0].r
;
1774 vtop
[-1].type
.t
= t
;
1779 /* convert stack entry to register and duplicate its value in another
1781 static void gv_dup(void)
1788 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1789 if (t
& VT_BITFIELD
) {
1799 /* stack: H L L1 H1 */
1809 /* duplicate value */
1814 #ifdef TCC_TARGET_X86_64
1815 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1818 #elif defined TCC_TARGET_RISCV64
1819 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
1828 load(r1
, &sv
); /* move r to r1 */
1830 /* duplicates value */
1837 /* generate CPU independent (unsigned) long long operations */
1838 static void gen_opl(int op
)
1840 int t
, a
, b
, op1
, c
, i
;
1842 unsigned short reg_iret
= REG_IRET
;
1843 unsigned short reg_lret
= REG_LRET
;
1849 func
= TOK___divdi3
;
1852 func
= TOK___udivdi3
;
1855 func
= TOK___moddi3
;
1858 func
= TOK___umoddi3
;
1865 /* call generic long long function */
1866 vpush_global_sym(&func_old_type
, func
);
1871 vtop
->r2
= reg_lret
;
1879 //pv("gen_opl A",0,2);
1885 /* stack: L1 H1 L2 H2 */
1890 vtop
[-2] = vtop
[-3];
1893 /* stack: H1 H2 L1 L2 */
1894 //pv("gen_opl B",0,4);
1900 /* stack: H1 H2 L1 L2 ML MH */
1903 /* stack: ML MH H1 H2 L1 L2 */
1907 /* stack: ML MH H1 L2 H2 L1 */
1912 /* stack: ML MH M1 M2 */
1915 } else if (op
== '+' || op
== '-') {
1916 /* XXX: add non carry method too (for MIPS or alpha) */
1922 /* stack: H1 H2 (L1 op L2) */
1925 gen_op(op1
+ 1); /* TOK_xxxC2 */
1928 /* stack: H1 H2 (L1 op L2) */
1931 /* stack: (L1 op L2) H1 H2 */
1933 /* stack: (L1 op L2) (H1 op H2) */
1941 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1942 t
= vtop
[-1].type
.t
;
1946 /* stack: L H shift */
1948 /* constant: simpler */
1949 /* NOTE: all comments are for SHL. the other cases are
1950 done by swapping words */
1961 if (op
!= TOK_SAR
) {
1994 /* XXX: should provide a faster fallback on x86 ? */
1997 func
= TOK___ashrdi3
;
2000 func
= TOK___lshrdi3
;
2003 func
= TOK___ashldi3
;
2009 /* compare operations */
2015 /* stack: L1 H1 L2 H2 */
2017 vtop
[-1] = vtop
[-2];
2019 /* stack: L1 L2 H1 H2 */
2023 /* when values are equal, we need to compare low words. since
2024 the jump is inverted, we invert the test too. */
2027 else if (op1
== TOK_GT
)
2029 else if (op1
== TOK_ULT
)
2031 else if (op1
== TOK_UGT
)
2041 /* generate non equal test */
2043 vset_VT_CMP(TOK_NE
);
2047 /* compare low. Always unsigned */
2051 else if (op1
== TOK_LE
)
2053 else if (op1
== TOK_GT
)
2055 else if (op1
== TOK_GE
)
2058 #if 0//def TCC_TARGET_I386
2059 if (op
== TOK_NE
) { gsym(b
); break; }
2060 if (op
== TOK_EQ
) { gsym(a
); break; }
2069 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2071 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2072 return (a
^ b
) >> 63 ? -x
: x
;
2075 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2077 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2080 /* handle integer constant optimizations and various machine
2082 static void gen_opic(int op
)
2084 SValue
*v1
= vtop
- 1;
2086 int t1
= v1
->type
.t
& VT_BTYPE
;
2087 int t2
= v2
->type
.t
& VT_BTYPE
;
2088 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2089 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2090 uint64_t l1
= c1
? v1
->c
.i
: 0;
2091 uint64_t l2
= c2
? v2
->c
.i
: 0;
2092 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2094 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2095 l1
= ((uint32_t)l1
|
2096 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2097 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2098 l2
= ((uint32_t)l2
|
2099 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2103 case '+': l1
+= l2
; break;
2104 case '-': l1
-= l2
; break;
2105 case '&': l1
&= l2
; break;
2106 case '^': l1
^= l2
; break;
2107 case '|': l1
|= l2
; break;
2108 case '*': l1
*= l2
; break;
2115 /* if division by zero, generate explicit division */
2118 tcc_error("division by zero in constant");
2122 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2123 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2124 case TOK_UDIV
: l1
= l1
/ l2
; break;
2125 case TOK_UMOD
: l1
= l1
% l2
; break;
2128 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2129 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2131 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2134 case TOK_ULT
: l1
= l1
< l2
; break;
2135 case TOK_UGE
: l1
= l1
>= l2
; break;
2136 case TOK_EQ
: l1
= l1
== l2
; break;
2137 case TOK_NE
: l1
= l1
!= l2
; break;
2138 case TOK_ULE
: l1
= l1
<= l2
; break;
2139 case TOK_UGT
: l1
= l1
> l2
; break;
2140 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2141 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2142 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2143 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2145 case TOK_LAND
: l1
= l1
&& l2
; break;
2146 case TOK_LOR
: l1
= l1
|| l2
; break;
2150 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2151 l1
= ((uint32_t)l1
|
2152 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2156 /* if commutative ops, put c2 as constant */
2157 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2158 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2160 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2161 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2163 if (!const_wanted
&&
2165 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2166 (l1
== -1 && op
== TOK_SAR
))) {
2167 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2169 } else if (!const_wanted
&&
2170 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2172 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2173 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2174 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2179 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2182 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2183 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2186 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2187 /* filter out NOP operations like x*1, x-0, x&-1... */
2189 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2190 /* try to use shifts instead of muls or divs */
2191 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2200 else if (op
== TOK_PDIV
)
2206 } else if (c2
&& (op
== '+' || op
== '-') &&
2207 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2208 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2209 /* symbol + constant case */
2213 /* The backends can't always deal with addends to symbols
2214 larger than +-1<<31. Don't construct such. */
2221 /* call low level op generator */
2222 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2223 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2231 /* generate a floating point operation with constant propagation */
2232 static void gen_opif(int op
)
2236 #if defined _MSC_VER && defined __x86_64__
2237 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2244 /* currently, we cannot do computations with forward symbols */
2245 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2246 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2248 if (v1
->type
.t
== VT_FLOAT
) {
2251 } else if (v1
->type
.t
== VT_DOUBLE
) {
2259 /* NOTE: we only do constant propagation if finite number (not
2260 NaN or infinity) (ANSI spec) */
2261 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2265 case '+': f1
+= f2
; break;
2266 case '-': f1
-= f2
; break;
2267 case '*': f1
*= f2
; break;
2270 /* If not in initializer we need to potentially generate
2271 FP exceptions at runtime, otherwise we want to fold. */
2277 /* XXX: also handles tests ? */
2281 /* XXX: overflow test ? */
2282 if (v1
->type
.t
== VT_FLOAT
) {
2284 } else if (v1
->type
.t
== VT_DOUBLE
) {
2296 static int pointed_size(CType
*type
)
2299 return type_size(pointed_type(type
), &align
);
2302 static void vla_runtime_pointed_size(CType
*type
)
2305 vla_runtime_type_size(pointed_type(type
), &align
);
2308 static inline int is_null_pointer(SValue
*p
)
2310 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2312 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2313 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2314 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2315 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2316 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2317 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2320 static inline int is_integer_btype(int bt
)
2322 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2323 bt
== VT_INT
|| bt
== VT_LLONG
);
2326 /* check types for comparison or subtraction of pointers */
2327 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2329 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2332 /* null pointers are accepted for all comparisons as gcc */
2333 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2337 bt1
= type1
->t
& VT_BTYPE
;
2338 bt2
= type2
->t
& VT_BTYPE
;
2339 /* accept comparison between pointer and integer with a warning */
2340 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2341 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2342 tcc_warning("comparison between pointer and integer");
2346 /* both must be pointers or implicit function pointers */
2347 if (bt1
== VT_PTR
) {
2348 type1
= pointed_type(type1
);
2349 } else if (bt1
!= VT_FUNC
)
2350 goto invalid_operands
;
2352 if (bt2
== VT_PTR
) {
2353 type2
= pointed_type(type2
);
2354 } else if (bt2
!= VT_FUNC
) {
2356 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2358 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2359 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2363 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2364 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2365 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2366 /* gcc-like error if '-' is used */
2368 goto invalid_operands
;
2370 tcc_warning("comparison of distinct pointer types lacks a cast");
2374 /* generic gen_op: handles types problems */
2375 ST_FUNC
void gen_op(int op
)
2377 int u
, t1
, t2
, bt1
, bt2
, t
;
2381 t1
= vtop
[-1].type
.t
;
2382 t2
= vtop
[0].type
.t
;
2383 bt1
= t1
& VT_BTYPE
;
2384 bt2
= t2
& VT_BTYPE
;
2386 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2387 tcc_error("operation on a struct");
2388 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2389 if (bt2
== VT_FUNC
) {
2390 mk_pointer(&vtop
->type
);
2393 if (bt1
== VT_FUNC
) {
2395 mk_pointer(&vtop
->type
);
2400 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2401 /* at least one operand is a pointer */
2402 /* relational op: must be both pointers */
2403 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2404 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2405 /* pointers are handled are unsigned */
2407 t
= VT_LLONG
| VT_UNSIGNED
;
2409 t
= VT_INT
| VT_UNSIGNED
;
2413 /* if both pointers, then it must be the '-' op */
2414 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2416 tcc_error("cannot use pointers here");
2417 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2418 /* XXX: check that types are compatible */
2419 if (vtop
[-1].type
.t
& VT_VLA
) {
2420 vla_runtime_pointed_size(&vtop
[-1].type
);
2422 vpushi(pointed_size(&vtop
[-1].type
));
2426 vtop
->type
.t
= ptrdiff_type
.t
;
2430 /* exactly one pointer : must be '+' or '-'. */
2431 if (op
!= '-' && op
!= '+')
2432 tcc_error("cannot use pointers here");
2433 /* Put pointer as first operand */
2434 if (bt2
== VT_PTR
) {
2436 t
= t1
, t1
= t2
, t2
= t
;
2439 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2440 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2443 type1
= vtop
[-1].type
;
2444 type1
.t
&= ~VT_ARRAY
;
2445 if (vtop
[-1].type
.t
& VT_VLA
)
2446 vla_runtime_pointed_size(&vtop
[-1].type
);
2448 u
= pointed_size(&vtop
[-1].type
);
2450 tcc_error("unknown array element size");
2454 /* XXX: cast to int ? (long long case) */
2460 /* #ifdef CONFIG_TCC_BCHECK
2461 The main reason to removing this code:
2468 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2469 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2471 When this code is on. then the output looks like
2473 v+(i-j) = 0xbff84000
2475 /* if evaluating constant expression, no code should be
2476 generated, so no bound check */
2477 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2478 /* if bounded pointers, we generate a special code to
2485 gen_bounded_ptr_add();
2491 /* put again type if gen_opic() swaped operands */
2494 } else if (is_float(bt1
) || is_float(bt2
)) {
2495 /* compute bigger type and do implicit casts */
2496 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2498 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2503 /* floats can only be used for a few operations */
2504 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2505 (op
< TOK_ULT
|| op
> TOK_GT
))
2506 tcc_error("invalid operands for binary operation");
2508 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2509 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2510 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2512 t
|= (VT_LONG
& t1
);
2514 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2515 /* cast to biggest op */
2516 t
= VT_LLONG
| VT_LONG
;
2517 if (bt1
== VT_LLONG
)
2519 if (bt2
== VT_LLONG
)
2521 /* convert to unsigned if it does not fit in a long long */
2522 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2523 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2527 /* integer operations */
2528 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2529 /* convert to unsigned if it does not fit in an integer */
2530 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2531 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2534 /* XXX: currently, some unsigned operations are explicit, so
2535 we modify them here */
2536 if (t
& VT_UNSIGNED
) {
2543 else if (op
== TOK_LT
)
2545 else if (op
== TOK_GT
)
2547 else if (op
== TOK_LE
)
2549 else if (op
== TOK_GE
)
2557 /* special case for shifts and long long: we keep the shift as
2559 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2566 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2567 /* relational op: the result is an int */
2568 vtop
->type
.t
= VT_INT
;
2573 // Make sure that we have converted to an rvalue:
2574 if (vtop
->r
& VT_LVAL
)
2575 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2578 #ifndef TCC_TARGET_ARM
2579 /* generic itof for unsigned long long case */
2580 static void gen_cvt_itof1(int t
)
2582 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2585 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2586 (VT_LLONG
| VT_UNSIGNED
)) {
2589 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2590 #if LDOUBLE_SIZE != 8
2591 else if (t
== VT_LDOUBLE
)
2592 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2595 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2599 vtop
->r
= reg_fret(t
);
2607 /* generic ftoi for unsigned long long case */
2608 static void gen_cvt_ftoi1(int t
)
2610 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2615 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2616 /* not handled natively */
2617 st
= vtop
->type
.t
& VT_BTYPE
;
2619 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2620 #if LDOUBLE_SIZE != 8
2621 else if (st
== VT_LDOUBLE
)
2622 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2625 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2631 vtop
->r2
= REG_LRET
;
2639 /* force char or short cast */
2640 static void force_charshort_cast(int t
)
2644 /* cannot cast static initializers */
2645 if (STATIC_DATA_WANTED
)
2649 /* XXX: add optimization if lvalue : just change type and offset */
2654 if (t
& VT_UNSIGNED
) {
2655 vpushi((1 << bits
) - 1);
2658 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2664 /* result must be signed or the SAR is converted to an SHL
2665 This was not the case when "t" was a signed short
2666 and the last value on the stack was an unsigned int */
2667 vtop
->type
.t
&= ~VT_UNSIGNED
;
2673 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2674 static void gen_cast_s(int t
)
2682 static void gen_cast(CType
*type
)
2684 int sbt
, dbt
, sf
, df
, c
, p
;
2686 /* special delayed cast for char/short */
2687 /* XXX: in some cases (multiple cascaded casts), it may still
2689 if (vtop
->r
& VT_MUSTCAST
) {
2690 vtop
->r
&= ~VT_MUSTCAST
;
2691 force_charshort_cast(vtop
->type
.t
);
2694 /* bitfields first get cast to ints */
2695 if (vtop
->type
.t
& VT_BITFIELD
) {
2699 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2700 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2705 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2706 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2707 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2708 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
2711 /* constant case: we can do it now */
2712 /* XXX: in ISOC, cannot do it if error in convert */
2713 if (sbt
== VT_FLOAT
)
2714 vtop
->c
.ld
= vtop
->c
.f
;
2715 else if (sbt
== VT_DOUBLE
)
2716 vtop
->c
.ld
= vtop
->c
.d
;
2719 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2720 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2721 vtop
->c
.ld
= vtop
->c
.i
;
2723 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2725 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2726 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2728 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2731 if (dbt
== VT_FLOAT
)
2732 vtop
->c
.f
= (float)vtop
->c
.ld
;
2733 else if (dbt
== VT_DOUBLE
)
2734 vtop
->c
.d
= (double)vtop
->c
.ld
;
2735 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2736 vtop
->c
.i
= vtop
->c
.ld
;
2737 } else if (sf
&& dbt
== VT_BOOL
) {
2738 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2741 vtop
->c
.i
= vtop
->c
.ld
;
2742 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2744 else if (sbt
& VT_UNSIGNED
)
2745 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2747 else if (sbt
== VT_PTR
)
2750 else if (sbt
!= VT_LLONG
)
2751 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2752 -(vtop
->c
.i
& 0x80000000));
2754 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2756 else if (dbt
== VT_BOOL
)
2757 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2759 else if (dbt
== VT_PTR
)
2762 else if (dbt
!= VT_LLONG
) {
2763 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2764 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2767 if (!(dbt
& VT_UNSIGNED
))
2768 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2771 } else if (p
&& dbt
== VT_BOOL
) {
2775 /* non constant case: generate code */
2777 /* convert from fp to fp */
2780 /* convert int to fp */
2783 /* convert fp to int */
2784 if (dbt
== VT_BOOL
) {
2788 /* we handle char/short/etc... with generic code */
2789 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2790 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2794 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2795 /* additional cast for char/short... */
2801 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2802 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2803 /* scalar to long long */
2804 /* machine independent conversion */
2806 /* generate high word */
2807 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2811 if (sbt
== VT_PTR
) {
2812 /* cast from pointer to int before we apply
2813 shift operation, which pointers don't support*/
2820 /* patch second register */
2821 vtop
[-1].r2
= vtop
->r
;
2825 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2826 (dbt
& VT_BTYPE
) == VT_PTR
||
2827 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2828 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2829 (sbt
& VT_BTYPE
) != VT_PTR
&&
2830 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2831 /* need to convert from 32bit to 64bit */
2833 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2834 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_RISCV64)
2836 #elif defined(TCC_TARGET_X86_64)
2838 /* x86_64 specific: movslq */
2840 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2844 } else if (sbt
& VT_UNSIGNED
) {
2845 #if defined(TCC_TARGET_RISCV64)
2846 /* RISC-V keeps 32bit vals in registers sign-extended.
2847 So here we need a zero-extension. */
2848 vtop
->type
.t
= VT_LLONG
;
2857 } else if (dbt
== VT_BOOL
) {
2858 /* scalar to bool */
2861 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2862 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2863 if (sbt
== VT_PTR
) {
2864 vtop
->type
.t
= VT_INT
;
2865 tcc_warning("nonportable conversion from pointer to char/short");
2867 force_charshort_cast(dbt
);
2868 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2870 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2872 /* from long long: just take low order word */
2876 if (dbt
& VT_UNSIGNED
) {
2877 /* XXX some architectures (e.g. risc-v) would like it
2878 better for this merely being a 32-to-64 sign or zero-
2881 vtop
->type
.t
|= VT_UNSIGNED
;
2887 /* if lvalue and single word type, nothing to do because
2888 the lvalue already contains the real type size (see
2889 VT_LVAL_xxx constants) */
2892 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2893 /* if we are casting between pointer types,
2894 we must update the VT_LVAL_xxx size */
2895 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2896 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2899 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2902 /* return type size as known at compile time. Put alignment at 'a' */
2903 ST_FUNC
int type_size(CType
*type
, int *a
)
2908 bt
= type
->t
& VT_BTYPE
;
2909 if (bt
== VT_STRUCT
) {
2914 } else if (bt
== VT_PTR
) {
2915 if (type
->t
& VT_ARRAY
) {
2919 ts
= type_size(&s
->type
, a
);
2921 if (ts
< 0 && s
->c
< 0)
2929 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
2930 return -1; /* incomplete enum */
2931 } else if (bt
== VT_LDOUBLE
) {
2933 return LDOUBLE_SIZE
;
2934 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2935 #ifdef TCC_TARGET_I386
2936 #ifdef TCC_TARGET_PE
2941 #elif defined(TCC_TARGET_ARM)
2951 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2954 } else if (bt
== VT_SHORT
) {
2957 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2961 /* char, void, function, _Bool */
2967 /* push type size as known at runtime time on top of value stack. Put
2969 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2971 if (type
->t
& VT_VLA
) {
2972 type_size(&type
->ref
->type
, a
);
2973 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2975 vpushi(type_size(type
, a
));
2979 /* return the pointed type of t */
2980 static inline CType
*pointed_type(CType
*type
)
2982 return &type
->ref
->type
;
2985 /* modify type so that its it is a pointer to type. */
2986 ST_FUNC
void mk_pointer(CType
*type
)
2989 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2990 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2994 /* compare function types. OLD functions match any new functions */
2995 static int is_compatible_func(CType
*type1
, CType
*type2
)
3001 if (s1
->f
.func_call
!= s2
->f
.func_call
)
3003 if (s1
->f
.func_type
!= s2
->f
.func_type
3004 && s1
->f
.func_type
!= FUNC_OLD
3005 && s2
->f
.func_type
!= FUNC_OLD
)
3007 /* we should check the function return type for FUNC_OLD too
3008 but that causes problems with the internally used support
3009 functions such as TOK_memmove */
3010 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
3012 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
3015 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
3026 /* return true if type1 and type2 are the same. If unqualified is
3027 true, qualifiers on the types are ignored.
3029 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3033 t1
= type1
->t
& VT_TYPE
;
3034 t2
= type2
->t
& VT_TYPE
;
3036 /* strip qualifiers before comparing */
3037 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3038 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3041 /* Default Vs explicit signedness only matters for char */
3042 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3046 /* XXX: bitfields ? */
3051 && !(type1
->ref
->c
< 0
3052 || type2
->ref
->c
< 0
3053 || type1
->ref
->c
== type2
->ref
->c
))
3056 /* test more complicated cases */
3057 bt1
= t1
& VT_BTYPE
;
3058 if (bt1
== VT_PTR
) {
3059 type1
= pointed_type(type1
);
3060 type2
= pointed_type(type2
);
3061 return is_compatible_types(type1
, type2
);
3062 } else if (bt1
== VT_STRUCT
) {
3063 return (type1
->ref
== type2
->ref
);
3064 } else if (bt1
== VT_FUNC
) {
3065 return is_compatible_func(type1
, type2
);
3066 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
3067 return type1
->ref
== type2
->ref
;
3073 /* return true if type1 and type2 are exactly the same (including
3076 static int is_compatible_types(CType
*type1
, CType
*type2
)
3078 return compare_types(type1
,type2
,0);
3081 /* return true if type1 and type2 are the same (ignoring qualifiers).
3083 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3085 return compare_types(type1
,type2
,1);
3088 /* print a type. If 'varstr' is not NULL, then the variable is also
3089 printed in the type */
3091 /* XXX: add array and function pointers */
3092 static void type_to_str(char *buf
, int buf_size
,
3093 CType
*type
, const char *varstr
)
3105 pstrcat(buf
, buf_size
, "extern ");
3107 pstrcat(buf
, buf_size
, "static ");
3109 pstrcat(buf
, buf_size
, "typedef ");
3111 pstrcat(buf
, buf_size
, "inline ");
3112 if (t
& VT_VOLATILE
)
3113 pstrcat(buf
, buf_size
, "volatile ");
3114 if (t
& VT_CONSTANT
)
3115 pstrcat(buf
, buf_size
, "const ");
3117 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
3118 || ((t
& VT_UNSIGNED
)
3119 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
3122 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
3124 buf_size
-= strlen(buf
);
3159 tstr
= "long double";
3161 pstrcat(buf
, buf_size
, tstr
);
3168 pstrcat(buf
, buf_size
, tstr
);
3169 v
= type
->ref
->v
& ~SYM_STRUCT
;
3170 if (v
>= SYM_FIRST_ANOM
)
3171 pstrcat(buf
, buf_size
, "<anonymous>");
3173 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3178 if (varstr
&& '*' == *varstr
) {
3179 pstrcat(buf1
, sizeof(buf1
), "(");
3180 pstrcat(buf1
, sizeof(buf1
), varstr
);
3181 pstrcat(buf1
, sizeof(buf1
), ")");
3183 pstrcat(buf1
, buf_size
, "(");
3185 while (sa
!= NULL
) {
3187 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3188 pstrcat(buf1
, sizeof(buf1
), buf2
);
3191 pstrcat(buf1
, sizeof(buf1
), ", ");
3193 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3194 pstrcat(buf1
, sizeof(buf1
), ", ...");
3195 pstrcat(buf1
, sizeof(buf1
), ")");
3196 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3201 if (varstr
&& '*' == *varstr
)
3202 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3204 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3205 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3208 pstrcpy(buf1
, sizeof(buf1
), "*");
3209 if (t
& VT_CONSTANT
)
3210 pstrcat(buf1
, buf_size
, "const ");
3211 if (t
& VT_VOLATILE
)
3212 pstrcat(buf1
, buf_size
, "volatile ");
3214 pstrcat(buf1
, sizeof(buf1
), varstr
);
3215 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3219 pstrcat(buf
, buf_size
, " ");
3220 pstrcat(buf
, buf_size
, varstr
);
3225 /* verify type compatibility to store vtop in 'dt' type, and generate
3227 static void gen_assign_cast(CType
*dt
)
3229 CType
*st
, *type1
, *type2
;
3230 char buf1
[256], buf2
[256];
3231 int dbt
, sbt
, qualwarn
, lvl
;
3233 st
= &vtop
->type
; /* source type */
3234 dbt
= dt
->t
& VT_BTYPE
;
3235 sbt
= st
->t
& VT_BTYPE
;
3236 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3237 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3238 ; /* It is Ok if both are void */
3240 tcc_error("cannot cast from/to void");
3242 if (dt
->t
& VT_CONSTANT
)
3243 tcc_warning("assignment of read-only location");
3246 /* special cases for pointers */
3247 /* '0' can also be a pointer */
3248 if (is_null_pointer(vtop
))
3250 /* accept implicit pointer to integer cast with warning */
3251 if (is_integer_btype(sbt
)) {
3252 tcc_warning("assignment makes pointer from integer without a cast");
3255 type1
= pointed_type(dt
);
3257 type2
= pointed_type(st
);
3258 else if (sbt
== VT_FUNC
)
3259 type2
= st
; /* a function is implicitly a function pointer */
3262 if (is_compatible_types(type1
, type2
))
3264 for (qualwarn
= lvl
= 0;; ++lvl
) {
3265 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3266 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3268 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3269 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3270 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3272 type1
= pointed_type(type1
);
3273 type2
= pointed_type(type2
);
3275 if (!is_compatible_unqualified_types(type1
, type2
)) {
3276 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3277 /* void * can match anything */
3278 } else if (dbt
== sbt
3279 && is_integer_btype(sbt
& VT_BTYPE
)
3280 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3281 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3282 /* Like GCC don't warn by default for merely changes
3283 in pointer target signedness. Do warn for different
3284 base types, though, in particular for unsigned enums
3285 and signed int targets. */
3287 tcc_warning("assignment from incompatible pointer type");
3292 tcc_warning("assignment discards qualifiers from pointer target type");
3298 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3299 tcc_warning("assignment makes integer from pointer without a cast");
3300 } else if (sbt
== VT_STRUCT
) {
3301 goto case_VT_STRUCT
;
3303 /* XXX: more tests */
3307 if (!is_compatible_unqualified_types(dt
, st
)) {
3309 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3310 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3311 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3318 /* store vtop in lvalue pushed on stack */
3319 ST_FUNC
void vstore(void)
3321 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3323 ft
= vtop
[-1].type
.t
;
3324 sbt
= vtop
->type
.t
& VT_BTYPE
;
3325 dbt
= ft
& VT_BTYPE
;
3326 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3327 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3328 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3329 /* optimize char/short casts */
3330 delayed_cast
= VT_MUSTCAST
;
3331 vtop
->type
.t
= ft
& VT_TYPE
;
3332 /* XXX: factorize */
3333 if (ft
& VT_CONSTANT
)
3334 tcc_warning("assignment of read-only location");
3337 if (!(ft
& VT_BITFIELD
))
3338 gen_assign_cast(&vtop
[-1].type
);
3341 if (sbt
== VT_STRUCT
) {
3342 /* if structure, only generate pointer */
3343 /* structure assignment : generate memcpy */
3344 /* XXX: optimize if small size */
3345 size
= type_size(&vtop
->type
, &align
);
3349 vtop
->type
.t
= VT_PTR
;
3352 /* address of memcpy() */
3355 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3356 else if(!(align
& 3))
3357 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3360 /* Use memmove, rather than memcpy, as dest and src may be same: */
3361 vpush_global_sym(&func_old_type
, TOK_memmove
);
3366 vtop
->type
.t
= VT_PTR
;
3372 /* leave source on stack */
3373 } else if (ft
& VT_BITFIELD
) {
3374 /* bitfield store handling */
3376 /* save lvalue as expression result (example: s.b = s.a = n;) */
3377 vdup(), vtop
[-1] = vtop
[-2];
3379 bit_pos
= BIT_POS(ft
);
3380 bit_size
= BIT_SIZE(ft
);
3381 /* remove bit field info to avoid loops */
3382 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3384 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3385 gen_cast(&vtop
[-1].type
);
3386 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3389 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3390 if (r
== VT_STRUCT
) {
3391 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3392 store_packed_bf(bit_pos
, bit_size
);
3394 unsigned long long mask
= (1ULL << bit_size
) - 1;
3395 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3397 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3400 vpushi((unsigned)mask
);
3407 /* duplicate destination */
3410 /* load destination, mask and or with source */
3411 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3412 vpushll(~(mask
<< bit_pos
));
3414 vpushi(~((unsigned)mask
<< bit_pos
));
3419 /* ... and discard */
3422 } else if (dbt
== VT_VOID
) {
3425 #ifdef CONFIG_TCC_BCHECK
3426 /* bound check case */
3427 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3436 #ifdef TCC_TARGET_X86_64
3437 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3439 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3442 #elif defined TCC_TARGET_RISCV64
3443 if (dbt
== VT_LDOUBLE
)
3447 r
= gv(rc
); /* generate value */
3448 /* if lvalue was saved on stack, must read it */
3449 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3451 t
= get_reg(RC_INT
);
3457 sv
.r
= VT_LOCAL
| VT_LVAL
;
3458 sv
.c
.i
= vtop
[-1].c
.i
;
3460 vtop
[-1].r
= t
| VT_LVAL
;
3462 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3463 #ifdef TCC_TARGET_RISCV64
3464 if (dbt
== VT_QLONG
|| dbt
== VT_LDOUBLE
) {
3465 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= VT_LLONG
;
3467 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3468 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3470 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3471 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3473 vtop
[-1].type
.t
= load_type
;
3476 /* convert to int to increment easily */
3477 vtop
->type
.t
= addr_type
;
3483 vtop
[-1].type
.t
= load_type
;
3484 /* XXX: it works because r2 is spilled last ! */
3485 store(vtop
->r2
, vtop
- 1);
3491 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3492 vtop
->r
|= delayed_cast
;
3496 /* post defines POST/PRE add. c is the token ++ or -- */
3497 ST_FUNC
void inc(int post
, int c
)
3500 vdup(); /* save lvalue */
3502 gv_dup(); /* duplicate value */
3507 vpushi(c
- TOK_MID
);
3509 vstore(); /* store value */
3511 vpop(); /* if post op, return saved value */
3514 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3516 /* read the string */
3520 while (tok
== TOK_STR
) {
3521 /* XXX: add \0 handling too ? */
3522 cstr_cat(astr
, tokc
.str
.data
, -1);
3525 cstr_ccat(astr
, '\0');
3528 /* If I is >= 1 and a power of two, returns log2(i)+1.
3529 If I is 0 returns 0. */
3530 static int exact_log2p1(int i
)
3535 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3546 /* Parse __attribute__((...)) GNUC extension. */
3547 static void parse_attribute(AttributeDef
*ad
)
3553 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3558 while (tok
!= ')') {
3559 if (tok
< TOK_IDENT
)
3560 expect("attribute name");
3572 tcc_warning("implicit declaration of function '%s'",
3573 get_tok_str(tok
, &tokc
));
3574 s
= external_global_sym(tok
, &func_old_type
);
3576 ad
->cleanup_func
= s
;
3581 case TOK_CONSTRUCTOR1
:
3582 case TOK_CONSTRUCTOR2
:
3583 ad
->a
.constructor
= 1;
3585 case TOK_DESTRUCTOR1
:
3586 case TOK_DESTRUCTOR2
:
3587 ad
->a
.destructor
= 1;
3592 parse_mult_str(&astr
, "section name");
3593 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3600 parse_mult_str(&astr
, "alias(\"target\")");
3601 ad
->alias_target
= /* save string as token, for later */
3602 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3606 case TOK_VISIBILITY1
:
3607 case TOK_VISIBILITY2
:
3609 parse_mult_str(&astr
,
3610 "visibility(\"default|hidden|internal|protected\")");
3611 if (!strcmp (astr
.data
, "default"))
3612 ad
->a
.visibility
= STV_DEFAULT
;
3613 else if (!strcmp (astr
.data
, "hidden"))
3614 ad
->a
.visibility
= STV_HIDDEN
;
3615 else if (!strcmp (astr
.data
, "internal"))
3616 ad
->a
.visibility
= STV_INTERNAL
;
3617 else if (!strcmp (astr
.data
, "protected"))
3618 ad
->a
.visibility
= STV_PROTECTED
;
3620 expect("visibility(\"default|hidden|internal|protected\")");
3629 if (n
<= 0 || (n
& (n
- 1)) != 0)
3630 tcc_error("alignment must be a positive power of two");
3635 ad
->a
.aligned
= exact_log2p1(n
);
3636 if (n
!= 1 << (ad
->a
.aligned
- 1))
3637 tcc_error("alignment of %d is larger than implemented", n
);
3649 /* currently, no need to handle it because tcc does not
3650 track unused objects */
3654 ad
->f
.func_noreturn
= 1;
3659 ad
->f
.func_call
= FUNC_CDECL
;
3664 ad
->f
.func_call
= FUNC_STDCALL
;
3666 #ifdef TCC_TARGET_I386
3676 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3682 ad
->f
.func_call
= FUNC_FASTCALLW
;
3689 ad
->attr_mode
= VT_LLONG
+ 1;
3692 ad
->attr_mode
= VT_BYTE
+ 1;
3695 ad
->attr_mode
= VT_SHORT
+ 1;
3699 ad
->attr_mode
= VT_INT
+ 1;
3702 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3709 ad
->a
.dllexport
= 1;
3711 case TOK_NODECORATE
:
3712 ad
->a
.nodecorate
= 1;
3715 ad
->a
.dllimport
= 1;
3718 if (tcc_state
->warn_unsupported
)
3719 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3720 /* skip parameters */
3722 int parenthesis
= 0;
3726 else if (tok
== ')')
3729 } while (parenthesis
&& tok
!= -1);
3742 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3746 while ((s
= s
->next
) != NULL
) {
3747 if ((s
->v
& SYM_FIELD
) &&
3748 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3749 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3750 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3762 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3764 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3765 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3766 int pcc
= !tcc_state
->ms_bitfields
;
3767 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3774 prevbt
= VT_STRUCT
; /* make it never match */
3779 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3780 if (f
->type
.t
& VT_BITFIELD
)
3781 bit_size
= BIT_SIZE(f
->type
.t
);
3784 size
= type_size(&f
->type
, &align
);
3785 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3788 if (pcc
&& bit_size
== 0) {
3789 /* in pcc mode, packing does not affect zero-width bitfields */
3792 /* in pcc mode, attribute packed overrides if set. */
3793 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3796 /* pragma pack overrides align if lesser and packs bitfields always */
3799 if (pragma_pack
< align
)
3800 align
= pragma_pack
;
3801 /* in pcc mode pragma pack also overrides individual align */
3802 if (pcc
&& pragma_pack
< a
)
3806 /* some individual align was specified */
3810 if (type
->ref
->type
.t
== VT_UNION
) {
3811 if (pcc
&& bit_size
>= 0)
3812 size
= (bit_size
+ 7) >> 3;
3817 } else if (bit_size
< 0) {
3819 c
+= (bit_pos
+ 7) >> 3;
3820 c
= (c
+ align
- 1) & -align
;
3829 /* A bit-field. Layout is more complicated. There are two
3830 options: PCC (GCC) compatible and MS compatible */
3832 /* In PCC layout a bit-field is placed adjacent to the
3833 preceding bit-fields, except if:
3835 - an individual alignment was given
3836 - it would overflow its base type container and
3837 there is no packing */
3838 if (bit_size
== 0) {
3840 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3842 } else if (f
->a
.aligned
) {
3844 } else if (!packed
) {
3846 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3847 if (ofs
> size
/ align
)
3851 /* in pcc mode, long long bitfields have type int if they fit */
3852 if (size
== 8 && bit_size
<= 32)
3853 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3855 while (bit_pos
>= align
* 8)
3856 c
+= align
, bit_pos
-= align
* 8;
3859 /* In PCC layout named bit-fields influence the alignment
3860 of the containing struct using the base types alignment,
3861 except for packed fields (which here have correct align). */
3862 if (f
->v
& SYM_FIRST_ANOM
3863 // && bit_size // ??? gcc on ARM/rpi does that
3868 bt
= f
->type
.t
& VT_BTYPE
;
3869 if ((bit_pos
+ bit_size
> size
* 8)
3870 || (bit_size
> 0) == (bt
!= prevbt
)
3872 c
= (c
+ align
- 1) & -align
;
3875 /* In MS bitfield mode a bit-field run always uses
3876 at least as many bits as the underlying type.
3877 To start a new run it's also required that this
3878 or the last bit-field had non-zero width. */
3879 if (bit_size
|| prev_bit_size
)
3882 /* In MS layout the records alignment is normally
3883 influenced by the field, except for a zero-width
3884 field at the start of a run (but by further zero-width
3885 fields it is again). */
3886 if (bit_size
== 0 && prevbt
!= bt
)
3889 prev_bit_size
= bit_size
;
3892 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3893 | (bit_pos
<< VT_STRUCT_SHIFT
);
3894 bit_pos
+= bit_size
;
3896 if (align
> maxalign
)
3900 printf("set field %s offset %-2d size %-2d align %-2d",
3901 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3902 if (f
->type
.t
& VT_BITFIELD
) {
3903 printf(" pos %-2d bits %-2d",
3916 c
+= (bit_pos
+ 7) >> 3;
3918 /* store size and alignment */
3919 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3923 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3924 /* can happen if individual align for some member was given. In
3925 this case MSVC ignores maxalign when aligning the size */
3930 c
= (c
+ a
- 1) & -a
;
3934 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3937 /* check whether we can access bitfields by their type */
3938 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3942 if (0 == (f
->type
.t
& VT_BITFIELD
))
3946 bit_size
= BIT_SIZE(f
->type
.t
);
3949 bit_pos
= BIT_POS(f
->type
.t
);
3950 size
= type_size(&f
->type
, &align
);
3951 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3954 /* try to access the field using a different type */
3955 c0
= -1, s
= align
= 1;
3957 px
= f
->c
* 8 + bit_pos
;
3958 cx
= (px
>> 3) & -align
;
3959 px
= px
- (cx
<< 3);
3962 s
= (px
+ bit_size
+ 7) >> 3;
3972 s
= type_size(&t
, &align
);
3976 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3977 /* update offset and bit position */
3980 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3981 | (bit_pos
<< VT_STRUCT_SHIFT
);
3985 printf("FIX field %s offset %-2d size %-2d align %-2d "
3986 "pos %-2d bits %-2d\n",
3987 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3988 cx
, s
, align
, px
, bit_size
);
3991 /* fall back to load/store single-byte wise */
3992 f
->auxtype
= VT_STRUCT
;
3994 printf("FIX field %s : load byte-wise\n",
3995 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4001 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4002 static void struct_decl(CType
*type
, int u
)
4004 int v
, c
, size
, align
, flexible
;
4005 int bit_size
, bsize
, bt
;
4007 AttributeDef ad
, ad1
;
4010 memset(&ad
, 0, sizeof ad
);
4012 parse_attribute(&ad
);
4016 /* struct already defined ? return it */
4018 expect("struct/union/enum name");
4020 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4023 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4025 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4030 /* Record the original enum/struct/union token. */
4031 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4033 /* we put an undefined size for struct/union */
4034 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4035 s
->r
= 0; /* default alignment is zero as gcc */
4037 type
->t
= s
->type
.t
;
4043 tcc_error("struct/union/enum already defined");
4045 /* cannot be empty */
4046 /* non empty enums are not allowed */
4049 long long ll
= 0, pl
= 0, nl
= 0;
4052 /* enum symbols have static storage */
4053 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4057 expect("identifier");
4059 if (ss
&& !local_stack
)
4060 tcc_error("redefinition of enumerator '%s'",
4061 get_tok_str(v
, NULL
));
4065 ll
= expr_const64();
4067 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4069 *ps
= ss
, ps
= &ss
->next
;
4078 /* NOTE: we accept a trailing comma */
4083 /* set integral type of the enum */
4086 if (pl
!= (unsigned)pl
)
4087 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4089 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4090 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4091 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4093 /* set type for enum members */
4094 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4096 if (ll
== (int)ll
) /* default is int if it fits */
4098 if (t
.t
& VT_UNSIGNED
) {
4099 ss
->type
.t
|= VT_UNSIGNED
;
4100 if (ll
== (unsigned)ll
)
4103 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4104 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4109 while (tok
!= '}') {
4110 if (!parse_btype(&btype
, &ad1
)) {
4116 tcc_error("flexible array member '%s' not at the end of struct",
4117 get_tok_str(v
, NULL
));
4123 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4125 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4126 expect("identifier");
4128 int v
= btype
.ref
->v
;
4129 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4130 if (tcc_state
->ms_extensions
== 0)
4131 expect("identifier");
4135 if (type_size(&type1
, &align
) < 0) {
4136 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4139 tcc_error("field '%s' has incomplete type",
4140 get_tok_str(v
, NULL
));
4142 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4143 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4144 (type1
.t
& VT_STORAGE
))
4145 tcc_error("invalid type for '%s'",
4146 get_tok_str(v
, NULL
));
4150 bit_size
= expr_const();
4151 /* XXX: handle v = 0 case for messages */
4153 tcc_error("negative width in bit-field '%s'",
4154 get_tok_str(v
, NULL
));
4155 if (v
&& bit_size
== 0)
4156 tcc_error("zero width for bit-field '%s'",
4157 get_tok_str(v
, NULL
));
4158 parse_attribute(&ad1
);
4160 size
= type_size(&type1
, &align
);
4161 if (bit_size
>= 0) {
4162 bt
= type1
.t
& VT_BTYPE
;
4168 tcc_error("bitfields must have scalar type");
4170 if (bit_size
> bsize
) {
4171 tcc_error("width of '%s' exceeds its type",
4172 get_tok_str(v
, NULL
));
4173 } else if (bit_size
== bsize
4174 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4175 /* no need for bit fields */
4177 } else if (bit_size
== 64) {
4178 tcc_error("field width 64 not implemented");
4180 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4182 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4185 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4186 /* Remember we've seen a real field to check
4187 for placement of flexible array member. */
4190 /* If member is a struct or bit-field, enforce
4191 placing into the struct (as anonymous). */
4193 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4198 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4203 if (tok
== ';' || tok
== TOK_EOF
)
4210 parse_attribute(&ad
);
4211 struct_layout(type
, &ad
);
4216 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4218 merge_symattr(&ad
->a
, &s
->a
);
4219 merge_funcattr(&ad
->f
, &s
->f
);
4222 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4223 are added to the element type, copied because it could be a typedef. */
4224 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4226 while (type
->t
& VT_ARRAY
) {
4227 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4228 type
= &type
->ref
->type
;
4230 type
->t
|= qualifiers
;
4233 /* return 0 if no type declaration. otherwise, return the basic type
4236 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4238 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4242 memset(ad
, 0, sizeof(AttributeDef
));
4252 /* currently, we really ignore extension */
4262 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4263 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4264 tmbt
: tcc_error("too many basic types");
4267 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4272 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4289 memset(&ad1
, 0, sizeof(AttributeDef
));
4290 if (parse_btype(&type1
, &ad1
)) {
4291 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4293 n
= 1 << (ad1
.a
.aligned
- 1);
4295 type_size(&type1
, &n
);
4298 if (n
<= 0 || (n
& (n
- 1)) != 0)
4299 tcc_error("alignment must be a positive power of two");
4302 ad
->a
.aligned
= exact_log2p1(n
);
4306 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4307 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4308 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4309 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4316 #ifdef TCC_TARGET_ARM64
4318 /* GCC's __uint128_t appears in some Linux header files. Make it a
4319 synonym for long double to get the size and alignment right. */
4330 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4331 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4339 struct_decl(&type1
, VT_ENUM
);
4342 type
->ref
= type1
.ref
;
4345 struct_decl(&type1
, VT_STRUCT
);
4348 struct_decl(&type1
, VT_UNION
);
4351 /* type modifiers */
4356 parse_btype_qualify(type
, VT_CONSTANT
);
4364 parse_btype_qualify(type
, VT_VOLATILE
);
4371 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4372 tcc_error("signed and unsigned modifier");
4385 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4386 tcc_error("signed and unsigned modifier");
4387 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4403 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4404 tcc_error("multiple storage classes");
4416 ad
->f
.func_noreturn
= 1;
4418 /* GNUC attribute */
4419 case TOK_ATTRIBUTE1
:
4420 case TOK_ATTRIBUTE2
:
4421 parse_attribute(ad
);
4422 if (ad
->attr_mode
) {
4423 u
= ad
->attr_mode
-1;
4424 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4432 parse_expr_type(&type1
);
4433 /* remove all storage modifiers except typedef */
4434 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4436 sym_to_attr(ad
, type1
.ref
);
4442 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4446 if (tok
== ':' && !in_generic
) {
4447 /* ignore if it's a label */
4452 t
&= ~(VT_BTYPE
|VT_LONG
);
4453 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4454 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4455 type
->ref
= s
->type
.ref
;
4457 parse_btype_qualify(type
, t
);
4459 /* get attributes from typedef */
4468 if (tcc_state
->char_is_unsigned
) {
4469 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4472 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4473 bt
= t
& (VT_BTYPE
|VT_LONG
);
4475 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4476 #ifdef TCC_TARGET_PE
4477 if (bt
== VT_LDOUBLE
)
4478 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4484 /* convert a function parameter type (array to pointer and function to
4485 function pointer) */
4486 static inline void convert_parameter_type(CType
*pt
)
4488 /* remove const and volatile qualifiers (XXX: const could be used
4489 to indicate a const function parameter */
4490 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4491 /* array must be transformed to pointer according to ANSI C */
4493 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4498 ST_FUNC
void parse_asm_str(CString
*astr
)
4501 parse_mult_str(astr
, "string constant");
4504 /* Parse an asm label and return the token */
4505 static int asm_label_instr(void)
4511 parse_asm_str(&astr
);
4514 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4516 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4521 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4523 int n
, l
, t1
, arg_size
, align
, unused_align
;
4524 Sym
**plast
, *s
, *first
;
4529 /* function type, or recursive declarator (return if so) */
4531 if (td
&& !(td
& TYPE_ABSTRACT
))
4535 else if (parse_btype(&pt
, &ad1
))
4538 merge_attr (ad
, &ad1
);
4547 /* read param name and compute offset */
4548 if (l
!= FUNC_OLD
) {
4549 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4551 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4552 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4553 tcc_error("parameter declared as void");
4557 expect("identifier");
4558 pt
.t
= VT_VOID
; /* invalid type */
4562 convert_parameter_type(&pt
);
4563 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4564 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4570 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4575 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4576 tcc_error("invalid type");
4579 /* if no parameters, then old type prototype */
4582 /* NOTE: const is ignored in returned type as it has a special
4583 meaning in gcc / C++ */
4584 type
->t
&= ~VT_CONSTANT
;
4585 /* some ancient pre-K&R C allows a function to return an array
4586 and the array brackets to be put after the arguments, such
4587 that "int c()[]" means something like "int[] c()" */
4590 skip(']'); /* only handle simple "[]" */
4593 /* we push a anonymous symbol which will contain the function prototype */
4594 ad
->f
.func_args
= arg_size
;
4595 ad
->f
.func_type
= l
;
4596 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4602 } else if (tok
== '[') {
4603 int saved_nocode_wanted
= nocode_wanted
;
4604 /* array definition */
4607 /* XXX The optional type-quals and static should only be accepted
4608 in parameter decls. The '*' as well, and then even only
4609 in prototypes (not function defs). */
4611 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4626 if (!local_stack
|| (storage
& VT_STATIC
))
4627 vpushi(expr_const());
4629 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4630 length must always be evaluated, even under nocode_wanted,
4631 so that its size slot is initialized (e.g. under sizeof
4636 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4639 tcc_error("invalid array size");
4641 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4642 tcc_error("size of variable length array should be an integer");
4648 /* parse next post type */
4649 post_type(type
, ad
, storage
, 0);
4651 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4652 tcc_error("declaration of an array of functions");
4653 if ((type
->t
& VT_BTYPE
) == VT_VOID
4654 || type_size(type
, &unused_align
) < 0)
4655 tcc_error("declaration of an array of incomplete type elements");
4657 t1
|= type
->t
& VT_VLA
;
4661 tcc_error("need explicit inner array size in VLAs");
4662 loc
-= type_size(&int_type
, &align
);
4666 vla_runtime_type_size(type
, &align
);
4668 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4674 nocode_wanted
= saved_nocode_wanted
;
4676 /* we push an anonymous symbol which will contain the array
4678 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4679 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4685 /* Parse a type declarator (except basic type), and return the type
4686 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4687 expected. 'type' should contain the basic type. 'ad' is the
4688 attribute definition of the basic type. It can be modified by
4689 type_decl(). If this (possibly abstract) declarator is a pointer chain
4690 it returns the innermost pointed to type (equals *type, but is a different
4691 pointer), otherwise returns type itself, that's used for recursive calls. */
4692 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4695 int qualifiers
, storage
;
4697 /* recursive type, remove storage bits first, apply them later again */
4698 storage
= type
->t
& VT_STORAGE
;
4699 type
->t
&= ~VT_STORAGE
;
4702 while (tok
== '*') {
4710 qualifiers
|= VT_CONSTANT
;
4715 qualifiers
|= VT_VOLATILE
;
4721 /* XXX: clarify attribute handling */
4722 case TOK_ATTRIBUTE1
:
4723 case TOK_ATTRIBUTE2
:
4724 parse_attribute(ad
);
4728 type
->t
|= qualifiers
;
4730 /* innermost pointed to type is the one for the first derivation */
4731 ret
= pointed_type(type
);
4735 /* This is possibly a parameter type list for abstract declarators
4736 ('int ()'), use post_type for testing this. */
4737 if (!post_type(type
, ad
, 0, td
)) {
4738 /* It's not, so it's a nested declarator, and the post operations
4739 apply to the innermost pointed to type (if any). */
4740 /* XXX: this is not correct to modify 'ad' at this point, but
4741 the syntax is not clear */
4742 parse_attribute(ad
);
4743 post
= type_decl(type
, ad
, v
, td
);
4747 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4748 /* type identifier */
4753 if (!(td
& TYPE_ABSTRACT
))
4754 expect("identifier");
4757 post_type(post
, ad
, storage
, 0);
4758 parse_attribute(ad
);
4763 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4764 ST_FUNC
int lvalue_type(int t
)
4769 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4771 else if (bt
== VT_SHORT
)
4775 if (t
& VT_UNSIGNED
)
4776 r
|= VT_LVAL_UNSIGNED
;
4780 /* indirection with full error checking and bound check */
4781 ST_FUNC
void indir(void)
4783 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4784 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4788 if (vtop
->r
& VT_LVAL
)
4790 vtop
->type
= *pointed_type(&vtop
->type
);
4791 /* Arrays and functions are never lvalues */
4792 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4793 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4794 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4795 /* if bound checking, the referenced pointer must be checked */
4796 #ifdef CONFIG_TCC_BCHECK
4797 if (tcc_state
->do_bounds_check
)
4798 vtop
->r
|= VT_MUSTBOUND
;
4803 /* pass a parameter to a function and do type checking and casting */
4804 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4809 func_type
= func
->f
.func_type
;
4810 if (func_type
== FUNC_OLD
||
4811 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4812 /* default casting : only need to convert float to double */
4813 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4814 gen_cast_s(VT_DOUBLE
);
4815 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4816 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4817 type
.ref
= vtop
->type
.ref
;
4820 } else if (arg
== NULL
) {
4821 tcc_error("too many arguments to function");
4824 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4825 gen_assign_cast(&type
);
4829 /* parse an expression and return its type without any side effect. */
4830 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4839 /* parse an expression of the form '(type)' or '(expr)' and return its
4841 static void parse_expr_type(CType
*type
)
4847 if (parse_btype(type
, &ad
)) {
4848 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4850 expr_type(type
, gexpr
);
4855 static void parse_type(CType
*type
)
4860 if (!parse_btype(type
, &ad
)) {
4863 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4866 static void parse_builtin_params(int nc
, const char *args
)
4873 while ((c
= *args
++)) {
4877 case 'e': expr_eq(); continue;
4878 case 't': parse_type(&t
); vpush(&t
); continue;
4879 default: tcc_error("internal error"); break;
4887 ST_FUNC
void unary(void)
4889 int n
, t
, align
, size
, r
, sizeof_caller
;
4894 sizeof_caller
= in_sizeof
;
4897 /* XXX: GCC 2.95.3 does not generate a table although it should be
4905 #ifdef TCC_TARGET_PE
4906 t
= VT_SHORT
|VT_UNSIGNED
;
4914 vsetc(&type
, VT_CONST
, &tokc
);
4918 t
= VT_INT
| VT_UNSIGNED
;
4924 t
= VT_LLONG
| VT_UNSIGNED
;
4936 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4939 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4941 case TOK___FUNCTION__
:
4943 goto tok_identifier
;
4949 /* special function name identifier */
4950 len
= strlen(funcname
) + 1;
4951 /* generate char[len] type */
4956 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4957 if (!NODATA_WANTED
) {
4958 ptr
= section_ptr_add(data_section
, len
);
4959 memcpy(ptr
, funcname
, len
);
4965 #ifdef TCC_TARGET_PE
4966 t
= VT_SHORT
| VT_UNSIGNED
;
4972 /* string parsing */
4974 if (tcc_state
->char_is_unsigned
)
4975 t
= VT_BYTE
| VT_UNSIGNED
;
4977 if (tcc_state
->warn_write_strings
)
4982 memset(&ad
, 0, sizeof(AttributeDef
));
4983 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4988 if (parse_btype(&type
, &ad
)) {
4989 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4991 /* check ISOC99 compound literal */
4993 /* data is allocated locally by default */
4998 /* all except arrays are lvalues */
4999 if (!(type
.t
& VT_ARRAY
))
5000 r
|= lvalue_type(type
.t
);
5001 memset(&ad
, 0, sizeof(AttributeDef
));
5002 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5004 if (sizeof_caller
) {
5011 } else if (tok
== '{') {
5012 int saved_nocode_wanted
= nocode_wanted
;
5014 tcc_error("expected constant");
5015 /* save all registers */
5017 /* statement expression : we do not accept break/continue
5018 inside as GCC does. We do retain the nocode_wanted state,
5019 as statement expressions can't ever be entered from the
5020 outside, so any reactivation of code emission (from labels
5021 or loop heads) can be disabled again after the end of it. */
5023 nocode_wanted
= saved_nocode_wanted
;
5038 /* functions names must be treated as function pointers,
5039 except for unary '&' and sizeof. Since we consider that
5040 functions are not lvalues, we only have to handle it
5041 there and in function calls. */
5042 /* arrays can also be used although they are not lvalues */
5043 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5044 !(vtop
->type
.t
& VT_ARRAY
))
5046 mk_pointer(&vtop
->type
);
5052 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5053 gen_cast_s(VT_BOOL
);
5054 vtop
->c
.i
= !vtop
->c
.i
;
5055 } else if (vtop
->r
== VT_CMP
) {
5057 n
= vtop
->jfalse
, vtop
->jfalse
= vtop
->jtrue
, vtop
->jtrue
= n
;
5072 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5073 tcc_error("pointer not accepted for unary plus");
5074 /* In order to force cast, we add zero, except for floating point
5075 where we really need an noop (otherwise -0.0 will be transformed
5077 if (!is_float(vtop
->type
.t
)) {
5089 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5091 if (vtop
[1].r
& VT_SYM
)
5092 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5093 size
= type_size(&type
, &align
);
5094 if (s
&& s
->a
.aligned
)
5095 align
= 1 << (s
->a
.aligned
- 1);
5096 if (t
== TOK_SIZEOF
) {
5097 if (!(type
.t
& VT_VLA
)) {
5099 tcc_error("sizeof applied to an incomplete type");
5102 vla_runtime_type_size(&type
, &align
);
5107 vtop
->type
.t
|= VT_UNSIGNED
;
5110 case TOK_builtin_expect
:
5111 /* __builtin_expect is a no-op for now */
5112 parse_builtin_params(0, "ee");
5115 case TOK_builtin_types_compatible_p
:
5116 parse_builtin_params(0, "tt");
5117 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5118 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5119 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5123 case TOK_builtin_choose_expr
:
5150 case TOK_builtin_constant_p
:
5151 parse_builtin_params(1, "e");
5152 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5156 case TOK_builtin_frame_address
:
5157 case TOK_builtin_return_address
:
5163 if (tok
!= TOK_CINT
) {
5164 tcc_error("%s only takes positive integers",
5165 tok1
== TOK_builtin_return_address
?
5166 "__builtin_return_address" :
5167 "__builtin_frame_address");
5169 level
= (uint32_t)tokc
.i
;
5174 vset(&type
, VT_LOCAL
, 0); /* local frame */
5176 mk_pointer(&vtop
->type
);
5177 indir(); /* -> parent frame */
5179 if (tok1
== TOK_builtin_return_address
) {
5180 // assume return address is just above frame pointer on stack
5183 mk_pointer(&vtop
->type
);
5188 #ifdef TCC_TARGET_RISCV64
5189 case TOK_builtin_va_start
:
5190 parse_builtin_params(0, "ee");
5191 r
= vtop
->r
& VT_VALMASK
;
5195 tcc_error("__builtin_va_start expects a local variable");
5200 #ifdef TCC_TARGET_X86_64
5201 #ifdef TCC_TARGET_PE
5202 case TOK_builtin_va_start
:
5203 parse_builtin_params(0, "ee");
5204 r
= vtop
->r
& VT_VALMASK
;
5208 tcc_error("__builtin_va_start expects a local variable");
5210 vtop
->type
= char_pointer_type
;
5215 case TOK_builtin_va_arg_types
:
5216 parse_builtin_params(0, "t");
5217 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5224 #ifdef TCC_TARGET_ARM64
5225 case TOK___va_start
: {
5226 parse_builtin_params(0, "ee");
5230 vtop
->type
.t
= VT_VOID
;
5233 case TOK___va_arg
: {
5234 parse_builtin_params(0, "et");
5242 case TOK___arm64_clear_cache
: {
5243 parse_builtin_params(0, "ee");
5246 vtop
->type
.t
= VT_VOID
;
5250 /* pre operations */
5261 t
= vtop
->type
.t
& VT_BTYPE
;
5263 /* In IEEE negate(x) isn't subtract(0,x), but rather
5267 vtop
->c
.f
= -1.0 * 0.0;
5268 else if (t
== VT_DOUBLE
)
5269 vtop
->c
.d
= -1.0 * 0.0;
5271 vtop
->c
.ld
= -1.0 * 0.0;
5279 goto tok_identifier
;
5281 /* allow to take the address of a label */
5282 if (tok
< TOK_UIDENT
)
5283 expect("label identifier");
5284 s
= label_find(tok
);
5286 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5288 if (s
->r
== LABEL_DECLARED
)
5289 s
->r
= LABEL_FORWARD
;
5292 s
->type
.t
= VT_VOID
;
5293 mk_pointer(&s
->type
);
5294 s
->type
.t
|= VT_STATIC
;
5296 vpushsym(&s
->type
, s
);
5302 CType controlling_type
;
5303 int has_default
= 0;
5306 TokenString
*str
= NULL
;
5307 int saved_const_wanted
= const_wanted
;
5312 expr_type(&controlling_type
, expr_eq
);
5313 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5314 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5315 mk_pointer(&controlling_type
);
5316 const_wanted
= saved_const_wanted
;
5320 if (tok
== TOK_DEFAULT
) {
5322 tcc_error("too many 'default'");
5328 AttributeDef ad_tmp
;
5333 parse_btype(&cur_type
, &ad_tmp
);
5336 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5337 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5339 tcc_error("type match twice");
5349 skip_or_save_block(&str
);
5351 skip_or_save_block(NULL
);
5358 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5359 tcc_error("type '%s' does not match any association", buf
);
5361 begin_macro(str
, 1);
5370 // special qnan , snan and infinity values
5375 vtop
->type
.t
= VT_FLOAT
;
5380 goto special_math_val
;
5383 goto special_math_val
;
5390 expect("identifier");
5392 if (!s
|| IS_ASM_SYM(s
)) {
5393 const char *name
= get_tok_str(t
, NULL
);
5395 tcc_error("'%s' undeclared", name
);
5396 /* for simple function calls, we tolerate undeclared
5397 external reference to int() function */
5398 if (tcc_state
->warn_implicit_function_declaration
5399 #ifdef TCC_TARGET_PE
5400 /* people must be warned about using undeclared WINAPI functions
5401 (which usually start with uppercase letter) */
5402 || (name
[0] >= 'A' && name
[0] <= 'Z')
5405 tcc_warning("implicit declaration of function '%s'", name
);
5406 s
= external_global_sym(t
, &func_old_type
);
5410 /* A symbol that has a register is a local register variable,
5411 which starts out as VT_LOCAL value. */
5412 if ((r
& VT_VALMASK
) < VT_CONST
)
5413 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5415 vset(&s
->type
, r
, s
->c
);
5416 /* Point to s as backpointer (even without r&VT_SYM).
5417 Will be used by at least the x86 inline asm parser for
5423 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5424 vtop
->c
.i
= s
->enum_val
;
5429 /* post operations */
5431 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5434 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5435 int qualifiers
, cumofs
= 0;
5437 if (tok
== TOK_ARROW
)
5439 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5442 /* expect pointer on structure */
5443 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5444 expect("struct or union");
5445 if (tok
== TOK_CDOUBLE
)
5446 expect("field name");
5448 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5449 expect("field name");
5450 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5452 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5453 /* add field offset to pointer */
5454 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5455 vpushi(cumofs
+ s
->c
);
5457 /* change type to field type, and set to lvalue */
5458 vtop
->type
= s
->type
;
5459 vtop
->type
.t
|= qualifiers
;
5460 /* an array is never an lvalue */
5461 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5462 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5463 #ifdef CONFIG_TCC_BCHECK
5464 /* if bound checking, the referenced pointer must be checked */
5465 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5466 vtop
->r
|= VT_MUSTBOUND
;
5470 } else if (tok
== '[') {
5476 } else if (tok
== '(') {
5479 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5482 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5483 /* pointer test (no array accepted) */
5484 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5485 vtop
->type
= *pointed_type(&vtop
->type
);
5486 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5490 expect("function pointer");
5493 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5495 /* get return type */
5498 sa
= s
->next
; /* first parameter */
5499 nb_args
= regsize
= 0;
5501 /* compute first implicit argument if a structure is returned */
5502 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5503 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5504 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5505 &ret_align
, ®size
);
5506 if (ret_nregs
<= 0) {
5507 /* get some space for the returned structure */
5508 size
= type_size(&s
->type
, &align
);
5509 #ifdef TCC_TARGET_ARM64
5510 /* On arm64, a small struct is return in registers.
5511 It is much easier to write it to memory if we know
5512 that we are allowed to write some extra bytes, so
5513 round the allocated space up to a power of 2: */
5515 while (size
& (size
- 1))
5516 size
= (size
| (size
- 1)) + 1;
5518 loc
= (loc
- size
) & -align
;
5520 ret
.r
= VT_LOCAL
| VT_LVAL
;
5521 /* pass it as 'int' to avoid structure arg passing
5523 vseti(VT_LOCAL
, loc
);
5535 if (ret_nregs
> 0) {
5536 /* return in register */
5537 if (is_float(ret
.type
.t
)) {
5538 ret
.r
= reg_fret(ret
.type
.t
);
5539 #ifdef TCC_TARGET_X86_64
5540 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5542 #elif defined TCC_TARGET_RISCV64
5543 if ((ret
.type
.t
& VT_BTYPE
) == VT_LDOUBLE
)
5547 #ifndef TCC_TARGET_ARM64
5548 #ifndef TCC_TARGET_RISCV64
5549 #ifdef TCC_TARGET_X86_64
5550 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5552 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5564 gfunc_param_typed(s
, sa
);
5574 tcc_error("too few arguments to function");
5576 gfunc_call(nb_args
);
5578 if (ret_nregs
< 0) {
5579 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
5580 #ifdef TCC_TARGET_RISCV64
5581 arch_transfer_ret_regs(1);
5585 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5586 vsetc(&ret
.type
, r
, &ret
.c
);
5587 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5590 /* handle packed struct return */
5591 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5594 size
= type_size(&s
->type
, &align
);
5595 /* We're writing whole regs often, make sure there's enough
5596 space. Assume register size is power of 2. */
5597 if (regsize
> align
)
5599 loc
= (loc
- size
) & -align
;
5603 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5607 if (--ret_nregs
== 0)
5611 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5614 if (s
->f
.func_noreturn
)
5622 ST_FUNC
void expr_prod(void)
5627 while (tok
== '*' || tok
== '/' || tok
== '%') {
5635 ST_FUNC
void expr_sum(void)
5640 while (tok
== '+' || tok
== '-') {
5648 static void expr_shift(void)
5653 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5661 static void expr_cmp(void)
5666 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5667 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5675 static void expr_cmpeq(void)
5680 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5688 static void expr_and(void)
5691 while (tok
== '&') {
5698 static void expr_xor(void)
5701 while (tok
== '^') {
5708 static void expr_or(void)
5711 while (tok
== '|') {
5718 static int condition_3way(void);
5720 static void expr_landor(void(*e_fn
)(void), int e_op
, int i
)
5722 int t
= 0, cc
= 1, f
= 0, c
;
5724 c
= f
? i
: condition_3way();
5726 save_regs(1), cc
= 0;
5727 } else if (c
!= i
) {
5728 nocode_wanted
++, f
= 1;
5750 static void expr_land(void)
5753 if (tok
== TOK_LAND
)
5754 expr_landor(expr_or
, TOK_LAND
, 1);
5757 static void expr_lor(void)
5761 expr_landor(expr_land
, TOK_LOR
, 0);
5764 /* Assuming vtop is a value used in a conditional context
5765 (i.e. compared with zero) return 0 if it's false, 1 if
5766 true and -1 if it can't be statically determined. */
5767 static int condition_3way(void)
5770 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5771 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5773 gen_cast_s(VT_BOOL
);
5780 static int is_cond_bool(SValue
*sv
)
5782 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
5783 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
5784 return (unsigned)sv
->c
.i
< 2;
5785 if (sv
->r
== VT_CMP
)
5790 static void expr_cond(void)
5792 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5794 CType type
, type1
, type2
;
5800 c
= condition_3way();
5801 g
= (tok
== ':' && gnu_ext
);
5811 /* needed to avoid having different registers saved in
5818 ncw_prev
= nocode_wanted
;
5825 if (c
< 0 && vtop
->r
== VT_CMP
) {
5831 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5832 mk_pointer(&vtop
->type
);
5834 sv
= *vtop
; /* save value to handle it later */
5835 vtop
--; /* no vpop so that FP stack is not flushed */
5845 nocode_wanted
= ncw_prev
;
5851 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
5852 if (sv
.r
== VT_CMP
) {
5863 nocode_wanted
= ncw_prev
;
5864 // tcc_warning("two conditions expr_cond");
5868 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5869 mk_pointer(&vtop
->type
);
5872 bt1
= t1
& VT_BTYPE
;
5874 bt2
= t2
& VT_BTYPE
;
5877 /* cast operands to correct type according to ISOC rules */
5878 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5879 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5880 } else if (is_float(bt1
) || is_float(bt2
)) {
5881 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5882 type
.t
= VT_LDOUBLE
;
5884 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5889 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5890 /* cast to biggest op */
5891 type
.t
= VT_LLONG
| VT_LONG
;
5892 if (bt1
== VT_LLONG
)
5894 if (bt2
== VT_LLONG
)
5896 /* convert to unsigned if it does not fit in a long long */
5897 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5898 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5899 type
.t
|= VT_UNSIGNED
;
5900 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5901 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5902 /* If one is a null ptr constant the result type
5904 if (is_null_pointer (vtop
)) type
= type1
;
5905 else if (is_null_pointer (&sv
)) type
= type2
;
5906 else if (bt1
!= bt2
)
5907 tcc_error("incompatible types in conditional expressions");
5909 CType
*pt1
= pointed_type(&type1
);
5910 CType
*pt2
= pointed_type(&type2
);
5911 int pbt1
= pt1
->t
& VT_BTYPE
;
5912 int pbt2
= pt2
->t
& VT_BTYPE
;
5913 int newquals
, copied
= 0;
5914 /* pointers to void get preferred, otherwise the
5915 pointed to types minus qualifs should be compatible */
5916 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5917 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5918 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5919 tcc_warning("pointer type mismatch in conditional expression\n");
5921 /* combine qualifs */
5922 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5923 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5926 /* copy the pointer target symbol */
5927 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5930 pointed_type(&type
)->t
|= newquals
;
5932 /* pointers to incomplete arrays get converted to
5933 pointers to completed ones if possible */
5934 if (pt1
->t
& VT_ARRAY
5935 && pt2
->t
& VT_ARRAY
5936 && pointed_type(&type
)->ref
->c
< 0
5937 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5940 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5942 pointed_type(&type
)->ref
=
5943 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5944 0, pointed_type(&type
)->ref
->c
);
5945 pointed_type(&type
)->ref
->c
=
5946 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5949 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5950 /* XXX: test structure compatibility */
5951 type
= bt1
== VT_STRUCT
? type1
: type2
;
5953 /* integer operations */
5954 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5955 /* convert to unsigned if it does not fit in an integer */
5956 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5957 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5958 type
.t
|= VT_UNSIGNED
;
5960 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5961 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5962 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5964 /* now we convert second operand */
5968 mk_pointer(&vtop
->type
);
5970 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5975 if (is_float(type
.t
)) {
5977 #ifdef TCC_TARGET_X86_64
5978 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5981 #elif defined TCC_TARGET_RISCV64
5982 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
)
5985 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5986 /* for long longs, we use fixed registers to avoid having
5987 to handle a complicated move */
5997 nocode_wanted
= ncw_prev
;
5999 /* this is horrible, but we must also convert first
6005 mk_pointer(&vtop
->type
);
6007 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6013 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6024 static void expr_eq(void)
6030 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
6031 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
6032 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
6047 ST_FUNC
void gexpr(void)
6058 /* parse a constant expression and return value in vtop. */
6059 static void expr_const1(void)
6068 /* parse an integer constant and return its value. */
6069 static inline int64_t expr_const64(void)
6073 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6074 expect("constant expression");
6080 /* parse an integer constant and return its value.
6081 Complain if it doesn't fit 32bit (signed or unsigned). */
6082 ST_FUNC
int expr_const(void)
6085 int64_t wc
= expr_const64();
6087 if (c
!= wc
&& (unsigned)c
!= wc
)
6088 tcc_error("constant exceeds 32 bit");
6092 /* ------------------------------------------------------------------------- */
6093 /* return from function */
6095 #ifndef TCC_TARGET_ARM64
6096 static void gfunc_return(CType
*func_type
)
6098 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6099 CType type
, ret_type
;
6100 int ret_align
, ret_nregs
, regsize
;
6101 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6102 &ret_align
, ®size
);
6103 if (ret_nregs
< 0) {
6104 #ifdef TCC_TARGET_RISCV64
6105 arch_transfer_ret_regs(0);
6107 } else if (0 == ret_nregs
) {
6108 /* if returning structure, must copy it to implicit
6109 first pointer arg location */
6112 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6115 /* copy structure value to pointer */
6118 /* returning structure packed into registers */
6119 int r
, size
, addr
, align
;
6120 size
= type_size(func_type
,&align
);
6121 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6122 (vtop
->c
.i
& (ret_align
-1)))
6123 && (align
& (ret_align
-1))) {
6124 loc
= (loc
- size
) & -ret_align
;
6127 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6131 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6133 vtop
->type
= ret_type
;
6134 if (is_float(ret_type
.t
))
6135 r
= rc_fret(ret_type
.t
);
6146 if (--ret_nregs
== 0)
6148 /* We assume that when a structure is returned in multiple
6149 registers, their classes are consecutive values of the
6152 vtop
->c
.i
+= regsize
;
6156 } else if (is_float(func_type
->t
)) {
6157 gv(rc_fret(func_type
->t
));
6161 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6165 static void check_func_return(void)
6167 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6169 if (!strcmp (funcname
, "main")
6170 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6171 /* main returns 0 by default */
6173 gen_assign_cast(&func_vt
);
6174 gfunc_return(&func_vt
);
6176 tcc_warning("function might return no value: '%s'", funcname
);
6180 /* ------------------------------------------------------------------------- */
6183 static int case_cmp(const void *pa
, const void *pb
)
6185 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6186 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6187 return a
< b
? -1 : a
> b
;
6190 static void gtst_addr(int t
, int a
)
6192 gsym_addr(gvtst(0, t
), a
);
6195 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6199 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6216 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6218 gcase(base
, len
/2, bsym
);
6222 base
+= e
; len
-= e
;
6232 if (p
->v1
== p
->v2
) {
6234 gtst_addr(0, p
->sym
);
6244 gtst_addr(0, p
->sym
);
6248 *bsym
= gjmp(*bsym
);
6251 /* ------------------------------------------------------------------------- */
6252 /* __attribute__((cleanup(fn))) */
6254 static void try_call_scope_cleanup(Sym
*stop
)
6256 Sym
*cls
= cur_scope
->cl
.s
;
6258 for (; cls
!= stop
; cls
= cls
->ncl
) {
6259 Sym
*fs
= cls
->next
;
6260 Sym
*vs
= cls
->prev_tok
;
6262 vpushsym(&fs
->type
, fs
);
6263 vset(&vs
->type
, vs
->r
, vs
->c
);
6265 mk_pointer(&vtop
->type
);
6271 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6276 if (!cur_scope
->cl
.s
)
6279 /* search NCA of both cleanup chains given parents and initial depth */
6280 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6281 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6283 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6285 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6288 try_call_scope_cleanup(cc
);
6291 /* call 'func' for each __attribute__((cleanup(func))) */
6292 static void block_cleanup(struct scope
*o
)
6296 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6297 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6302 try_call_scope_cleanup(o
->cl
.s
);
6303 pcl
->jnext
= gjmp(0);
6305 goto remove_pending
;
6315 try_call_scope_cleanup(o
->cl
.s
);
6318 /* ------------------------------------------------------------------------- */
6321 static void vla_restore(int loc
)
6324 gen_vla_sp_restore(loc
);
6327 static void vla_leave(struct scope
*o
)
6329 if (o
->vla
.num
< cur_scope
->vla
.num
)
6330 vla_restore(o
->vla
.loc
);
6333 /* ------------------------------------------------------------------------- */
6336 void new_scope(struct scope
*o
)
6338 /* copy and link previous scope */
6340 o
->prev
= cur_scope
;
6343 /* record local declaration stack position */
6344 o
->lstk
= local_stack
;
6345 o
->llstk
= local_label_stack
;
6350 void prev_scope(struct scope
*o
, int is_expr
)
6354 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6355 block_cleanup(o
->prev
);
6357 /* pop locally defined labels */
6358 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6360 /* In the is_expr case (a statement expression is finished here),
6361 vtop might refer to symbols on the local_stack. Either via the
6362 type or via vtop->sym. We can't pop those nor any that in turn
6363 might be referred to. To make it easier we don't roll back
6364 any symbols in that case; some upper level call to block() will
6365 do that. We do have to remove such symbols from the lookup
6366 tables, though. sym_pop will do that. */
6368 /* pop locally defined symbols */
6369 sym_pop(&local_stack
, o
->lstk
, is_expr
);
6371 cur_scope
= o
->prev
;
6375 /* leave a scope via break/continue(/goto) */
6376 void leave_scope(struct scope
*o
)
6380 try_call_scope_cleanup(o
->cl
.s
);
6384 /* ------------------------------------------------------------------------- */
6385 /* call block from 'for do while' loops */
6387 static void lblock(int *bsym
, int *csym
)
6389 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6390 int *b
= co
->bsym
, *c
= co
->csym
;
6404 static void block(int is_expr
)
6406 int a
, b
, c
, d
, e
, t
;
6410 /* default return value is (void) */
6412 vtop
->type
.t
= VT_VOID
;
6424 if (tok
== TOK_ELSE
) {
6429 gsym(d
); /* patch else jmp */
6434 } else if (t
== TOK_WHILE
) {
6446 } else if (t
== '{') {
6450 /* handle local labels declarations */
6451 while (tok
== TOK_LABEL
) {
6454 if (tok
< TOK_UIDENT
)
6455 expect("label identifier");
6456 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6458 } while (tok
== ',');
6462 while (tok
!= '}') {
6471 prev_scope(&o
, is_expr
);
6473 if (0 == local_scope
&& !nocode_wanted
)
6474 check_func_return();
6477 } else if (t
== TOK_RETURN
) {
6479 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6481 gexpr(), gen_assign_cast(&func_vt
);
6482 leave_scope(root_scope
);
6484 gfunc_return(&func_vt
);
6488 tcc_warning("'return' with no value.");
6490 /* jump unless last stmt in top-level block */
6491 if (tok
!= '}' || local_scope
!= 1)
6495 } else if (t
== TOK_BREAK
) {
6497 if (!cur_scope
->bsym
)
6498 tcc_error("cannot break");
6499 if (!cur_switch
|| cur_scope
->bsym
!= cur_switch
->bsym
)
6500 leave_scope(loop_scope
);
6502 leave_scope(cur_switch
->scope
);
6503 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6506 } else if (t
== TOK_CONTINUE
) {
6508 if (!cur_scope
->csym
)
6509 tcc_error("cannot continue");
6510 leave_scope(loop_scope
);
6511 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6514 } else if (t
== TOK_FOR
) {
6520 /* c99 for-loop init decl? */
6521 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6522 /* no, regular for-loop init expr */
6550 } else if (t
== TOK_DO
) {
6564 } else if (t
== TOK_SWITCH
) {
6565 struct switch_t
*saved
, sw
;
6572 sw
.scope
= cur_scope
;
6580 switchval
= *vtop
--;
6583 b
= gjmp(0); /* jump to first case */
6585 a
= gjmp(a
); /* add implicit break */
6589 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6590 for (b
= 1; b
< sw
.n
; b
++)
6591 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6592 tcc_error("duplicate case value");
6594 /* Our switch table sorting is signed, so the compared
6595 value needs to be as well when it's 64bit. */
6596 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6597 switchval
.type
.t
&= ~VT_UNSIGNED
;
6600 d
= 0, gcase(sw
.p
, sw
.n
, &d
);
6603 gsym_addr(d
, sw
.def_sym
);
6609 dynarray_reset(&sw
.p
, &sw
.n
);
6612 } else if (t
== TOK_CASE
) {
6613 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6616 cr
->v1
= cr
->v2
= expr_const64();
6617 if (gnu_ext
&& tok
== TOK_DOTS
) {
6619 cr
->v2
= expr_const64();
6620 if (cr
->v2
< cr
->v1
)
6621 tcc_warning("empty case range");
6624 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6627 goto block_after_label
;
6629 } else if (t
== TOK_DEFAULT
) {
6632 if (cur_switch
->def_sym
)
6633 tcc_error("too many 'default'");
6634 cur_switch
->def_sym
= gind();
6637 goto block_after_label
;
6639 } else if (t
== TOK_GOTO
) {
6640 vla_restore(root_scope
->vla
.loc
);
6641 if (tok
== '*' && gnu_ext
) {
6645 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6649 } else if (tok
>= TOK_UIDENT
) {
6650 s
= label_find(tok
);
6651 /* put forward definition if needed */
6653 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6654 else if (s
->r
== LABEL_DECLARED
)
6655 s
->r
= LABEL_FORWARD
;
6657 if (s
->r
& LABEL_FORWARD
) {
6658 /* start new goto chain for cleanups, linked via label->next */
6659 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
6660 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
6661 pending_gotos
->prev_tok
= s
;
6662 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6663 pending_gotos
->next
= s
;
6665 s
->jnext
= gjmp(s
->jnext
);
6667 try_call_cleanup_goto(s
->cleanupstate
);
6668 gjmp_addr(s
->jnext
);
6673 expect("label identifier");
6677 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
6681 if (tok
== ':' && t
>= TOK_UIDENT
) {
6686 if (s
->r
== LABEL_DEFINED
)
6687 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6688 s
->r
= LABEL_DEFINED
;
6690 Sym
*pcl
; /* pending cleanup goto */
6691 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6693 sym_pop(&s
->next
, NULL
, 0);
6697 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
6700 s
->cleanupstate
= cur_scope
->cl
.s
;
6703 vla_restore(cur_scope
->vla
.loc
);
6704 /* we accept this, but it is a mistake */
6706 tcc_warning("deprecated use of label at end of compound statement");
6712 /* expression case */
6728 /* This skips over a stream of tokens containing balanced {} and ()
6729 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6730 with a '{'). If STR then allocates and stores the skipped tokens
6731 in *STR. This doesn't check if () and {} are nested correctly,
6732 i.e. "({)}" is accepted. */
6733 static void skip_or_save_block(TokenString
**str
)
6735 int braces
= tok
== '{';
6738 *str
= tok_str_alloc();
6740 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6742 if (tok
== TOK_EOF
) {
6743 if (str
|| level
> 0)
6744 tcc_error("unexpected end of file");
6749 tok_str_add_tok(*str
);
6752 if (t
== '{' || t
== '(') {
6754 } else if (t
== '}' || t
== ')') {
6756 if (level
== 0 && braces
&& t
== '}')
6761 tok_str_add(*str
, -1);
6762 tok_str_add(*str
, 0);
6766 #define EXPR_CONST 1
6769 static void parse_init_elem(int expr_type
)
6771 int saved_global_expr
;
6774 /* compound literals must be allocated globally in this case */
6775 saved_global_expr
= global_expr
;
6778 global_expr
= saved_global_expr
;
6779 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6780 (compound literals). */
6781 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6782 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6783 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6784 #ifdef TCC_TARGET_PE
6785 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6788 tcc_error("initializer element is not constant");
6796 /* put zeros for variable based init */
6797 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6800 /* nothing to do because globals are already set to zero */
6802 vpush_global_sym(&func_old_type
, TOK_memset
);
6804 #ifdef TCC_TARGET_ARM
6816 #define DIF_SIZE_ONLY 2
6817 #define DIF_HAVE_ELEM 4
6819 /* t is the array or struct type. c is the array or struct
6820 address. cur_field is the pointer to the current
6821 field, for arrays the 'c' member contains the current start
6822 index. 'flags' is as in decl_initializer.
6823 'al' contains the already initialized length of the
6824 current container (starting at c). This returns the new length of that. */
6825 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6826 Sym
**cur_field
, int flags
, int al
)
6829 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6830 unsigned long corig
= c
;
6835 if (flags
& DIF_HAVE_ELEM
)
6838 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
6845 /* NOTE: we only support ranges for last designator */
6846 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6848 if (!(type
->t
& VT_ARRAY
))
6849 expect("array type");
6851 index
= index_last
= expr_const();
6852 if (tok
== TOK_DOTS
&& gnu_ext
) {
6854 index_last
= expr_const();
6858 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6860 tcc_error("invalid index");
6862 (*cur_field
)->c
= index_last
;
6863 type
= pointed_type(type
);
6864 elem_size
= type_size(type
, &align
);
6865 c
+= index
* elem_size
;
6866 nb_elems
= index_last
- index
+ 1;
6873 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6874 expect("struct/union type");
6876 f
= find_field(type
, l
, &cumofs
);
6889 } else if (!gnu_ext
) {
6894 if (type
->t
& VT_ARRAY
) {
6895 index
= (*cur_field
)->c
;
6896 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6897 tcc_error("index too large");
6898 type
= pointed_type(type
);
6899 c
+= index
* type_size(type
, &align
);
6902 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6903 *cur_field
= f
= f
->next
;
6905 tcc_error("too many field init");
6910 /* must put zero in holes (note that doing it that way
6911 ensures that it even works with designators) */
6912 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
6913 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6914 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
6916 /* XXX: make it more general */
6917 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
6918 unsigned long c_end
;
6923 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6924 for (i
= 1; i
< nb_elems
; i
++) {
6925 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6930 } else if (!NODATA_WANTED
) {
6931 c_end
= c
+ nb_elems
* elem_size
;
6932 if (c_end
> sec
->data_allocated
)
6933 section_realloc(sec
, c_end
);
6934 src
= sec
->data
+ c
;
6936 for(i
= 1; i
< nb_elems
; i
++) {
6938 memcpy(dst
, src
, elem_size
);
6942 c
+= nb_elems
* type_size(type
, &align
);
6948 /* store a value or an expression directly in global data or in local array */
6949 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6956 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6960 /* XXX: not portable */
6961 /* XXX: generate error if incorrect relocation */
6962 gen_assign_cast(&dtype
);
6963 bt
= type
->t
& VT_BTYPE
;
6965 if ((vtop
->r
& VT_SYM
)
6968 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6969 || (type
->t
& VT_BITFIELD
))
6970 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6972 tcc_error("initializer element is not computable at load time");
6974 if (NODATA_WANTED
) {
6979 size
= type_size(type
, &align
);
6980 section_reserve(sec
, c
+ size
);
6981 ptr
= sec
->data
+ c
;
6983 /* XXX: make code faster ? */
6984 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6985 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6986 /* XXX This rejects compound literals like
6987 '(void *){ptr}'. The problem is that '&sym' is
6988 represented the same way, which would be ruled out
6989 by the SYM_FIRST_ANOM check above, but also '"string"'
6990 in 'char *p = "string"' is represented the same
6991 with the type being VT_PTR and the symbol being an
6992 anonymous one. That is, there's no difference in vtop
6993 between '(void *){x}' and '&(void *){x}'. Ignore
6994 pointer typed entities here. Hopefully no real code
6995 will every use compound literals with scalar type. */
6996 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6997 /* These come from compound literals, memcpy stuff over. */
7001 esym
= elfsym(vtop
->sym
);
7002 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7003 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
7005 /* We need to copy over all memory contents, and that
7006 includes relocations. Use the fact that relocs are
7007 created it order, so look from the end of relocs
7008 until we hit one before the copied region. */
7009 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
7010 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
7011 while (num_relocs
--) {
7013 if (rel
->r_offset
>= esym
->st_value
+ size
)
7015 if (rel
->r_offset
< esym
->st_value
)
7017 /* Note: if the same fields are initialized multiple
7018 times (possible with designators) then we possibly
7019 add multiple relocations for the same offset here.
7020 That would lead to wrong code, the last reloc needs
7021 to win. We clean this up later after the whole
7022 initializer is parsed. */
7023 put_elf_reloca(symtab_section
, sec
,
7024 c
+ rel
->r_offset
- esym
->st_value
,
7025 ELFW(R_TYPE
)(rel
->r_info
),
7026 ELFW(R_SYM
)(rel
->r_info
),
7036 if (type
->t
& VT_BITFIELD
) {
7037 int bit_pos
, bit_size
, bits
, n
;
7038 unsigned char *p
, v
, m
;
7039 bit_pos
= BIT_POS(vtop
->type
.t
);
7040 bit_size
= BIT_SIZE(vtop
->type
.t
);
7041 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7042 bit_pos
&= 7, bits
= 0;
7047 v
= vtop
->c
.i
>> bits
<< bit_pos
;
7048 m
= ((1 << n
) - 1) << bit_pos
;
7049 *p
= (*p
& ~m
) | (v
& m
);
7050 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7054 /* XXX: when cross-compiling we assume that each type has the
7055 same representation on host and target, which is likely to
7056 be wrong in the case of long double */
7058 vtop
->c
.i
= vtop
->c
.i
!= 0;
7060 *(char *)ptr
|= vtop
->c
.i
;
7063 *(short *)ptr
|= vtop
->c
.i
;
7066 *(float*)ptr
= vtop
->c
.f
;
7069 *(double *)ptr
= vtop
->c
.d
;
7072 #if defined TCC_IS_NATIVE_387
7073 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7074 memcpy(ptr
, &vtop
->c
.ld
, 10);
7076 else if (sizeof (long double) == sizeof (double))
7077 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7079 else if (vtop
->c
.ld
== 0.0)
7083 if (sizeof(long double) == LDOUBLE_SIZE
)
7084 *(long double*)ptr
= vtop
->c
.ld
;
7085 else if (sizeof(double) == LDOUBLE_SIZE
)
7086 *(double *)ptr
= (double)vtop
->c
.ld
;
7088 tcc_error("can't cross compile long double constants");
7092 *(long long *)ptr
|= vtop
->c
.i
;
7099 addr_t val
= vtop
->c
.i
;
7101 if (vtop
->r
& VT_SYM
)
7102 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7104 *(addr_t
*)ptr
|= val
;
7106 if (vtop
->r
& VT_SYM
)
7107 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7108 *(addr_t
*)ptr
|= val
;
7114 int val
= vtop
->c
.i
;
7116 if (vtop
->r
& VT_SYM
)
7117 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7121 if (vtop
->r
& VT_SYM
)
7122 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7131 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7138 /* 't' contains the type and storage info. 'c' is the offset of the
7139 object in section 'sec'. If 'sec' is NULL, it means stack based
7140 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7141 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7142 size only evaluation is wanted (only for arrays). */
7143 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
7146 int len
, n
, no_oblock
, nb
, i
;
7152 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7153 /* In case of strings we have special handling for arrays, so
7154 don't consume them as initializer value (which would commit them
7155 to some anonymous symbol). */
7156 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7157 !(flags
& DIF_SIZE_ONLY
)) {
7158 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7159 flags
|= DIF_HAVE_ELEM
;
7162 if ((flags
& DIF_HAVE_ELEM
) &&
7163 !(type
->t
& VT_ARRAY
) &&
7164 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7165 The source type might have VT_CONSTANT set, which is
7166 of course assignable to non-const elements. */
7167 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7168 init_putv(type
, sec
, c
);
7169 } else if (type
->t
& VT_ARRAY
) {
7172 t1
= pointed_type(type
);
7173 size1
= type_size(t1
, &align1
);
7176 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7179 tcc_error("character array initializer must be a literal,"
7180 " optionally enclosed in braces");
7185 /* only parse strings here if correct type (otherwise: handle
7186 them as ((w)char *) expressions */
7187 if ((tok
== TOK_LSTR
&&
7188 #ifdef TCC_TARGET_PE
7189 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7191 (t1
->t
& VT_BTYPE
) == VT_INT
7193 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7195 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7198 /* compute maximum number of chars wanted */
7200 cstr_len
= tokc
.str
.size
;
7202 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
7205 if (n
>= 0 && nb
> (n
- len
))
7207 if (!(flags
& DIF_SIZE_ONLY
)) {
7209 tcc_warning("initializer-string for array is too long");
7210 /* in order to go faster for common case (char
7211 string in global variable, we handle it
7213 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
7215 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
7219 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
7221 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
7223 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
7230 /* only add trailing zero if enough storage (no
7231 warning in this case since it is standard) */
7232 if (n
< 0 || len
< n
) {
7233 if (!(flags
& DIF_SIZE_ONLY
)) {
7235 init_putv(t1
, sec
, c
+ (len
* size1
));
7246 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7247 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7248 flags
&= ~DIF_HAVE_ELEM
;
7249 if (type
->t
& VT_ARRAY
) {
7251 /* special test for multi dimensional arrays (may not
7252 be strictly correct if designators are used at the
7254 if (no_oblock
&& len
>= n
*size1
)
7257 if (s
->type
.t
== VT_UNION
)
7261 if (no_oblock
&& f
== NULL
)
7270 /* put zeros at the end */
7271 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7272 init_putz(sec
, c
+ len
, n
*size1
- len
);
7275 /* patch type size if needed, which happens only for array types */
7277 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7278 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7281 if ((flags
& DIF_FIRST
) || tok
== '{') {
7289 } else if (tok
== '{') {
7290 if (flags
& DIF_HAVE_ELEM
)
7293 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7295 } else if ((flags
& DIF_SIZE_ONLY
)) {
7296 /* If we supported only ISO C we wouldn't have to accept calling
7297 this on anything than an array if DIF_SIZE_ONLY (and even then
7298 only on the outermost level, so no recursion would be needed),
7299 because initializing a flex array member isn't supported.
7300 But GNU C supports it, so we need to recurse even into
7301 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7302 /* just skip expression */
7303 skip_or_save_block(NULL
);
7305 if (!(flags
& DIF_HAVE_ELEM
)) {
7306 /* This should happen only when we haven't parsed
7307 the init element above for fear of committing a
7308 string constant to memory too early. */
7309 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7310 expect("string constant");
7311 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7313 init_putv(type
, sec
, c
);
7317 /* parse an initializer for type 't' if 'has_init' is non zero, and
7318 allocate space in local or global data space ('r' is either
7319 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7320 variable 'v' of scope 'scope' is declared before initializers
7321 are parsed. If 'v' is zero, then a reference to the new object
7322 is put in the value stack. If 'has_init' is 2, a special parsing
7323 is done to handle string constants. */
7324 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7325 int has_init
, int v
, int scope
)
7327 int size
, align
, addr
;
7328 TokenString
*init_str
= NULL
;
7331 Sym
*flexible_array
;
7333 int saved_nocode_wanted
= nocode_wanted
;
7334 #ifdef CONFIG_TCC_BCHECK
7338 /* Always allocate static or global variables */
7339 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7340 nocode_wanted
|= 0x80000000;
7342 #ifdef CONFIG_TCC_BCHECK
7343 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7346 flexible_array
= NULL
;
7347 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7348 Sym
*field
= type
->ref
->next
;
7351 field
= field
->next
;
7352 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7353 flexible_array
= field
;
7357 size
= type_size(type
, &align
);
7358 /* If unknown size, we must evaluate it before
7359 evaluating initializers because
7360 initializers can generate global data too
7361 (e.g. string pointers or ISOC99 compound
7362 literals). It also simplifies local
7363 initializers handling */
7364 if (size
< 0 || (flexible_array
&& has_init
)) {
7366 tcc_error("unknown type size");
7367 /* get all init string */
7368 if (has_init
== 2) {
7369 init_str
= tok_str_alloc();
7370 /* only get strings */
7371 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7372 tok_str_add_tok(init_str
);
7375 tok_str_add(init_str
, -1);
7376 tok_str_add(init_str
, 0);
7378 skip_or_save_block(&init_str
);
7383 begin_macro(init_str
, 1);
7385 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7386 /* prepare second initializer parsing */
7387 macro_ptr
= init_str
->str
;
7390 /* if still unknown size, error */
7391 size
= type_size(type
, &align
);
7393 tcc_error("unknown type size");
7395 /* If there's a flex member and it was used in the initializer
7397 if (flexible_array
&&
7398 flexible_array
->type
.ref
->c
> 0)
7399 size
+= flexible_array
->type
.ref
->c
7400 * pointed_size(&flexible_array
->type
);
7401 /* take into account specified alignment if bigger */
7402 if (ad
->a
.aligned
) {
7403 int speca
= 1 << (ad
->a
.aligned
- 1);
7406 } else if (ad
->a
.packed
) {
7410 if (!v
&& NODATA_WANTED
)
7411 size
= 0, align
= 1;
7413 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7415 #ifdef CONFIG_TCC_BCHECK
7416 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7420 loc
= (loc
- size
) & -align
;
7422 #ifdef CONFIG_TCC_BCHECK
7423 /* handles bounds */
7424 /* XXX: currently, since we do only one pass, we cannot track
7425 '&' operators, so we add only arrays */
7426 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7428 /* add padding between regions */
7430 /* then add local bound info */
7431 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7432 bounds_ptr
[0] = addr
;
7433 bounds_ptr
[1] = size
;
7437 /* local variable */
7438 #ifdef CONFIG_TCC_ASM
7439 if (ad
->asm_label
) {
7440 int reg
= asm_parse_regvar(ad
->asm_label
);
7442 r
= (r
& ~VT_VALMASK
) | reg
;
7445 sym
= sym_push(v
, type
, r
, addr
);
7446 if (ad
->cleanup_func
) {
7447 Sym
*cls
= sym_push2(&all_cleanups
,
7448 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7449 cls
->prev_tok
= sym
;
7450 cls
->next
= ad
->cleanup_func
;
7451 cls
->ncl
= cur_scope
->cl
.s
;
7452 cur_scope
->cl
.s
= cls
;
7457 /* push local reference */
7458 vset(type
, r
, addr
);
7461 if (v
&& scope
== VT_CONST
) {
7462 /* see if the symbol was already defined */
7465 patch_storage(sym
, ad
, type
);
7466 /* we accept several definitions of the same global variable. */
7467 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7472 /* allocate symbol in corresponding section */
7477 else if (tcc_state
->nocommon
)
7482 addr
= section_add(sec
, size
, align
);
7483 #ifdef CONFIG_TCC_BCHECK
7484 /* add padding if bound check */
7486 section_add(sec
, 1, 1);
7489 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7490 sec
= common_section
;
7495 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7496 patch_storage(sym
, ad
, NULL
);
7498 /* update symbol definition */
7499 put_extern_sym(sym
, sec
, addr
, size
);
7501 /* push global reference */
7502 vpush_ref(type
, sec
, addr
, size
);
7507 #ifdef CONFIG_TCC_BCHECK
7508 /* handles bounds now because the symbol must be defined
7509 before for the relocation */
7513 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7514 /* then add global bound info */
7515 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7516 bounds_ptr
[0] = 0; /* relocated */
7517 bounds_ptr
[1] = size
;
7522 if (type
->t
& VT_VLA
) {
7528 /* save current stack pointer */
7529 if (root_scope
->vla
.loc
== 0) {
7530 struct scope
*v
= cur_scope
;
7531 gen_vla_sp_save(loc
-= PTR_SIZE
);
7532 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
7535 vla_runtime_type_size(type
, &a
);
7536 gen_vla_alloc(type
, a
);
7537 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7538 /* on _WIN64, because of the function args scratch area, the
7539 result of alloca differs from RSP and is returned in RAX. */
7540 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7542 gen_vla_sp_save(addr
);
7543 cur_scope
->vla
.loc
= addr
;
7544 cur_scope
->vla
.num
++;
7546 } else if (has_init
) {
7547 size_t oldreloc_offset
= 0;
7548 if (sec
&& sec
->reloc
)
7549 oldreloc_offset
= sec
->reloc
->data_offset
;
7550 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7551 if (sec
&& sec
->reloc
)
7552 squeeze_multi_relocs(sec
, oldreloc_offset
);
7553 /* patch flexible array member size back to -1, */
7554 /* for possible subsequent similar declarations */
7556 flexible_array
->type
.ref
->c
= -1;
7560 /* restore parse state if needed */
7566 nocode_wanted
= saved_nocode_wanted
;
7569 /* parse a function defined by symbol 'sym' and generate its code in
7570 'cur_text_section' */
7571 static void gen_function(Sym
*sym
, AttributeDef
*ad
)
7573 /* Initialize VLA state */
7574 struct scope f
= { 0 };
7575 cur_scope
= root_scope
= &f
;
7578 ind
= cur_text_section
->data_offset
;
7579 if (sym
->a
.aligned
) {
7580 size_t newoff
= section_add(cur_text_section
, 0,
7581 1 << (sym
->a
.aligned
- 1));
7582 gen_fill_nops(newoff
- ind
);
7584 /* NOTE: we patch the symbol size later */
7585 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7587 if (ad
&& ad
->a
.constructor
) {
7588 add_init_array (tcc_state
, sym
);
7590 if (ad
&& ad
->a
.destructor
) {
7591 add_fini_array (tcc_state
, sym
);
7594 funcname
= get_tok_str(sym
->v
, NULL
);
7597 /* put debug symbol */
7598 tcc_debug_funcstart(tcc_state
, sym
);
7599 /* push a dummy symbol to enable local sym storage */
7600 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7601 local_scope
= 1; /* for function parameters */
7602 gfunc_prolog(&sym
->type
);
7605 clear_temp_local_var_list();
7610 cur_text_section
->data_offset
= ind
;
7611 /* reset local stack */
7612 sym_pop(&local_stack
, NULL
, 0);
7614 label_pop(&global_label_stack
, NULL
, 0);
7615 sym_pop(&all_cleanups
, NULL
, 0);
7616 /* patch symbol size */
7617 elfsym(sym
)->st_size
= ind
- func_ind
;
7618 /* end of function */
7619 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7620 /* It's better to crash than to generate wrong code */
7621 cur_text_section
= NULL
;
7622 funcname
= ""; /* for safety */
7623 func_vt
.t
= VT_VOID
; /* for safety */
7624 func_var
= 0; /* for safety */
7625 ind
= 0; /* for safety */
7626 nocode_wanted
= 0x80000000;
7630 static void gen_inline_functions(TCCState
*s
)
7633 int inline_generated
, i
;
7634 struct InlineFunc
*fn
;
7636 tcc_open_bf(s
, ":inline:", 0);
7637 /* iterate while inline function are referenced */
7639 inline_generated
= 0;
7640 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7641 fn
= s
->inline_fns
[i
];
7643 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
7644 /* the function was used or forced (and then not internal):
7645 generate its code and convert it to a normal function */
7648 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7649 begin_macro(fn
->func_str
, 1);
7651 cur_text_section
= text_section
;
7652 gen_function(sym
, NULL
);
7655 inline_generated
= 1;
7658 } while (inline_generated
);
7662 ST_FUNC
void free_inline_functions(TCCState
*s
)
7665 /* free tokens of unused inline functions */
7666 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7667 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7669 tok_str_free(fn
->func_str
);
7671 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7674 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7675 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7676 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7681 AttributeDef ad
, adbase
;
7684 if (tok
== TOK_STATIC_ASSERT
) {
7692 tcc_error("%s", get_tok_str(tok
, &tokc
));
7698 if (!parse_btype(&btype
, &adbase
)) {
7699 if (is_for_loop_init
)
7701 /* skip redundant ';' if not in old parameter decl scope */
7702 if (tok
== ';' && l
!= VT_CMP
) {
7708 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7709 /* global asm block */
7713 if (tok
>= TOK_UIDENT
) {
7714 /* special test for old K&R protos without explicit int
7715 type. Only accepted when defining global data */
7719 expect("declaration");
7724 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7725 int v
= btype
.ref
->v
;
7726 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7727 tcc_warning("unnamed struct/union that defines no instances");
7731 if (IS_ENUM(btype
.t
)) {
7736 while (1) { /* iterate thru each declaration */
7738 /* If the base type itself was an array type of unspecified
7739 size (like in 'typedef int arr[]; arr x = {1};') then
7740 we will overwrite the unknown size by the real one for
7741 this decl. We need to unshare the ref symbol holding
7743 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7744 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7747 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7751 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7752 printf("type = '%s'\n", buf
);
7755 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7756 /* if old style function prototype, we accept a
7759 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7760 decl0(VT_CMP
, 0, sym
);
7761 /* always compile 'extern inline' */
7762 if (type
.t
& VT_EXTERN
)
7763 type
.t
&= ~VT_INLINE
;
7766 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7767 ad
.asm_label
= asm_label_instr();
7768 /* parse one last attribute list, after asm label */
7769 parse_attribute(&ad
);
7771 /* gcc does not allow __asm__("label") with function definition,
7778 #ifdef TCC_TARGET_PE
7779 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7780 if (type
.t
& VT_STATIC
)
7781 tcc_error("cannot have dll linkage with static");
7782 if (type
.t
& VT_TYPEDEF
) {
7783 tcc_warning("'%s' attribute ignored for typedef",
7784 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
7785 (ad
.a
.dllexport
= 0, "dllexport"));
7786 } else if (ad
.a
.dllimport
) {
7787 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7790 type
.t
|= VT_EXTERN
;
7796 tcc_error("cannot use local functions");
7797 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7798 expect("function definition");
7800 /* reject abstract declarators in function definition
7801 make old style params without decl have int type */
7803 while ((sym
= sym
->next
) != NULL
) {
7804 if (!(sym
->v
& ~SYM_FIELD
))
7805 expect("identifier");
7806 if (sym
->type
.t
== VT_VOID
)
7807 sym
->type
= int_type
;
7810 /* put function symbol */
7811 type
.t
&= ~VT_EXTERN
;
7812 sym
= external_sym(v
, &type
, 0, &ad
);
7813 /* static inline functions are just recorded as a kind
7814 of macro. Their code will be emitted at the end of
7815 the compilation unit only if they are used */
7816 if (sym
->type
.t
& VT_INLINE
) {
7817 struct InlineFunc
*fn
;
7818 const char *filename
;
7820 filename
= file
? file
->filename
: "";
7821 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7822 strcpy(fn
->filename
, filename
);
7824 skip_or_save_block(&fn
->func_str
);
7825 dynarray_add(&tcc_state
->inline_fns
,
7826 &tcc_state
->nb_inline_fns
, fn
);
7828 /* compute text section */
7829 cur_text_section
= ad
.section
;
7830 if (!cur_text_section
)
7831 cur_text_section
= text_section
;
7832 gen_function(sym
, &ad
);
7837 /* find parameter in function parameter list */
7838 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7839 if ((sym
->v
& ~SYM_FIELD
) == v
)
7841 tcc_error("declaration for parameter '%s' but no such parameter",
7842 get_tok_str(v
, NULL
));
7844 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7845 tcc_error("storage class specified for '%s'",
7846 get_tok_str(v
, NULL
));
7847 if (sym
->type
.t
!= VT_VOID
)
7848 tcc_error("redefinition of parameter '%s'",
7849 get_tok_str(v
, NULL
));
7850 convert_parameter_type(&type
);
7852 } else if (type
.t
& VT_TYPEDEF
) {
7853 /* save typedefed type */
7854 /* XXX: test storage specifiers ? */
7856 if (sym
&& sym
->sym_scope
== local_scope
) {
7857 if (!is_compatible_types(&sym
->type
, &type
)
7858 || !(sym
->type
.t
& VT_TYPEDEF
))
7859 tcc_error("incompatible redefinition of '%s'",
7860 get_tok_str(v
, NULL
));
7863 sym
= sym_push(v
, &type
, 0, 0);
7867 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7868 && !(type
.t
& VT_EXTERN
)) {
7869 tcc_error("declaration of void object");
7872 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7873 /* external function definition */
7874 /* specific case for func_call attribute */
7876 } else if (!(type
.t
& VT_ARRAY
)) {
7877 /* not lvalue if array */
7878 r
|= lvalue_type(type
.t
);
7880 has_init
= (tok
== '=');
7881 if (has_init
&& (type
.t
& VT_VLA
))
7882 tcc_error("variable length array cannot be initialized");
7883 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
7884 || (type
.t
& VT_BTYPE
) == VT_FUNC
7885 /* as with GCC, uninitialized global arrays with no size
7886 are considered extern: */
7887 || ((type
.t
& VT_ARRAY
) && !has_init
7888 && l
== VT_CONST
&& type
.ref
->c
< 0)
7890 /* external variable or function */
7891 type
.t
|= VT_EXTERN
;
7892 sym
= external_sym(v
, &type
, r
, &ad
);
7893 if (ad
.alias_target
) {
7896 alias_target
= sym_find(ad
.alias_target
);
7897 esym
= elfsym(alias_target
);
7899 tcc_error("unsupported forward __alias__ attribute");
7900 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7903 if (type
.t
& VT_STATIC
)
7909 else if (l
== VT_CONST
)
7910 /* uninitialized global variables may be overridden */
7911 type
.t
|= VT_EXTERN
;
7912 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7916 if (is_for_loop_init
)
7928 static void decl(int l
)
7933 /* ------------------------------------------------------------------------- */
7936 /* ------------------------------------------------------------------------- */