2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
42 static Sym
*all_cleanups
, *pending_gotos
;
43 static int local_scope
;
45 static int in_generic
;
46 static int section_sym
;
48 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
50 ST_DATA
int const_wanted
; /* true if constant wanted */
51 ST_DATA
int nocode_wanted
; /* no code generation wanted */
52 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
53 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 /* Automagical code suppression ----> */
56 #define CODE_OFF() (nocode_wanted |= 0x20000000)
57 #define CODE_ON() (nocode_wanted &= ~0x20000000)
59 /* Clear 'nocode_wanted' at label if it was used */
60 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
61 static int gind(void) { CODE_ON(); return ind
; }
63 /* Set 'nocode_wanted' after unconditional jumps */
64 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
65 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
67 /* These are #undef'd at the end of this file */
68 #define gjmp_addr gjmp_addr_acs
72 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
74 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
76 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
77 ST_DATA
const char *funcname
;
80 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
82 ST_DATA
struct switch_t
{
86 } **p
; int n
; /* list of case ranges */
87 int def_sym
; /* default symbol */
90 } *cur_switch
; /* current switch */
92 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
93 /*list of temporary local variables on the stack in current function. */
94 ST_DATA
struct temp_local_variable
{
95 int location
; //offset on stack. Svalue.c.i
98 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
99 short nb_temp_local_vars
;
101 static struct scope
{
103 struct { int loc
, num
; } vla
;
104 struct { Sym
*s
; int n
; } cl
;
107 } *cur_scope
, *loop_scope
, *root_scope
;
109 /* ------------------------------------------------------------------------- */
111 static void gen_cast(CType
*type
);
112 static void gen_cast_s(int t
);
113 static inline CType
*pointed_type(CType
*type
);
114 static int is_compatible_types(CType
*type1
, CType
*type2
);
115 static int parse_btype(CType
*type
, AttributeDef
*ad
);
116 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
117 static void parse_expr_type(CType
*type
);
118 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
119 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
120 static void block(int is_expr
);
121 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
122 static void decl(int l
);
123 static int decl0(int l
, int is_for_loop_init
, Sym
*);
124 static void expr_eq(void);
125 static void vla_runtime_type_size(CType
*type
, int *a
);
126 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
127 static inline int64_t expr_const64(void);
128 static void vpush64(int ty
, unsigned long long v
);
129 static void vpush(CType
*type
);
130 static int gvtst(int inv
, int t
);
131 static void gen_inline_functions(TCCState
*s
);
132 static void skip_or_save_block(TokenString
**str
);
133 static void gv_dup(void);
134 static int get_temp_local_var(int size
,int align
);
135 static void clear_temp_local_var_list();
137 ST_INLN
int is_float(int t
)
141 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
144 /* we use our own 'finite' function to avoid potential problems with
145 non standard math libs */
146 /* XXX: endianness dependent */
147 ST_FUNC
int ieee_finite(double d
)
150 memcpy(p
, &d
, sizeof(double));
151 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
154 /* compiling intel long double natively */
155 #if (defined __i386__ || defined __x86_64__) \
156 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
157 # define TCC_IS_NATIVE_387
160 ST_FUNC
void test_lvalue(void)
162 if (!(vtop
->r
& VT_LVAL
))
166 ST_FUNC
void check_vstack(void)
169 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
172 /* ------------------------------------------------------------------------- */
173 /* vstack debugging aid */
176 void pv (const char *lbl
, int a
, int b
)
179 for (i
= a
; i
< a
+ b
; ++i
) {
180 SValue
*p
= &vtop
[-i
];
181 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
182 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
187 /* ------------------------------------------------------------------------- */
188 /* start of translation unit info */
189 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
194 /* file info: full path + filename */
195 section_sym
= put_elf_sym(symtab_section
, 0, 0,
196 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
197 text_section
->sh_num
, NULL
);
198 getcwd(buf
, sizeof(buf
));
200 normalize_slashes(buf
);
202 pstrcat(buf
, sizeof(buf
), "/");
203 put_stabs_r(buf
, N_SO
, 0, 0,
204 text_section
->data_offset
, text_section
, section_sym
);
205 put_stabs_r(file
->filename
, N_SO
, 0, 0,
206 text_section
->data_offset
, text_section
, section_sym
);
211 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
212 symbols can be safely used */
213 put_elf_sym(symtab_section
, 0, 0,
214 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
215 SHN_ABS
, file
->filename
);
218 /* put end of translation unit info */
219 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
223 put_stabs_r(NULL
, N_SO
, 0, 0,
224 text_section
->data_offset
, text_section
, section_sym
);
228 /* generate line number info */
229 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
233 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
234 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
236 last_line_num
= file
->line_num
;
240 /* put function symbol */
241 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
249 /* XXX: we put here a dummy type */
250 snprintf(buf
, sizeof(buf
), "%s:%c1",
251 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
252 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
253 cur_text_section
, sym
->c
);
254 /* //gr gdb wants a line at the function */
255 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
261 /* put function size */
262 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
266 put_stabn(N_FUN
, 0, 0, size
);
269 /* ------------------------------------------------------------------------- */
270 ST_FUNC
int tccgen_compile(TCCState
*s1
)
272 cur_text_section
= NULL
;
274 anon_sym
= SYM_FIRST_ANOM
;
277 nocode_wanted
= 0x80000000;
280 /* define some often used types */
282 char_pointer_type
.t
= VT_BYTE
;
283 mk_pointer(&char_pointer_type
);
285 size_type
.t
= VT_INT
| VT_UNSIGNED
;
286 ptrdiff_type
.t
= VT_INT
;
288 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
289 ptrdiff_type
.t
= VT_LLONG
;
291 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
292 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
294 func_old_type
.t
= VT_FUNC
;
295 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
296 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
297 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
301 #ifdef TCC_TARGET_ARM
306 printf("%s: **** new file\n", file
->filename
);
309 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
312 gen_inline_functions(s1
);
314 /* end of translation unit info */
319 /* ------------------------------------------------------------------------- */
320 ST_FUNC ElfSym
*elfsym(Sym
*s
)
324 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
327 /* apply storage attributes to Elf symbol */
328 ST_FUNC
void update_storage(Sym
*sym
)
331 int sym_bind
, old_sym_bind
;
337 if (sym
->a
.visibility
)
338 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
341 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
342 sym_bind
= STB_LOCAL
;
343 else if (sym
->a
.weak
)
346 sym_bind
= STB_GLOBAL
;
347 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
348 if (sym_bind
!= old_sym_bind
) {
349 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
353 if (sym
->a
.dllimport
)
354 esym
->st_other
|= ST_PE_IMPORT
;
355 if (sym
->a
.dllexport
)
356 esym
->st_other
|= ST_PE_EXPORT
;
360 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
361 get_tok_str(sym
->v
, NULL
),
362 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
370 /* ------------------------------------------------------------------------- */
371 /* update sym->c so that it points to an external symbol in section
372 'section' with value 'value' */
374 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
375 addr_t value
, unsigned long size
,
376 int can_add_underscore
)
378 int sym_type
, sym_bind
, info
, other
, t
;
382 #ifdef CONFIG_TCC_BCHECK
387 name
= get_tok_str(sym
->v
, NULL
);
388 #ifdef CONFIG_TCC_BCHECK
389 if (tcc_state
->do_bounds_check
) {
390 /* XXX: avoid doing that for statics ? */
391 /* if bound checking is activated, we change some function
392 names by adding the "__bound" prefix */
395 /* XXX: we rely only on malloc hooks */
408 strcpy(buf
, "__bound_");
416 if ((t
& VT_BTYPE
) == VT_FUNC
) {
418 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
419 sym_type
= STT_NOTYPE
;
421 sym_type
= STT_OBJECT
;
423 if (t
& (VT_STATIC
| VT_INLINE
))
424 sym_bind
= STB_LOCAL
;
426 sym_bind
= STB_GLOBAL
;
429 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
430 Sym
*ref
= sym
->type
.ref
;
431 if (ref
->a
.nodecorate
) {
432 can_add_underscore
= 0;
434 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
435 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
437 other
|= ST_PE_STDCALL
;
438 can_add_underscore
= 0;
442 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
444 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
448 name
= get_tok_str(sym
->asm_label
, NULL
);
449 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
450 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
453 esym
->st_value
= value
;
454 esym
->st_size
= size
;
455 esym
->st_shndx
= sh_num
;
460 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
461 addr_t value
, unsigned long size
)
463 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
464 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
467 /* add a new relocation entry to symbol 'sym' in section 's' */
468 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
473 if (nocode_wanted
&& s
== cur_text_section
)
478 put_extern_sym(sym
, NULL
, 0, 0);
482 /* now we can add ELF relocation info */
483 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
487 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
489 greloca(s
, sym
, offset
, type
, 0);
493 /* ------------------------------------------------------------------------- */
494 /* symbol allocator */
495 static Sym
*__sym_malloc(void)
497 Sym
*sym_pool
, *sym
, *last_sym
;
500 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
501 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
503 last_sym
= sym_free_first
;
505 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
506 sym
->next
= last_sym
;
510 sym_free_first
= last_sym
;
514 static inline Sym
*sym_malloc(void)
518 sym
= sym_free_first
;
520 sym
= __sym_malloc();
521 sym_free_first
= sym
->next
;
524 sym
= tcc_malloc(sizeof(Sym
));
529 ST_INLN
void sym_free(Sym
*sym
)
532 sym
->next
= sym_free_first
;
533 sym_free_first
= sym
;
539 /* push, without hashing */
540 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
545 memset(s
, 0, sizeof *s
);
555 /* find a symbol and return its associated structure. 's' is the top
556 of the symbol stack */
557 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
569 /* structure lookup */
570 ST_INLN Sym
*struct_find(int v
)
573 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
575 return table_ident
[v
]->sym_struct
;
578 /* find an identifier */
579 ST_INLN Sym
*sym_find(int v
)
582 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
584 return table_ident
[v
]->sym_identifier
;
587 static int sym_scope(Sym
*s
)
589 if (IS_ENUM_VAL (s
->type
.t
))
590 return s
->type
.ref
->sym_scope
;
595 /* push a given symbol on the symbol stack */
596 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
605 s
= sym_push2(ps
, v
, type
->t
, c
);
606 s
->type
.ref
= type
->ref
;
608 /* don't record fields or anonymous symbols */
610 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
611 /* record symbol in token array */
612 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
614 ps
= &ts
->sym_struct
;
616 ps
= &ts
->sym_identifier
;
619 s
->sym_scope
= local_scope
;
620 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
621 tcc_error("redeclaration of '%s'",
622 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
627 /* push a global identifier */
628 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
631 s
= sym_push2(&global_stack
, v
, t
, c
);
632 s
->r
= VT_CONST
| VT_SYM
;
633 /* don't record anonymous symbol */
634 if (v
< SYM_FIRST_ANOM
) {
635 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
636 /* modify the top most local identifier, so that sym_identifier will
637 point to 's' when popped; happens when called from inline asm */
638 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
639 ps
= &(*ps
)->prev_tok
;
646 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
647 pop them yet from the list, but do remove them from the token array. */
648 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
658 /* remove symbol in token array */
660 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
661 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
663 ps
= &ts
->sym_struct
;
665 ps
= &ts
->sym_identifier
;
676 /* ------------------------------------------------------------------------- */
677 static void vcheck_cmp(void)
679 /* cannot let cpu flags if other instruction are generated. Also
680 avoid leaving VT_JMP anywhere except on the top of the stack
681 because it would complicate the code generator.
683 Don't do this when nocode_wanted. vtop might come from
684 !nocode_wanted regions (see 88_codeopt.c) and transforming
685 it to a register without actually generating code is wrong
686 as their value might still be used for real. All values
687 we push under nocode_wanted will eventually be popped
688 again, so that the VT_CMP/VT_JMP value will be in vtop
689 when code is unsuppressed again. */
691 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
695 static void vsetc(CType
*type
, int r
, CValue
*vc
)
697 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
698 tcc_error("memory full (vstack)");
708 ST_FUNC
void vswap(void)
718 /* pop stack value */
719 ST_FUNC
void vpop(void)
722 v
= vtop
->r
& VT_VALMASK
;
723 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
724 /* for x86, we need to pop the FP stack */
726 o(0xd8dd); /* fstp %st(0) */
730 /* need to put correct jump if && or || without test */
737 /* push constant of type "type" with useless value */
738 ST_FUNC
void vpush(CType
*type
)
740 vset(type
, VT_CONST
, 0);
743 /* push integer constant */
744 ST_FUNC
void vpushi(int v
)
748 vsetc(&int_type
, VT_CONST
, &cval
);
751 /* push a pointer sized constant */
752 static void vpushs(addr_t v
)
756 vsetc(&size_type
, VT_CONST
, &cval
);
759 /* push arbitrary 64bit constant */
760 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
767 vsetc(&ctype
, VT_CONST
, &cval
);
770 /* push long long constant */
771 static inline void vpushll(long long v
)
773 vpush64(VT_LLONG
, v
);
776 ST_FUNC
void vset(CType
*type
, int r
, int v
)
781 vsetc(type
, r
, &cval
);
784 static void vseti(int r
, int v
)
792 ST_FUNC
void vpushv(SValue
*v
)
794 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
795 tcc_error("memory full (vstack)");
800 static void vdup(void)
805 /* rotate n first stack elements to the bottom
806 I1 ... In -> I2 ... In I1 [top is right]
808 ST_FUNC
void vrotb(int n
)
820 /* rotate the n elements before entry e towards the top
821 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
823 ST_FUNC
void vrote(SValue
*e
, int n
)
830 for(i
= 0;i
< n
- 1; i
++)
835 /* rotate n first stack elements to the top
836 I1 ... In -> In I1 ... I(n-1) [top is right]
838 ST_FUNC
void vrott(int n
)
843 /* ------------------------------------------------------------------------- */
844 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
846 /* called from generators to set the result from relational ops */
847 ST_FUNC
void vset_VT_CMP(int op
)
855 /* called once before asking generators to load VT_CMP to a register */
856 static void vset_VT_JMP(void)
858 int op
= vtop
->cmp_op
;
859 if (vtop
->jtrue
|| vtop
->jfalse
) {
860 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
861 int inv
= op
& (op
< 2); /* small optimization */
862 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
864 /* otherwise convert flags (rsp. 0/1) to register */
866 if (op
< 2) /* doesn't seem to happen */
871 /* Set CPU Flags, doesn't yet jump */
872 static void gvtst_set(int inv
, int t
)
875 if (vtop
->r
!= VT_CMP
) {
878 if (vtop
->r
== VT_CMP
) /* must be VT_CONST otherwise */
880 else if (vtop
->r
== VT_CONST
)
881 vset_VT_CMP(vtop
->c
.i
!= 0);
885 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
886 *p
= gjmp_append(*p
, t
);
889 /* Generate value test
891 * Generate a test for any value (jump, comparison and integers) */
892 static int gvtst(int inv
, int t
)
898 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
903 /* jump to the wanted target */
905 t
= gjmp_cond(op
^ inv
, t
);
908 /* resolve complementary jumps to here */
915 /* ------------------------------------------------------------------------- */
916 /* push a symbol value of TYPE */
917 static inline void vpushsym(CType
*type
, Sym
*sym
)
921 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
925 /* Return a static symbol pointing to a section */
926 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
932 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
933 sym
->type
.t
|= VT_STATIC
;
934 put_extern_sym(sym
, sec
, offset
, size
);
938 /* push a reference to a section offset by adding a dummy symbol */
939 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
941 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
944 /* define a new external reference to a symbol 'v' of type 'u' */
945 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
951 /* push forward reference */
952 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
953 s
->type
.ref
= type
->ref
;
954 } else if (IS_ASM_SYM(s
)) {
955 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
956 s
->type
.ref
= type
->ref
;
962 /* Merge symbol attributes. */
963 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
965 if (sa1
->aligned
&& !sa
->aligned
)
966 sa
->aligned
= sa1
->aligned
;
967 sa
->packed
|= sa1
->packed
;
968 sa
->weak
|= sa1
->weak
;
969 if (sa1
->visibility
!= STV_DEFAULT
) {
970 int vis
= sa
->visibility
;
971 if (vis
== STV_DEFAULT
972 || vis
> sa1
->visibility
)
973 vis
= sa1
->visibility
;
974 sa
->visibility
= vis
;
976 sa
->dllexport
|= sa1
->dllexport
;
977 sa
->nodecorate
|= sa1
->nodecorate
;
978 sa
->dllimport
|= sa1
->dllimport
;
981 /* Merge function attributes. */
982 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
984 if (fa1
->func_call
&& !fa
->func_call
)
985 fa
->func_call
= fa1
->func_call
;
986 if (fa1
->func_type
&& !fa
->func_type
)
987 fa
->func_type
= fa1
->func_type
;
988 if (fa1
->func_args
&& !fa
->func_args
)
989 fa
->func_args
= fa1
->func_args
;
992 /* Merge attributes. */
993 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
995 merge_symattr(&ad
->a
, &ad1
->a
);
996 merge_funcattr(&ad
->f
, &ad1
->f
);
999 ad
->section
= ad1
->section
;
1000 if (ad1
->alias_target
)
1001 ad
->alias_target
= ad1
->alias_target
;
1003 ad
->asm_label
= ad1
->asm_label
;
1005 ad
->attr_mode
= ad1
->attr_mode
;
1008 /* Merge some type attributes. */
1009 static void patch_type(Sym
*sym
, CType
*type
)
1011 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1012 if (!(sym
->type
.t
& VT_EXTERN
))
1013 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1014 sym
->type
.t
&= ~VT_EXTERN
;
1017 if (IS_ASM_SYM(sym
)) {
1018 /* stay static if both are static */
1019 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1020 sym
->type
.ref
= type
->ref
;
1023 if (!is_compatible_types(&sym
->type
, type
)) {
1024 tcc_error("incompatible types for redefinition of '%s'",
1025 get_tok_str(sym
->v
, NULL
));
1027 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1028 int static_proto
= sym
->type
.t
& VT_STATIC
;
1029 /* warn if static follows non-static function declaration */
1030 if ((type
->t
& VT_STATIC
) && !static_proto
1031 /* XXX this test for inline shouldn't be here. Until we
1032 implement gnu-inline mode again it silences a warning for
1033 mingw caused by our workarounds. */
1034 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1035 tcc_warning("static storage ignored for redefinition of '%s'",
1036 get_tok_str(sym
->v
, NULL
));
1038 /* set 'inline' if both agree or if one has static */
1039 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1040 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1041 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1042 static_proto
|= VT_INLINE
;
1045 if (0 == (type
->t
& VT_EXTERN
)) {
1046 /* put complete type, use static from prototype */
1047 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1048 sym
->type
.ref
= type
->ref
;
1050 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1053 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1054 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1055 sym
->type
.ref
= type
->ref
;
1059 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1060 /* set array size if it was omitted in extern declaration */
1061 sym
->type
.ref
->c
= type
->ref
->c
;
1063 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1064 tcc_warning("storage mismatch for redefinition of '%s'",
1065 get_tok_str(sym
->v
, NULL
));
1069 /* Merge some storage attributes. */
1070 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1073 patch_type(sym
, type
);
1075 #ifdef TCC_TARGET_PE
1076 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1077 tcc_error("incompatible dll linkage for redefinition of '%s'",
1078 get_tok_str(sym
->v
, NULL
));
1080 merge_symattr(&sym
->a
, &ad
->a
);
1082 sym
->asm_label
= ad
->asm_label
;
1083 update_storage(sym
);
1086 /* copy sym to other stack */
1087 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1090 s
= sym_malloc(), *s
= *s0
;
1091 s
->prev
= *ps
, *ps
= s
;
1092 if (s
->v
< SYM_FIRST_ANOM
) {
1093 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1094 s
->prev_tok
= *ps
, *ps
= s
;
1099 /* copy a list of syms */
1100 static void sym_copy_ref(Sym
*s0
, Sym
**ps
)
1102 Sym
*s
, **sp
= &s0
->type
.ref
;
1103 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
)
1104 sp
= &(*sp
= sym_copy(s
, ps
))->next
;
1107 /* define a new external reference to a symbol 'v' */
1108 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1112 /* look for global symbol */
1114 while (s
&& s
->sym_scope
)
1118 /* push forward reference */
1119 s
= global_identifier_push(v
, type
->t
, 0);
1122 s
->asm_label
= ad
->asm_label
;
1123 s
->type
.ref
= type
->ref
;
1124 bt
= s
->type
.t
& (VT_BTYPE
|VT_ARRAY
);
1125 /* copy type to the global stack also */
1126 if (local_scope
&& (bt
== VT_FUNC
|| (bt
& VT_ARRAY
)))
1127 sym_copy_ref(s
, &global_stack
);
1129 patch_storage(s
, ad
, type
);
1130 bt
= s
->type
.t
& VT_BTYPE
;
1132 /* push variables to local scope if any */
1133 if (local_stack
&& bt
!= VT_FUNC
)
1134 s
= sym_copy(s
, &local_stack
);
1138 /* push a reference to global symbol v */
1139 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1141 vpushsym(type
, external_global_sym(v
, type
));
1144 /* save registers up to (vtop - n) stack entry */
1145 ST_FUNC
void save_regs(int n
)
1148 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1152 /* save r to the memory stack, and mark it as being free */
1153 ST_FUNC
void save_reg(int r
)
1155 save_reg_upstack(r
, 0);
1158 /* save r to the memory stack, and mark it as being free,
1159 if seen up to (vtop - n) stack entry */
1160 ST_FUNC
void save_reg_upstack(int r
, int n
)
1162 int l
, saved
, size
, align
;
1166 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1171 /* modify all stack values */
1174 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1175 if ((p
->r
& VT_VALMASK
) == r
|| (p
->r2
& VT_VALMASK
) == r
) {
1176 /* must save value on stack if not already done */
1178 /* NOTE: must reload 'r' because r might be equal to r2 */
1179 r
= p
->r
& VT_VALMASK
;
1180 /* store register in the stack */
1182 if ((p
->r
& VT_LVAL
) ||
1183 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1185 type
= &char_pointer_type
;
1189 size
= type_size(type
, &align
);
1190 l
=get_temp_local_var(size
,align
);
1191 sv
.type
.t
= type
->t
;
1192 sv
.r
= VT_LOCAL
| VT_LVAL
;
1195 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1196 /* x86 specific: need to pop fp register ST0 if saved */
1197 if (r
== TREG_ST0
) {
1198 o(0xd8dd); /* fstp %st(0) */
1201 /* special long long case */
1202 if ((p
->r2
& VT_VALMASK
) < VT_CONST
) {
1208 /* mark that stack entry as being saved on the stack */
1209 if (p
->r
& VT_LVAL
) {
1210 /* also clear the bounded flag because the
1211 relocation address of the function was stored in
1213 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1215 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1223 #ifdef TCC_TARGET_ARM
1224 /* find a register of class 'rc2' with at most one reference on stack.
1225 * If none, call get_reg(rc) */
1226 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1231 for(r
=0;r
<NB_REGS
;r
++) {
1232 if (reg_classes
[r
] & rc2
) {
1235 for(p
= vstack
; p
<= vtop
; p
++) {
1236 if ((p
->r
& VT_VALMASK
) == r
||
1237 (p
->r2
& VT_VALMASK
) == r
)
1248 /* find a free register of class 'rc'. If none, save one register */
1249 ST_FUNC
int get_reg(int rc
)
1254 /* find a free register */
1255 for(r
=0;r
<NB_REGS
;r
++) {
1256 if (reg_classes
[r
] & rc
) {
1259 for(p
=vstack
;p
<=vtop
;p
++) {
1260 if ((p
->r
& VT_VALMASK
) == r
||
1261 (p
->r2
& VT_VALMASK
) == r
)
1269 /* no register left : free the first one on the stack (VERY
1270 IMPORTANT to start from the bottom to ensure that we don't
1271 spill registers used in gen_opi()) */
1272 for(p
=vstack
;p
<=vtop
;p
++) {
1273 /* look at second register (if long long) */
1274 r
= p
->r2
& VT_VALMASK
;
1275 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1277 r
= p
->r
& VT_VALMASK
;
1278 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1284 /* Should never comes here */
1288 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1289 static int get_temp_local_var(int size
,int align
){
1291 struct temp_local_variable
*temp_var
;
1298 for(i
=0;i
<nb_temp_local_vars
;i
++){
1299 temp_var
=&arr_temp_local_vars
[i
];
1300 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1303 /*check if temp_var is free*/
1305 for(p
=vstack
;p
<=vtop
;p
++) {
1307 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1308 if(p
->c
.i
==temp_var
->location
){
1315 found_var
=temp_var
->location
;
1321 loc
= (loc
- size
) & -align
;
1322 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1323 temp_var
=&arr_temp_local_vars
[i
];
1324 temp_var
->location
=loc
;
1325 temp_var
->size
=size
;
1326 temp_var
->align
=align
;
1327 nb_temp_local_vars
++;
1334 static void clear_temp_local_var_list(){
1335 nb_temp_local_vars
=0;
1338 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1340 static void move_reg(int r
, int s
, int t
)
1354 /* get address of vtop (vtop MUST BE an lvalue) */
1355 ST_FUNC
void gaddrof(void)
1357 vtop
->r
&= ~VT_LVAL
;
1358 /* tricky: if saved lvalue, then we can go back to lvalue */
1359 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1360 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1365 #ifdef CONFIG_TCC_BCHECK
1366 /* generate lvalue bound code */
1367 static void gbound(void)
1372 vtop
->r
&= ~VT_MUSTBOUND
;
1373 /* if lvalue, then use checking code before dereferencing */
1374 if (vtop
->r
& VT_LVAL
) {
1375 /* if not VT_BOUNDED value, then make one */
1376 if (!(vtop
->r
& VT_BOUNDED
)) {
1377 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1378 /* must save type because we must set it to int to get pointer */
1380 vtop
->type
.t
= VT_PTR
;
1383 gen_bounded_ptr_add();
1384 vtop
->r
|= lval_type
;
1387 /* then check for dereferencing */
1388 gen_bounded_ptr_deref();
1393 static void incr_bf_adr(int o
)
1395 vtop
->type
= char_pointer_type
;
1399 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1400 | (VT_BYTE
|VT_UNSIGNED
);
1401 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1402 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1405 /* single-byte load mode for packed or otherwise unaligned bitfields */
1406 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1409 save_reg_upstack(vtop
->r
, 1);
1410 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1411 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1420 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1422 vpushi((1 << n
) - 1), gen_op('&');
1425 vpushi(bits
), gen_op(TOK_SHL
);
1428 bits
+= n
, bit_size
-= n
, o
= 1;
1431 if (!(type
->t
& VT_UNSIGNED
)) {
1432 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1433 vpushi(n
), gen_op(TOK_SHL
);
1434 vpushi(n
), gen_op(TOK_SAR
);
1438 /* single-byte store mode for packed or otherwise unaligned bitfields */
1439 static void store_packed_bf(int bit_pos
, int bit_size
)
1441 int bits
, n
, o
, m
, c
;
1443 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1445 save_reg_upstack(vtop
->r
, 1);
1446 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1448 incr_bf_adr(o
); // X B
1450 c
? vdup() : gv_dup(); // B V X
1453 vpushi(bits
), gen_op(TOK_SHR
);
1455 vpushi(bit_pos
), gen_op(TOK_SHL
);
1460 m
= ((1 << n
) - 1) << bit_pos
;
1461 vpushi(m
), gen_op('&'); // X B V1
1462 vpushv(vtop
-1); // X B V1 B
1463 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1464 gen_op('&'); // X B V1 B1
1465 gen_op('|'); // X B V2
1467 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1468 vstore(), vpop(); // X B
1469 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1474 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1477 if (0 == sv
->type
.ref
)
1479 t
= sv
->type
.ref
->auxtype
;
1480 if (t
!= -1 && t
!= VT_STRUCT
) {
1481 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1482 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1487 /* store vtop a register belonging to class 'rc'. lvalues are
1488 converted to values. Cannot be used if cannot be converted to
1489 register value (such as structures). */
1490 ST_FUNC
int gv(int rc
)
1492 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1494 /* NOTE: get_reg can modify vstack[] */
1495 if (vtop
->type
.t
& VT_BITFIELD
) {
1498 bit_pos
= BIT_POS(vtop
->type
.t
);
1499 bit_size
= BIT_SIZE(vtop
->type
.t
);
1500 /* remove bit field info to avoid loops */
1501 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1504 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1505 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1506 type
.t
|= VT_UNSIGNED
;
1508 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1510 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1515 if (r
== VT_STRUCT
) {
1516 load_packed_bf(&type
, bit_pos
, bit_size
);
1518 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1519 /* cast to int to propagate signedness in following ops */
1521 /* generate shifts */
1522 vpushi(bits
- (bit_pos
+ bit_size
));
1524 vpushi(bits
- bit_size
);
1525 /* NOTE: transformed to SHR if unsigned */
1530 if (is_float(vtop
->type
.t
) &&
1531 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1532 unsigned long offset
;
1533 /* CPUs usually cannot use float constants, so we store them
1534 generically in data segment */
1535 size
= type_size(&vtop
->type
, &align
);
1537 size
= 0, align
= 1;
1538 offset
= section_add(data_section
, size
, align
);
1539 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1541 init_putv(&vtop
->type
, data_section
, offset
);
1544 #ifdef CONFIG_TCC_BCHECK
1545 if (vtop
->r
& VT_MUSTBOUND
)
1548 #ifdef TCC_TARGET_RISCV64
1550 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
&& rc
== RC_FLOAT
)
1554 r
= vtop
->r
& VT_VALMASK
;
1555 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1556 #ifndef TCC_TARGET_ARM64
1557 #ifndef TCC_TARGET_RISCV64 /* XXX: remove the whole LRET/QRET class */
1560 #ifdef TCC_TARGET_X86_64
1561 else if (rc
== RC_FRET
)
1566 /* need to reload if:
1568 - lvalue (need to dereference pointer)
1569 - already a register, but not in the right class */
1571 || (vtop
->r
& VT_LVAL
)
1572 || !(reg_classes
[r
] & rc
)
1573 #ifdef TCC_TARGET_RISCV64
1574 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& (vtop
->r2
>= NB_REGS
|| !(reg_classes
[vtop
->r2
] & rc2
)))
1575 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
&& (vtop
->r2
>= NB_REGS
|| !(reg_classes
[vtop
->r2
] & rc2
)))
1577 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1578 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1580 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1585 #ifdef TCC_TARGET_RISCV64
1586 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
)) {
1587 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= VT_LLONG
;
1589 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1590 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1592 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1593 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1594 unsigned long long ll
;
1596 int r2
, original_type
;
1597 original_type
= vtop
->type
.t
;
1598 /* two register type load : expand to two words
1601 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1604 vtop
->c
.i
= ll
; /* first word */
1606 vtop
->r
= r
; /* save register value */
1607 vpushi(ll
>> 32); /* second word */
1610 if (vtop
->r
& VT_LVAL
) {
1611 /* We do not want to modifier the long long
1612 pointer here, so the safest (and less
1613 efficient) is to save all the other registers
1614 in the stack. XXX: totally inefficient. */
1618 /* lvalue_save: save only if used further down the stack */
1619 save_reg_upstack(vtop
->r
, 1);
1621 /* load from memory */
1622 vtop
->type
.t
= load_type
;
1625 vtop
[-1].r
= r
; /* save register value */
1626 /* increment pointer to get second word */
1627 vtop
->type
.t
= addr_type
;
1632 vtop
->type
.t
= load_type
;
1634 /* move registers */
1637 vtop
[-1].r
= r
; /* save register value */
1638 vtop
->r
= vtop
[-1].r2
;
1640 /* Allocate second register. Here we rely on the fact that
1641 get_reg() tries first to free r2 of an SValue. */
1645 /* write second register */
1647 vtop
->type
.t
= original_type
;
1648 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1650 /* lvalue of scalar type : need to use lvalue type
1651 because of possible cast */
1654 /* compute memory access type */
1655 if (vtop
->r
& VT_LVAL_BYTE
)
1657 else if (vtop
->r
& VT_LVAL_SHORT
)
1659 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1663 /* restore wanted type */
1666 if (vtop
->r
== VT_CMP
)
1668 /* one register type load */
1673 #ifdef TCC_TARGET_C67
1674 /* uses register pairs for doubles */
1675 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1682 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1683 ST_FUNC
void gv2(int rc1
, int rc2
)
1685 /* generate more generic register first. But VT_JMP or VT_CMP
1686 values must be generated first in all cases to avoid possible
1688 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1693 /* test if reload is needed for first register */
1694 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1704 /* test if reload is needed for first register */
1705 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1711 #ifndef TCC_TARGET_ARM64
1712 /* wrapper around RC_FRET to return a register by type */
1713 static int rc_fret(int t
)
1715 #ifdef TCC_TARGET_X86_64
1716 if (t
== VT_LDOUBLE
) {
1719 #elif defined TCC_TARGET_RISCV64
1720 if (t
== VT_LDOUBLE
)
1727 /* wrapper around REG_FRET to return a register by type */
1728 static int reg_fret(int t
)
1730 #ifdef TCC_TARGET_X86_64
1731 if (t
== VT_LDOUBLE
) {
1734 #elif defined TCC_TARGET_RISCV64
1735 if (t
== VT_LDOUBLE
)
1742 /* expand 64bit on stack in two ints */
1743 ST_FUNC
void lexpand(void)
1746 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1747 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1748 if (v
== VT_CONST
) {
1751 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1757 vtop
[0].r
= vtop
[-1].r2
;
1758 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1760 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1765 /* build a long long from two ints */
1766 static void lbuild(int t
)
1768 gv2(RC_INT
, RC_INT
);
1769 vtop
[-1].r2
= vtop
[0].r
;
1770 vtop
[-1].type
.t
= t
;
1775 /* convert stack entry to register and duplicate its value in another
1777 static void gv_dup(void)
1784 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1785 if (t
& VT_BITFIELD
) {
1795 /* stack: H L L1 H1 */
1805 /* duplicate value */
1810 #ifdef TCC_TARGET_X86_64
1811 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1814 #elif defined TCC_TARGET_RISCV64
1815 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
1824 load(r1
, &sv
); /* move r to r1 */
1826 /* duplicates value */
1833 /* generate CPU independent (unsigned) long long operations */
1834 static void gen_opl(int op
)
1836 int t
, a
, b
, op1
, c
, i
;
1838 unsigned short reg_iret
= REG_IRET
;
1839 unsigned short reg_lret
= REG_LRET
;
1845 func
= TOK___divdi3
;
1848 func
= TOK___udivdi3
;
1851 func
= TOK___moddi3
;
1854 func
= TOK___umoddi3
;
1861 /* call generic long long function */
1862 vpush_global_sym(&func_old_type
, func
);
1867 vtop
->r2
= reg_lret
;
1875 //pv("gen_opl A",0,2);
1881 /* stack: L1 H1 L2 H2 */
1886 vtop
[-2] = vtop
[-3];
1889 /* stack: H1 H2 L1 L2 */
1890 //pv("gen_opl B",0,4);
1896 /* stack: H1 H2 L1 L2 ML MH */
1899 /* stack: ML MH H1 H2 L1 L2 */
1903 /* stack: ML MH H1 L2 H2 L1 */
1908 /* stack: ML MH M1 M2 */
1911 } else if (op
== '+' || op
== '-') {
1912 /* XXX: add non carry method too (for MIPS or alpha) */
1918 /* stack: H1 H2 (L1 op L2) */
1921 gen_op(op1
+ 1); /* TOK_xxxC2 */
1924 /* stack: H1 H2 (L1 op L2) */
1927 /* stack: (L1 op L2) H1 H2 */
1929 /* stack: (L1 op L2) (H1 op H2) */
1937 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1938 t
= vtop
[-1].type
.t
;
1942 /* stack: L H shift */
1944 /* constant: simpler */
1945 /* NOTE: all comments are for SHL. the other cases are
1946 done by swapping words */
1957 if (op
!= TOK_SAR
) {
1990 /* XXX: should provide a faster fallback on x86 ? */
1993 func
= TOK___ashrdi3
;
1996 func
= TOK___lshrdi3
;
1999 func
= TOK___ashldi3
;
2005 /* compare operations */
2011 /* stack: L1 H1 L2 H2 */
2013 vtop
[-1] = vtop
[-2];
2015 /* stack: L1 L2 H1 H2 */
2019 /* when values are equal, we need to compare low words. since
2020 the jump is inverted, we invert the test too. */
2023 else if (op1
== TOK_GT
)
2025 else if (op1
== TOK_ULT
)
2027 else if (op1
== TOK_UGT
)
2037 /* generate non equal test */
2039 vset_VT_CMP(TOK_NE
);
2043 /* compare low. Always unsigned */
2047 else if (op1
== TOK_LE
)
2049 else if (op1
== TOK_GT
)
2051 else if (op1
== TOK_GE
)
2054 #if 0//def TCC_TARGET_I386
2055 if (op
== TOK_NE
) { gsym(b
); break; }
2056 if (op
== TOK_EQ
) { gsym(a
); break; }
2065 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2067 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2068 return (a
^ b
) >> 63 ? -x
: x
;
2071 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2073 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2076 /* handle integer constant optimizations and various machine
2078 static void gen_opic(int op
)
2080 SValue
*v1
= vtop
- 1;
2082 int t1
= v1
->type
.t
& VT_BTYPE
;
2083 int t2
= v2
->type
.t
& VT_BTYPE
;
2084 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2085 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2086 uint64_t l1
= c1
? v1
->c
.i
: 0;
2087 uint64_t l2
= c2
? v2
->c
.i
: 0;
2088 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2090 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2091 l1
= ((uint32_t)l1
|
2092 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2093 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2094 l2
= ((uint32_t)l2
|
2095 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2099 case '+': l1
+= l2
; break;
2100 case '-': l1
-= l2
; break;
2101 case '&': l1
&= l2
; break;
2102 case '^': l1
^= l2
; break;
2103 case '|': l1
|= l2
; break;
2104 case '*': l1
*= l2
; break;
2111 /* if division by zero, generate explicit division */
2114 tcc_error("division by zero in constant");
2118 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2119 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2120 case TOK_UDIV
: l1
= l1
/ l2
; break;
2121 case TOK_UMOD
: l1
= l1
% l2
; break;
2124 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2125 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2127 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2130 case TOK_ULT
: l1
= l1
< l2
; break;
2131 case TOK_UGE
: l1
= l1
>= l2
; break;
2132 case TOK_EQ
: l1
= l1
== l2
; break;
2133 case TOK_NE
: l1
= l1
!= l2
; break;
2134 case TOK_ULE
: l1
= l1
<= l2
; break;
2135 case TOK_UGT
: l1
= l1
> l2
; break;
2136 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2137 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2138 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2139 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2141 case TOK_LAND
: l1
= l1
&& l2
; break;
2142 case TOK_LOR
: l1
= l1
|| l2
; break;
2146 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2147 l1
= ((uint32_t)l1
|
2148 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2152 /* if commutative ops, put c2 as constant */
2153 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2154 op
== '|' || op
== '*')) {
2156 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2157 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2159 if (!const_wanted
&&
2161 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2162 (l1
== -1 && op
== TOK_SAR
))) {
2163 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2165 } else if (!const_wanted
&&
2166 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2168 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2169 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2170 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2175 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2178 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2179 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2182 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2183 /* filter out NOP operations like x*1, x-0, x&-1... */
2185 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2186 /* try to use shifts instead of muls or divs */
2187 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2196 else if (op
== TOK_PDIV
)
2202 } else if (c2
&& (op
== '+' || op
== '-') &&
2203 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2204 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2205 /* symbol + constant case */
2209 /* The backends can't always deal with addends to symbols
2210 larger than +-1<<31. Don't construct such. */
2217 /* call low level op generator */
2218 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2219 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2227 /* generate a floating point operation with constant propagation */
2228 static void gen_opif(int op
)
2232 #if defined _MSC_VER && defined __x86_64__
2233 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2240 /* currently, we cannot do computations with forward symbols */
2241 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2242 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2244 if (v1
->type
.t
== VT_FLOAT
) {
2247 } else if (v1
->type
.t
== VT_DOUBLE
) {
2255 /* NOTE: we only do constant propagation if finite number (not
2256 NaN or infinity) (ANSI spec) */
2257 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2261 case '+': f1
+= f2
; break;
2262 case '-': f1
-= f2
; break;
2263 case '*': f1
*= f2
; break;
2266 /* If not in initializer we need to potentially generate
2267 FP exceptions at runtime, otherwise we want to fold. */
2273 /* XXX: also handles tests ? */
2277 /* XXX: overflow test ? */
2278 if (v1
->type
.t
== VT_FLOAT
) {
2280 } else if (v1
->type
.t
== VT_DOUBLE
) {
2292 static int pointed_size(CType
*type
)
2295 return type_size(pointed_type(type
), &align
);
2298 static void vla_runtime_pointed_size(CType
*type
)
2301 vla_runtime_type_size(pointed_type(type
), &align
);
2304 static inline int is_null_pointer(SValue
*p
)
2306 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2308 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2309 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2310 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2311 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2312 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2313 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2316 static inline int is_integer_btype(int bt
)
2318 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2319 bt
== VT_INT
|| bt
== VT_LLONG
);
2322 /* check types for comparison or subtraction of pointers */
2323 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2325 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2328 /* null pointers are accepted for all comparisons as gcc */
2329 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2333 bt1
= type1
->t
& VT_BTYPE
;
2334 bt2
= type2
->t
& VT_BTYPE
;
2335 /* accept comparison between pointer and integer with a warning */
2336 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2337 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2338 tcc_warning("comparison between pointer and integer");
2342 /* both must be pointers or implicit function pointers */
2343 if (bt1
== VT_PTR
) {
2344 type1
= pointed_type(type1
);
2345 } else if (bt1
!= VT_FUNC
)
2346 goto invalid_operands
;
2348 if (bt2
== VT_PTR
) {
2349 type2
= pointed_type(type2
);
2350 } else if (bt2
!= VT_FUNC
) {
2352 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2354 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2355 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2359 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2360 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2361 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2362 /* gcc-like error if '-' is used */
2364 goto invalid_operands
;
2366 tcc_warning("comparison of distinct pointer types lacks a cast");
2370 /* generic gen_op: handles types problems */
2371 ST_FUNC
void gen_op(int op
)
2373 int u
, t1
, t2
, bt1
, bt2
, t
;
2377 t1
= vtop
[-1].type
.t
;
2378 t2
= vtop
[0].type
.t
;
2379 bt1
= t1
& VT_BTYPE
;
2380 bt2
= t2
& VT_BTYPE
;
2382 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2383 tcc_error("operation on a struct");
2384 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2385 if (bt2
== VT_FUNC
) {
2386 mk_pointer(&vtop
->type
);
2389 if (bt1
== VT_FUNC
) {
2391 mk_pointer(&vtop
->type
);
2396 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2397 /* at least one operand is a pointer */
2398 /* relational op: must be both pointers */
2399 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2400 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2401 /* pointers are handled are unsigned */
2403 t
= VT_LLONG
| VT_UNSIGNED
;
2405 t
= VT_INT
| VT_UNSIGNED
;
2409 /* if both pointers, then it must be the '-' op */
2410 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2412 tcc_error("cannot use pointers here");
2413 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2414 /* XXX: check that types are compatible */
2415 if (vtop
[-1].type
.t
& VT_VLA
) {
2416 vla_runtime_pointed_size(&vtop
[-1].type
);
2418 vpushi(pointed_size(&vtop
[-1].type
));
2422 vtop
->type
.t
= ptrdiff_type
.t
;
2426 /* exactly one pointer : must be '+' or '-'. */
2427 if (op
!= '-' && op
!= '+')
2428 tcc_error("cannot use pointers here");
2429 /* Put pointer as first operand */
2430 if (bt2
== VT_PTR
) {
2432 t
= t1
, t1
= t2
, t2
= t
;
2435 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2436 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2439 type1
= vtop
[-1].type
;
2440 type1
.t
&= ~VT_ARRAY
;
2441 if (vtop
[-1].type
.t
& VT_VLA
)
2442 vla_runtime_pointed_size(&vtop
[-1].type
);
2444 u
= pointed_size(&vtop
[-1].type
);
2446 tcc_error("unknown array element size");
2450 /* XXX: cast to int ? (long long case) */
2456 /* #ifdef CONFIG_TCC_BCHECK
2457 The main reason to removing this code:
2464 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2465 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2467 When this code is on. then the output looks like
2469 v+(i-j) = 0xbff84000
2471 /* if evaluating constant expression, no code should be
2472 generated, so no bound check */
2473 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2474 /* if bounded pointers, we generate a special code to
2481 gen_bounded_ptr_add();
2487 /* put again type if gen_opic() swaped operands */
2490 } else if (is_float(bt1
) || is_float(bt2
)) {
2491 /* compute bigger type and do implicit casts */
2492 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2494 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2499 /* floats can only be used for a few operations */
2500 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2501 (op
< TOK_ULT
|| op
> TOK_GT
))
2502 tcc_error("invalid operands for binary operation");
2504 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2505 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2506 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2508 t
|= (VT_LONG
& t1
);
2510 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2511 /* cast to biggest op */
2512 t
= VT_LLONG
| VT_LONG
;
2513 if (bt1
== VT_LLONG
)
2515 if (bt2
== VT_LLONG
)
2517 /* convert to unsigned if it does not fit in a long long */
2518 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2519 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2523 /* integer operations */
2524 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2525 /* convert to unsigned if it does not fit in an integer */
2526 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2527 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2530 /* XXX: currently, some unsigned operations are explicit, so
2531 we modify them here */
2532 if (t
& VT_UNSIGNED
) {
2539 else if (op
== TOK_LT
)
2541 else if (op
== TOK_GT
)
2543 else if (op
== TOK_LE
)
2545 else if (op
== TOK_GE
)
2553 /* special case for shifts and long long: we keep the shift as
2555 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2562 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2563 /* relational op: the result is an int */
2564 vtop
->type
.t
= VT_INT
;
2569 // Make sure that we have converted to an rvalue:
2570 if (vtop
->r
& VT_LVAL
)
2571 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2574 #ifndef TCC_TARGET_ARM
2575 /* generic itof for unsigned long long case */
2576 static void gen_cvt_itof1(int t
)
2578 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2581 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2582 (VT_LLONG
| VT_UNSIGNED
)) {
2585 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2586 #if LDOUBLE_SIZE != 8
2587 else if (t
== VT_LDOUBLE
)
2588 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2591 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2595 vtop
->r
= reg_fret(t
);
2603 /* generic ftoi for unsigned long long case */
2604 static void gen_cvt_ftoi1(int t
)
2606 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2611 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2612 /* not handled natively */
2613 st
= vtop
->type
.t
& VT_BTYPE
;
2615 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2616 #if LDOUBLE_SIZE != 8
2617 else if (st
== VT_LDOUBLE
)
2618 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2621 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2626 vtop
->r2
= REG_LRET
;
2633 /* force char or short cast */
2634 static void force_charshort_cast(int t
)
2638 /* cannot cast static initializers */
2639 if (STATIC_DATA_WANTED
)
2643 /* XXX: add optimization if lvalue : just change type and offset */
2648 if (t
& VT_UNSIGNED
) {
2649 vpushi((1 << bits
) - 1);
2652 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2658 /* result must be signed or the SAR is converted to an SHL
2659 This was not the case when "t" was a signed short
2660 and the last value on the stack was an unsigned int */
2661 vtop
->type
.t
&= ~VT_UNSIGNED
;
2667 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2668 static void gen_cast_s(int t
)
2676 static void gen_cast(CType
*type
)
2678 int sbt
, dbt
, sf
, df
, c
, p
;
2680 /* special delayed cast for char/short */
2681 /* XXX: in some cases (multiple cascaded casts), it may still
2683 if (vtop
->r
& VT_MUSTCAST
) {
2684 vtop
->r
&= ~VT_MUSTCAST
;
2685 force_charshort_cast(vtop
->type
.t
);
2688 /* bitfields first get cast to ints */
2689 if (vtop
->type
.t
& VT_BITFIELD
) {
2693 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2694 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2699 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2700 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2701 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2702 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
2705 /* constant case: we can do it now */
2706 /* XXX: in ISOC, cannot do it if error in convert */
2707 if (sbt
== VT_FLOAT
)
2708 vtop
->c
.ld
= vtop
->c
.f
;
2709 else if (sbt
== VT_DOUBLE
)
2710 vtop
->c
.ld
= vtop
->c
.d
;
2713 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2714 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2715 vtop
->c
.ld
= vtop
->c
.i
;
2717 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2719 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2720 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2722 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2725 if (dbt
== VT_FLOAT
)
2726 vtop
->c
.f
= (float)vtop
->c
.ld
;
2727 else if (dbt
== VT_DOUBLE
)
2728 vtop
->c
.d
= (double)vtop
->c
.ld
;
2729 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2730 vtop
->c
.i
= vtop
->c
.ld
;
2731 } else if (sf
&& dbt
== VT_BOOL
) {
2732 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2735 vtop
->c
.i
= vtop
->c
.ld
;
2736 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2738 else if (sbt
& VT_UNSIGNED
)
2739 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2741 else if (sbt
== VT_PTR
)
2744 else if (sbt
!= VT_LLONG
)
2745 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2746 -(vtop
->c
.i
& 0x80000000));
2748 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2750 else if (dbt
== VT_BOOL
)
2751 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2753 else if (dbt
== VT_PTR
)
2756 else if (dbt
!= VT_LLONG
) {
2757 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2758 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2761 if (!(dbt
& VT_UNSIGNED
))
2762 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2765 } else if (p
&& dbt
== VT_BOOL
) {
2769 /* non constant case: generate code */
2771 /* convert from fp to fp */
2774 /* convert int to fp */
2777 /* convert fp to int */
2778 if (dbt
== VT_BOOL
) {
2782 /* we handle char/short/etc... with generic code */
2783 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2784 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2788 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2789 /* additional cast for char/short... */
2795 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2796 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2797 /* scalar to long long */
2798 /* machine independent conversion */
2800 /* generate high word */
2801 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2805 if (sbt
== VT_PTR
) {
2806 /* cast from pointer to int before we apply
2807 shift operation, which pointers don't support*/
2814 /* patch second register */
2815 vtop
[-1].r2
= vtop
->r
;
2819 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2820 (dbt
& VT_BTYPE
) == VT_PTR
||
2821 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2822 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2823 (sbt
& VT_BTYPE
) != VT_PTR
&&
2824 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2825 /* need to convert from 32bit to 64bit */
2827 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2828 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_RISCV64)
2830 #elif defined(TCC_TARGET_X86_64)
2832 /* x86_64 specific: movslq */
2834 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2838 } else if (sbt
& VT_UNSIGNED
) {
2839 #if defined(TCC_TARGET_RISCV64)
2840 /* RISC-V keeps 32bit vals in registers sign-extended.
2841 So here we need a zero-extension. */
2842 vtop
->type
.t
= VT_LLONG
;
2851 } else if (dbt
== VT_BOOL
) {
2852 /* scalar to bool */
2855 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2856 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2857 if (sbt
== VT_PTR
) {
2858 vtop
->type
.t
= VT_INT
;
2859 tcc_warning("nonportable conversion from pointer to char/short");
2861 force_charshort_cast(dbt
);
2862 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2864 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2866 /* from long long: just take low order word */
2870 if (dbt
& VT_UNSIGNED
) {
2871 /* XXX some architectures (e.g. risc-v) would like it
2872 better for this merely being a 32-to-64 sign or zero-
2875 vtop
->type
.t
|= VT_UNSIGNED
;
2881 /* if lvalue and single word type, nothing to do because
2882 the lvalue already contains the real type size (see
2883 VT_LVAL_xxx constants) */
2886 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2887 /* if we are casting between pointer types,
2888 we must update the VT_LVAL_xxx size */
2889 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2890 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2893 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2896 /* return type size as known at compile time. Put alignment at 'a' */
2897 ST_FUNC
int type_size(CType
*type
, int *a
)
2902 bt
= type
->t
& VT_BTYPE
;
2903 if (bt
== VT_STRUCT
) {
2908 } else if (bt
== VT_PTR
) {
2909 if (type
->t
& VT_ARRAY
) {
2913 ts
= type_size(&s
->type
, a
);
2915 if (ts
< 0 && s
->c
< 0)
2923 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
2924 return -1; /* incomplete enum */
2925 } else if (bt
== VT_LDOUBLE
) {
2927 return LDOUBLE_SIZE
;
2928 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2929 #ifdef TCC_TARGET_I386
2930 #ifdef TCC_TARGET_PE
2935 #elif defined(TCC_TARGET_ARM)
2945 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2948 } else if (bt
== VT_SHORT
) {
2951 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2955 /* char, void, function, _Bool */
2961 /* push type size as known at runtime time on top of value stack. Put
2963 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2965 if (type
->t
& VT_VLA
) {
2966 type_size(&type
->ref
->type
, a
);
2967 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2969 vpushi(type_size(type
, a
));
2973 /* return the pointed type of t */
2974 static inline CType
*pointed_type(CType
*type
)
2976 return &type
->ref
->type
;
2979 /* modify type so that its it is a pointer to type. */
2980 ST_FUNC
void mk_pointer(CType
*type
)
2983 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2984 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2988 /* compare function types. OLD functions match any new functions */
2989 static int is_compatible_func(CType
*type1
, CType
*type2
)
2995 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2997 if (s1
->f
.func_type
!= s2
->f
.func_type
2998 && s1
->f
.func_type
!= FUNC_OLD
2999 && s2
->f
.func_type
!= FUNC_OLD
)
3001 /* we should check the function return type for FUNC_OLD too
3002 but that causes problems with the internally used support
3003 functions such as TOK_memmove */
3004 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
3006 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
3009 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
3020 /* return true if type1 and type2 are the same. If unqualified is
3021 true, qualifiers on the types are ignored.
3023 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3027 t1
= type1
->t
& VT_TYPE
;
3028 t2
= type2
->t
& VT_TYPE
;
3030 /* strip qualifiers before comparing */
3031 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3032 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3035 /* Default Vs explicit signedness only matters for char */
3036 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3040 /* XXX: bitfields ? */
3045 && !(type1
->ref
->c
< 0
3046 || type2
->ref
->c
< 0
3047 || type1
->ref
->c
== type2
->ref
->c
))
3050 /* test more complicated cases */
3051 bt1
= t1
& VT_BTYPE
;
3052 if (bt1
== VT_PTR
) {
3053 type1
= pointed_type(type1
);
3054 type2
= pointed_type(type2
);
3055 return is_compatible_types(type1
, type2
);
3056 } else if (bt1
== VT_STRUCT
) {
3057 return (type1
->ref
== type2
->ref
);
3058 } else if (bt1
== VT_FUNC
) {
3059 return is_compatible_func(type1
, type2
);
3060 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
3061 return type1
->ref
== type2
->ref
;
3067 /* return true if type1 and type2 are exactly the same (including
3070 static int is_compatible_types(CType
*type1
, CType
*type2
)
3072 return compare_types(type1
,type2
,0);
3075 /* return true if type1 and type2 are the same (ignoring qualifiers).
3077 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3079 return compare_types(type1
,type2
,1);
3082 /* print a type. If 'varstr' is not NULL, then the variable is also
3083 printed in the type */
3085 /* XXX: add array and function pointers */
3086 static void type_to_str(char *buf
, int buf_size
,
3087 CType
*type
, const char *varstr
)
3099 pstrcat(buf
, buf_size
, "extern ");
3101 pstrcat(buf
, buf_size
, "static ");
3103 pstrcat(buf
, buf_size
, "typedef ");
3105 pstrcat(buf
, buf_size
, "inline ");
3106 if (t
& VT_VOLATILE
)
3107 pstrcat(buf
, buf_size
, "volatile ");
3108 if (t
& VT_CONSTANT
)
3109 pstrcat(buf
, buf_size
, "const ");
3111 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
3112 || ((t
& VT_UNSIGNED
)
3113 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
3116 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
3118 buf_size
-= strlen(buf
);
3153 tstr
= "long double";
3155 pstrcat(buf
, buf_size
, tstr
);
3162 pstrcat(buf
, buf_size
, tstr
);
3163 v
= type
->ref
->v
& ~SYM_STRUCT
;
3164 if (v
>= SYM_FIRST_ANOM
)
3165 pstrcat(buf
, buf_size
, "<anonymous>");
3167 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3172 if (varstr
&& '*' == *varstr
) {
3173 pstrcat(buf1
, sizeof(buf1
), "(");
3174 pstrcat(buf1
, sizeof(buf1
), varstr
);
3175 pstrcat(buf1
, sizeof(buf1
), ")");
3177 pstrcat(buf1
, buf_size
, "(");
3179 while (sa
!= NULL
) {
3181 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3182 pstrcat(buf1
, sizeof(buf1
), buf2
);
3185 pstrcat(buf1
, sizeof(buf1
), ", ");
3187 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3188 pstrcat(buf1
, sizeof(buf1
), ", ...");
3189 pstrcat(buf1
, sizeof(buf1
), ")");
3190 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3195 if (varstr
&& '*' == *varstr
)
3196 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3198 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3199 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3202 pstrcpy(buf1
, sizeof(buf1
), "*");
3203 if (t
& VT_CONSTANT
)
3204 pstrcat(buf1
, buf_size
, "const ");
3205 if (t
& VT_VOLATILE
)
3206 pstrcat(buf1
, buf_size
, "volatile ");
3208 pstrcat(buf1
, sizeof(buf1
), varstr
);
3209 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3213 pstrcat(buf
, buf_size
, " ");
3214 pstrcat(buf
, buf_size
, varstr
);
3219 /* verify type compatibility to store vtop in 'dt' type, and generate
3221 static void gen_assign_cast(CType
*dt
)
3223 CType
*st
, *type1
, *type2
;
3224 char buf1
[256], buf2
[256];
3225 int dbt
, sbt
, qualwarn
, lvl
;
3227 st
= &vtop
->type
; /* source type */
3228 dbt
= dt
->t
& VT_BTYPE
;
3229 sbt
= st
->t
& VT_BTYPE
;
3230 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3231 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3232 ; /* It is Ok if both are void */
3234 tcc_error("cannot cast from/to void");
3236 if (dt
->t
& VT_CONSTANT
)
3237 tcc_warning("assignment of read-only location");
3240 /* special cases for pointers */
3241 /* '0' can also be a pointer */
3242 if (is_null_pointer(vtop
))
3244 /* accept implicit pointer to integer cast with warning */
3245 if (is_integer_btype(sbt
)) {
3246 tcc_warning("assignment makes pointer from integer without a cast");
3249 type1
= pointed_type(dt
);
3251 type2
= pointed_type(st
);
3252 else if (sbt
== VT_FUNC
)
3253 type2
= st
; /* a function is implicitly a function pointer */
3256 if (is_compatible_types(type1
, type2
))
3258 for (qualwarn
= lvl
= 0;; ++lvl
) {
3259 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3260 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3262 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3263 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3264 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3266 type1
= pointed_type(type1
);
3267 type2
= pointed_type(type2
);
3269 if (!is_compatible_unqualified_types(type1
, type2
)) {
3270 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3271 /* void * can match anything */
3272 } else if (dbt
== sbt
3273 && is_integer_btype(sbt
& VT_BTYPE
)
3274 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3275 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3276 /* Like GCC don't warn by default for merely changes
3277 in pointer target signedness. Do warn for different
3278 base types, though, in particular for unsigned enums
3279 and signed int targets. */
3281 tcc_warning("assignment from incompatible pointer type");
3286 tcc_warning("assignment discards qualifiers from pointer target type");
3292 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3293 tcc_warning("assignment makes integer from pointer without a cast");
3294 } else if (sbt
== VT_STRUCT
) {
3295 goto case_VT_STRUCT
;
3297 /* XXX: more tests */
3301 if (!is_compatible_unqualified_types(dt
, st
)) {
3303 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3304 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3305 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3312 /* store vtop in lvalue pushed on stack */
3313 ST_FUNC
void vstore(void)
3315 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3317 ft
= vtop
[-1].type
.t
;
3318 sbt
= vtop
->type
.t
& VT_BTYPE
;
3319 dbt
= ft
& VT_BTYPE
;
3320 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3321 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3322 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3323 /* optimize char/short casts */
3324 delayed_cast
= VT_MUSTCAST
;
3325 vtop
->type
.t
= ft
& VT_TYPE
;
3326 /* XXX: factorize */
3327 if (ft
& VT_CONSTANT
)
3328 tcc_warning("assignment of read-only location");
3331 if (!(ft
& VT_BITFIELD
))
3332 gen_assign_cast(&vtop
[-1].type
);
3335 if (sbt
== VT_STRUCT
) {
3336 /* if structure, only generate pointer */
3337 /* structure assignment : generate memcpy */
3338 /* XXX: optimize if small size */
3339 size
= type_size(&vtop
->type
, &align
);
3343 vtop
->type
.t
= VT_PTR
;
3346 /* address of memcpy() */
3349 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3350 else if(!(align
& 3))
3351 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3354 /* Use memmove, rather than memcpy, as dest and src may be same: */
3355 vpush_global_sym(&func_old_type
, TOK_memmove
);
3360 vtop
->type
.t
= VT_PTR
;
3366 /* leave source on stack */
3367 } else if (ft
& VT_BITFIELD
) {
3368 /* bitfield store handling */
3370 /* save lvalue as expression result (example: s.b = s.a = n;) */
3371 vdup(), vtop
[-1] = vtop
[-2];
3373 bit_pos
= BIT_POS(ft
);
3374 bit_size
= BIT_SIZE(ft
);
3375 /* remove bit field info to avoid loops */
3376 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3378 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3379 gen_cast(&vtop
[-1].type
);
3380 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3383 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3384 if (r
== VT_STRUCT
) {
3385 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3386 store_packed_bf(bit_pos
, bit_size
);
3388 unsigned long long mask
= (1ULL << bit_size
) - 1;
3389 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3391 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3394 vpushi((unsigned)mask
);
3401 /* duplicate destination */
3404 /* load destination, mask and or with source */
3405 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3406 vpushll(~(mask
<< bit_pos
));
3408 vpushi(~((unsigned)mask
<< bit_pos
));
3413 /* ... and discard */
3416 } else if (dbt
== VT_VOID
) {
3419 #ifdef CONFIG_TCC_BCHECK
3420 /* bound check case */
3421 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3430 #ifdef TCC_TARGET_X86_64
3431 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3433 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3436 #elif defined TCC_TARGET_RISCV64
3437 if (dbt
== VT_LDOUBLE
)
3441 r
= gv(rc
); /* generate value */
3442 /* if lvalue was saved on stack, must read it */
3443 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3445 t
= get_reg(RC_INT
);
3451 sv
.r
= VT_LOCAL
| VT_LVAL
;
3452 sv
.c
.i
= vtop
[-1].c
.i
;
3454 vtop
[-1].r
= t
| VT_LVAL
;
3456 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3457 #ifdef TCC_TARGET_RISCV64
3458 if (dbt
== VT_QLONG
|| dbt
== VT_LDOUBLE
) {
3459 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= VT_LLONG
;
3461 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3462 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3464 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3465 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3467 vtop
[-1].type
.t
= load_type
;
3470 /* convert to int to increment easily */
3471 vtop
->type
.t
= addr_type
;
3477 vtop
[-1].type
.t
= load_type
;
3478 /* XXX: it works because r2 is spilled last ! */
3479 store(vtop
->r2
, vtop
- 1);
3485 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3486 vtop
->r
|= delayed_cast
;
3490 /* post defines POST/PRE add. c is the token ++ or -- */
3491 ST_FUNC
void inc(int post
, int c
)
3494 vdup(); /* save lvalue */
3496 gv_dup(); /* duplicate value */
3501 vpushi(c
- TOK_MID
);
3503 vstore(); /* store value */
3505 vpop(); /* if post op, return saved value */
3508 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3510 /* read the string */
3514 while (tok
== TOK_STR
) {
3515 /* XXX: add \0 handling too ? */
3516 cstr_cat(astr
, tokc
.str
.data
, -1);
3519 cstr_ccat(astr
, '\0');
3522 /* If I is >= 1 and a power of two, returns log2(i)+1.
3523 If I is 0 returns 0. */
3524 static int exact_log2p1(int i
)
3529 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3540 /* Parse __attribute__((...)) GNUC extension. */
3541 static void parse_attribute(AttributeDef
*ad
)
3547 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3552 while (tok
!= ')') {
3553 if (tok
< TOK_IDENT
)
3554 expect("attribute name");
3566 tcc_warning("implicit declaration of function '%s'",
3567 get_tok_str(tok
, &tokc
));
3568 s
= external_global_sym(tok
, &func_old_type
);
3570 ad
->cleanup_func
= s
;
3578 parse_mult_str(&astr
, "section name");
3579 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3586 parse_mult_str(&astr
, "alias(\"target\")");
3587 ad
->alias_target
= /* save string as token, for later */
3588 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3592 case TOK_VISIBILITY1
:
3593 case TOK_VISIBILITY2
:
3595 parse_mult_str(&astr
,
3596 "visibility(\"default|hidden|internal|protected\")");
3597 if (!strcmp (astr
.data
, "default"))
3598 ad
->a
.visibility
= STV_DEFAULT
;
3599 else if (!strcmp (astr
.data
, "hidden"))
3600 ad
->a
.visibility
= STV_HIDDEN
;
3601 else if (!strcmp (astr
.data
, "internal"))
3602 ad
->a
.visibility
= STV_INTERNAL
;
3603 else if (!strcmp (astr
.data
, "protected"))
3604 ad
->a
.visibility
= STV_PROTECTED
;
3606 expect("visibility(\"default|hidden|internal|protected\")");
3615 if (n
<= 0 || (n
& (n
- 1)) != 0)
3616 tcc_error("alignment must be a positive power of two");
3621 ad
->a
.aligned
= exact_log2p1(n
);
3622 if (n
!= 1 << (ad
->a
.aligned
- 1))
3623 tcc_error("alignment of %d is larger than implemented", n
);
3635 /* currently, no need to handle it because tcc does not
3636 track unused objects */
3640 ad
->f
.func_noreturn
= 1;
3645 ad
->f
.func_call
= FUNC_CDECL
;
3650 ad
->f
.func_call
= FUNC_STDCALL
;
3652 #ifdef TCC_TARGET_I386
3662 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3668 ad
->f
.func_call
= FUNC_FASTCALLW
;
3675 ad
->attr_mode
= VT_LLONG
+ 1;
3678 ad
->attr_mode
= VT_BYTE
+ 1;
3681 ad
->attr_mode
= VT_SHORT
+ 1;
3685 ad
->attr_mode
= VT_INT
+ 1;
3688 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3695 ad
->a
.dllexport
= 1;
3697 case TOK_NODECORATE
:
3698 ad
->a
.nodecorate
= 1;
3701 ad
->a
.dllimport
= 1;
3704 if (tcc_state
->warn_unsupported
)
3705 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3706 /* skip parameters */
3708 int parenthesis
= 0;
3712 else if (tok
== ')')
3715 } while (parenthesis
&& tok
!= -1);
3728 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3732 while ((s
= s
->next
) != NULL
) {
3733 if ((s
->v
& SYM_FIELD
) &&
3734 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3735 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3736 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3748 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3750 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3751 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3752 int pcc
= !tcc_state
->ms_bitfields
;
3753 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3760 prevbt
= VT_STRUCT
; /* make it never match */
3765 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3766 if (f
->type
.t
& VT_BITFIELD
)
3767 bit_size
= BIT_SIZE(f
->type
.t
);
3770 size
= type_size(&f
->type
, &align
);
3771 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3774 if (pcc
&& bit_size
== 0) {
3775 /* in pcc mode, packing does not affect zero-width bitfields */
3778 /* in pcc mode, attribute packed overrides if set. */
3779 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3782 /* pragma pack overrides align if lesser and packs bitfields always */
3785 if (pragma_pack
< align
)
3786 align
= pragma_pack
;
3787 /* in pcc mode pragma pack also overrides individual align */
3788 if (pcc
&& pragma_pack
< a
)
3792 /* some individual align was specified */
3796 if (type
->ref
->type
.t
== VT_UNION
) {
3797 if (pcc
&& bit_size
>= 0)
3798 size
= (bit_size
+ 7) >> 3;
3803 } else if (bit_size
< 0) {
3805 c
+= (bit_pos
+ 7) >> 3;
3806 c
= (c
+ align
- 1) & -align
;
3815 /* A bit-field. Layout is more complicated. There are two
3816 options: PCC (GCC) compatible and MS compatible */
3818 /* In PCC layout a bit-field is placed adjacent to the
3819 preceding bit-fields, except if:
3821 - an individual alignment was given
3822 - it would overflow its base type container and
3823 there is no packing */
3824 if (bit_size
== 0) {
3826 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3828 } else if (f
->a
.aligned
) {
3830 } else if (!packed
) {
3832 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3833 if (ofs
> size
/ align
)
3837 /* in pcc mode, long long bitfields have type int if they fit */
3838 if (size
== 8 && bit_size
<= 32)
3839 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3841 while (bit_pos
>= align
* 8)
3842 c
+= align
, bit_pos
-= align
* 8;
3845 /* In PCC layout named bit-fields influence the alignment
3846 of the containing struct using the base types alignment,
3847 except for packed fields (which here have correct align). */
3848 if (f
->v
& SYM_FIRST_ANOM
3849 // && bit_size // ??? gcc on ARM/rpi does that
3854 bt
= f
->type
.t
& VT_BTYPE
;
3855 if ((bit_pos
+ bit_size
> size
* 8)
3856 || (bit_size
> 0) == (bt
!= prevbt
)
3858 c
= (c
+ align
- 1) & -align
;
3861 /* In MS bitfield mode a bit-field run always uses
3862 at least as many bits as the underlying type.
3863 To start a new run it's also required that this
3864 or the last bit-field had non-zero width. */
3865 if (bit_size
|| prev_bit_size
)
3868 /* In MS layout the records alignment is normally
3869 influenced by the field, except for a zero-width
3870 field at the start of a run (but by further zero-width
3871 fields it is again). */
3872 if (bit_size
== 0 && prevbt
!= bt
)
3875 prev_bit_size
= bit_size
;
3878 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3879 | (bit_pos
<< VT_STRUCT_SHIFT
);
3880 bit_pos
+= bit_size
;
3882 if (align
> maxalign
)
3886 printf("set field %s offset %-2d size %-2d align %-2d",
3887 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3888 if (f
->type
.t
& VT_BITFIELD
) {
3889 printf(" pos %-2d bits %-2d",
3902 c
+= (bit_pos
+ 7) >> 3;
3904 /* store size and alignment */
3905 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3909 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3910 /* can happen if individual align for some member was given. In
3911 this case MSVC ignores maxalign when aligning the size */
3916 c
= (c
+ a
- 1) & -a
;
3920 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3923 /* check whether we can access bitfields by their type */
3924 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3928 if (0 == (f
->type
.t
& VT_BITFIELD
))
3932 bit_size
= BIT_SIZE(f
->type
.t
);
3935 bit_pos
= BIT_POS(f
->type
.t
);
3936 size
= type_size(&f
->type
, &align
);
3937 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3940 /* try to access the field using a different type */
3941 c0
= -1, s
= align
= 1;
3943 px
= f
->c
* 8 + bit_pos
;
3944 cx
= (px
>> 3) & -align
;
3945 px
= px
- (cx
<< 3);
3948 s
= (px
+ bit_size
+ 7) >> 3;
3958 s
= type_size(&t
, &align
);
3962 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3963 /* update offset and bit position */
3966 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3967 | (bit_pos
<< VT_STRUCT_SHIFT
);
3971 printf("FIX field %s offset %-2d size %-2d align %-2d "
3972 "pos %-2d bits %-2d\n",
3973 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3974 cx
, s
, align
, px
, bit_size
);
3977 /* fall back to load/store single-byte wise */
3978 f
->auxtype
= VT_STRUCT
;
3980 printf("FIX field %s : load byte-wise\n",
3981 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3987 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3988 static void struct_decl(CType
*type
, int u
)
3990 int v
, c
, size
, align
, flexible
;
3991 int bit_size
, bsize
, bt
;
3993 AttributeDef ad
, ad1
;
3996 memset(&ad
, 0, sizeof ad
);
3998 parse_attribute(&ad
);
4002 /* struct already defined ? return it */
4004 expect("struct/union/enum name");
4006 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4009 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4011 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4016 /* Record the original enum/struct/union token. */
4017 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4019 /* we put an undefined size for struct/union */
4020 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4021 s
->r
= 0; /* default alignment is zero as gcc */
4023 type
->t
= s
->type
.t
;
4029 tcc_error("struct/union/enum already defined");
4031 /* cannot be empty */
4032 /* non empty enums are not allowed */
4035 long long ll
= 0, pl
= 0, nl
= 0;
4038 /* enum symbols have static storage */
4039 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4043 expect("identifier");
4045 if (ss
&& !local_stack
)
4046 tcc_error("redefinition of enumerator '%s'",
4047 get_tok_str(v
, NULL
));
4051 ll
= expr_const64();
4053 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4055 *ps
= ss
, ps
= &ss
->next
;
4064 /* NOTE: we accept a trailing comma */
4069 /* set integral type of the enum */
4072 if (pl
!= (unsigned)pl
)
4073 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4075 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4076 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4077 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4079 /* set type for enum members */
4080 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4082 if (ll
== (int)ll
) /* default is int if it fits */
4084 if (t
.t
& VT_UNSIGNED
) {
4085 ss
->type
.t
|= VT_UNSIGNED
;
4086 if (ll
== (unsigned)ll
)
4089 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4090 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4095 while (tok
!= '}') {
4096 if (!parse_btype(&btype
, &ad1
)) {
4102 tcc_error("flexible array member '%s' not at the end of struct",
4103 get_tok_str(v
, NULL
));
4109 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4111 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4112 expect("identifier");
4114 int v
= btype
.ref
->v
;
4115 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4116 if (tcc_state
->ms_extensions
== 0)
4117 expect("identifier");
4121 if (type_size(&type1
, &align
) < 0) {
4122 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4125 tcc_error("field '%s' has incomplete type",
4126 get_tok_str(v
, NULL
));
4128 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4129 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4130 (type1
.t
& VT_STORAGE
))
4131 tcc_error("invalid type for '%s'",
4132 get_tok_str(v
, NULL
));
4136 bit_size
= expr_const();
4137 /* XXX: handle v = 0 case for messages */
4139 tcc_error("negative width in bit-field '%s'",
4140 get_tok_str(v
, NULL
));
4141 if (v
&& bit_size
== 0)
4142 tcc_error("zero width for bit-field '%s'",
4143 get_tok_str(v
, NULL
));
4144 parse_attribute(&ad1
);
4146 size
= type_size(&type1
, &align
);
4147 if (bit_size
>= 0) {
4148 bt
= type1
.t
& VT_BTYPE
;
4154 tcc_error("bitfields must have scalar type");
4156 if (bit_size
> bsize
) {
4157 tcc_error("width of '%s' exceeds its type",
4158 get_tok_str(v
, NULL
));
4159 } else if (bit_size
== bsize
4160 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4161 /* no need for bit fields */
4163 } else if (bit_size
== 64) {
4164 tcc_error("field width 64 not implemented");
4166 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4168 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4171 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4172 /* Remember we've seen a real field to check
4173 for placement of flexible array member. */
4176 /* If member is a struct or bit-field, enforce
4177 placing into the struct (as anonymous). */
4179 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4184 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4189 if (tok
== ';' || tok
== TOK_EOF
)
4196 parse_attribute(&ad
);
4197 struct_layout(type
, &ad
);
4202 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4204 merge_symattr(&ad
->a
, &s
->a
);
4205 merge_funcattr(&ad
->f
, &s
->f
);
4208 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4209 are added to the element type, copied because it could be a typedef. */
4210 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4212 while (type
->t
& VT_ARRAY
) {
4213 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4214 type
= &type
->ref
->type
;
4216 type
->t
|= qualifiers
;
4219 /* return 0 if no type declaration. otherwise, return the basic type
4222 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4224 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4228 memset(ad
, 0, sizeof(AttributeDef
));
4238 /* currently, we really ignore extension */
4248 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4249 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4250 tmbt
: tcc_error("too many basic types");
4253 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4258 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4275 memset(&ad1
, 0, sizeof(AttributeDef
));
4276 if (parse_btype(&type1
, &ad1
)) {
4277 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4279 n
= 1 << (ad1
.a
.aligned
- 1);
4281 type_size(&type1
, &n
);
4284 if (n
<= 0 || (n
& (n
- 1)) != 0)
4285 tcc_error("alignment must be a positive power of two");
4288 ad
->a
.aligned
= exact_log2p1(n
);
4292 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4293 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4294 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4295 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4302 #ifdef TCC_TARGET_ARM64
4304 /* GCC's __uint128_t appears in some Linux header files. Make it a
4305 synonym for long double to get the size and alignment right. */
4316 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4317 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4325 struct_decl(&type1
, VT_ENUM
);
4328 type
->ref
= type1
.ref
;
4331 struct_decl(&type1
, VT_STRUCT
);
4334 struct_decl(&type1
, VT_UNION
);
4337 /* type modifiers */
4342 parse_btype_qualify(type
, VT_CONSTANT
);
4350 parse_btype_qualify(type
, VT_VOLATILE
);
4357 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4358 tcc_error("signed and unsigned modifier");
4371 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4372 tcc_error("signed and unsigned modifier");
4373 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4389 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4390 tcc_error("multiple storage classes");
4401 /* currently, no need to handle it because tcc does not
4402 track unused objects */
4405 /* GNUC attribute */
4406 case TOK_ATTRIBUTE1
:
4407 case TOK_ATTRIBUTE2
:
4408 parse_attribute(ad
);
4409 if (ad
->attr_mode
) {
4410 u
= ad
->attr_mode
-1;
4411 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4419 parse_expr_type(&type1
);
4420 /* remove all storage modifiers except typedef */
4421 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4423 sym_to_attr(ad
, type1
.ref
);
4429 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4433 if (tok
== ':' && !in_generic
) {
4434 /* ignore if it's a label */
4439 t
&= ~(VT_BTYPE
|VT_LONG
);
4440 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4441 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4442 type
->ref
= s
->type
.ref
;
4444 parse_btype_qualify(type
, t
);
4446 /* get attributes from typedef */
4455 if (tcc_state
->char_is_unsigned
) {
4456 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4459 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4460 bt
= t
& (VT_BTYPE
|VT_LONG
);
4462 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4463 #ifdef TCC_TARGET_PE
4464 if (bt
== VT_LDOUBLE
)
4465 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4471 /* convert a function parameter type (array to pointer and function to
4472 function pointer) */
4473 static inline void convert_parameter_type(CType
*pt
)
4475 /* remove const and volatile qualifiers (XXX: const could be used
4476 to indicate a const function parameter */
4477 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4478 /* array must be transformed to pointer according to ANSI C */
4480 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4485 ST_FUNC
void parse_asm_str(CString
*astr
)
4488 parse_mult_str(astr
, "string constant");
4491 /* Parse an asm label and return the token */
4492 static int asm_label_instr(void)
4498 parse_asm_str(&astr
);
4501 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4503 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4508 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4510 int n
, l
, t1
, arg_size
, align
, unused_align
;
4511 Sym
**plast
, *s
, *first
;
4516 /* function type, or recursive declarator (return if so) */
4518 if (td
&& !(td
& TYPE_ABSTRACT
))
4522 else if (parse_btype(&pt
, &ad1
))
4525 merge_attr (ad
, &ad1
);
4534 /* read param name and compute offset */
4535 if (l
!= FUNC_OLD
) {
4536 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4538 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4539 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4540 tcc_error("parameter declared as void");
4544 expect("identifier");
4545 pt
.t
= VT_VOID
; /* invalid type */
4549 convert_parameter_type(&pt
);
4550 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4551 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4557 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4562 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4563 tcc_error("invalid type");
4566 /* if no parameters, then old type prototype */
4569 /* NOTE: const is ignored in returned type as it has a special
4570 meaning in gcc / C++ */
4571 type
->t
&= ~VT_CONSTANT
;
4572 /* some ancient pre-K&R C allows a function to return an array
4573 and the array brackets to be put after the arguments, such
4574 that "int c()[]" means something like "int[] c()" */
4577 skip(']'); /* only handle simple "[]" */
4580 /* we push a anonymous symbol which will contain the function prototype */
4581 ad
->f
.func_args
= arg_size
;
4582 ad
->f
.func_type
= l
;
4583 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4589 } else if (tok
== '[') {
4590 int saved_nocode_wanted
= nocode_wanted
;
4591 /* array definition */
4594 /* XXX The optional type-quals and static should only be accepted
4595 in parameter decls. The '*' as well, and then even only
4596 in prototypes (not function defs). */
4598 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4613 if (!local_stack
|| (storage
& VT_STATIC
))
4614 vpushi(expr_const());
4616 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4617 length must always be evaluated, even under nocode_wanted,
4618 so that its size slot is initialized (e.g. under sizeof
4623 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4626 tcc_error("invalid array size");
4628 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4629 tcc_error("size of variable length array should be an integer");
4635 /* parse next post type */
4636 post_type(type
, ad
, storage
, 0);
4638 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4639 tcc_error("declaration of an array of functions");
4640 if ((type
->t
& VT_BTYPE
) == VT_VOID
4641 || type_size(type
, &unused_align
) < 0)
4642 tcc_error("declaration of an array of incomplete type elements");
4644 t1
|= type
->t
& VT_VLA
;
4648 tcc_error("need explicit inner array size in VLAs");
4649 loc
-= type_size(&int_type
, &align
);
4653 vla_runtime_type_size(type
, &align
);
4655 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4661 nocode_wanted
= saved_nocode_wanted
;
4663 /* we push an anonymous symbol which will contain the array
4665 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4666 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4672 /* Parse a type declarator (except basic type), and return the type
4673 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4674 expected. 'type' should contain the basic type. 'ad' is the
4675 attribute definition of the basic type. It can be modified by
4676 type_decl(). If this (possibly abstract) declarator is a pointer chain
4677 it returns the innermost pointed to type (equals *type, but is a different
4678 pointer), otherwise returns type itself, that's used for recursive calls. */
4679 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4682 int qualifiers
, storage
;
4684 /* recursive type, remove storage bits first, apply them later again */
4685 storage
= type
->t
& VT_STORAGE
;
4686 type
->t
&= ~VT_STORAGE
;
4689 while (tok
== '*') {
4697 qualifiers
|= VT_CONSTANT
;
4702 qualifiers
|= VT_VOLATILE
;
4708 /* XXX: clarify attribute handling */
4709 case TOK_ATTRIBUTE1
:
4710 case TOK_ATTRIBUTE2
:
4711 parse_attribute(ad
);
4715 type
->t
|= qualifiers
;
4717 /* innermost pointed to type is the one for the first derivation */
4718 ret
= pointed_type(type
);
4722 /* This is possibly a parameter type list for abstract declarators
4723 ('int ()'), use post_type for testing this. */
4724 if (!post_type(type
, ad
, 0, td
)) {
4725 /* It's not, so it's a nested declarator, and the post operations
4726 apply to the innermost pointed to type (if any). */
4727 /* XXX: this is not correct to modify 'ad' at this point, but
4728 the syntax is not clear */
4729 parse_attribute(ad
);
4730 post
= type_decl(type
, ad
, v
, td
);
4734 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4735 /* type identifier */
4740 if (!(td
& TYPE_ABSTRACT
))
4741 expect("identifier");
4744 post_type(post
, ad
, storage
, 0);
4745 parse_attribute(ad
);
4750 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4751 ST_FUNC
int lvalue_type(int t
)
4756 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4758 else if (bt
== VT_SHORT
)
4762 if (t
& VT_UNSIGNED
)
4763 r
|= VT_LVAL_UNSIGNED
;
4767 /* indirection with full error checking and bound check */
4768 ST_FUNC
void indir(void)
4770 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4771 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4775 if (vtop
->r
& VT_LVAL
)
4777 vtop
->type
= *pointed_type(&vtop
->type
);
4778 /* Arrays and functions are never lvalues */
4779 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4780 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4781 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4782 /* if bound checking, the referenced pointer must be checked */
4783 #ifdef CONFIG_TCC_BCHECK
4784 if (tcc_state
->do_bounds_check
)
4785 vtop
->r
|= VT_MUSTBOUND
;
4790 /* pass a parameter to a function and do type checking and casting */
4791 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4796 func_type
= func
->f
.func_type
;
4797 if (func_type
== FUNC_OLD
||
4798 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4799 /* default casting : only need to convert float to double */
4800 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4801 gen_cast_s(VT_DOUBLE
);
4802 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4803 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4804 type
.ref
= vtop
->type
.ref
;
4807 } else if (arg
== NULL
) {
4808 tcc_error("too many arguments to function");
4811 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4812 gen_assign_cast(&type
);
4816 /* parse an expression and return its type without any side effect. */
4817 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4826 /* parse an expression of the form '(type)' or '(expr)' and return its
4828 static void parse_expr_type(CType
*type
)
4834 if (parse_btype(type
, &ad
)) {
4835 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4837 expr_type(type
, gexpr
);
4842 static void parse_type(CType
*type
)
4847 if (!parse_btype(type
, &ad
)) {
4850 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4853 static void parse_builtin_params(int nc
, const char *args
)
4860 while ((c
= *args
++)) {
4864 case 'e': expr_eq(); continue;
4865 case 't': parse_type(&t
); vpush(&t
); continue;
4866 default: tcc_error("internal error"); break;
4874 ST_FUNC
void unary(void)
4876 int n
, t
, align
, size
, r
, sizeof_caller
;
4881 sizeof_caller
= in_sizeof
;
4884 /* XXX: GCC 2.95.3 does not generate a table although it should be
4892 #ifdef TCC_TARGET_PE
4893 t
= VT_SHORT
|VT_UNSIGNED
;
4901 vsetc(&type
, VT_CONST
, &tokc
);
4905 t
= VT_INT
| VT_UNSIGNED
;
4911 t
= VT_LLONG
| VT_UNSIGNED
;
4923 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4926 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4928 case TOK___FUNCTION__
:
4930 goto tok_identifier
;
4936 /* special function name identifier */
4937 len
= strlen(funcname
) + 1;
4938 /* generate char[len] type */
4943 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4944 if (!NODATA_WANTED
) {
4945 ptr
= section_ptr_add(data_section
, len
);
4946 memcpy(ptr
, funcname
, len
);
4952 #ifdef TCC_TARGET_PE
4953 t
= VT_SHORT
| VT_UNSIGNED
;
4959 /* string parsing */
4961 if (tcc_state
->char_is_unsigned
)
4962 t
= VT_BYTE
| VT_UNSIGNED
;
4964 if (tcc_state
->warn_write_strings
)
4969 memset(&ad
, 0, sizeof(AttributeDef
));
4970 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4975 if (parse_btype(&type
, &ad
)) {
4976 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4978 /* check ISOC99 compound literal */
4980 /* data is allocated locally by default */
4985 /* all except arrays are lvalues */
4986 if (!(type
.t
& VT_ARRAY
))
4987 r
|= lvalue_type(type
.t
);
4988 memset(&ad
, 0, sizeof(AttributeDef
));
4989 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4991 if (sizeof_caller
) {
4998 } else if (tok
== '{') {
4999 int saved_nocode_wanted
= nocode_wanted
;
5001 tcc_error("expected constant");
5002 /* save all registers */
5004 /* statement expression : we do not accept break/continue
5005 inside as GCC does. We do retain the nocode_wanted state,
5006 as statement expressions can't ever be entered from the
5007 outside, so any reactivation of code emission (from labels
5008 or loop heads) can be disabled again after the end of it. */
5010 nocode_wanted
= saved_nocode_wanted
;
5025 /* functions names must be treated as function pointers,
5026 except for unary '&' and sizeof. Since we consider that
5027 functions are not lvalues, we only have to handle it
5028 there and in function calls. */
5029 /* arrays can also be used although they are not lvalues */
5030 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5031 !(vtop
->type
.t
& VT_ARRAY
))
5033 mk_pointer(&vtop
->type
);
5039 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5040 gen_cast_s(VT_BOOL
);
5041 vtop
->c
.i
= !vtop
->c
.i
;
5042 } else if (vtop
->r
== VT_CMP
) {
5044 n
= vtop
->jfalse
, vtop
->jfalse
= vtop
->jtrue
, vtop
->jtrue
= n
;
5059 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5060 tcc_error("pointer not accepted for unary plus");
5061 /* In order to force cast, we add zero, except for floating point
5062 where we really need an noop (otherwise -0.0 will be transformed
5064 if (!is_float(vtop
->type
.t
)) {
5076 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5078 if (vtop
[1].r
& VT_SYM
)
5079 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5080 size
= type_size(&type
, &align
);
5081 if (s
&& s
->a
.aligned
)
5082 align
= 1 << (s
->a
.aligned
- 1);
5083 if (t
== TOK_SIZEOF
) {
5084 if (!(type
.t
& VT_VLA
)) {
5086 tcc_error("sizeof applied to an incomplete type");
5089 vla_runtime_type_size(&type
, &align
);
5094 vtop
->type
.t
|= VT_UNSIGNED
;
5097 case TOK_builtin_expect
:
5098 /* __builtin_expect is a no-op for now */
5099 parse_builtin_params(0, "ee");
5102 case TOK_builtin_types_compatible_p
:
5103 parse_builtin_params(0, "tt");
5104 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5105 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5106 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5110 case TOK_builtin_choose_expr
:
5137 case TOK_builtin_constant_p
:
5138 parse_builtin_params(1, "e");
5139 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5143 case TOK_builtin_frame_address
:
5144 case TOK_builtin_return_address
:
5150 if (tok
!= TOK_CINT
) {
5151 tcc_error("%s only takes positive integers",
5152 tok1
== TOK_builtin_return_address
?
5153 "__builtin_return_address" :
5154 "__builtin_frame_address");
5156 level
= (uint32_t)tokc
.i
;
5161 vset(&type
, VT_LOCAL
, 0); /* local frame */
5163 mk_pointer(&vtop
->type
);
5164 indir(); /* -> parent frame */
5166 if (tok1
== TOK_builtin_return_address
) {
5167 // assume return address is just above frame pointer on stack
5170 mk_pointer(&vtop
->type
);
5175 #ifdef TCC_TARGET_RISCV64
5176 case TOK_builtin_va_start
:
5177 parse_builtin_params(0, "ee");
5178 r
= vtop
->r
& VT_VALMASK
;
5182 tcc_error("__builtin_va_start expects a local variable");
5187 #ifdef TCC_TARGET_X86_64
5188 #ifdef TCC_TARGET_PE
5189 case TOK_builtin_va_start
:
5190 parse_builtin_params(0, "ee");
5191 r
= vtop
->r
& VT_VALMASK
;
5195 tcc_error("__builtin_va_start expects a local variable");
5197 vtop
->type
= char_pointer_type
;
5202 case TOK_builtin_va_arg_types
:
5203 parse_builtin_params(0, "t");
5204 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5211 #ifdef TCC_TARGET_ARM64
5212 case TOK___va_start
: {
5213 parse_builtin_params(0, "ee");
5217 vtop
->type
.t
= VT_VOID
;
5220 case TOK___va_arg
: {
5221 parse_builtin_params(0, "et");
5229 case TOK___arm64_clear_cache
: {
5230 parse_builtin_params(0, "ee");
5233 vtop
->type
.t
= VT_VOID
;
5237 /* pre operations */
5248 t
= vtop
->type
.t
& VT_BTYPE
;
5250 /* In IEEE negate(x) isn't subtract(0,x), but rather
5254 vtop
->c
.f
= -1.0 * 0.0;
5255 else if (t
== VT_DOUBLE
)
5256 vtop
->c
.d
= -1.0 * 0.0;
5258 vtop
->c
.ld
= -1.0 * 0.0;
5266 goto tok_identifier
;
5268 /* allow to take the address of a label */
5269 if (tok
< TOK_UIDENT
)
5270 expect("label identifier");
5271 s
= label_find(tok
);
5273 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5275 if (s
->r
== LABEL_DECLARED
)
5276 s
->r
= LABEL_FORWARD
;
5279 s
->type
.t
= VT_VOID
;
5280 mk_pointer(&s
->type
);
5281 s
->type
.t
|= VT_STATIC
;
5283 vpushsym(&s
->type
, s
);
5289 CType controlling_type
;
5290 int has_default
= 0;
5293 TokenString
*str
= NULL
;
5294 int saved_const_wanted
= const_wanted
;
5299 expr_type(&controlling_type
, expr_eq
);
5300 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5301 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5302 mk_pointer(&controlling_type
);
5303 const_wanted
= saved_const_wanted
;
5307 if (tok
== TOK_DEFAULT
) {
5309 tcc_error("too many 'default'");
5315 AttributeDef ad_tmp
;
5320 parse_btype(&cur_type
, &ad_tmp
);
5323 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5324 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5326 tcc_error("type match twice");
5336 skip_or_save_block(&str
);
5338 skip_or_save_block(NULL
);
5345 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5346 tcc_error("type '%s' does not match any association", buf
);
5348 begin_macro(str
, 1);
5357 // special qnan , snan and infinity values
5362 vtop
->type
.t
= VT_FLOAT
;
5367 goto special_math_val
;
5370 goto special_math_val
;
5377 expect("identifier");
5379 if (!s
|| IS_ASM_SYM(s
)) {
5380 const char *name
= get_tok_str(t
, NULL
);
5382 tcc_error("'%s' undeclared", name
);
5383 /* for simple function calls, we tolerate undeclared
5384 external reference to int() function */
5385 if (tcc_state
->warn_implicit_function_declaration
5386 #ifdef TCC_TARGET_PE
5387 /* people must be warned about using undeclared WINAPI functions
5388 (which usually start with uppercase letter) */
5389 || (name
[0] >= 'A' && name
[0] <= 'Z')
5392 tcc_warning("implicit declaration of function '%s'", name
);
5393 s
= external_global_sym(t
, &func_old_type
);
5397 /* A symbol that has a register is a local register variable,
5398 which starts out as VT_LOCAL value. */
5399 if ((r
& VT_VALMASK
) < VT_CONST
)
5400 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5402 vset(&s
->type
, r
, s
->c
);
5403 /* Point to s as backpointer (even without r&VT_SYM).
5404 Will be used by at least the x86 inline asm parser for
5410 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5411 vtop
->c
.i
= s
->enum_val
;
5416 /* post operations */
5418 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5421 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5422 int qualifiers
, cumofs
= 0;
5424 if (tok
== TOK_ARROW
)
5426 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5429 /* expect pointer on structure */
5430 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5431 expect("struct or union");
5432 if (tok
== TOK_CDOUBLE
)
5433 expect("field name");
5435 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5436 expect("field name");
5437 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5439 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5440 /* add field offset to pointer */
5441 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5442 vpushi(cumofs
+ s
->c
);
5444 /* change type to field type, and set to lvalue */
5445 vtop
->type
= s
->type
;
5446 vtop
->type
.t
|= qualifiers
;
5447 /* an array is never an lvalue */
5448 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5449 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5450 #ifdef CONFIG_TCC_BCHECK
5451 /* if bound checking, the referenced pointer must be checked */
5452 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5453 vtop
->r
|= VT_MUSTBOUND
;
5457 } else if (tok
== '[') {
5463 } else if (tok
== '(') {
5466 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5469 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5470 /* pointer test (no array accepted) */
5471 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5472 vtop
->type
= *pointed_type(&vtop
->type
);
5473 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5477 expect("function pointer");
5480 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5482 /* get return type */
5485 sa
= s
->next
; /* first parameter */
5486 nb_args
= regsize
= 0;
5488 /* compute first implicit argument if a structure is returned */
5489 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5490 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5491 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5492 &ret_align
, ®size
);
5493 if (ret_nregs
<= 0) {
5494 /* get some space for the returned structure */
5495 size
= type_size(&s
->type
, &align
);
5496 #ifdef TCC_TARGET_ARM64
5497 /* On arm64, a small struct is return in registers.
5498 It is much easier to write it to memory if we know
5499 that we are allowed to write some extra bytes, so
5500 round the allocated space up to a power of 2: */
5502 while (size
& (size
- 1))
5503 size
= (size
| (size
- 1)) + 1;
5505 loc
= (loc
- size
) & -align
;
5507 ret
.r
= VT_LOCAL
| VT_LVAL
;
5508 /* pass it as 'int' to avoid structure arg passing
5510 vseti(VT_LOCAL
, loc
);
5522 if (ret_nregs
> 0) {
5523 /* return in register */
5524 if (is_float(ret
.type
.t
)) {
5525 ret
.r
= reg_fret(ret
.type
.t
);
5526 #ifdef TCC_TARGET_X86_64
5527 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5529 #elif defined TCC_TARGET_RISCV64
5530 if ((ret
.type
.t
& VT_BTYPE
) == VT_LDOUBLE
)
5534 #ifndef TCC_TARGET_ARM64
5535 #ifndef TCC_TARGET_RISCV64
5536 #ifdef TCC_TARGET_X86_64
5537 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5539 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5551 gfunc_param_typed(s
, sa
);
5561 tcc_error("too few arguments to function");
5563 gfunc_call(nb_args
);
5565 if (ret_nregs
< 0) {
5566 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
5567 #ifdef TCC_TARGET_RISCV64
5568 arch_transfer_ret_regs(1);
5572 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5573 vsetc(&ret
.type
, r
, &ret
.c
);
5574 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5577 /* handle packed struct return */
5578 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5581 size
= type_size(&s
->type
, &align
);
5582 /* We're writing whole regs often, make sure there's enough
5583 space. Assume register size is power of 2. */
5584 if (regsize
> align
)
5586 loc
= (loc
- size
) & -align
;
5590 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5594 if (--ret_nregs
== 0)
5598 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5601 if (s
->f
.func_noreturn
)
5609 ST_FUNC
void expr_prod(void)
5614 while (tok
== '*' || tok
== '/' || tok
== '%') {
5622 ST_FUNC
void expr_sum(void)
5627 while (tok
== '+' || tok
== '-') {
5635 static void expr_shift(void)
5640 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5648 static void expr_cmp(void)
5653 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5654 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5662 static void expr_cmpeq(void)
5667 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5675 static void expr_and(void)
5678 while (tok
== '&') {
5685 static void expr_xor(void)
5688 while (tok
== '^') {
5695 static void expr_or(void)
5698 while (tok
== '|') {
5705 static int condition_3way(void);
5707 static void expr_landor(void(*e_fn
)(void), int e_op
, int i
)
5709 int t
= 0, cc
= 1, f
= 0, c
;
5711 c
= f
? i
: condition_3way();
5713 save_regs(1), cc
= 0;
5714 } else if (c
!= i
) {
5715 nocode_wanted
++, f
= 1;
5737 static void expr_land(void)
5740 if (tok
== TOK_LAND
)
5741 expr_landor(expr_or
, TOK_LAND
, 1);
5744 static void expr_lor(void)
5748 expr_landor(expr_land
, TOK_LOR
, 0);
5751 /* Assuming vtop is a value used in a conditional context
5752 (i.e. compared with zero) return 0 if it's false, 1 if
5753 true and -1 if it can't be statically determined. */
5754 static int condition_3way(void)
5757 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5758 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5760 gen_cast_s(VT_BOOL
);
5767 static int is_cond_bool(SValue
*sv
)
5769 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
5770 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
5771 return (unsigned)sv
->c
.i
< 2;
5772 if (sv
->r
== VT_CMP
)
5777 static void expr_cond(void)
5779 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5781 CType type
, type1
, type2
;
5787 c
= condition_3way();
5788 g
= (tok
== ':' && gnu_ext
);
5798 /* needed to avoid having different registers saved in
5801 if (is_float(vtop
->type
.t
)) {
5803 #ifdef TCC_TARGET_X86_64
5804 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5807 #elif defined TCC_TARGET_RISCV64
5808 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
)
5818 ncw_prev
= nocode_wanted
;
5825 if (c
< 0 && vtop
->r
== VT_CMP
) {
5831 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5832 mk_pointer(&vtop
->type
);
5834 sv
= *vtop
; /* save value to handle it later */
5835 vtop
--; /* no vpop so that FP stack is not flushed */
5845 nocode_wanted
= ncw_prev
;
5851 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
5852 if (sv
.r
== VT_CMP
) {
5863 nocode_wanted
= ncw_prev
;
5864 // tcc_warning("two conditions expr_cond");
5868 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5869 mk_pointer(&vtop
->type
);
5872 bt1
= t1
& VT_BTYPE
;
5874 bt2
= t2
& VT_BTYPE
;
5877 /* cast operands to correct type according to ISOC rules */
5878 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5879 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5880 } else if (is_float(bt1
) || is_float(bt2
)) {
5881 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5882 type
.t
= VT_LDOUBLE
;
5884 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5889 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5890 /* cast to biggest op */
5891 type
.t
= VT_LLONG
| VT_LONG
;
5892 if (bt1
== VT_LLONG
)
5894 if (bt2
== VT_LLONG
)
5896 /* convert to unsigned if it does not fit in a long long */
5897 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5898 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5899 type
.t
|= VT_UNSIGNED
;
5900 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5901 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5902 /* If one is a null ptr constant the result type
5904 if (is_null_pointer (vtop
)) type
= type1
;
5905 else if (is_null_pointer (&sv
)) type
= type2
;
5906 else if (bt1
!= bt2
)
5907 tcc_error("incompatible types in conditional expressions");
5909 CType
*pt1
= pointed_type(&type1
);
5910 CType
*pt2
= pointed_type(&type2
);
5911 int pbt1
= pt1
->t
& VT_BTYPE
;
5912 int pbt2
= pt2
->t
& VT_BTYPE
;
5913 int newquals
, copied
= 0;
5914 /* pointers to void get preferred, otherwise the
5915 pointed to types minus qualifs should be compatible */
5916 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5917 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5918 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5919 tcc_warning("pointer type mismatch in conditional expression\n");
5921 /* combine qualifs */
5922 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5923 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5926 /* copy the pointer target symbol */
5927 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5930 pointed_type(&type
)->t
|= newquals
;
5932 /* pointers to incomplete arrays get converted to
5933 pointers to completed ones if possible */
5934 if (pt1
->t
& VT_ARRAY
5935 && pt2
->t
& VT_ARRAY
5936 && pointed_type(&type
)->ref
->c
< 0
5937 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5940 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5942 pointed_type(&type
)->ref
=
5943 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5944 0, pointed_type(&type
)->ref
->c
);
5945 pointed_type(&type
)->ref
->c
=
5946 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5949 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5950 /* XXX: test structure compatibility */
5951 type
= bt1
== VT_STRUCT
? type1
: type2
;
5953 /* integer operations */
5954 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5955 /* convert to unsigned if it does not fit in an integer */
5956 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5957 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5958 type
.t
|= VT_UNSIGNED
;
5960 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5961 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5962 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5964 /* now we convert second operand */
5968 mk_pointer(&vtop
->type
);
5970 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5975 if (is_float(type
.t
)) {
5977 #ifdef TCC_TARGET_X86_64
5978 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5981 #elif defined TCC_TARGET_RISCV64
5982 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
)
5985 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5986 /* for long longs, we use fixed registers to avoid having
5987 to handle a complicated move */
5997 nocode_wanted
= ncw_prev
;
5999 /* this is horrible, but we must also convert first
6005 mk_pointer(&vtop
->type
);
6007 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6013 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6024 static void expr_eq(void)
6030 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
6031 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
6032 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
6047 ST_FUNC
void gexpr(void)
6058 /* parse a constant expression and return value in vtop. */
6059 static void expr_const1(void)
6068 /* parse an integer constant and return its value. */
6069 static inline int64_t expr_const64(void)
6073 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6074 expect("constant expression");
6080 /* parse an integer constant and return its value.
6081 Complain if it doesn't fit 32bit (signed or unsigned). */
6082 ST_FUNC
int expr_const(void)
6085 int64_t wc
= expr_const64();
6087 if (c
!= wc
&& (unsigned)c
!= wc
)
6088 tcc_error("constant exceeds 32 bit");
6092 /* ------------------------------------------------------------------------- */
6093 /* return from function */
6095 #ifndef TCC_TARGET_ARM64
6096 static void gfunc_return(CType
*func_type
)
6098 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6099 CType type
, ret_type
;
6100 int ret_align
, ret_nregs
, regsize
;
6101 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6102 &ret_align
, ®size
);
6103 if (ret_nregs
< 0) {
6104 #ifdef TCC_TARGET_RISCV64
6105 arch_transfer_ret_regs(0);
6107 } else if (0 == ret_nregs
) {
6108 /* if returning structure, must copy it to implicit
6109 first pointer arg location */
6112 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6115 /* copy structure value to pointer */
6118 /* returning structure packed into registers */
6119 int r
, size
, addr
, align
;
6120 size
= type_size(func_type
,&align
);
6121 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6122 (vtop
->c
.i
& (ret_align
-1)))
6123 && (align
& (ret_align
-1))) {
6124 loc
= (loc
- size
) & -ret_align
;
6127 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6131 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6133 vtop
->type
= ret_type
;
6134 if (is_float(ret_type
.t
))
6135 r
= rc_fret(ret_type
.t
);
6146 if (--ret_nregs
== 0)
6148 /* We assume that when a structure is returned in multiple
6149 registers, their classes are consecutive values of the
6152 vtop
->c
.i
+= regsize
;
6156 } else if (is_float(func_type
->t
)) {
6157 gv(rc_fret(func_type
->t
));
6161 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6165 static void check_func_return(void)
6167 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6169 if (!strcmp (funcname
, "main")
6170 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6171 /* main returns 0 by default */
6173 gen_assign_cast(&func_vt
);
6174 gfunc_return(&func_vt
);
6176 tcc_warning("function might return no value: '%s'", funcname
);
6180 /* ------------------------------------------------------------------------- */
6183 static int case_cmp(const void *pa
, const void *pb
)
6185 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6186 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6187 return a
< b
? -1 : a
> b
;
6190 static void gtst_addr(int t
, int a
)
6192 gsym_addr(gvtst(0, t
), a
);
6195 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6199 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6216 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6218 gcase(base
, len
/2, bsym
);
6222 base
+= e
; len
-= e
;
6232 if (p
->v1
== p
->v2
) {
6234 gtst_addr(0, p
->sym
);
6244 gtst_addr(0, p
->sym
);
6248 *bsym
= gjmp(*bsym
);
6251 /* ------------------------------------------------------------------------- */
6252 /* __attribute__((cleanup(fn))) */
6254 static void try_call_scope_cleanup(Sym
*stop
)
6256 Sym
*cls
= cur_scope
->cl
.s
;
6258 for (; cls
!= stop
; cls
= cls
->ncl
) {
6259 Sym
*fs
= cls
->next
;
6260 Sym
*vs
= cls
->prev_tok
;
6262 vpushsym(&fs
->type
, fs
);
6263 vset(&vs
->type
, vs
->r
, vs
->c
);
6265 mk_pointer(&vtop
->type
);
6271 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6276 if (!cur_scope
->cl
.s
)
6279 /* search NCA of both cleanup chains given parents and initial depth */
6280 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6281 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6283 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6285 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6288 try_call_scope_cleanup(cc
);
6291 /* call 'func' for each __attribute__((cleanup(func))) */
6292 static void block_cleanup(struct scope
*o
)
6296 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6297 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6302 try_call_scope_cleanup(o
->cl
.s
);
6303 pcl
->jnext
= gjmp(0);
6305 goto remove_pending
;
6315 try_call_scope_cleanup(o
->cl
.s
);
6318 /* ------------------------------------------------------------------------- */
6321 static void vla_restore(int loc
)
6324 gen_vla_sp_restore(loc
);
6327 static void vla_leave(struct scope
*o
)
6329 if (o
->vla
.num
< cur_scope
->vla
.num
)
6330 vla_restore(o
->vla
.loc
);
6333 /* ------------------------------------------------------------------------- */
6336 void new_scope(struct scope
*o
)
6338 /* copy and link previous scope */
6340 o
->prev
= cur_scope
;
6343 /* record local declaration stack position */
6344 o
->lstk
= local_stack
;
6345 o
->llstk
= local_label_stack
;
6350 void prev_scope(struct scope
*o
, int is_expr
)
6354 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6355 block_cleanup(o
->prev
);
6357 /* pop locally defined labels */
6358 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6360 /* In the is_expr case (a statement expression is finished here),
6361 vtop might refer to symbols on the local_stack. Either via the
6362 type or via vtop->sym. We can't pop those nor any that in turn
6363 might be referred to. To make it easier we don't roll back
6364 any symbols in that case; some upper level call to block() will
6365 do that. We do have to remove such symbols from the lookup
6366 tables, though. sym_pop will do that. */
6368 /* pop locally defined symbols */
6369 sym_pop(&local_stack
, o
->lstk
, is_expr
);
6371 cur_scope
= o
->prev
;
6375 /* leave a scope via break/continue(/goto) */
6376 void leave_scope(struct scope
*o
)
6380 try_call_scope_cleanup(o
->cl
.s
);
6384 /* ------------------------------------------------------------------------- */
6385 /* call block from 'for do while' loops */
6387 static void lblock(int *bsym
, int *csym
)
6389 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6390 int *b
= co
->bsym
, *c
= co
->csym
;
6404 static void block(int is_expr
)
6406 int a
, b
, c
, d
, e
, t
;
6410 /* default return value is (void) */
6412 vtop
->type
.t
= VT_VOID
;
6424 if (tok
== TOK_ELSE
) {
6429 gsym(d
); /* patch else jmp */
6434 } else if (t
== TOK_WHILE
) {
6446 } else if (t
== '{') {
6450 /* handle local labels declarations */
6451 while (tok
== TOK_LABEL
) {
6454 if (tok
< TOK_UIDENT
)
6455 expect("label identifier");
6456 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6458 } while (tok
== ',');
6462 while (tok
!= '}') {
6471 prev_scope(&o
, is_expr
);
6473 if (0 == local_scope
&& !nocode_wanted
)
6474 check_func_return();
6477 } else if (t
== TOK_RETURN
) {
6479 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6481 gexpr(), gen_assign_cast(&func_vt
);
6482 leave_scope(root_scope
);
6484 gfunc_return(&func_vt
);
6488 tcc_warning("'return' with no value.");
6490 /* jump unless last stmt in top-level block */
6491 if (tok
!= '}' || local_scope
!= 1)
6495 } else if (t
== TOK_BREAK
) {
6497 if (!cur_scope
->bsym
)
6498 tcc_error("cannot break");
6499 if (!cur_switch
|| cur_scope
->bsym
!= cur_switch
->bsym
)
6500 leave_scope(loop_scope
);
6502 leave_scope(cur_switch
->scope
);
6503 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6506 } else if (t
== TOK_CONTINUE
) {
6508 if (!cur_scope
->csym
)
6509 tcc_error("cannot continue");
6510 leave_scope(loop_scope
);
6511 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6514 } else if (t
== TOK_FOR
) {
6520 /* c99 for-loop init decl? */
6521 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6522 /* no, regular for-loop init expr */
6550 } else if (t
== TOK_DO
) {
6564 } else if (t
== TOK_SWITCH
) {
6565 struct switch_t
*saved
, sw
;
6572 sw
.scope
= cur_scope
;
6580 switchval
= *vtop
--;
6583 b
= gjmp(0); /* jump to first case */
6585 a
= gjmp(a
); /* add implicit break */
6589 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6590 for (b
= 1; b
< sw
.n
; b
++)
6591 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6592 tcc_error("duplicate case value");
6594 /* Our switch table sorting is signed, so the compared
6595 value needs to be as well when it's 64bit. */
6596 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6597 switchval
.type
.t
&= ~VT_UNSIGNED
;
6600 d
= 0, gcase(sw
.p
, sw
.n
, &d
);
6603 gsym_addr(d
, sw
.def_sym
);
6609 dynarray_reset(&sw
.p
, &sw
.n
);
6612 } else if (t
== TOK_CASE
) {
6613 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6616 cr
->v1
= cr
->v2
= expr_const64();
6617 if (gnu_ext
&& tok
== TOK_DOTS
) {
6619 cr
->v2
= expr_const64();
6620 if (cr
->v2
< cr
->v1
)
6621 tcc_warning("empty case range");
6624 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6627 goto block_after_label
;
6629 } else if (t
== TOK_DEFAULT
) {
6632 if (cur_switch
->def_sym
)
6633 tcc_error("too many 'default'");
6634 cur_switch
->def_sym
= gind();
6637 goto block_after_label
;
6639 } else if (t
== TOK_GOTO
) {
6640 vla_restore(root_scope
->vla
.loc
);
6641 if (tok
== '*' && gnu_ext
) {
6645 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6649 } else if (tok
>= TOK_UIDENT
) {
6650 s
= label_find(tok
);
6651 /* put forward definition if needed */
6653 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6654 else if (s
->r
== LABEL_DECLARED
)
6655 s
->r
= LABEL_FORWARD
;
6657 if (s
->r
& LABEL_FORWARD
) {
6658 /* start new goto chain for cleanups, linked via label->next */
6659 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
6660 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
6661 pending_gotos
->prev_tok
= s
;
6662 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6663 pending_gotos
->next
= s
;
6665 s
->jnext
= gjmp(s
->jnext
);
6667 try_call_cleanup_goto(s
->cleanupstate
);
6668 gjmp_addr(s
->jnext
);
6673 expect("label identifier");
6677 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
6681 if (tok
== ':' && t
>= TOK_UIDENT
) {
6686 if (s
->r
== LABEL_DEFINED
)
6687 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6688 s
->r
= LABEL_DEFINED
;
6690 Sym
*pcl
; /* pending cleanup goto */
6691 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6693 sym_pop(&s
->next
, NULL
, 0);
6697 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
6700 s
->cleanupstate
= cur_scope
->cl
.s
;
6703 vla_restore(cur_scope
->vla
.loc
);
6704 /* we accept this, but it is a mistake */
6706 tcc_warning("deprecated use of label at end of compound statement");
6712 /* expression case */
6728 /* This skips over a stream of tokens containing balanced {} and ()
6729 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6730 with a '{'). If STR then allocates and stores the skipped tokens
6731 in *STR. This doesn't check if () and {} are nested correctly,
6732 i.e. "({)}" is accepted. */
6733 static void skip_or_save_block(TokenString
**str
)
6735 int braces
= tok
== '{';
6738 *str
= tok_str_alloc();
6740 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6742 if (tok
== TOK_EOF
) {
6743 if (str
|| level
> 0)
6744 tcc_error("unexpected end of file");
6749 tok_str_add_tok(*str
);
6752 if (t
== '{' || t
== '(') {
6754 } else if (t
== '}' || t
== ')') {
6756 if (level
== 0 && braces
&& t
== '}')
6761 tok_str_add(*str
, -1);
6762 tok_str_add(*str
, 0);
6766 #define EXPR_CONST 1
6769 static void parse_init_elem(int expr_type
)
6771 int saved_global_expr
;
6774 /* compound literals must be allocated globally in this case */
6775 saved_global_expr
= global_expr
;
6778 global_expr
= saved_global_expr
;
6779 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6780 (compound literals). */
6781 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6782 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6783 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6784 #ifdef TCC_TARGET_PE
6785 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6788 tcc_error("initializer element is not constant");
6796 /* put zeros for variable based init */
6797 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6800 /* nothing to do because globals are already set to zero */
6802 vpush_global_sym(&func_old_type
, TOK_memset
);
6804 #ifdef TCC_TARGET_ARM
6816 #define DIF_SIZE_ONLY 2
6817 #define DIF_HAVE_ELEM 4
6819 /* t is the array or struct type. c is the array or struct
6820 address. cur_field is the pointer to the current
6821 field, for arrays the 'c' member contains the current start
6822 index. 'flags' is as in decl_initializer.
6823 'al' contains the already initialized length of the
6824 current container (starting at c). This returns the new length of that. */
6825 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6826 Sym
**cur_field
, int flags
, int al
)
6829 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6830 unsigned long corig
= c
;
6835 if (flags
& DIF_HAVE_ELEM
)
6838 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
6845 /* NOTE: we only support ranges for last designator */
6846 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6848 if (!(type
->t
& VT_ARRAY
))
6849 expect("array type");
6851 index
= index_last
= expr_const();
6852 if (tok
== TOK_DOTS
&& gnu_ext
) {
6854 index_last
= expr_const();
6858 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6860 tcc_error("invalid index");
6862 (*cur_field
)->c
= index_last
;
6863 type
= pointed_type(type
);
6864 elem_size
= type_size(type
, &align
);
6865 c
+= index
* elem_size
;
6866 nb_elems
= index_last
- index
+ 1;
6873 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6874 expect("struct/union type");
6876 f
= find_field(type
, l
, &cumofs
);
6889 } else if (!gnu_ext
) {
6894 if (type
->t
& VT_ARRAY
) {
6895 index
= (*cur_field
)->c
;
6896 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6897 tcc_error("index too large");
6898 type
= pointed_type(type
);
6899 c
+= index
* type_size(type
, &align
);
6902 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6903 *cur_field
= f
= f
->next
;
6905 tcc_error("too many field init");
6910 /* must put zero in holes (note that doing it that way
6911 ensures that it even works with designators) */
6912 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
6913 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6914 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
6916 /* XXX: make it more general */
6917 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
6918 unsigned long c_end
;
6923 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6924 for (i
= 1; i
< nb_elems
; i
++) {
6925 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6930 } else if (!NODATA_WANTED
) {
6931 c_end
= c
+ nb_elems
* elem_size
;
6932 if (c_end
> sec
->data_allocated
)
6933 section_realloc(sec
, c_end
);
6934 src
= sec
->data
+ c
;
6936 for(i
= 1; i
< nb_elems
; i
++) {
6938 memcpy(dst
, src
, elem_size
);
6942 c
+= nb_elems
* type_size(type
, &align
);
6948 /* store a value or an expression directly in global data or in local array */
6949 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6956 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6960 /* XXX: not portable */
6961 /* XXX: generate error if incorrect relocation */
6962 gen_assign_cast(&dtype
);
6963 bt
= type
->t
& VT_BTYPE
;
6965 if ((vtop
->r
& VT_SYM
)
6968 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6969 || (type
->t
& VT_BITFIELD
))
6970 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6972 tcc_error("initializer element is not computable at load time");
6974 if (NODATA_WANTED
) {
6979 size
= type_size(type
, &align
);
6980 section_reserve(sec
, c
+ size
);
6981 ptr
= sec
->data
+ c
;
6983 /* XXX: make code faster ? */
6984 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6985 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6986 /* XXX This rejects compound literals like
6987 '(void *){ptr}'. The problem is that '&sym' is
6988 represented the same way, which would be ruled out
6989 by the SYM_FIRST_ANOM check above, but also '"string"'
6990 in 'char *p = "string"' is represented the same
6991 with the type being VT_PTR and the symbol being an
6992 anonymous one. That is, there's no difference in vtop
6993 between '(void *){x}' and '&(void *){x}'. Ignore
6994 pointer typed entities here. Hopefully no real code
6995 will every use compound literals with scalar type. */
6996 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6997 /* These come from compound literals, memcpy stuff over. */
7001 esym
= elfsym(vtop
->sym
);
7002 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7003 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
7005 /* We need to copy over all memory contents, and that
7006 includes relocations. Use the fact that relocs are
7007 created it order, so look from the end of relocs
7008 until we hit one before the copied region. */
7009 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
7010 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
7011 while (num_relocs
--) {
7013 if (rel
->r_offset
>= esym
->st_value
+ size
)
7015 if (rel
->r_offset
< esym
->st_value
)
7017 /* Note: if the same fields are initialized multiple
7018 times (possible with designators) then we possibly
7019 add multiple relocations for the same offset here.
7020 That would lead to wrong code, the last reloc needs
7021 to win. We clean this up later after the whole
7022 initializer is parsed. */
7023 put_elf_reloca(symtab_section
, sec
,
7024 c
+ rel
->r_offset
- esym
->st_value
,
7025 ELFW(R_TYPE
)(rel
->r_info
),
7026 ELFW(R_SYM
)(rel
->r_info
),
7036 if (type
->t
& VT_BITFIELD
) {
7037 int bit_pos
, bit_size
, bits
, n
;
7038 unsigned char *p
, v
, m
;
7039 bit_pos
= BIT_POS(vtop
->type
.t
);
7040 bit_size
= BIT_SIZE(vtop
->type
.t
);
7041 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7042 bit_pos
&= 7, bits
= 0;
7047 v
= vtop
->c
.i
>> bits
<< bit_pos
;
7048 m
= ((1 << n
) - 1) << bit_pos
;
7049 *p
= (*p
& ~m
) | (v
& m
);
7050 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7054 /* XXX: when cross-compiling we assume that each type has the
7055 same representation on host and target, which is likely to
7056 be wrong in the case of long double */
7058 vtop
->c
.i
= vtop
->c
.i
!= 0;
7060 *(char *)ptr
|= vtop
->c
.i
;
7063 *(short *)ptr
|= vtop
->c
.i
;
7066 *(float*)ptr
= vtop
->c
.f
;
7069 *(double *)ptr
= vtop
->c
.d
;
7072 #if defined TCC_IS_NATIVE_387
7073 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7074 memcpy(ptr
, &vtop
->c
.ld
, 10);
7076 else if (sizeof (long double) == sizeof (double))
7077 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7079 else if (vtop
->c
.ld
== 0.0)
7083 if (sizeof(long double) == LDOUBLE_SIZE
)
7084 *(long double*)ptr
= vtop
->c
.ld
;
7085 else if (sizeof(double) == LDOUBLE_SIZE
)
7086 *(double *)ptr
= (double)vtop
->c
.ld
;
7088 tcc_error("can't cross compile long double constants");
7092 *(long long *)ptr
|= vtop
->c
.i
;
7099 addr_t val
= vtop
->c
.i
;
7101 if (vtop
->r
& VT_SYM
)
7102 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7104 *(addr_t
*)ptr
|= val
;
7106 if (vtop
->r
& VT_SYM
)
7107 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7108 *(addr_t
*)ptr
|= val
;
7114 int val
= vtop
->c
.i
;
7116 if (vtop
->r
& VT_SYM
)
7117 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7121 if (vtop
->r
& VT_SYM
)
7122 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7131 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7138 /* 't' contains the type and storage info. 'c' is the offset of the
7139 object in section 'sec'. If 'sec' is NULL, it means stack based
7140 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7141 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7142 size only evaluation is wanted (only for arrays). */
7143 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
7146 int len
, n
, no_oblock
, nb
, i
;
7152 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7153 /* In case of strings we have special handling for arrays, so
7154 don't consume them as initializer value (which would commit them
7155 to some anonymous symbol). */
7156 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7157 !(flags
& DIF_SIZE_ONLY
)) {
7158 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7159 flags
|= DIF_HAVE_ELEM
;
7162 if ((flags
& DIF_HAVE_ELEM
) &&
7163 !(type
->t
& VT_ARRAY
) &&
7164 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7165 The source type might have VT_CONSTANT set, which is
7166 of course assignable to non-const elements. */
7167 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7168 init_putv(type
, sec
, c
);
7169 } else if (type
->t
& VT_ARRAY
) {
7172 t1
= pointed_type(type
);
7173 size1
= type_size(t1
, &align1
);
7176 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7179 tcc_error("character array initializer must be a literal,"
7180 " optionally enclosed in braces");
7185 /* only parse strings here if correct type (otherwise: handle
7186 them as ((w)char *) expressions */
7187 if ((tok
== TOK_LSTR
&&
7188 #ifdef TCC_TARGET_PE
7189 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7191 (t1
->t
& VT_BTYPE
) == VT_INT
7193 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7195 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7198 /* compute maximum number of chars wanted */
7200 cstr_len
= tokc
.str
.size
;
7202 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
7205 if (n
>= 0 && nb
> (n
- len
))
7207 if (!(flags
& DIF_SIZE_ONLY
)) {
7209 tcc_warning("initializer-string for array is too long");
7210 /* in order to go faster for common case (char
7211 string in global variable, we handle it
7213 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
7215 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
7219 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
7221 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
7223 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
7230 /* only add trailing zero if enough storage (no
7231 warning in this case since it is standard) */
7232 if (n
< 0 || len
< n
) {
7233 if (!(flags
& DIF_SIZE_ONLY
)) {
7235 init_putv(t1
, sec
, c
+ (len
* size1
));
7246 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7247 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7248 flags
&= ~DIF_HAVE_ELEM
;
7249 if (type
->t
& VT_ARRAY
) {
7251 /* special test for multi dimensional arrays (may not
7252 be strictly correct if designators are used at the
7254 if (no_oblock
&& len
>= n
*size1
)
7257 if (s
->type
.t
== VT_UNION
)
7261 if (no_oblock
&& f
== NULL
)
7270 /* put zeros at the end */
7271 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7272 init_putz(sec
, c
+ len
, n
*size1
- len
);
7275 /* patch type size if needed, which happens only for array types */
7277 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7278 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7281 if ((flags
& DIF_FIRST
) || tok
== '{') {
7289 } else if (tok
== '{') {
7290 if (flags
& DIF_HAVE_ELEM
)
7293 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7295 } else if ((flags
& DIF_SIZE_ONLY
)) {
7296 /* If we supported only ISO C we wouldn't have to accept calling
7297 this on anything than an array if DIF_SIZE_ONLY (and even then
7298 only on the outermost level, so no recursion would be needed),
7299 because initializing a flex array member isn't supported.
7300 But GNU C supports it, so we need to recurse even into
7301 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7302 /* just skip expression */
7303 skip_or_save_block(NULL
);
7305 if (!(flags
& DIF_HAVE_ELEM
)) {
7306 /* This should happen only when we haven't parsed
7307 the init element above for fear of committing a
7308 string constant to memory too early. */
7309 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7310 expect("string constant");
7311 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7313 init_putv(type
, sec
, c
);
7317 /* parse an initializer for type 't' if 'has_init' is non zero, and
7318 allocate space in local or global data space ('r' is either
7319 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7320 variable 'v' of scope 'scope' is declared before initializers
7321 are parsed. If 'v' is zero, then a reference to the new object
7322 is put in the value stack. If 'has_init' is 2, a special parsing
7323 is done to handle string constants. */
7324 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7325 int has_init
, int v
, int scope
)
7327 int size
, align
, addr
;
7328 TokenString
*init_str
= NULL
;
7331 Sym
*flexible_array
;
7333 int saved_nocode_wanted
= nocode_wanted
;
7334 #ifdef CONFIG_TCC_BCHECK
7338 /* Always allocate static or global variables */
7339 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7340 nocode_wanted
|= 0x80000000;
7342 #ifdef CONFIG_TCC_BCHECK
7343 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7346 flexible_array
= NULL
;
7347 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7348 Sym
*field
= type
->ref
->next
;
7351 field
= field
->next
;
7352 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7353 flexible_array
= field
;
7357 size
= type_size(type
, &align
);
7358 /* If unknown size, we must evaluate it before
7359 evaluating initializers because
7360 initializers can generate global data too
7361 (e.g. string pointers or ISOC99 compound
7362 literals). It also simplifies local
7363 initializers handling */
7364 if (size
< 0 || (flexible_array
&& has_init
)) {
7366 tcc_error("unknown type size");
7367 /* get all init string */
7368 if (has_init
== 2) {
7369 init_str
= tok_str_alloc();
7370 /* only get strings */
7371 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7372 tok_str_add_tok(init_str
);
7375 tok_str_add(init_str
, -1);
7376 tok_str_add(init_str
, 0);
7378 skip_or_save_block(&init_str
);
7383 begin_macro(init_str
, 1);
7385 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7386 /* prepare second initializer parsing */
7387 macro_ptr
= init_str
->str
;
7390 /* if still unknown size, error */
7391 size
= type_size(type
, &align
);
7393 tcc_error("unknown type size");
7395 /* If there's a flex member and it was used in the initializer
7397 if (flexible_array
&&
7398 flexible_array
->type
.ref
->c
> 0)
7399 size
+= flexible_array
->type
.ref
->c
7400 * pointed_size(&flexible_array
->type
);
7401 /* take into account specified alignment if bigger */
7402 if (ad
->a
.aligned
) {
7403 int speca
= 1 << (ad
->a
.aligned
- 1);
7406 } else if (ad
->a
.packed
) {
7410 if (!v
&& NODATA_WANTED
)
7411 size
= 0, align
= 1;
7413 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7415 #ifdef CONFIG_TCC_BCHECK
7416 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7420 loc
= (loc
- size
) & -align
;
7422 #ifdef CONFIG_TCC_BCHECK
7423 /* handles bounds */
7424 /* XXX: currently, since we do only one pass, we cannot track
7425 '&' operators, so we add only arrays */
7426 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7428 /* add padding between regions */
7430 /* then add local bound info */
7431 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7432 bounds_ptr
[0] = addr
;
7433 bounds_ptr
[1] = size
;
7437 /* local variable */
7438 #ifdef CONFIG_TCC_ASM
7439 if (ad
->asm_label
) {
7440 int reg
= asm_parse_regvar(ad
->asm_label
);
7442 r
= (r
& ~VT_VALMASK
) | reg
;
7445 sym
= sym_push(v
, type
, r
, addr
);
7446 if (ad
->cleanup_func
) {
7447 Sym
*cls
= sym_push2(&all_cleanups
,
7448 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7449 cls
->prev_tok
= sym
;
7450 cls
->next
= ad
->cleanup_func
;
7451 cls
->ncl
= cur_scope
->cl
.s
;
7452 cur_scope
->cl
.s
= cls
;
7457 /* push local reference */
7458 vset(type
, r
, addr
);
7461 if (v
&& scope
== VT_CONST
) {
7462 /* see if the symbol was already defined */
7465 patch_storage(sym
, ad
, type
);
7466 /* we accept several definitions of the same global variable. */
7467 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7472 /* allocate symbol in corresponding section */
7477 else if (tcc_state
->nocommon
)
7482 addr
= section_add(sec
, size
, align
);
7483 #ifdef CONFIG_TCC_BCHECK
7484 /* add padding if bound check */
7486 section_add(sec
, 1, 1);
7489 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7490 sec
= common_section
;
7495 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7496 patch_storage(sym
, ad
, NULL
);
7498 /* update symbol definition */
7499 put_extern_sym(sym
, sec
, addr
, size
);
7501 /* push global reference */
7502 vpush_ref(type
, sec
, addr
, size
);
7507 #ifdef CONFIG_TCC_BCHECK
7508 /* handles bounds now because the symbol must be defined
7509 before for the relocation */
7513 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7514 /* then add global bound info */
7515 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7516 bounds_ptr
[0] = 0; /* relocated */
7517 bounds_ptr
[1] = size
;
7522 if (type
->t
& VT_VLA
) {
7528 /* save current stack pointer */
7529 if (root_scope
->vla
.loc
== 0) {
7530 struct scope
*v
= cur_scope
;
7531 gen_vla_sp_save(loc
-= PTR_SIZE
);
7532 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
7535 vla_runtime_type_size(type
, &a
);
7536 gen_vla_alloc(type
, a
);
7537 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7538 /* on _WIN64, because of the function args scratch area, the
7539 result of alloca differs from RSP and is returned in RAX. */
7540 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7542 gen_vla_sp_save(addr
);
7543 cur_scope
->vla
.loc
= addr
;
7544 cur_scope
->vla
.num
++;
7546 } else if (has_init
) {
7547 size_t oldreloc_offset
= 0;
7548 if (sec
&& sec
->reloc
)
7549 oldreloc_offset
= sec
->reloc
->data_offset
;
7550 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7551 if (sec
&& sec
->reloc
)
7552 squeeze_multi_relocs(sec
, oldreloc_offset
);
7553 /* patch flexible array member size back to -1, */
7554 /* for possible subsequent similar declarations */
7556 flexible_array
->type
.ref
->c
= -1;
7560 /* restore parse state if needed */
7566 nocode_wanted
= saved_nocode_wanted
;
7569 /* parse a function defined by symbol 'sym' and generate its code in
7570 'cur_text_section' */
7571 static void gen_function(Sym
*sym
)
7573 /* Initialize VLA state */
7574 struct scope f
= { 0 };
7575 cur_scope
= root_scope
= &f
;
7578 ind
= cur_text_section
->data_offset
;
7579 if (sym
->a
.aligned
) {
7580 size_t newoff
= section_add(cur_text_section
, 0,
7581 1 << (sym
->a
.aligned
- 1));
7582 gen_fill_nops(newoff
- ind
);
7584 /* NOTE: we patch the symbol size later */
7585 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7587 funcname
= get_tok_str(sym
->v
, NULL
);
7590 /* put debug symbol */
7591 tcc_debug_funcstart(tcc_state
, sym
);
7592 /* push a dummy symbol to enable local sym storage */
7593 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7594 local_scope
= 1; /* for function parameters */
7595 gfunc_prolog(&sym
->type
);
7598 clear_temp_local_var_list();
7603 cur_text_section
->data_offset
= ind
;
7604 /* reset local stack */
7605 sym_pop(&local_stack
, NULL
, 0);
7607 label_pop(&global_label_stack
, NULL
, 0);
7608 sym_pop(&all_cleanups
, NULL
, 0);
7609 /* patch symbol size */
7610 elfsym(sym
)->st_size
= ind
- func_ind
;
7611 /* end of function */
7612 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7613 /* It's better to crash than to generate wrong code */
7614 cur_text_section
= NULL
;
7615 funcname
= ""; /* for safety */
7616 func_vt
.t
= VT_VOID
; /* for safety */
7617 func_var
= 0; /* for safety */
7618 ind
= 0; /* for safety */
7619 nocode_wanted
= 0x80000000;
7623 static void gen_inline_functions(TCCState
*s
)
7626 int inline_generated
, i
;
7627 struct InlineFunc
*fn
;
7629 tcc_open_bf(s
, ":inline:", 0);
7630 /* iterate while inline function are referenced */
7632 inline_generated
= 0;
7633 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7634 fn
= s
->inline_fns
[i
];
7636 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
7637 /* the function was used or forced (and then not internal):
7638 generate its code and convert it to a normal function */
7641 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7642 begin_macro(fn
->func_str
, 1);
7644 cur_text_section
= text_section
;
7648 inline_generated
= 1;
7651 } while (inline_generated
);
7655 ST_FUNC
void free_inline_functions(TCCState
*s
)
7658 /* free tokens of unused inline functions */
7659 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7660 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7662 tok_str_free(fn
->func_str
);
7664 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7667 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7668 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7669 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7674 AttributeDef ad
, adbase
;
7677 if (tok
== TOK_STATIC_ASSERT
) {
7685 tcc_error("%s", get_tok_str(tok
, &tokc
));
7691 if (!parse_btype(&btype
, &adbase
)) {
7692 if (is_for_loop_init
)
7694 /* skip redundant ';' if not in old parameter decl scope */
7695 if (tok
== ';' && l
!= VT_CMP
) {
7701 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7702 /* global asm block */
7706 if (tok
>= TOK_UIDENT
) {
7707 /* special test for old K&R protos without explicit int
7708 type. Only accepted when defining global data */
7712 expect("declaration");
7717 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7718 int v
= btype
.ref
->v
;
7719 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7720 tcc_warning("unnamed struct/union that defines no instances");
7724 if (IS_ENUM(btype
.t
)) {
7729 while (1) { /* iterate thru each declaration */
7731 /* If the base type itself was an array type of unspecified
7732 size (like in 'typedef int arr[]; arr x = {1};') then
7733 we will overwrite the unknown size by the real one for
7734 this decl. We need to unshare the ref symbol holding
7736 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7737 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7740 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7744 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7745 printf("type = '%s'\n", buf
);
7748 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7749 /* if old style function prototype, we accept a
7752 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7753 decl0(VT_CMP
, 0, sym
);
7754 /* always compile 'extern inline' */
7755 if (type
.t
& VT_EXTERN
)
7756 type
.t
&= ~VT_INLINE
;
7759 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7760 ad
.asm_label
= asm_label_instr();
7761 /* parse one last attribute list, after asm label */
7762 parse_attribute(&ad
);
7764 /* gcc does not allow __asm__("label") with function definition,
7771 #ifdef TCC_TARGET_PE
7772 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7773 if (type
.t
& VT_STATIC
)
7774 tcc_error("cannot have dll linkage with static");
7775 if (type
.t
& VT_TYPEDEF
) {
7776 tcc_warning("'%s' attribute ignored for typedef",
7777 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
7778 (ad
.a
.dllexport
= 0, "dllexport"));
7779 } else if (ad
.a
.dllimport
) {
7780 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7783 type
.t
|= VT_EXTERN
;
7789 tcc_error("cannot use local functions");
7790 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7791 expect("function definition");
7793 /* reject abstract declarators in function definition
7794 make old style params without decl have int type */
7796 while ((sym
= sym
->next
) != NULL
) {
7797 if (!(sym
->v
& ~SYM_FIELD
))
7798 expect("identifier");
7799 if (sym
->type
.t
== VT_VOID
)
7800 sym
->type
= int_type
;
7803 /* put function symbol */
7804 type
.t
&= ~VT_EXTERN
;
7805 sym
= external_sym(v
, &type
, 0, &ad
);
7806 /* static inline functions are just recorded as a kind
7807 of macro. Their code will be emitted at the end of
7808 the compilation unit only if they are used */
7809 if (sym
->type
.t
& VT_INLINE
) {
7810 struct InlineFunc
*fn
;
7811 const char *filename
;
7813 filename
= file
? file
->filename
: "";
7814 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7815 strcpy(fn
->filename
, filename
);
7817 skip_or_save_block(&fn
->func_str
);
7818 dynarray_add(&tcc_state
->inline_fns
,
7819 &tcc_state
->nb_inline_fns
, fn
);
7821 /* compute text section */
7822 cur_text_section
= ad
.section
;
7823 if (!cur_text_section
)
7824 cur_text_section
= text_section
;
7830 /* find parameter in function parameter list */
7831 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7832 if ((sym
->v
& ~SYM_FIELD
) == v
)
7834 tcc_error("declaration for parameter '%s' but no such parameter",
7835 get_tok_str(v
, NULL
));
7837 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7838 tcc_error("storage class specified for '%s'",
7839 get_tok_str(v
, NULL
));
7840 if (sym
->type
.t
!= VT_VOID
)
7841 tcc_error("redefinition of parameter '%s'",
7842 get_tok_str(v
, NULL
));
7843 convert_parameter_type(&type
);
7845 } else if (type
.t
& VT_TYPEDEF
) {
7846 /* save typedefed type */
7847 /* XXX: test storage specifiers ? */
7849 if (sym
&& sym
->sym_scope
== local_scope
) {
7850 if (!is_compatible_types(&sym
->type
, &type
)
7851 || !(sym
->type
.t
& VT_TYPEDEF
))
7852 tcc_error("incompatible redefinition of '%s'",
7853 get_tok_str(v
, NULL
));
7856 sym
= sym_push(v
, &type
, 0, 0);
7860 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7861 && !(type
.t
& VT_EXTERN
)) {
7862 tcc_error("declaration of void object");
7865 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7866 /* external function definition */
7867 /* specific case for func_call attribute */
7869 } else if (!(type
.t
& VT_ARRAY
)) {
7870 /* not lvalue if array */
7871 r
|= lvalue_type(type
.t
);
7873 has_init
= (tok
== '=');
7874 if (has_init
&& (type
.t
& VT_VLA
))
7875 tcc_error("variable length array cannot be initialized");
7876 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
7877 || (type
.t
& VT_BTYPE
) == VT_FUNC
7878 /* as with GCC, uninitialized global arrays with no size
7879 are considered extern: */
7880 || ((type
.t
& VT_ARRAY
) && !has_init
7881 && l
== VT_CONST
&& type
.ref
->c
< 0)
7883 /* external variable or function */
7884 type
.t
|= VT_EXTERN
;
7885 sym
= external_sym(v
, &type
, r
, &ad
);
7886 if (ad
.alias_target
) {
7889 alias_target
= sym_find(ad
.alias_target
);
7890 esym
= elfsym(alias_target
);
7892 tcc_error("unsupported forward __alias__ attribute");
7893 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7896 if (type
.t
& VT_STATIC
)
7902 else if (l
== VT_CONST
)
7903 /* uninitialized global variables may be overridden */
7904 type
.t
|= VT_EXTERN
;
7905 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7909 if (is_for_loop_init
)
7921 static void decl(int l
)
7926 /* ------------------------------------------------------------------------- */
7929 /* ------------------------------------------------------------------------- */