2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
42 static Sym
*all_cleanups
, *pending_gotos
;
43 static int local_scope
;
45 static int in_generic
;
46 static int section_sym
;
48 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
50 ST_DATA
int const_wanted
; /* true if constant wanted */
51 ST_DATA
int nocode_wanted
; /* no code generation wanted */
52 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
53 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 /* Automagical code suppression ----> */
56 #define CODE_OFF() (nocode_wanted |= 0x20000000)
57 #define CODE_ON() (nocode_wanted &= ~0x20000000)
59 /* Clear 'nocode_wanted' at label if it was used */
60 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
61 static int gind(void) { CODE_ON(); return ind
; }
63 /* Set 'nocode_wanted' after unconditional jumps */
64 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
65 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
67 /* These are #undef'd at the end of this file */
68 #define gjmp_addr gjmp_addr_acs
72 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
73 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
74 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
76 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
77 ST_DATA
const char *funcname
;
80 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
82 ST_DATA
struct switch_t
{
86 } **p
; int n
; /* list of case ranges */
87 int def_sym
; /* default symbol */
90 } *cur_switch
; /* current switch */
92 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
93 /*list of temporary local variables on the stack in current function. */
94 ST_DATA
struct temp_local_variable
{
95 int location
; //offset on stack. Svalue.c.i
98 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
99 short nb_temp_local_vars
;
101 static struct scope
{
103 struct { int loc
, num
; } vla
;
104 struct { Sym
*s
; int n
; } cl
;
107 } *cur_scope
, *loop_scope
, *root_scope
;
109 /* ------------------------------------------------------------------------- */
111 static void gen_cast(CType
*type
);
112 static void gen_cast_s(int t
);
113 static inline CType
*pointed_type(CType
*type
);
114 static int is_compatible_types(CType
*type1
, CType
*type2
);
115 static int parse_btype(CType
*type
, AttributeDef
*ad
);
116 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
117 static void parse_expr_type(CType
*type
);
118 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
119 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
120 static void block(int is_expr
);
121 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
122 static void decl(int l
);
123 static int decl0(int l
, int is_for_loop_init
, Sym
*);
124 static void expr_eq(void);
125 static void vla_runtime_type_size(CType
*type
, int *a
);
126 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
127 static inline int64_t expr_const64(void);
128 static void vpush64(int ty
, unsigned long long v
);
129 static void vpush(CType
*type
);
130 static int gvtst(int inv
, int t
);
131 static void gen_inline_functions(TCCState
*s
);
132 static void skip_or_save_block(TokenString
**str
);
133 static void gv_dup(void);
134 static int get_temp_local_var(int size
,int align
);
135 static void clear_temp_local_var_list();
137 ST_INLN
int is_float(int t
)
141 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
144 /* we use our own 'finite' function to avoid potential problems with
145 non standard math libs */
146 /* XXX: endianness dependent */
147 ST_FUNC
int ieee_finite(double d
)
150 memcpy(p
, &d
, sizeof(double));
151 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
154 /* compiling intel long double natively */
155 #if (defined __i386__ || defined __x86_64__) \
156 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
157 # define TCC_IS_NATIVE_387
160 ST_FUNC
void test_lvalue(void)
162 if (!(vtop
->r
& VT_LVAL
))
166 ST_FUNC
void check_vstack(void)
169 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
172 /* ------------------------------------------------------------------------- */
173 /* vstack debugging aid */
176 void pv (const char *lbl
, int a
, int b
)
179 for (i
= a
; i
< a
+ b
; ++i
) {
180 SValue
*p
= &vtop
[-i
];
181 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
182 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
187 /* ------------------------------------------------------------------------- */
188 /* start of translation unit info */
189 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
194 /* file info: full path + filename */
195 section_sym
= put_elf_sym(symtab_section
, 0, 0,
196 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
197 text_section
->sh_num
, NULL
);
198 getcwd(buf
, sizeof(buf
));
200 normalize_slashes(buf
);
202 pstrcat(buf
, sizeof(buf
), "/");
203 put_stabs_r(buf
, N_SO
, 0, 0,
204 text_section
->data_offset
, text_section
, section_sym
);
205 put_stabs_r(file
->filename
, N_SO
, 0, 0,
206 text_section
->data_offset
, text_section
, section_sym
);
211 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
212 symbols can be safely used */
213 put_elf_sym(symtab_section
, 0, 0,
214 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
215 SHN_ABS
, file
->filename
);
218 /* put end of translation unit info */
219 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
223 put_stabs_r(NULL
, N_SO
, 0, 0,
224 text_section
->data_offset
, text_section
, section_sym
);
228 /* generate line number info */
229 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
233 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
234 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
236 last_line_num
= file
->line_num
;
240 /* put function symbol */
241 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
249 /* XXX: we put here a dummy type */
250 snprintf(buf
, sizeof(buf
), "%s:%c1",
251 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
252 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
253 cur_text_section
, sym
->c
);
254 /* //gr gdb wants a line at the function */
255 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
261 /* put function size */
262 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
266 put_stabn(N_FUN
, 0, 0, size
);
269 /* ------------------------------------------------------------------------- */
270 ST_FUNC
int tccgen_compile(TCCState
*s1
)
272 cur_text_section
= NULL
;
274 anon_sym
= SYM_FIRST_ANOM
;
277 nocode_wanted
= 0x80000000;
280 /* define some often used types */
282 char_pointer_type
.t
= VT_BYTE
;
283 mk_pointer(&char_pointer_type
);
285 size_type
.t
= VT_INT
| VT_UNSIGNED
;
286 ptrdiff_type
.t
= VT_INT
;
288 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
289 ptrdiff_type
.t
= VT_LLONG
;
291 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
292 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
294 func_old_type
.t
= VT_FUNC
;
295 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
296 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
297 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
301 #ifdef TCC_TARGET_ARM
306 printf("%s: **** new file\n", file
->filename
);
309 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
312 gen_inline_functions(s1
);
314 /* end of translation unit info */
319 /* ------------------------------------------------------------------------- */
320 ST_FUNC ElfSym
*elfsym(Sym
*s
)
324 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
327 /* apply storage attributes to Elf symbol */
328 ST_FUNC
void update_storage(Sym
*sym
)
331 int sym_bind
, old_sym_bind
;
337 if (sym
->a
.visibility
)
338 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
341 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
342 sym_bind
= STB_LOCAL
;
343 else if (sym
->a
.weak
)
346 sym_bind
= STB_GLOBAL
;
347 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
348 if (sym_bind
!= old_sym_bind
) {
349 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
353 if (sym
->a
.dllimport
)
354 esym
->st_other
|= ST_PE_IMPORT
;
355 if (sym
->a
.dllexport
)
356 esym
->st_other
|= ST_PE_EXPORT
;
360 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
361 get_tok_str(sym
->v
, NULL
),
362 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
370 /* ------------------------------------------------------------------------- */
371 /* update sym->c so that it points to an external symbol in section
372 'section' with value 'value' */
374 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
375 addr_t value
, unsigned long size
,
376 int can_add_underscore
)
378 int sym_type
, sym_bind
, info
, other
, t
;
382 #ifdef CONFIG_TCC_BCHECK
387 name
= get_tok_str(sym
->v
, NULL
);
388 #ifdef CONFIG_TCC_BCHECK
389 if (tcc_state
->do_bounds_check
) {
390 /* XXX: avoid doing that for statics ? */
391 /* if bound checking is activated, we change some function
392 names by adding the "__bound" prefix */
395 /* XXX: we rely only on malloc hooks */
408 strcpy(buf
, "__bound_");
416 if ((t
& VT_BTYPE
) == VT_FUNC
) {
418 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
419 sym_type
= STT_NOTYPE
;
421 sym_type
= STT_OBJECT
;
423 if (t
& (VT_STATIC
| VT_INLINE
))
424 sym_bind
= STB_LOCAL
;
426 sym_bind
= STB_GLOBAL
;
429 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
430 Sym
*ref
= sym
->type
.ref
;
431 if (ref
->a
.nodecorate
) {
432 can_add_underscore
= 0;
434 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
435 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
437 other
|= ST_PE_STDCALL
;
438 can_add_underscore
= 0;
442 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
444 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
448 name
= get_tok_str(sym
->asm_label
, NULL
);
449 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
450 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
453 esym
->st_value
= value
;
454 esym
->st_size
= size
;
455 esym
->st_shndx
= sh_num
;
460 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
461 addr_t value
, unsigned long size
)
463 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
464 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
467 /* add a new relocation entry to symbol 'sym' in section 's' */
468 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
473 if (nocode_wanted
&& s
== cur_text_section
)
478 put_extern_sym(sym
, NULL
, 0, 0);
482 /* now we can add ELF relocation info */
483 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
487 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
489 greloca(s
, sym
, offset
, type
, 0);
493 /* ------------------------------------------------------------------------- */
494 /* symbol allocator */
495 static Sym
*__sym_malloc(void)
497 Sym
*sym_pool
, *sym
, *last_sym
;
500 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
501 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
503 last_sym
= sym_free_first
;
505 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
506 sym
->next
= last_sym
;
510 sym_free_first
= last_sym
;
514 static inline Sym
*sym_malloc(void)
518 sym
= sym_free_first
;
520 sym
= __sym_malloc();
521 sym_free_first
= sym
->next
;
524 sym
= tcc_malloc(sizeof(Sym
));
529 ST_INLN
void sym_free(Sym
*sym
)
532 sym
->next
= sym_free_first
;
533 sym_free_first
= sym
;
539 /* push, without hashing */
540 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
545 memset(s
, 0, sizeof *s
);
555 /* find a symbol and return its associated structure. 's' is the top
556 of the symbol stack */
557 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
569 /* structure lookup */
570 ST_INLN Sym
*struct_find(int v
)
573 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
575 return table_ident
[v
]->sym_struct
;
578 /* find an identifier */
579 ST_INLN Sym
*sym_find(int v
)
582 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
584 return table_ident
[v
]->sym_identifier
;
587 static int sym_scope(Sym
*s
)
589 if (IS_ENUM_VAL (s
->type
.t
))
590 return s
->type
.ref
->sym_scope
;
595 /* push a given symbol on the symbol stack */
596 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
605 s
= sym_push2(ps
, v
, type
->t
, c
);
606 s
->type
.ref
= type
->ref
;
608 /* don't record fields or anonymous symbols */
610 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
611 /* record symbol in token array */
612 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
614 ps
= &ts
->sym_struct
;
616 ps
= &ts
->sym_identifier
;
619 s
->sym_scope
= local_scope
;
620 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
621 tcc_error("redeclaration of '%s'",
622 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
627 /* push a global identifier */
628 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
631 s
= sym_push2(&global_stack
, v
, t
, c
);
632 s
->r
= VT_CONST
| VT_SYM
;
633 /* don't record anonymous symbol */
634 if (v
< SYM_FIRST_ANOM
) {
635 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
636 /* modify the top most local identifier, so that sym_identifier will
637 point to 's' when popped; happens when called from inline asm */
638 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
639 ps
= &(*ps
)->prev_tok
;
646 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
647 pop them yet from the list, but do remove them from the token array. */
648 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
658 /* remove symbol in token array */
660 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
661 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
663 ps
= &ts
->sym_struct
;
665 ps
= &ts
->sym_identifier
;
676 /* ------------------------------------------------------------------------- */
677 static void vcheck_cmp(void)
679 /* cannot let cpu flags if other instruction are generated. Also
680 avoid leaving VT_JMP anywhere except on the top of the stack
681 because it would complicate the code generator.
683 Don't do this when nocode_wanted. vtop might come from
684 !nocode_wanted regions (see 88_codeopt.c) and transforming
685 it to a register without actually generating code is wrong
686 as their value might still be used for real. All values
687 we push under nocode_wanted will eventually be popped
688 again, so that the VT_CMP/VT_JMP value will be in vtop
689 when code is unsuppressed again. */
691 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
695 static void vsetc(CType
*type
, int r
, CValue
*vc
)
697 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
698 tcc_error("memory full (vstack)");
708 ST_FUNC
void vswap(void)
718 /* pop stack value */
719 ST_FUNC
void vpop(void)
722 v
= vtop
->r
& VT_VALMASK
;
723 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
724 /* for x86, we need to pop the FP stack */
726 o(0xd8dd); /* fstp %st(0) */
730 /* need to put correct jump if && or || without test */
737 /* push constant of type "type" with useless value */
738 ST_FUNC
void vpush(CType
*type
)
740 vset(type
, VT_CONST
, 0);
743 /* push integer constant */
744 ST_FUNC
void vpushi(int v
)
748 vsetc(&int_type
, VT_CONST
, &cval
);
751 /* push a pointer sized constant */
752 static void vpushs(addr_t v
)
756 vsetc(&size_type
, VT_CONST
, &cval
);
759 /* push arbitrary 64bit constant */
760 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
767 vsetc(&ctype
, VT_CONST
, &cval
);
770 /* push long long constant */
771 static inline void vpushll(long long v
)
773 vpush64(VT_LLONG
, v
);
776 ST_FUNC
void vset(CType
*type
, int r
, int v
)
781 vsetc(type
, r
, &cval
);
784 static void vseti(int r
, int v
)
792 ST_FUNC
void vpushv(SValue
*v
)
794 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
795 tcc_error("memory full (vstack)");
800 static void vdup(void)
805 /* rotate n first stack elements to the bottom
806 I1 ... In -> I2 ... In I1 [top is right]
808 ST_FUNC
void vrotb(int n
)
820 /* rotate the n elements before entry e towards the top
821 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
823 ST_FUNC
void vrote(SValue
*e
, int n
)
830 for(i
= 0;i
< n
- 1; i
++)
835 /* rotate n first stack elements to the top
836 I1 ... In -> In I1 ... I(n-1) [top is right]
838 ST_FUNC
void vrott(int n
)
843 /* ------------------------------------------------------------------------- */
844 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
846 /* called from generators to set the result from relational ops */
847 ST_FUNC
void vset_VT_CMP(int op
)
855 /* called once before asking generators to load VT_CMP to a register */
856 static void vset_VT_JMP(void)
858 int op
= vtop
->cmp_op
;
859 if (vtop
->jtrue
|| vtop
->jfalse
) {
860 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
861 int inv
= op
& (op
< 2); /* small optimization */
862 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
864 /* otherwise convert flags (rsp. 0/1) to register */
866 if (op
< 2) /* doesn't seem to happen */
871 /* Set CPU Flags, doesn't yet jump */
872 static void gvtst_set(int inv
, int t
)
875 if (vtop
->r
!= VT_CMP
) {
878 if (vtop
->r
== VT_CMP
) /* must be VT_CONST otherwise */
880 else if (vtop
->r
== VT_CONST
)
881 vset_VT_CMP(vtop
->c
.i
!= 0);
885 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
886 *p
= gjmp_append(*p
, t
);
889 /* Generate value test
891 * Generate a test for any value (jump, comparison and integers) */
892 static int gvtst(int inv
, int t
)
898 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
903 /* jump to the wanted target */
905 t
= gjmp_cond(op
^ inv
, t
);
908 /* resolve complementary jumps to here */
915 /* ------------------------------------------------------------------------- */
916 /* push a symbol value of TYPE */
917 static inline void vpushsym(CType
*type
, Sym
*sym
)
921 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
925 /* Return a static symbol pointing to a section */
926 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
932 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
933 sym
->type
.t
|= VT_STATIC
;
934 put_extern_sym(sym
, sec
, offset
, size
);
938 /* push a reference to a section offset by adding a dummy symbol */
939 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
941 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
944 /* define a new external reference to a symbol 'v' of type 'u' */
945 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
951 /* push forward reference */
952 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
953 s
->type
.ref
= type
->ref
;
954 } else if (IS_ASM_SYM(s
)) {
955 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
956 s
->type
.ref
= type
->ref
;
962 /* Merge symbol attributes. */
963 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
965 if (sa1
->aligned
&& !sa
->aligned
)
966 sa
->aligned
= sa1
->aligned
;
967 sa
->packed
|= sa1
->packed
;
968 sa
->weak
|= sa1
->weak
;
969 if (sa1
->visibility
!= STV_DEFAULT
) {
970 int vis
= sa
->visibility
;
971 if (vis
== STV_DEFAULT
972 || vis
> sa1
->visibility
)
973 vis
= sa1
->visibility
;
974 sa
->visibility
= vis
;
976 sa
->dllexport
|= sa1
->dllexport
;
977 sa
->nodecorate
|= sa1
->nodecorate
;
978 sa
->dllimport
|= sa1
->dllimport
;
981 /* Merge function attributes. */
982 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
984 if (fa1
->func_call
&& !fa
->func_call
)
985 fa
->func_call
= fa1
->func_call
;
986 if (fa1
->func_type
&& !fa
->func_type
)
987 fa
->func_type
= fa1
->func_type
;
988 if (fa1
->func_args
&& !fa
->func_args
)
989 fa
->func_args
= fa1
->func_args
;
992 /* Merge attributes. */
993 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
995 merge_symattr(&ad
->a
, &ad1
->a
);
996 merge_funcattr(&ad
->f
, &ad1
->f
);
999 ad
->section
= ad1
->section
;
1000 if (ad1
->alias_target
)
1001 ad
->alias_target
= ad1
->alias_target
;
1003 ad
->asm_label
= ad1
->asm_label
;
1005 ad
->attr_mode
= ad1
->attr_mode
;
1008 /* Merge some type attributes. */
1009 static void patch_type(Sym
*sym
, CType
*type
)
1011 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1012 if (!(sym
->type
.t
& VT_EXTERN
))
1013 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1014 sym
->type
.t
&= ~VT_EXTERN
;
1017 if (IS_ASM_SYM(sym
)) {
1018 /* stay static if both are static */
1019 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1020 sym
->type
.ref
= type
->ref
;
1023 if (!is_compatible_types(&sym
->type
, type
)) {
1024 tcc_error("incompatible types for redefinition of '%s'",
1025 get_tok_str(sym
->v
, NULL
));
1027 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1028 int static_proto
= sym
->type
.t
& VT_STATIC
;
1029 /* warn if static follows non-static function declaration */
1030 if ((type
->t
& VT_STATIC
) && !static_proto
1031 /* XXX this test for inline shouldn't be here. Until we
1032 implement gnu-inline mode again it silences a warning for
1033 mingw caused by our workarounds. */
1034 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1035 tcc_warning("static storage ignored for redefinition of '%s'",
1036 get_tok_str(sym
->v
, NULL
));
1038 /* set 'inline' if both agree or if one has static */
1039 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1040 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1041 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1042 static_proto
|= VT_INLINE
;
1045 if (0 == (type
->t
& VT_EXTERN
)) {
1046 /* put complete type, use static from prototype */
1047 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1048 sym
->type
.ref
= type
->ref
;
1050 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1053 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1054 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1055 sym
->type
.ref
= type
->ref
;
1059 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1060 /* set array size if it was omitted in extern declaration */
1061 sym
->type
.ref
->c
= type
->ref
->c
;
1063 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1064 tcc_warning("storage mismatch for redefinition of '%s'",
1065 get_tok_str(sym
->v
, NULL
));
1069 /* Merge some storage attributes. */
1070 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1073 patch_type(sym
, type
);
1075 #ifdef TCC_TARGET_PE
1076 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1077 tcc_error("incompatible dll linkage for redefinition of '%s'",
1078 get_tok_str(sym
->v
, NULL
));
1080 merge_symattr(&sym
->a
, &ad
->a
);
1082 sym
->asm_label
= ad
->asm_label
;
1083 update_storage(sym
);
1086 /* copy sym to other stack */
1087 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1090 s
= sym_malloc(), *s
= *s0
;
1091 s
->prev
= *ps
, *ps
= s
;
1092 if (s
->v
< SYM_FIRST_ANOM
) {
1093 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1094 s
->prev_tok
= *ps
, *ps
= s
;
1099 /* copy a list of syms */
1100 static void sym_copy_ref(Sym
*s0
, Sym
**ps
)
1102 Sym
*s
, **sp
= &s0
->type
.ref
;
1103 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
)
1104 sp
= &(*sp
= sym_copy(s
, ps
))->next
;
1107 /* define a new external reference to a symbol 'v' */
1108 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1112 /* look for global symbol */
1114 while (s
&& s
->sym_scope
)
1118 /* push forward reference */
1119 s
= global_identifier_push(v
, type
->t
, 0);
1122 s
->asm_label
= ad
->asm_label
;
1123 s
->type
.ref
= type
->ref
;
1124 bt
= s
->type
.t
& (VT_BTYPE
|VT_ARRAY
);
1125 /* copy type to the global stack also */
1126 if (local_scope
&& (bt
== VT_FUNC
|| (bt
& VT_ARRAY
)))
1127 sym_copy_ref(s
, &global_stack
);
1129 patch_storage(s
, ad
, type
);
1130 bt
= s
->type
.t
& VT_BTYPE
;
1132 /* push variables to local scope if any */
1133 if (local_stack
&& bt
!= VT_FUNC
)
1134 s
= sym_copy(s
, &local_stack
);
1138 /* push a reference to global symbol v */
1139 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1141 vpushsym(type
, external_global_sym(v
, type
));
1144 /* save registers up to (vtop - n) stack entry */
1145 ST_FUNC
void save_regs(int n
)
1148 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1152 /* save r to the memory stack, and mark it as being free */
1153 ST_FUNC
void save_reg(int r
)
1155 save_reg_upstack(r
, 0);
1158 /* save r to the memory stack, and mark it as being free,
1159 if seen up to (vtop - n) stack entry */
1160 ST_FUNC
void save_reg_upstack(int r
, int n
)
1162 int l
, saved
, size
, align
;
1166 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1171 /* modify all stack values */
1174 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1175 if ((p
->r
& VT_VALMASK
) == r
||
1176 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
1177 /* must save value on stack if not already done */
1179 /* NOTE: must reload 'r' because r might be equal to r2 */
1180 r
= p
->r
& VT_VALMASK
;
1181 /* store register in the stack */
1183 if ((p
->r
& VT_LVAL
) ||
1184 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1186 type
= &char_pointer_type
;
1190 size
= type_size(type
, &align
);
1191 l
=get_temp_local_var(size
,align
);
1192 sv
.type
.t
= type
->t
;
1193 sv
.r
= VT_LOCAL
| VT_LVAL
;
1196 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1197 /* x86 specific: need to pop fp register ST0 if saved */
1198 if (r
== TREG_ST0
) {
1199 o(0xd8dd); /* fstp %st(0) */
1203 /* special long long case */
1204 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1211 /* mark that stack entry as being saved on the stack */
1212 if (p
->r
& VT_LVAL
) {
1213 /* also clear the bounded flag because the
1214 relocation address of the function was stored in
1216 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1218 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1226 #ifdef TCC_TARGET_ARM
1227 /* find a register of class 'rc2' with at most one reference on stack.
1228 * If none, call get_reg(rc) */
1229 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1234 for(r
=0;r
<NB_REGS
;r
++) {
1235 if (reg_classes
[r
] & rc2
) {
1238 for(p
= vstack
; p
<= vtop
; p
++) {
1239 if ((p
->r
& VT_VALMASK
) == r
||
1240 (p
->r2
& VT_VALMASK
) == r
)
1251 /* find a free register of class 'rc'. If none, save one register */
1252 ST_FUNC
int get_reg(int rc
)
1257 /* find a free register */
1258 for(r
=0;r
<NB_REGS
;r
++) {
1259 if (reg_classes
[r
] & rc
) {
1262 for(p
=vstack
;p
<=vtop
;p
++) {
1263 if ((p
->r
& VT_VALMASK
) == r
||
1264 (p
->r2
& VT_VALMASK
) == r
)
1272 /* no register left : free the first one on the stack (VERY
1273 IMPORTANT to start from the bottom to ensure that we don't
1274 spill registers used in gen_opi()) */
1275 for(p
=vstack
;p
<=vtop
;p
++) {
1276 /* look at second register (if long long) */
1277 r
= p
->r2
& VT_VALMASK
;
1278 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1280 r
= p
->r
& VT_VALMASK
;
1281 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1287 /* Should never comes here */
1291 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1292 static int get_temp_local_var(int size
,int align
){
1294 struct temp_local_variable
*temp_var
;
1301 for(i
=0;i
<nb_temp_local_vars
;i
++){
1302 temp_var
=&arr_temp_local_vars
[i
];
1303 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1306 /*check if temp_var is free*/
1308 for(p
=vstack
;p
<=vtop
;p
++) {
1310 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1311 if(p
->c
.i
==temp_var
->location
){
1318 found_var
=temp_var
->location
;
1324 loc
= (loc
- size
) & -align
;
1325 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1326 temp_var
=&arr_temp_local_vars
[i
];
1327 temp_var
->location
=loc
;
1328 temp_var
->size
=size
;
1329 temp_var
->align
=align
;
1330 nb_temp_local_vars
++;
1337 static void clear_temp_local_var_list(){
1338 nb_temp_local_vars
=0;
1341 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1343 static void move_reg(int r
, int s
, int t
)
1357 /* get address of vtop (vtop MUST BE an lvalue) */
1358 ST_FUNC
void gaddrof(void)
1360 vtop
->r
&= ~VT_LVAL
;
1361 /* tricky: if saved lvalue, then we can go back to lvalue */
1362 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1363 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1368 #ifdef CONFIG_TCC_BCHECK
1369 /* generate lvalue bound code */
1370 static void gbound(void)
1375 vtop
->r
&= ~VT_MUSTBOUND
;
1376 /* if lvalue, then use checking code before dereferencing */
1377 if (vtop
->r
& VT_LVAL
) {
1378 /* if not VT_BOUNDED value, then make one */
1379 if (!(vtop
->r
& VT_BOUNDED
)) {
1380 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1381 /* must save type because we must set it to int to get pointer */
1383 vtop
->type
.t
= VT_PTR
;
1386 gen_bounded_ptr_add();
1387 vtop
->r
|= lval_type
;
1390 /* then check for dereferencing */
1391 gen_bounded_ptr_deref();
1396 static void incr_bf_adr(int o
)
1398 vtop
->type
= char_pointer_type
;
1402 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1403 | (VT_BYTE
|VT_UNSIGNED
);
1404 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1405 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1408 /* single-byte load mode for packed or otherwise unaligned bitfields */
1409 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1412 save_reg_upstack(vtop
->r
, 1);
1413 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1414 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1423 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1425 vpushi((1 << n
) - 1), gen_op('&');
1428 vpushi(bits
), gen_op(TOK_SHL
);
1431 bits
+= n
, bit_size
-= n
, o
= 1;
1434 if (!(type
->t
& VT_UNSIGNED
)) {
1435 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1436 vpushi(n
), gen_op(TOK_SHL
);
1437 vpushi(n
), gen_op(TOK_SAR
);
1441 /* single-byte store mode for packed or otherwise unaligned bitfields */
1442 static void store_packed_bf(int bit_pos
, int bit_size
)
1444 int bits
, n
, o
, m
, c
;
1446 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1448 save_reg_upstack(vtop
->r
, 1);
1449 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1451 incr_bf_adr(o
); // X B
1453 c
? vdup() : gv_dup(); // B V X
1456 vpushi(bits
), gen_op(TOK_SHR
);
1458 vpushi(bit_pos
), gen_op(TOK_SHL
);
1463 m
= ((1 << n
) - 1) << bit_pos
;
1464 vpushi(m
), gen_op('&'); // X B V1
1465 vpushv(vtop
-1); // X B V1 B
1466 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1467 gen_op('&'); // X B V1 B1
1468 gen_op('|'); // X B V2
1470 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1471 vstore(), vpop(); // X B
1472 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1477 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1480 if (0 == sv
->type
.ref
)
1482 t
= sv
->type
.ref
->auxtype
;
1483 if (t
!= -1 && t
!= VT_STRUCT
) {
1484 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1485 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1490 /* store vtop a register belonging to class 'rc'. lvalues are
1491 converted to values. Cannot be used if cannot be converted to
1492 register value (such as structures). */
1493 ST_FUNC
int gv(int rc
)
1495 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1497 /* NOTE: get_reg can modify vstack[] */
1498 if (vtop
->type
.t
& VT_BITFIELD
) {
1501 bit_pos
= BIT_POS(vtop
->type
.t
);
1502 bit_size
= BIT_SIZE(vtop
->type
.t
);
1503 /* remove bit field info to avoid loops */
1504 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1507 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1508 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1509 type
.t
|= VT_UNSIGNED
;
1511 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1513 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1518 if (r
== VT_STRUCT
) {
1519 load_packed_bf(&type
, bit_pos
, bit_size
);
1521 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1522 /* cast to int to propagate signedness in following ops */
1524 /* generate shifts */
1525 vpushi(bits
- (bit_pos
+ bit_size
));
1527 vpushi(bits
- bit_size
);
1528 /* NOTE: transformed to SHR if unsigned */
1533 if (is_float(vtop
->type
.t
) &&
1534 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1535 unsigned long offset
;
1536 /* CPUs usually cannot use float constants, so we store them
1537 generically in data segment */
1538 size
= type_size(&vtop
->type
, &align
);
1540 size
= 0, align
= 1;
1541 offset
= section_add(data_section
, size
, align
);
1542 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1544 init_putv(&vtop
->type
, data_section
, offset
);
1547 #ifdef CONFIG_TCC_BCHECK
1548 if (vtop
->r
& VT_MUSTBOUND
)
1552 r
= vtop
->r
& VT_VALMASK
;
1553 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1554 #ifndef TCC_TARGET_ARM64
1555 #ifndef TCC_TARGET_RISCV64 /* XXX: remove the whole LRET/QRET class */
1558 #ifdef TCC_TARGET_X86_64
1559 else if (rc
== RC_FRET
)
1564 /* need to reload if:
1566 - lvalue (need to dereference pointer)
1567 - already a register, but not in the right class */
1569 || (vtop
->r
& VT_LVAL
)
1570 || !(reg_classes
[r
] & rc
)
1572 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1573 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1575 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1581 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1582 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1584 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1585 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1586 unsigned long long ll
;
1588 int r2
, original_type
;
1589 original_type
= vtop
->type
.t
;
1590 /* two register type load : expand to two words
1593 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1596 vtop
->c
.i
= ll
; /* first word */
1598 vtop
->r
= r
; /* save register value */
1599 vpushi(ll
>> 32); /* second word */
1602 if (vtop
->r
& VT_LVAL
) {
1603 /* We do not want to modifier the long long
1604 pointer here, so the safest (and less
1605 efficient) is to save all the other registers
1606 in the stack. XXX: totally inefficient. */
1610 /* lvalue_save: save only if used further down the stack */
1611 save_reg_upstack(vtop
->r
, 1);
1613 /* load from memory */
1614 vtop
->type
.t
= load_type
;
1617 vtop
[-1].r
= r
; /* save register value */
1618 /* increment pointer to get second word */
1619 vtop
->type
.t
= addr_type
;
1624 vtop
->type
.t
= load_type
;
1626 /* move registers */
1629 vtop
[-1].r
= r
; /* save register value */
1630 vtop
->r
= vtop
[-1].r2
;
1632 /* Allocate second register. Here we rely on the fact that
1633 get_reg() tries first to free r2 of an SValue. */
1637 /* write second register */
1639 vtop
->type
.t
= original_type
;
1640 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1642 /* lvalue of scalar type : need to use lvalue type
1643 because of possible cast */
1646 /* compute memory access type */
1647 if (vtop
->r
& VT_LVAL_BYTE
)
1649 else if (vtop
->r
& VT_LVAL_SHORT
)
1651 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1655 /* restore wanted type */
1658 if (vtop
->r
== VT_CMP
)
1660 /* one register type load */
1665 #ifdef TCC_TARGET_C67
1666 /* uses register pairs for doubles */
1667 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1674 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1675 ST_FUNC
void gv2(int rc1
, int rc2
)
1677 /* generate more generic register first. But VT_JMP or VT_CMP
1678 values must be generated first in all cases to avoid possible
1680 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1685 /* test if reload is needed for first register */
1686 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1696 /* test if reload is needed for first register */
1697 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1703 #ifndef TCC_TARGET_ARM64
1704 /* wrapper around RC_FRET to return a register by type */
1705 static int rc_fret(int t
)
1707 #ifdef TCC_TARGET_X86_64
1708 if (t
== VT_LDOUBLE
) {
1716 /* wrapper around REG_FRET to return a register by type */
1717 static int reg_fret(int t
)
1719 #ifdef TCC_TARGET_X86_64
1720 if (t
== VT_LDOUBLE
) {
1728 /* expand 64bit on stack in two ints */
1729 ST_FUNC
void lexpand(void)
1732 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1733 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1734 if (v
== VT_CONST
) {
1737 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1743 vtop
[0].r
= vtop
[-1].r2
;
1744 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1746 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1751 /* build a long long from two ints */
1752 static void lbuild(int t
)
1754 gv2(RC_INT
, RC_INT
);
1755 vtop
[-1].r2
= vtop
[0].r
;
1756 vtop
[-1].type
.t
= t
;
1761 /* convert stack entry to register and duplicate its value in another
1763 static void gv_dup(void)
1770 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1771 if (t
& VT_BITFIELD
) {
1781 /* stack: H L L1 H1 */
1791 /* duplicate value */
1796 #ifdef TCC_TARGET_X86_64
1797 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1807 load(r1
, &sv
); /* move r to r1 */
1809 /* duplicates value */
1816 /* generate CPU independent (unsigned) long long operations */
1817 static void gen_opl(int op
)
1819 int t
, a
, b
, op1
, c
, i
;
1821 unsigned short reg_iret
= REG_IRET
;
1822 unsigned short reg_lret
= REG_LRET
;
1828 func
= TOK___divdi3
;
1831 func
= TOK___udivdi3
;
1834 func
= TOK___moddi3
;
1837 func
= TOK___umoddi3
;
1844 /* call generic long long function */
1845 vpush_global_sym(&func_old_type
, func
);
1850 vtop
->r2
= reg_lret
;
1858 //pv("gen_opl A",0,2);
1864 /* stack: L1 H1 L2 H2 */
1869 vtop
[-2] = vtop
[-3];
1872 /* stack: H1 H2 L1 L2 */
1873 //pv("gen_opl B",0,4);
1879 /* stack: H1 H2 L1 L2 ML MH */
1882 /* stack: ML MH H1 H2 L1 L2 */
1886 /* stack: ML MH H1 L2 H2 L1 */
1891 /* stack: ML MH M1 M2 */
1894 } else if (op
== '+' || op
== '-') {
1895 /* XXX: add non carry method too (for MIPS or alpha) */
1901 /* stack: H1 H2 (L1 op L2) */
1904 gen_op(op1
+ 1); /* TOK_xxxC2 */
1907 /* stack: H1 H2 (L1 op L2) */
1910 /* stack: (L1 op L2) H1 H2 */
1912 /* stack: (L1 op L2) (H1 op H2) */
1920 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1921 t
= vtop
[-1].type
.t
;
1925 /* stack: L H shift */
1927 /* constant: simpler */
1928 /* NOTE: all comments are for SHL. the other cases are
1929 done by swapping words */
1940 if (op
!= TOK_SAR
) {
1973 /* XXX: should provide a faster fallback on x86 ? */
1976 func
= TOK___ashrdi3
;
1979 func
= TOK___lshrdi3
;
1982 func
= TOK___ashldi3
;
1988 /* compare operations */
1994 /* stack: L1 H1 L2 H2 */
1996 vtop
[-1] = vtop
[-2];
1998 /* stack: L1 L2 H1 H2 */
2002 /* when values are equal, we need to compare low words. since
2003 the jump is inverted, we invert the test too. */
2006 else if (op1
== TOK_GT
)
2008 else if (op1
== TOK_ULT
)
2010 else if (op1
== TOK_UGT
)
2020 /* generate non equal test */
2022 vset_VT_CMP(TOK_NE
);
2026 /* compare low. Always unsigned */
2030 else if (op1
== TOK_LE
)
2032 else if (op1
== TOK_GT
)
2034 else if (op1
== TOK_GE
)
2037 #if 0//def TCC_TARGET_I386
2038 if (op
== TOK_NE
) { gsym(b
); break; }
2039 if (op
== TOK_EQ
) { gsym(a
); break; }
2048 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2050 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2051 return (a
^ b
) >> 63 ? -x
: x
;
2054 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2056 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2059 /* handle integer constant optimizations and various machine
2061 static void gen_opic(int op
)
2063 SValue
*v1
= vtop
- 1;
2065 int t1
= v1
->type
.t
& VT_BTYPE
;
2066 int t2
= v2
->type
.t
& VT_BTYPE
;
2067 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2068 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2069 uint64_t l1
= c1
? v1
->c
.i
: 0;
2070 uint64_t l2
= c2
? v2
->c
.i
: 0;
2071 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2073 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2074 l1
= ((uint32_t)l1
|
2075 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2076 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2077 l2
= ((uint32_t)l2
|
2078 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2082 case '+': l1
+= l2
; break;
2083 case '-': l1
-= l2
; break;
2084 case '&': l1
&= l2
; break;
2085 case '^': l1
^= l2
; break;
2086 case '|': l1
|= l2
; break;
2087 case '*': l1
*= l2
; break;
2094 /* if division by zero, generate explicit division */
2097 tcc_error("division by zero in constant");
2101 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2102 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2103 case TOK_UDIV
: l1
= l1
/ l2
; break;
2104 case TOK_UMOD
: l1
= l1
% l2
; break;
2107 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2108 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2110 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2113 case TOK_ULT
: l1
= l1
< l2
; break;
2114 case TOK_UGE
: l1
= l1
>= l2
; break;
2115 case TOK_EQ
: l1
= l1
== l2
; break;
2116 case TOK_NE
: l1
= l1
!= l2
; break;
2117 case TOK_ULE
: l1
= l1
<= l2
; break;
2118 case TOK_UGT
: l1
= l1
> l2
; break;
2119 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2120 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2121 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2122 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2124 case TOK_LAND
: l1
= l1
&& l2
; break;
2125 case TOK_LOR
: l1
= l1
|| l2
; break;
2129 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2130 l1
= ((uint32_t)l1
|
2131 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2135 /* if commutative ops, put c2 as constant */
2136 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2137 op
== '|' || op
== '*')) {
2139 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2140 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2142 if (!const_wanted
&&
2144 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2145 (l1
== -1 && op
== TOK_SAR
))) {
2146 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2148 } else if (!const_wanted
&&
2149 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2151 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2152 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2153 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2158 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2161 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2162 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2165 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2166 /* filter out NOP operations like x*1, x-0, x&-1... */
2168 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2169 /* try to use shifts instead of muls or divs */
2170 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2179 else if (op
== TOK_PDIV
)
2185 } else if (c2
&& (op
== '+' || op
== '-') &&
2186 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2187 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2188 /* symbol + constant case */
2192 /* The backends can't always deal with addends to symbols
2193 larger than +-1<<31. Don't construct such. */
2200 /* call low level op generator */
2201 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2202 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2210 /* generate a floating point operation with constant propagation */
2211 static void gen_opif(int op
)
2215 #if defined _MSC_VER && defined __x86_64__
2216 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2223 /* currently, we cannot do computations with forward symbols */
2224 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2225 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2227 if (v1
->type
.t
== VT_FLOAT
) {
2230 } else if (v1
->type
.t
== VT_DOUBLE
) {
2238 /* NOTE: we only do constant propagation if finite number (not
2239 NaN or infinity) (ANSI spec) */
2240 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2244 case '+': f1
+= f2
; break;
2245 case '-': f1
-= f2
; break;
2246 case '*': f1
*= f2
; break;
2249 /* If not in initializer we need to potentially generate
2250 FP exceptions at runtime, otherwise we want to fold. */
2256 /* XXX: also handles tests ? */
2260 /* XXX: overflow test ? */
2261 if (v1
->type
.t
== VT_FLOAT
) {
2263 } else if (v1
->type
.t
== VT_DOUBLE
) {
2275 static int pointed_size(CType
*type
)
2278 return type_size(pointed_type(type
), &align
);
2281 static void vla_runtime_pointed_size(CType
*type
)
2284 vla_runtime_type_size(pointed_type(type
), &align
);
2287 static inline int is_null_pointer(SValue
*p
)
2289 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2291 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2292 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2293 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2294 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2295 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2296 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2299 static inline int is_integer_btype(int bt
)
2301 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2302 bt
== VT_INT
|| bt
== VT_LLONG
);
2305 /* check types for comparison or subtraction of pointers */
2306 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2308 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2311 /* null pointers are accepted for all comparisons as gcc */
2312 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2316 bt1
= type1
->t
& VT_BTYPE
;
2317 bt2
= type2
->t
& VT_BTYPE
;
2318 /* accept comparison between pointer and integer with a warning */
2319 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2320 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2321 tcc_warning("comparison between pointer and integer");
2325 /* both must be pointers or implicit function pointers */
2326 if (bt1
== VT_PTR
) {
2327 type1
= pointed_type(type1
);
2328 } else if (bt1
!= VT_FUNC
)
2329 goto invalid_operands
;
2331 if (bt2
== VT_PTR
) {
2332 type2
= pointed_type(type2
);
2333 } else if (bt2
!= VT_FUNC
) {
2335 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2337 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2338 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2342 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2343 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2344 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2345 /* gcc-like error if '-' is used */
2347 goto invalid_operands
;
2349 tcc_warning("comparison of distinct pointer types lacks a cast");
2353 /* generic gen_op: handles types problems */
2354 ST_FUNC
void gen_op(int op
)
2356 int u
, t1
, t2
, bt1
, bt2
, t
;
2360 t1
= vtop
[-1].type
.t
;
2361 t2
= vtop
[0].type
.t
;
2362 bt1
= t1
& VT_BTYPE
;
2363 bt2
= t2
& VT_BTYPE
;
2365 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2366 tcc_error("operation on a struct");
2367 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2368 if (bt2
== VT_FUNC
) {
2369 mk_pointer(&vtop
->type
);
2372 if (bt1
== VT_FUNC
) {
2374 mk_pointer(&vtop
->type
);
2379 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2380 /* at least one operand is a pointer */
2381 /* relational op: must be both pointers */
2382 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2383 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2384 /* pointers are handled are unsigned */
2386 t
= VT_LLONG
| VT_UNSIGNED
;
2388 t
= VT_INT
| VT_UNSIGNED
;
2392 /* if both pointers, then it must be the '-' op */
2393 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2395 tcc_error("cannot use pointers here");
2396 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2397 /* XXX: check that types are compatible */
2398 if (vtop
[-1].type
.t
& VT_VLA
) {
2399 vla_runtime_pointed_size(&vtop
[-1].type
);
2401 vpushi(pointed_size(&vtop
[-1].type
));
2405 vtop
->type
.t
= ptrdiff_type
.t
;
2409 /* exactly one pointer : must be '+' or '-'. */
2410 if (op
!= '-' && op
!= '+')
2411 tcc_error("cannot use pointers here");
2412 /* Put pointer as first operand */
2413 if (bt2
== VT_PTR
) {
2415 t
= t1
, t1
= t2
, t2
= t
;
2418 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2419 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2422 type1
= vtop
[-1].type
;
2423 type1
.t
&= ~VT_ARRAY
;
2424 if (vtop
[-1].type
.t
& VT_VLA
)
2425 vla_runtime_pointed_size(&vtop
[-1].type
);
2427 u
= pointed_size(&vtop
[-1].type
);
2429 tcc_error("unknown array element size");
2433 /* XXX: cast to int ? (long long case) */
2439 /* #ifdef CONFIG_TCC_BCHECK
2440 The main reason to removing this code:
2447 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2448 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2450 When this code is on. then the output looks like
2452 v+(i-j) = 0xbff84000
2454 /* if evaluating constant expression, no code should be
2455 generated, so no bound check */
2456 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2457 /* if bounded pointers, we generate a special code to
2464 gen_bounded_ptr_add();
2470 /* put again type if gen_opic() swaped operands */
2473 } else if (is_float(bt1
) || is_float(bt2
)) {
2474 /* compute bigger type and do implicit casts */
2475 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2477 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2482 /* floats can only be used for a few operations */
2483 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2484 (op
< TOK_ULT
|| op
> TOK_GT
))
2485 tcc_error("invalid operands for binary operation");
2487 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2488 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2489 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2491 t
|= (VT_LONG
& t1
);
2493 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2494 /* cast to biggest op */
2495 t
= VT_LLONG
| VT_LONG
;
2496 if (bt1
== VT_LLONG
)
2498 if (bt2
== VT_LLONG
)
2500 /* convert to unsigned if it does not fit in a long long */
2501 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2502 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2506 /* integer operations */
2507 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2508 /* convert to unsigned if it does not fit in an integer */
2509 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2510 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2513 /* XXX: currently, some unsigned operations are explicit, so
2514 we modify them here */
2515 if (t
& VT_UNSIGNED
) {
2522 else if (op
== TOK_LT
)
2524 else if (op
== TOK_GT
)
2526 else if (op
== TOK_LE
)
2528 else if (op
== TOK_GE
)
2536 /* special case for shifts and long long: we keep the shift as
2538 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2545 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2546 /* relational op: the result is an int */
2547 vtop
->type
.t
= VT_INT
;
2552 // Make sure that we have converted to an rvalue:
2553 if (vtop
->r
& VT_LVAL
)
2554 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2557 #ifndef TCC_TARGET_ARM
2558 /* generic itof for unsigned long long case */
2559 static void gen_cvt_itof1(int t
)
2561 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2564 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2565 (VT_LLONG
| VT_UNSIGNED
)) {
2568 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2569 #if LDOUBLE_SIZE != 8
2570 else if (t
== VT_LDOUBLE
)
2571 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2574 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2578 vtop
->r
= reg_fret(t
);
2586 /* generic ftoi for unsigned long long case */
2587 static void gen_cvt_ftoi1(int t
)
2589 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
2594 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2595 /* not handled natively */
2596 st
= vtop
->type
.t
& VT_BTYPE
;
2598 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2599 #if LDOUBLE_SIZE != 8
2600 else if (st
== VT_LDOUBLE
)
2601 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2604 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2609 vtop
->r2
= REG_LRET
;
2616 /* force char or short cast */
2617 static void force_charshort_cast(int t
)
2621 /* cannot cast static initializers */
2622 if (STATIC_DATA_WANTED
)
2626 /* XXX: add optimization if lvalue : just change type and offset */
2631 if (t
& VT_UNSIGNED
) {
2632 vpushi((1 << bits
) - 1);
2635 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2641 /* result must be signed or the SAR is converted to an SHL
2642 This was not the case when "t" was a signed short
2643 and the last value on the stack was an unsigned int */
2644 vtop
->type
.t
&= ~VT_UNSIGNED
;
2650 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2651 static void gen_cast_s(int t
)
2659 static void gen_cast(CType
*type
)
2661 int sbt
, dbt
, sf
, df
, c
, p
;
2663 /* special delayed cast for char/short */
2664 /* XXX: in some cases (multiple cascaded casts), it may still
2666 if (vtop
->r
& VT_MUSTCAST
) {
2667 vtop
->r
&= ~VT_MUSTCAST
;
2668 force_charshort_cast(vtop
->type
.t
);
2671 /* bitfields first get cast to ints */
2672 if (vtop
->type
.t
& VT_BITFIELD
) {
2676 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2677 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2682 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2683 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2684 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2685 c
&= dbt
!= VT_LDOUBLE
;
2688 /* constant case: we can do it now */
2689 /* XXX: in ISOC, cannot do it if error in convert */
2690 if (sbt
== VT_FLOAT
)
2691 vtop
->c
.ld
= vtop
->c
.f
;
2692 else if (sbt
== VT_DOUBLE
)
2693 vtop
->c
.ld
= vtop
->c
.d
;
2696 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2697 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2698 vtop
->c
.ld
= vtop
->c
.i
;
2700 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2702 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2703 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2705 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2708 if (dbt
== VT_FLOAT
)
2709 vtop
->c
.f
= (float)vtop
->c
.ld
;
2710 else if (dbt
== VT_DOUBLE
)
2711 vtop
->c
.d
= (double)vtop
->c
.ld
;
2712 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2713 vtop
->c
.i
= vtop
->c
.ld
;
2714 } else if (sf
&& dbt
== VT_BOOL
) {
2715 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2718 vtop
->c
.i
= vtop
->c
.ld
;
2719 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2721 else if (sbt
& VT_UNSIGNED
)
2722 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2724 else if (sbt
== VT_PTR
)
2727 else if (sbt
!= VT_LLONG
)
2728 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2729 -(vtop
->c
.i
& 0x80000000));
2731 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2733 else if (dbt
== VT_BOOL
)
2734 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2736 else if (dbt
== VT_PTR
)
2739 else if (dbt
!= VT_LLONG
) {
2740 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2741 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2744 if (!(dbt
& VT_UNSIGNED
))
2745 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2748 } else if (p
&& dbt
== VT_BOOL
) {
2752 /* non constant case: generate code */
2754 /* convert from fp to fp */
2757 /* convert int to fp */
2760 /* convert fp to int */
2761 if (dbt
== VT_BOOL
) {
2765 /* we handle char/short/etc... with generic code */
2766 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2767 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2771 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2772 /* additional cast for char/short... */
2778 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2779 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2780 /* scalar to long long */
2781 /* machine independent conversion */
2783 /* generate high word */
2784 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2788 if (sbt
== VT_PTR
) {
2789 /* cast from pointer to int before we apply
2790 shift operation, which pointers don't support*/
2797 /* patch second register */
2798 vtop
[-1].r2
= vtop
->r
;
2802 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2803 (dbt
& VT_BTYPE
) == VT_PTR
||
2804 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2805 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2806 (sbt
& VT_BTYPE
) != VT_PTR
&&
2807 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2808 /* need to convert from 32bit to 64bit */
2810 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2811 #if defined(TCC_TARGET_ARM64) || defined(TCC_TARGET_RISCV64)
2813 #elif defined(TCC_TARGET_X86_64)
2815 /* x86_64 specific: movslq */
2817 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2824 } else if (dbt
== VT_BOOL
) {
2825 /* scalar to bool */
2828 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2829 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2830 if (sbt
== VT_PTR
) {
2831 vtop
->type
.t
= VT_INT
;
2832 tcc_warning("nonportable conversion from pointer to char/short");
2834 force_charshort_cast(dbt
);
2835 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2837 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2839 /* from long long: just take low order word */
2844 vtop
->type
.t
|= VT_UNSIGNED
;
2848 /* if lvalue and single word type, nothing to do because
2849 the lvalue already contains the real type size (see
2850 VT_LVAL_xxx constants) */
2853 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2854 /* if we are casting between pointer types,
2855 we must update the VT_LVAL_xxx size */
2856 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2857 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2860 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2863 /* return type size as known at compile time. Put alignment at 'a' */
2864 ST_FUNC
int type_size(CType
*type
, int *a
)
2869 bt
= type
->t
& VT_BTYPE
;
2870 if (bt
== VT_STRUCT
) {
2875 } else if (bt
== VT_PTR
) {
2876 if (type
->t
& VT_ARRAY
) {
2880 ts
= type_size(&s
->type
, a
);
2882 if (ts
< 0 && s
->c
< 0)
2890 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
2891 return -1; /* incomplete enum */
2892 } else if (bt
== VT_LDOUBLE
) {
2894 return LDOUBLE_SIZE
;
2895 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2896 #ifdef TCC_TARGET_I386
2897 #ifdef TCC_TARGET_PE
2902 #elif defined(TCC_TARGET_ARM)
2912 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2915 } else if (bt
== VT_SHORT
) {
2918 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2922 /* char, void, function, _Bool */
2928 /* push type size as known at runtime time on top of value stack. Put
2930 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2932 if (type
->t
& VT_VLA
) {
2933 type_size(&type
->ref
->type
, a
);
2934 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2936 vpushi(type_size(type
, a
));
2940 /* return the pointed type of t */
2941 static inline CType
*pointed_type(CType
*type
)
2943 return &type
->ref
->type
;
2946 /* modify type so that its it is a pointer to type. */
2947 ST_FUNC
void mk_pointer(CType
*type
)
2950 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2951 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2955 /* compare function types. OLD functions match any new functions */
2956 static int is_compatible_func(CType
*type1
, CType
*type2
)
2962 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2964 if (s1
->f
.func_type
!= s2
->f
.func_type
2965 && s1
->f
.func_type
!= FUNC_OLD
2966 && s2
->f
.func_type
!= FUNC_OLD
)
2968 /* we should check the function return type for FUNC_OLD too
2969 but that causes problems with the internally used support
2970 functions such as TOK_memmove */
2971 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
2973 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
2976 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2987 /* return true if type1 and type2 are the same. If unqualified is
2988 true, qualifiers on the types are ignored.
2990 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2994 t1
= type1
->t
& VT_TYPE
;
2995 t2
= type2
->t
& VT_TYPE
;
2997 /* strip qualifiers before comparing */
2998 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2999 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3002 /* Default Vs explicit signedness only matters for char */
3003 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3007 /* XXX: bitfields ? */
3012 && !(type1
->ref
->c
< 0
3013 || type2
->ref
->c
< 0
3014 || type1
->ref
->c
== type2
->ref
->c
))
3017 /* test more complicated cases */
3018 bt1
= t1
& VT_BTYPE
;
3019 if (bt1
== VT_PTR
) {
3020 type1
= pointed_type(type1
);
3021 type2
= pointed_type(type2
);
3022 return is_compatible_types(type1
, type2
);
3023 } else if (bt1
== VT_STRUCT
) {
3024 return (type1
->ref
== type2
->ref
);
3025 } else if (bt1
== VT_FUNC
) {
3026 return is_compatible_func(type1
, type2
);
3027 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
3028 return type1
->ref
== type2
->ref
;
3034 /* return true if type1 and type2 are exactly the same (including
3037 static int is_compatible_types(CType
*type1
, CType
*type2
)
3039 return compare_types(type1
,type2
,0);
3042 /* return true if type1 and type2 are the same (ignoring qualifiers).
3044 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3046 return compare_types(type1
,type2
,1);
3049 /* print a type. If 'varstr' is not NULL, then the variable is also
3050 printed in the type */
3052 /* XXX: add array and function pointers */
3053 static void type_to_str(char *buf
, int buf_size
,
3054 CType
*type
, const char *varstr
)
3066 pstrcat(buf
, buf_size
, "extern ");
3068 pstrcat(buf
, buf_size
, "static ");
3070 pstrcat(buf
, buf_size
, "typedef ");
3072 pstrcat(buf
, buf_size
, "inline ");
3073 if (t
& VT_VOLATILE
)
3074 pstrcat(buf
, buf_size
, "volatile ");
3075 if (t
& VT_CONSTANT
)
3076 pstrcat(buf
, buf_size
, "const ");
3078 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
3079 || ((t
& VT_UNSIGNED
)
3080 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
3083 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
3085 buf_size
-= strlen(buf
);
3120 tstr
= "long double";
3122 pstrcat(buf
, buf_size
, tstr
);
3129 pstrcat(buf
, buf_size
, tstr
);
3130 v
= type
->ref
->v
& ~SYM_STRUCT
;
3131 if (v
>= SYM_FIRST_ANOM
)
3132 pstrcat(buf
, buf_size
, "<anonymous>");
3134 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3139 if (varstr
&& '*' == *varstr
) {
3140 pstrcat(buf1
, sizeof(buf1
), "(");
3141 pstrcat(buf1
, sizeof(buf1
), varstr
);
3142 pstrcat(buf1
, sizeof(buf1
), ")");
3144 pstrcat(buf1
, buf_size
, "(");
3146 while (sa
!= NULL
) {
3148 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3149 pstrcat(buf1
, sizeof(buf1
), buf2
);
3152 pstrcat(buf1
, sizeof(buf1
), ", ");
3154 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3155 pstrcat(buf1
, sizeof(buf1
), ", ...");
3156 pstrcat(buf1
, sizeof(buf1
), ")");
3157 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3162 if (varstr
&& '*' == *varstr
)
3163 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3165 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3166 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3169 pstrcpy(buf1
, sizeof(buf1
), "*");
3170 if (t
& VT_CONSTANT
)
3171 pstrcat(buf1
, buf_size
, "const ");
3172 if (t
& VT_VOLATILE
)
3173 pstrcat(buf1
, buf_size
, "volatile ");
3175 pstrcat(buf1
, sizeof(buf1
), varstr
);
3176 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3180 pstrcat(buf
, buf_size
, " ");
3181 pstrcat(buf
, buf_size
, varstr
);
3186 /* verify type compatibility to store vtop in 'dt' type, and generate
3188 static void gen_assign_cast(CType
*dt
)
3190 CType
*st
, *type1
, *type2
;
3191 char buf1
[256], buf2
[256];
3192 int dbt
, sbt
, qualwarn
, lvl
;
3194 st
= &vtop
->type
; /* source type */
3195 dbt
= dt
->t
& VT_BTYPE
;
3196 sbt
= st
->t
& VT_BTYPE
;
3197 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3198 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3199 ; /* It is Ok if both are void */
3201 tcc_error("cannot cast from/to void");
3203 if (dt
->t
& VT_CONSTANT
)
3204 tcc_warning("assignment of read-only location");
3207 /* special cases for pointers */
3208 /* '0' can also be a pointer */
3209 if (is_null_pointer(vtop
))
3211 /* accept implicit pointer to integer cast with warning */
3212 if (is_integer_btype(sbt
)) {
3213 tcc_warning("assignment makes pointer from integer without a cast");
3216 type1
= pointed_type(dt
);
3218 type2
= pointed_type(st
);
3219 else if (sbt
== VT_FUNC
)
3220 type2
= st
; /* a function is implicitly a function pointer */
3223 if (is_compatible_types(type1
, type2
))
3225 for (qualwarn
= lvl
= 0;; ++lvl
) {
3226 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3227 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3229 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3230 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3231 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3233 type1
= pointed_type(type1
);
3234 type2
= pointed_type(type2
);
3236 if (!is_compatible_unqualified_types(type1
, type2
)) {
3237 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3238 /* void * can match anything */
3239 } else if (dbt
== sbt
3240 && is_integer_btype(sbt
& VT_BTYPE
)
3241 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3242 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3243 /* Like GCC don't warn by default for merely changes
3244 in pointer target signedness. Do warn for different
3245 base types, though, in particular for unsigned enums
3246 and signed int targets. */
3248 tcc_warning("assignment from incompatible pointer type");
3253 tcc_warning("assignment discards qualifiers from pointer target type");
3259 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3260 tcc_warning("assignment makes integer from pointer without a cast");
3261 } else if (sbt
== VT_STRUCT
) {
3262 goto case_VT_STRUCT
;
3264 /* XXX: more tests */
3268 if (!is_compatible_unqualified_types(dt
, st
)) {
3270 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3271 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3272 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3279 /* store vtop in lvalue pushed on stack */
3280 ST_FUNC
void vstore(void)
3282 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3284 ft
= vtop
[-1].type
.t
;
3285 sbt
= vtop
->type
.t
& VT_BTYPE
;
3286 dbt
= ft
& VT_BTYPE
;
3287 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3288 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3289 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3290 /* optimize char/short casts */
3291 delayed_cast
= VT_MUSTCAST
;
3292 vtop
->type
.t
= ft
& VT_TYPE
;
3293 /* XXX: factorize */
3294 if (ft
& VT_CONSTANT
)
3295 tcc_warning("assignment of read-only location");
3298 if (!(ft
& VT_BITFIELD
))
3299 gen_assign_cast(&vtop
[-1].type
);
3302 if (sbt
== VT_STRUCT
) {
3303 /* if structure, only generate pointer */
3304 /* structure assignment : generate memcpy */
3305 /* XXX: optimize if small size */
3306 size
= type_size(&vtop
->type
, &align
);
3310 vtop
->type
.t
= VT_PTR
;
3313 /* address of memcpy() */
3316 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3317 else if(!(align
& 3))
3318 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3321 /* Use memmove, rather than memcpy, as dest and src may be same: */
3322 vpush_global_sym(&func_old_type
, TOK_memmove
);
3327 vtop
->type
.t
= VT_PTR
;
3333 /* leave source on stack */
3334 } else if (ft
& VT_BITFIELD
) {
3335 /* bitfield store handling */
3337 /* save lvalue as expression result (example: s.b = s.a = n;) */
3338 vdup(), vtop
[-1] = vtop
[-2];
3340 bit_pos
= BIT_POS(ft
);
3341 bit_size
= BIT_SIZE(ft
);
3342 /* remove bit field info to avoid loops */
3343 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3345 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3346 gen_cast(&vtop
[-1].type
);
3347 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3350 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3351 if (r
== VT_STRUCT
) {
3352 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3353 store_packed_bf(bit_pos
, bit_size
);
3355 unsigned long long mask
= (1ULL << bit_size
) - 1;
3356 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3358 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3361 vpushi((unsigned)mask
);
3368 /* duplicate destination */
3371 /* load destination, mask and or with source */
3372 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3373 vpushll(~(mask
<< bit_pos
));
3375 vpushi(~((unsigned)mask
<< bit_pos
));
3380 /* ... and discard */
3383 } else if (dbt
== VT_VOID
) {
3386 #ifdef CONFIG_TCC_BCHECK
3387 /* bound check case */
3388 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3397 #ifdef TCC_TARGET_X86_64
3398 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3400 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3405 r
= gv(rc
); /* generate value */
3406 /* if lvalue was saved on stack, must read it */
3407 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3409 t
= get_reg(RC_INT
);
3415 sv
.r
= VT_LOCAL
| VT_LVAL
;
3416 sv
.c
.i
= vtop
[-1].c
.i
;
3418 vtop
[-1].r
= t
| VT_LVAL
;
3420 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3422 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3423 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3425 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3426 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3428 vtop
[-1].type
.t
= load_type
;
3431 /* convert to int to increment easily */
3432 vtop
->type
.t
= addr_type
;
3438 vtop
[-1].type
.t
= load_type
;
3439 /* XXX: it works because r2 is spilled last ! */
3440 store(vtop
->r2
, vtop
- 1);
3446 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3447 vtop
->r
|= delayed_cast
;
3451 /* post defines POST/PRE add. c is the token ++ or -- */
3452 ST_FUNC
void inc(int post
, int c
)
3455 vdup(); /* save lvalue */
3457 gv_dup(); /* duplicate value */
3462 vpushi(c
- TOK_MID
);
3464 vstore(); /* store value */
3466 vpop(); /* if post op, return saved value */
3469 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3471 /* read the string */
3475 while (tok
== TOK_STR
) {
3476 /* XXX: add \0 handling too ? */
3477 cstr_cat(astr
, tokc
.str
.data
, -1);
3480 cstr_ccat(astr
, '\0');
3483 /* If I is >= 1 and a power of two, returns log2(i)+1.
3484 If I is 0 returns 0. */
3485 static int exact_log2p1(int i
)
3490 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3501 /* Parse __attribute__((...)) GNUC extension. */
3502 static void parse_attribute(AttributeDef
*ad
)
3508 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3513 while (tok
!= ')') {
3514 if (tok
< TOK_IDENT
)
3515 expect("attribute name");
3527 tcc_warning("implicit declaration of function '%s'",
3528 get_tok_str(tok
, &tokc
));
3529 s
= external_global_sym(tok
, &func_old_type
);
3531 ad
->cleanup_func
= s
;
3539 parse_mult_str(&astr
, "section name");
3540 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3547 parse_mult_str(&astr
, "alias(\"target\")");
3548 ad
->alias_target
= /* save string as token, for later */
3549 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3553 case TOK_VISIBILITY1
:
3554 case TOK_VISIBILITY2
:
3556 parse_mult_str(&astr
,
3557 "visibility(\"default|hidden|internal|protected\")");
3558 if (!strcmp (astr
.data
, "default"))
3559 ad
->a
.visibility
= STV_DEFAULT
;
3560 else if (!strcmp (astr
.data
, "hidden"))
3561 ad
->a
.visibility
= STV_HIDDEN
;
3562 else if (!strcmp (astr
.data
, "internal"))
3563 ad
->a
.visibility
= STV_INTERNAL
;
3564 else if (!strcmp (astr
.data
, "protected"))
3565 ad
->a
.visibility
= STV_PROTECTED
;
3567 expect("visibility(\"default|hidden|internal|protected\")");
3576 if (n
<= 0 || (n
& (n
- 1)) != 0)
3577 tcc_error("alignment must be a positive power of two");
3582 ad
->a
.aligned
= exact_log2p1(n
);
3583 if (n
!= 1 << (ad
->a
.aligned
- 1))
3584 tcc_error("alignment of %d is larger than implemented", n
);
3596 /* currently, no need to handle it because tcc does not
3597 track unused objects */
3601 ad
->f
.func_noreturn
= 1;
3606 ad
->f
.func_call
= FUNC_CDECL
;
3611 ad
->f
.func_call
= FUNC_STDCALL
;
3613 #ifdef TCC_TARGET_I386
3623 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3629 ad
->f
.func_call
= FUNC_FASTCALLW
;
3636 ad
->attr_mode
= VT_LLONG
+ 1;
3639 ad
->attr_mode
= VT_BYTE
+ 1;
3642 ad
->attr_mode
= VT_SHORT
+ 1;
3646 ad
->attr_mode
= VT_INT
+ 1;
3649 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3656 ad
->a
.dllexport
= 1;
3658 case TOK_NODECORATE
:
3659 ad
->a
.nodecorate
= 1;
3662 ad
->a
.dllimport
= 1;
3665 if (tcc_state
->warn_unsupported
)
3666 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3667 /* skip parameters */
3669 int parenthesis
= 0;
3673 else if (tok
== ')')
3676 } while (parenthesis
&& tok
!= -1);
3689 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3693 while ((s
= s
->next
) != NULL
) {
3694 if ((s
->v
& SYM_FIELD
) &&
3695 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3696 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3697 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3709 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3711 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3712 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3713 int pcc
= !tcc_state
->ms_bitfields
;
3714 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3721 prevbt
= VT_STRUCT
; /* make it never match */
3726 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3727 if (f
->type
.t
& VT_BITFIELD
)
3728 bit_size
= BIT_SIZE(f
->type
.t
);
3731 size
= type_size(&f
->type
, &align
);
3732 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3735 if (pcc
&& bit_size
== 0) {
3736 /* in pcc mode, packing does not affect zero-width bitfields */
3739 /* in pcc mode, attribute packed overrides if set. */
3740 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3743 /* pragma pack overrides align if lesser and packs bitfields always */
3746 if (pragma_pack
< align
)
3747 align
= pragma_pack
;
3748 /* in pcc mode pragma pack also overrides individual align */
3749 if (pcc
&& pragma_pack
< a
)
3753 /* some individual align was specified */
3757 if (type
->ref
->type
.t
== VT_UNION
) {
3758 if (pcc
&& bit_size
>= 0)
3759 size
= (bit_size
+ 7) >> 3;
3764 } else if (bit_size
< 0) {
3766 c
+= (bit_pos
+ 7) >> 3;
3767 c
= (c
+ align
- 1) & -align
;
3776 /* A bit-field. Layout is more complicated. There are two
3777 options: PCC (GCC) compatible and MS compatible */
3779 /* In PCC layout a bit-field is placed adjacent to the
3780 preceding bit-fields, except if:
3782 - an individual alignment was given
3783 - it would overflow its base type container and
3784 there is no packing */
3785 if (bit_size
== 0) {
3787 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3789 } else if (f
->a
.aligned
) {
3791 } else if (!packed
) {
3793 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3794 if (ofs
> size
/ align
)
3798 /* in pcc mode, long long bitfields have type int if they fit */
3799 if (size
== 8 && bit_size
<= 32)
3800 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3802 while (bit_pos
>= align
* 8)
3803 c
+= align
, bit_pos
-= align
* 8;
3806 /* In PCC layout named bit-fields influence the alignment
3807 of the containing struct using the base types alignment,
3808 except for packed fields (which here have correct align). */
3809 if (f
->v
& SYM_FIRST_ANOM
3810 // && bit_size // ??? gcc on ARM/rpi does that
3815 bt
= f
->type
.t
& VT_BTYPE
;
3816 if ((bit_pos
+ bit_size
> size
* 8)
3817 || (bit_size
> 0) == (bt
!= prevbt
)
3819 c
= (c
+ align
- 1) & -align
;
3822 /* In MS bitfield mode a bit-field run always uses
3823 at least as many bits as the underlying type.
3824 To start a new run it's also required that this
3825 or the last bit-field had non-zero width. */
3826 if (bit_size
|| prev_bit_size
)
3829 /* In MS layout the records alignment is normally
3830 influenced by the field, except for a zero-width
3831 field at the start of a run (but by further zero-width
3832 fields it is again). */
3833 if (bit_size
== 0 && prevbt
!= bt
)
3836 prev_bit_size
= bit_size
;
3839 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3840 | (bit_pos
<< VT_STRUCT_SHIFT
);
3841 bit_pos
+= bit_size
;
3843 if (align
> maxalign
)
3847 printf("set field %s offset %-2d size %-2d align %-2d",
3848 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3849 if (f
->type
.t
& VT_BITFIELD
) {
3850 printf(" pos %-2d bits %-2d",
3863 c
+= (bit_pos
+ 7) >> 3;
3865 /* store size and alignment */
3866 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3870 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3871 /* can happen if individual align for some member was given. In
3872 this case MSVC ignores maxalign when aligning the size */
3877 c
= (c
+ a
- 1) & -a
;
3881 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3884 /* check whether we can access bitfields by their type */
3885 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3889 if (0 == (f
->type
.t
& VT_BITFIELD
))
3893 bit_size
= BIT_SIZE(f
->type
.t
);
3896 bit_pos
= BIT_POS(f
->type
.t
);
3897 size
= type_size(&f
->type
, &align
);
3898 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3901 /* try to access the field using a different type */
3902 c0
= -1, s
= align
= 1;
3904 px
= f
->c
* 8 + bit_pos
;
3905 cx
= (px
>> 3) & -align
;
3906 px
= px
- (cx
<< 3);
3909 s
= (px
+ bit_size
+ 7) >> 3;
3919 s
= type_size(&t
, &align
);
3923 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3924 /* update offset and bit position */
3927 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3928 | (bit_pos
<< VT_STRUCT_SHIFT
);
3932 printf("FIX field %s offset %-2d size %-2d align %-2d "
3933 "pos %-2d bits %-2d\n",
3934 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3935 cx
, s
, align
, px
, bit_size
);
3938 /* fall back to load/store single-byte wise */
3939 f
->auxtype
= VT_STRUCT
;
3941 printf("FIX field %s : load byte-wise\n",
3942 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3948 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3949 static void struct_decl(CType
*type
, int u
)
3951 int v
, c
, size
, align
, flexible
;
3952 int bit_size
, bsize
, bt
;
3954 AttributeDef ad
, ad1
;
3957 memset(&ad
, 0, sizeof ad
);
3959 parse_attribute(&ad
);
3963 /* struct already defined ? return it */
3965 expect("struct/union/enum name");
3967 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3970 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3972 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3977 /* Record the original enum/struct/union token. */
3978 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3980 /* we put an undefined size for struct/union */
3981 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3982 s
->r
= 0; /* default alignment is zero as gcc */
3984 type
->t
= s
->type
.t
;
3990 tcc_error("struct/union/enum already defined");
3992 /* cannot be empty */
3993 /* non empty enums are not allowed */
3996 long long ll
= 0, pl
= 0, nl
= 0;
3999 /* enum symbols have static storage */
4000 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4004 expect("identifier");
4006 if (ss
&& !local_stack
)
4007 tcc_error("redefinition of enumerator '%s'",
4008 get_tok_str(v
, NULL
));
4012 ll
= expr_const64();
4014 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4016 *ps
= ss
, ps
= &ss
->next
;
4025 /* NOTE: we accept a trailing comma */
4030 /* set integral type of the enum */
4033 if (pl
!= (unsigned)pl
)
4034 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4036 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4037 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4038 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4040 /* set type for enum members */
4041 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4043 if (ll
== (int)ll
) /* default is int if it fits */
4045 if (t
.t
& VT_UNSIGNED
) {
4046 ss
->type
.t
|= VT_UNSIGNED
;
4047 if (ll
== (unsigned)ll
)
4050 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4051 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4056 while (tok
!= '}') {
4057 if (!parse_btype(&btype
, &ad1
)) {
4063 tcc_error("flexible array member '%s' not at the end of struct",
4064 get_tok_str(v
, NULL
));
4070 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4072 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4073 expect("identifier");
4075 int v
= btype
.ref
->v
;
4076 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4077 if (tcc_state
->ms_extensions
== 0)
4078 expect("identifier");
4082 if (type_size(&type1
, &align
) < 0) {
4083 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4086 tcc_error("field '%s' has incomplete type",
4087 get_tok_str(v
, NULL
));
4089 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4090 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4091 (type1
.t
& VT_STORAGE
))
4092 tcc_error("invalid type for '%s'",
4093 get_tok_str(v
, NULL
));
4097 bit_size
= expr_const();
4098 /* XXX: handle v = 0 case for messages */
4100 tcc_error("negative width in bit-field '%s'",
4101 get_tok_str(v
, NULL
));
4102 if (v
&& bit_size
== 0)
4103 tcc_error("zero width for bit-field '%s'",
4104 get_tok_str(v
, NULL
));
4105 parse_attribute(&ad1
);
4107 size
= type_size(&type1
, &align
);
4108 if (bit_size
>= 0) {
4109 bt
= type1
.t
& VT_BTYPE
;
4115 tcc_error("bitfields must have scalar type");
4117 if (bit_size
> bsize
) {
4118 tcc_error("width of '%s' exceeds its type",
4119 get_tok_str(v
, NULL
));
4120 } else if (bit_size
== bsize
4121 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4122 /* no need for bit fields */
4124 } else if (bit_size
== 64) {
4125 tcc_error("field width 64 not implemented");
4127 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4129 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4132 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4133 /* Remember we've seen a real field to check
4134 for placement of flexible array member. */
4137 /* If member is a struct or bit-field, enforce
4138 placing into the struct (as anonymous). */
4140 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4145 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4150 if (tok
== ';' || tok
== TOK_EOF
)
4157 parse_attribute(&ad
);
4158 struct_layout(type
, &ad
);
4163 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4165 merge_symattr(&ad
->a
, &s
->a
);
4166 merge_funcattr(&ad
->f
, &s
->f
);
4169 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4170 are added to the element type, copied because it could be a typedef. */
4171 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4173 while (type
->t
& VT_ARRAY
) {
4174 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4175 type
= &type
->ref
->type
;
4177 type
->t
|= qualifiers
;
4180 /* return 0 if no type declaration. otherwise, return the basic type
4183 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4185 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4189 memset(ad
, 0, sizeof(AttributeDef
));
4199 /* currently, we really ignore extension */
4209 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4210 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4211 tmbt
: tcc_error("too many basic types");
4214 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4219 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4236 memset(&ad1
, 0, sizeof(AttributeDef
));
4237 if (parse_btype(&type1
, &ad1
)) {
4238 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4240 n
= 1 << (ad1
.a
.aligned
- 1);
4242 type_size(&type1
, &n
);
4245 if (n
<= 0 || (n
& (n
- 1)) != 0)
4246 tcc_error("alignment must be a positive power of two");
4249 ad
->a
.aligned
= exact_log2p1(n
);
4253 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4254 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4255 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4256 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4263 #ifdef TCC_TARGET_ARM64
4265 /* GCC's __uint128_t appears in some Linux header files. Make it a
4266 synonym for long double to get the size and alignment right. */
4277 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4278 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4286 struct_decl(&type1
, VT_ENUM
);
4289 type
->ref
= type1
.ref
;
4292 struct_decl(&type1
, VT_STRUCT
);
4295 struct_decl(&type1
, VT_UNION
);
4298 /* type modifiers */
4303 parse_btype_qualify(type
, VT_CONSTANT
);
4311 parse_btype_qualify(type
, VT_VOLATILE
);
4318 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4319 tcc_error("signed and unsigned modifier");
4332 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4333 tcc_error("signed and unsigned modifier");
4334 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4350 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4351 tcc_error("multiple storage classes");
4362 /* currently, no need to handle it because tcc does not
4363 track unused objects */
4366 /* GNUC attribute */
4367 case TOK_ATTRIBUTE1
:
4368 case TOK_ATTRIBUTE2
:
4369 parse_attribute(ad
);
4370 if (ad
->attr_mode
) {
4371 u
= ad
->attr_mode
-1;
4372 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4380 parse_expr_type(&type1
);
4381 /* remove all storage modifiers except typedef */
4382 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4384 sym_to_attr(ad
, type1
.ref
);
4390 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4394 if (tok
== ':' && !in_generic
) {
4395 /* ignore if it's a label */
4400 t
&= ~(VT_BTYPE
|VT_LONG
);
4401 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4402 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4403 type
->ref
= s
->type
.ref
;
4405 parse_btype_qualify(type
, t
);
4407 /* get attributes from typedef */
4416 if (tcc_state
->char_is_unsigned
) {
4417 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4420 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4421 bt
= t
& (VT_BTYPE
|VT_LONG
);
4423 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4424 #ifdef TCC_TARGET_PE
4425 if (bt
== VT_LDOUBLE
)
4426 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4432 /* convert a function parameter type (array to pointer and function to
4433 function pointer) */
4434 static inline void convert_parameter_type(CType
*pt
)
4436 /* remove const and volatile qualifiers (XXX: const could be used
4437 to indicate a const function parameter */
4438 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4439 /* array must be transformed to pointer according to ANSI C */
4441 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4446 ST_FUNC
void parse_asm_str(CString
*astr
)
4449 parse_mult_str(astr
, "string constant");
4452 /* Parse an asm label and return the token */
4453 static int asm_label_instr(void)
4459 parse_asm_str(&astr
);
4462 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4464 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4469 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4471 int n
, l
, t1
, arg_size
, align
, unused_align
;
4472 Sym
**plast
, *s
, *first
;
4477 /* function type, or recursive declarator (return if so) */
4479 if (td
&& !(td
& TYPE_ABSTRACT
))
4483 else if (parse_btype(&pt
, &ad1
))
4486 merge_attr (ad
, &ad1
);
4495 /* read param name and compute offset */
4496 if (l
!= FUNC_OLD
) {
4497 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4499 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4500 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4501 tcc_error("parameter declared as void");
4505 expect("identifier");
4506 pt
.t
= VT_VOID
; /* invalid type */
4510 convert_parameter_type(&pt
);
4511 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4512 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4518 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4523 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4524 tcc_error("invalid type");
4527 /* if no parameters, then old type prototype */
4530 /* NOTE: const is ignored in returned type as it has a special
4531 meaning in gcc / C++ */
4532 type
->t
&= ~VT_CONSTANT
;
4533 /* some ancient pre-K&R C allows a function to return an array
4534 and the array brackets to be put after the arguments, such
4535 that "int c()[]" means something like "int[] c()" */
4538 skip(']'); /* only handle simple "[]" */
4541 /* we push a anonymous symbol which will contain the function prototype */
4542 ad
->f
.func_args
= arg_size
;
4543 ad
->f
.func_type
= l
;
4544 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4550 } else if (tok
== '[') {
4551 int saved_nocode_wanted
= nocode_wanted
;
4552 /* array definition */
4555 /* XXX The optional type-quals and static should only be accepted
4556 in parameter decls. The '*' as well, and then even only
4557 in prototypes (not function defs). */
4559 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4574 if (!local_stack
|| (storage
& VT_STATIC
))
4575 vpushi(expr_const());
4577 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4578 length must always be evaluated, even under nocode_wanted,
4579 so that its size slot is initialized (e.g. under sizeof
4584 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4587 tcc_error("invalid array size");
4589 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4590 tcc_error("size of variable length array should be an integer");
4596 /* parse next post type */
4597 post_type(type
, ad
, storage
, 0);
4599 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4600 tcc_error("declaration of an array of functions");
4601 if ((type
->t
& VT_BTYPE
) == VT_VOID
4602 || type_size(type
, &unused_align
) < 0)
4603 tcc_error("declaration of an array of incomplete type elements");
4605 t1
|= type
->t
& VT_VLA
;
4609 tcc_error("need explicit inner array size in VLAs");
4610 loc
-= type_size(&int_type
, &align
);
4614 vla_runtime_type_size(type
, &align
);
4616 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4622 nocode_wanted
= saved_nocode_wanted
;
4624 /* we push an anonymous symbol which will contain the array
4626 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4627 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4633 /* Parse a type declarator (except basic type), and return the type
4634 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4635 expected. 'type' should contain the basic type. 'ad' is the
4636 attribute definition of the basic type. It can be modified by
4637 type_decl(). If this (possibly abstract) declarator is a pointer chain
4638 it returns the innermost pointed to type (equals *type, but is a different
4639 pointer), otherwise returns type itself, that's used for recursive calls. */
4640 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4643 int qualifiers
, storage
;
4645 /* recursive type, remove storage bits first, apply them later again */
4646 storage
= type
->t
& VT_STORAGE
;
4647 type
->t
&= ~VT_STORAGE
;
4650 while (tok
== '*') {
4658 qualifiers
|= VT_CONSTANT
;
4663 qualifiers
|= VT_VOLATILE
;
4669 /* XXX: clarify attribute handling */
4670 case TOK_ATTRIBUTE1
:
4671 case TOK_ATTRIBUTE2
:
4672 parse_attribute(ad
);
4676 type
->t
|= qualifiers
;
4678 /* innermost pointed to type is the one for the first derivation */
4679 ret
= pointed_type(type
);
4683 /* This is possibly a parameter type list for abstract declarators
4684 ('int ()'), use post_type for testing this. */
4685 if (!post_type(type
, ad
, 0, td
)) {
4686 /* It's not, so it's a nested declarator, and the post operations
4687 apply to the innermost pointed to type (if any). */
4688 /* XXX: this is not correct to modify 'ad' at this point, but
4689 the syntax is not clear */
4690 parse_attribute(ad
);
4691 post
= type_decl(type
, ad
, v
, td
);
4695 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4696 /* type identifier */
4701 if (!(td
& TYPE_ABSTRACT
))
4702 expect("identifier");
4705 post_type(post
, ad
, storage
, 0);
4706 parse_attribute(ad
);
4711 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4712 ST_FUNC
int lvalue_type(int t
)
4717 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4719 else if (bt
== VT_SHORT
)
4723 if (t
& VT_UNSIGNED
)
4724 r
|= VT_LVAL_UNSIGNED
;
4728 /* indirection with full error checking and bound check */
4729 ST_FUNC
void indir(void)
4731 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4732 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4736 if (vtop
->r
& VT_LVAL
)
4738 vtop
->type
= *pointed_type(&vtop
->type
);
4739 /* Arrays and functions are never lvalues */
4740 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4741 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4742 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4743 /* if bound checking, the referenced pointer must be checked */
4744 #ifdef CONFIG_TCC_BCHECK
4745 if (tcc_state
->do_bounds_check
)
4746 vtop
->r
|= VT_MUSTBOUND
;
4751 /* pass a parameter to a function and do type checking and casting */
4752 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4757 func_type
= func
->f
.func_type
;
4758 if (func_type
== FUNC_OLD
||
4759 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4760 /* default casting : only need to convert float to double */
4761 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4762 gen_cast_s(VT_DOUBLE
);
4763 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4764 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4765 type
.ref
= vtop
->type
.ref
;
4768 } else if (arg
== NULL
) {
4769 tcc_error("too many arguments to function");
4772 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4773 gen_assign_cast(&type
);
4777 /* parse an expression and return its type without any side effect. */
4778 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4787 /* parse an expression of the form '(type)' or '(expr)' and return its
4789 static void parse_expr_type(CType
*type
)
4795 if (parse_btype(type
, &ad
)) {
4796 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4798 expr_type(type
, gexpr
);
4803 static void parse_type(CType
*type
)
4808 if (!parse_btype(type
, &ad
)) {
4811 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4814 static void parse_builtin_params(int nc
, const char *args
)
4821 while ((c
= *args
++)) {
4825 case 'e': expr_eq(); continue;
4826 case 't': parse_type(&t
); vpush(&t
); continue;
4827 default: tcc_error("internal error"); break;
4835 ST_FUNC
void unary(void)
4837 int n
, t
, align
, size
, r
, sizeof_caller
;
4842 sizeof_caller
= in_sizeof
;
4845 /* XXX: GCC 2.95.3 does not generate a table although it should be
4853 #ifdef TCC_TARGET_PE
4854 t
= VT_SHORT
|VT_UNSIGNED
;
4862 vsetc(&type
, VT_CONST
, &tokc
);
4866 t
= VT_INT
| VT_UNSIGNED
;
4872 t
= VT_LLONG
| VT_UNSIGNED
;
4884 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4887 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4889 case TOK___FUNCTION__
:
4891 goto tok_identifier
;
4897 /* special function name identifier */
4898 len
= strlen(funcname
) + 1;
4899 /* generate char[len] type */
4904 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4905 if (!NODATA_WANTED
) {
4906 ptr
= section_ptr_add(data_section
, len
);
4907 memcpy(ptr
, funcname
, len
);
4913 #ifdef TCC_TARGET_PE
4914 t
= VT_SHORT
| VT_UNSIGNED
;
4920 /* string parsing */
4922 if (tcc_state
->char_is_unsigned
)
4923 t
= VT_BYTE
| VT_UNSIGNED
;
4925 if (tcc_state
->warn_write_strings
)
4930 memset(&ad
, 0, sizeof(AttributeDef
));
4931 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4936 if (parse_btype(&type
, &ad
)) {
4937 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4939 /* check ISOC99 compound literal */
4941 /* data is allocated locally by default */
4946 /* all except arrays are lvalues */
4947 if (!(type
.t
& VT_ARRAY
))
4948 r
|= lvalue_type(type
.t
);
4949 memset(&ad
, 0, sizeof(AttributeDef
));
4950 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4952 if (sizeof_caller
) {
4959 } else if (tok
== '{') {
4960 int saved_nocode_wanted
= nocode_wanted
;
4962 tcc_error("expected constant");
4963 /* save all registers */
4965 /* statement expression : we do not accept break/continue
4966 inside as GCC does. We do retain the nocode_wanted state,
4967 as statement expressions can't ever be entered from the
4968 outside, so any reactivation of code emission (from labels
4969 or loop heads) can be disabled again after the end of it. */
4971 nocode_wanted
= saved_nocode_wanted
;
4986 /* functions names must be treated as function pointers,
4987 except for unary '&' and sizeof. Since we consider that
4988 functions are not lvalues, we only have to handle it
4989 there and in function calls. */
4990 /* arrays can also be used although they are not lvalues */
4991 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4992 !(vtop
->type
.t
& VT_ARRAY
))
4994 mk_pointer(&vtop
->type
);
5000 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5001 gen_cast_s(VT_BOOL
);
5002 vtop
->c
.i
= !vtop
->c
.i
;
5003 } else if (vtop
->r
== VT_CMP
) {
5005 n
= vtop
->jfalse
, vtop
->jfalse
= vtop
->jtrue
, vtop
->jtrue
= n
;
5020 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5021 tcc_error("pointer not accepted for unary plus");
5022 /* In order to force cast, we add zero, except for floating point
5023 where we really need an noop (otherwise -0.0 will be transformed
5025 if (!is_float(vtop
->type
.t
)) {
5037 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5039 if (vtop
[1].r
& VT_SYM
)
5040 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5041 size
= type_size(&type
, &align
);
5042 if (s
&& s
->a
.aligned
)
5043 align
= 1 << (s
->a
.aligned
- 1);
5044 if (t
== TOK_SIZEOF
) {
5045 if (!(type
.t
& VT_VLA
)) {
5047 tcc_error("sizeof applied to an incomplete type");
5050 vla_runtime_type_size(&type
, &align
);
5055 vtop
->type
.t
|= VT_UNSIGNED
;
5058 case TOK_builtin_expect
:
5059 /* __builtin_expect is a no-op for now */
5060 parse_builtin_params(0, "ee");
5063 case TOK_builtin_types_compatible_p
:
5064 parse_builtin_params(0, "tt");
5065 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5066 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5067 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5071 case TOK_builtin_choose_expr
:
5098 case TOK_builtin_constant_p
:
5099 parse_builtin_params(1, "e");
5100 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5104 case TOK_builtin_frame_address
:
5105 case TOK_builtin_return_address
:
5111 if (tok
!= TOK_CINT
) {
5112 tcc_error("%s only takes positive integers",
5113 tok1
== TOK_builtin_return_address
?
5114 "__builtin_return_address" :
5115 "__builtin_frame_address");
5117 level
= (uint32_t)tokc
.i
;
5122 vset(&type
, VT_LOCAL
, 0); /* local frame */
5124 mk_pointer(&vtop
->type
);
5125 indir(); /* -> parent frame */
5127 if (tok1
== TOK_builtin_return_address
) {
5128 // assume return address is just above frame pointer on stack
5131 mk_pointer(&vtop
->type
);
5136 #ifdef TCC_TARGET_X86_64
5137 #ifdef TCC_TARGET_PE
5138 case TOK_builtin_va_start
:
5139 parse_builtin_params(0, "ee");
5140 r
= vtop
->r
& VT_VALMASK
;
5144 tcc_error("__builtin_va_start expects a local variable");
5146 vtop
->type
= char_pointer_type
;
5151 case TOK_builtin_va_arg_types
:
5152 parse_builtin_params(0, "t");
5153 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5160 #ifdef TCC_TARGET_ARM64
5161 case TOK___va_start
: {
5162 parse_builtin_params(0, "ee");
5166 vtop
->type
.t
= VT_VOID
;
5169 case TOK___va_arg
: {
5170 parse_builtin_params(0, "et");
5178 case TOK___arm64_clear_cache
: {
5179 parse_builtin_params(0, "ee");
5182 vtop
->type
.t
= VT_VOID
;
5186 /* pre operations */
5197 t
= vtop
->type
.t
& VT_BTYPE
;
5199 /* In IEEE negate(x) isn't subtract(0,x), but rather
5203 vtop
->c
.f
= -1.0 * 0.0;
5204 else if (t
== VT_DOUBLE
)
5205 vtop
->c
.d
= -1.0 * 0.0;
5207 vtop
->c
.ld
= -1.0 * 0.0;
5215 goto tok_identifier
;
5217 /* allow to take the address of a label */
5218 if (tok
< TOK_UIDENT
)
5219 expect("label identifier");
5220 s
= label_find(tok
);
5222 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5224 if (s
->r
== LABEL_DECLARED
)
5225 s
->r
= LABEL_FORWARD
;
5228 s
->type
.t
= VT_VOID
;
5229 mk_pointer(&s
->type
);
5230 s
->type
.t
|= VT_STATIC
;
5232 vpushsym(&s
->type
, s
);
5238 CType controlling_type
;
5239 int has_default
= 0;
5242 TokenString
*str
= NULL
;
5243 int saved_const_wanted
= const_wanted
;
5248 expr_type(&controlling_type
, expr_eq
);
5249 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5250 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5251 mk_pointer(&controlling_type
);
5252 const_wanted
= saved_const_wanted
;
5256 if (tok
== TOK_DEFAULT
) {
5258 tcc_error("too many 'default'");
5264 AttributeDef ad_tmp
;
5269 parse_btype(&cur_type
, &ad_tmp
);
5272 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5273 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5275 tcc_error("type match twice");
5285 skip_or_save_block(&str
);
5287 skip_or_save_block(NULL
);
5294 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5295 tcc_error("type '%s' does not match any association", buf
);
5297 begin_macro(str
, 1);
5306 // special qnan , snan and infinity values
5311 vtop
->type
.t
= VT_FLOAT
;
5316 goto special_math_val
;
5319 goto special_math_val
;
5326 expect("identifier");
5328 if (!s
|| IS_ASM_SYM(s
)) {
5329 const char *name
= get_tok_str(t
, NULL
);
5331 tcc_error("'%s' undeclared", name
);
5332 /* for simple function calls, we tolerate undeclared
5333 external reference to int() function */
5334 if (tcc_state
->warn_implicit_function_declaration
5335 #ifdef TCC_TARGET_PE
5336 /* people must be warned about using undeclared WINAPI functions
5337 (which usually start with uppercase letter) */
5338 || (name
[0] >= 'A' && name
[0] <= 'Z')
5341 tcc_warning("implicit declaration of function '%s'", name
);
5342 s
= external_global_sym(t
, &func_old_type
);
5346 /* A symbol that has a register is a local register variable,
5347 which starts out as VT_LOCAL value. */
5348 if ((r
& VT_VALMASK
) < VT_CONST
)
5349 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5351 vset(&s
->type
, r
, s
->c
);
5352 /* Point to s as backpointer (even without r&VT_SYM).
5353 Will be used by at least the x86 inline asm parser for
5359 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5360 vtop
->c
.i
= s
->enum_val
;
5365 /* post operations */
5367 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5370 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5371 int qualifiers
, cumofs
= 0;
5373 if (tok
== TOK_ARROW
)
5375 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5378 /* expect pointer on structure */
5379 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5380 expect("struct or union");
5381 if (tok
== TOK_CDOUBLE
)
5382 expect("field name");
5384 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5385 expect("field name");
5386 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5388 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5389 /* add field offset to pointer */
5390 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5391 vpushi(cumofs
+ s
->c
);
5393 /* change type to field type, and set to lvalue */
5394 vtop
->type
= s
->type
;
5395 vtop
->type
.t
|= qualifiers
;
5396 /* an array is never an lvalue */
5397 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5398 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5399 #ifdef CONFIG_TCC_BCHECK
5400 /* if bound checking, the referenced pointer must be checked */
5401 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5402 vtop
->r
|= VT_MUSTBOUND
;
5406 } else if (tok
== '[') {
5412 } else if (tok
== '(') {
5415 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5418 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5419 /* pointer test (no array accepted) */
5420 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5421 vtop
->type
= *pointed_type(&vtop
->type
);
5422 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5426 expect("function pointer");
5429 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5431 /* get return type */
5434 sa
= s
->next
; /* first parameter */
5435 nb_args
= regsize
= 0;
5437 /* compute first implicit argument if a structure is returned */
5438 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5439 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5440 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5441 &ret_align
, ®size
);
5443 /* get some space for the returned structure */
5444 size
= type_size(&s
->type
, &align
);
5445 #ifdef TCC_TARGET_ARM64
5446 /* On arm64, a small struct is return in registers.
5447 It is much easier to write it to memory if we know
5448 that we are allowed to write some extra bytes, so
5449 round the allocated space up to a power of 2: */
5451 while (size
& (size
- 1))
5452 size
= (size
| (size
- 1)) + 1;
5454 loc
= (loc
- size
) & -align
;
5456 ret
.r
= VT_LOCAL
| VT_LVAL
;
5457 /* pass it as 'int' to avoid structure arg passing
5459 vseti(VT_LOCAL
, loc
);
5469 /* return in register */
5470 if (is_float(ret
.type
.t
)) {
5471 ret
.r
= reg_fret(ret
.type
.t
);
5472 #ifdef TCC_TARGET_X86_64
5473 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5477 #ifndef TCC_TARGET_ARM64
5478 #ifndef TCC_TARGET_RISCV64
5479 #ifdef TCC_TARGET_X86_64
5480 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5482 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5494 gfunc_param_typed(s
, sa
);
5504 tcc_error("too few arguments to function");
5506 gfunc_call(nb_args
);
5509 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5510 vsetc(&ret
.type
, r
, &ret
.c
);
5511 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5514 /* handle packed struct return */
5515 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5518 size
= type_size(&s
->type
, &align
);
5519 /* We're writing whole regs often, make sure there's enough
5520 space. Assume register size is power of 2. */
5521 if (regsize
> align
)
5523 loc
= (loc
- size
) & -align
;
5527 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5531 if (--ret_nregs
== 0)
5535 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5537 if (s
->f
.func_noreturn
)
5545 ST_FUNC
void expr_prod(void)
5550 while (tok
== '*' || tok
== '/' || tok
== '%') {
5558 ST_FUNC
void expr_sum(void)
5563 while (tok
== '+' || tok
== '-') {
5571 static void expr_shift(void)
5576 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5584 static void expr_cmp(void)
5589 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5590 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5598 static void expr_cmpeq(void)
5603 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5611 static void expr_and(void)
5614 while (tok
== '&') {
5621 static void expr_xor(void)
5624 while (tok
== '^') {
5631 static void expr_or(void)
5634 while (tok
== '|') {
5641 static int condition_3way(void);
5643 static void expr_landor(void(*e_fn
)(void), int e_op
, int i
)
5645 int t
= 0, cc
= 1, f
= 0, c
;
5647 c
= f
? i
: condition_3way();
5649 save_regs(1), cc
= 0;
5650 } else if (c
!= i
) {
5651 nocode_wanted
++, f
= 1;
5673 static void expr_land(void)
5676 if (tok
== TOK_LAND
)
5677 expr_landor(expr_or
, TOK_LAND
, 1);
5680 static void expr_lor(void)
5684 expr_landor(expr_land
, TOK_LOR
, 0);
5687 /* Assuming vtop is a value used in a conditional context
5688 (i.e. compared with zero) return 0 if it's false, 1 if
5689 true and -1 if it can't be statically determined. */
5690 static int condition_3way(void)
5693 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5694 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5696 gen_cast_s(VT_BOOL
);
5703 static int is_cond_bool(SValue
*sv
)
5705 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
5706 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
5707 return (unsigned)sv
->c
.i
< 2;
5708 if (sv
->r
== VT_CMP
)
5713 static void expr_cond(void)
5715 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5717 CType type
, type1
, type2
;
5723 c
= condition_3way();
5724 g
= (tok
== ':' && gnu_ext
);
5734 /* needed to avoid having different registers saved in
5737 if (is_float(vtop
->type
.t
)) {
5739 #ifdef TCC_TARGET_X86_64
5740 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5751 ncw_prev
= nocode_wanted
;
5758 if (c
< 0 && vtop
->r
== VT_CMP
) {
5764 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5765 mk_pointer(&vtop
->type
);
5767 sv
= *vtop
; /* save value to handle it later */
5768 vtop
--; /* no vpop so that FP stack is not flushed */
5778 nocode_wanted
= ncw_prev
;
5784 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
5785 if (sv
.r
== VT_CMP
) {
5796 nocode_wanted
= ncw_prev
;
5797 // tcc_warning("two conditions expr_cond");
5801 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5802 mk_pointer(&vtop
->type
);
5805 bt1
= t1
& VT_BTYPE
;
5807 bt2
= t2
& VT_BTYPE
;
5810 /* cast operands to correct type according to ISOC rules */
5811 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5812 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5813 } else if (is_float(bt1
) || is_float(bt2
)) {
5814 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5815 type
.t
= VT_LDOUBLE
;
5817 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5822 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5823 /* cast to biggest op */
5824 type
.t
= VT_LLONG
| VT_LONG
;
5825 if (bt1
== VT_LLONG
)
5827 if (bt2
== VT_LLONG
)
5829 /* convert to unsigned if it does not fit in a long long */
5830 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5831 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5832 type
.t
|= VT_UNSIGNED
;
5833 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5834 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5835 /* If one is a null ptr constant the result type
5837 if (is_null_pointer (vtop
)) type
= type1
;
5838 else if (is_null_pointer (&sv
)) type
= type2
;
5839 else if (bt1
!= bt2
)
5840 tcc_error("incompatible types in conditional expressions");
5842 CType
*pt1
= pointed_type(&type1
);
5843 CType
*pt2
= pointed_type(&type2
);
5844 int pbt1
= pt1
->t
& VT_BTYPE
;
5845 int pbt2
= pt2
->t
& VT_BTYPE
;
5846 int newquals
, copied
= 0;
5847 /* pointers to void get preferred, otherwise the
5848 pointed to types minus qualifs should be compatible */
5849 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5850 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5851 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5852 tcc_warning("pointer type mismatch in conditional expression\n");
5854 /* combine qualifs */
5855 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5856 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5859 /* copy the pointer target symbol */
5860 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5863 pointed_type(&type
)->t
|= newquals
;
5865 /* pointers to incomplete arrays get converted to
5866 pointers to completed ones if possible */
5867 if (pt1
->t
& VT_ARRAY
5868 && pt2
->t
& VT_ARRAY
5869 && pointed_type(&type
)->ref
->c
< 0
5870 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5873 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5875 pointed_type(&type
)->ref
=
5876 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5877 0, pointed_type(&type
)->ref
->c
);
5878 pointed_type(&type
)->ref
->c
=
5879 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5882 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5883 /* XXX: test structure compatibility */
5884 type
= bt1
== VT_STRUCT
? type1
: type2
;
5886 /* integer operations */
5887 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5888 /* convert to unsigned if it does not fit in an integer */
5889 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5890 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5891 type
.t
|= VT_UNSIGNED
;
5893 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5894 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5895 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5897 /* now we convert second operand */
5901 mk_pointer(&vtop
->type
);
5903 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5908 if (is_float(type
.t
)) {
5910 #ifdef TCC_TARGET_X86_64
5911 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5915 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5916 /* for long longs, we use fixed registers to avoid having
5917 to handle a complicated move */
5927 nocode_wanted
= ncw_prev
;
5929 /* this is horrible, but we must also convert first
5935 mk_pointer(&vtop
->type
);
5937 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5943 move_reg(r2
, r1
, type
.t
);
5954 static void expr_eq(void)
5960 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5961 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5962 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5977 ST_FUNC
void gexpr(void)
5988 /* parse a constant expression and return value in vtop. */
5989 static void expr_const1(void)
5998 /* parse an integer constant and return its value. */
5999 static inline int64_t expr_const64(void)
6003 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6004 expect("constant expression");
6010 /* parse an integer constant and return its value.
6011 Complain if it doesn't fit 32bit (signed or unsigned). */
6012 ST_FUNC
int expr_const(void)
6015 int64_t wc
= expr_const64();
6017 if (c
!= wc
&& (unsigned)c
!= wc
)
6018 tcc_error("constant exceeds 32 bit");
6022 /* ------------------------------------------------------------------------- */
6023 /* return from function */
6025 #ifndef TCC_TARGET_ARM64
6026 #ifndef TCC_TARGET_RISCV64
6027 static void gfunc_return(CType
*func_type
)
6029 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6030 CType type
, ret_type
;
6031 int ret_align
, ret_nregs
, regsize
;
6032 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6033 &ret_align
, ®size
);
6034 if (0 == ret_nregs
) {
6035 /* if returning structure, must copy it to implicit
6036 first pointer arg location */
6039 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6042 /* copy structure value to pointer */
6045 /* returning structure packed into registers */
6046 int r
, size
, addr
, align
;
6047 size
= type_size(func_type
,&align
);
6048 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6049 (vtop
->c
.i
& (ret_align
-1)))
6050 && (align
& (ret_align
-1))) {
6051 loc
= (loc
- size
) & -ret_align
;
6054 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6058 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6060 vtop
->type
= ret_type
;
6061 if (is_float(ret_type
.t
))
6062 r
= rc_fret(ret_type
.t
);
6073 if (--ret_nregs
== 0)
6075 /* We assume that when a structure is returned in multiple
6076 registers, their classes are consecutive values of the
6079 vtop
->c
.i
+= regsize
;
6083 } else if (is_float(func_type
->t
)) {
6084 gv(rc_fret(func_type
->t
));
6088 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6093 static void check_func_return(void)
6095 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6097 if (!strcmp (funcname
, "main")
6098 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6099 /* main returns 0 by default */
6101 gen_assign_cast(&func_vt
);
6102 gfunc_return(&func_vt
);
6104 tcc_warning("function might return no value: '%s'", funcname
);
6108 /* ------------------------------------------------------------------------- */
6111 static int case_cmp(const void *pa
, const void *pb
)
6113 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6114 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6115 return a
< b
? -1 : a
> b
;
6118 static void gtst_addr(int t
, int a
)
6120 gsym_addr(gvtst(0, t
), a
);
6123 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6127 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6144 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6146 gcase(base
, len
/2, bsym
);
6150 base
+= e
; len
-= e
;
6160 if (p
->v1
== p
->v2
) {
6162 gtst_addr(0, p
->sym
);
6172 gtst_addr(0, p
->sym
);
6176 *bsym
= gjmp(*bsym
);
6179 /* ------------------------------------------------------------------------- */
6180 /* __attribute__((cleanup(fn))) */
6182 static void try_call_scope_cleanup(Sym
*stop
)
6184 Sym
*cls
= cur_scope
->cl
.s
;
6186 for (; cls
!= stop
; cls
= cls
->ncl
) {
6187 Sym
*fs
= cls
->next
;
6188 Sym
*vs
= cls
->prev_tok
;
6190 vpushsym(&fs
->type
, fs
);
6191 vset(&vs
->type
, vs
->r
, vs
->c
);
6193 mk_pointer(&vtop
->type
);
6199 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6204 if (!cur_scope
->cl
.s
)
6207 /* search NCA of both cleanup chains given parents and initial depth */
6208 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6209 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6211 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6213 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6216 try_call_scope_cleanup(cc
);
6219 /* call 'func' for each __attribute__((cleanup(func))) */
6220 static void block_cleanup(struct scope
*o
)
6224 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6225 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6230 try_call_scope_cleanup(o
->cl
.s
);
6231 pcl
->jnext
= gjmp(0);
6233 goto remove_pending
;
6243 try_call_scope_cleanup(o
->cl
.s
);
6246 /* ------------------------------------------------------------------------- */
6249 static void vla_restore(int loc
)
6252 gen_vla_sp_restore(loc
);
6255 static void vla_leave(struct scope
*o
)
6257 if (o
->vla
.num
< cur_scope
->vla
.num
)
6258 vla_restore(o
->vla
.loc
);
6261 /* ------------------------------------------------------------------------- */
6264 void new_scope(struct scope
*o
)
6266 /* copy and link previous scope */
6268 o
->prev
= cur_scope
;
6271 /* record local declaration stack position */
6272 o
->lstk
= local_stack
;
6273 o
->llstk
= local_label_stack
;
6278 void prev_scope(struct scope
*o
, int is_expr
)
6282 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6283 block_cleanup(o
->prev
);
6285 /* pop locally defined labels */
6286 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6288 /* In the is_expr case (a statement expression is finished here),
6289 vtop might refer to symbols on the local_stack. Either via the
6290 type or via vtop->sym. We can't pop those nor any that in turn
6291 might be referred to. To make it easier we don't roll back
6292 any symbols in that case; some upper level call to block() will
6293 do that. We do have to remove such symbols from the lookup
6294 tables, though. sym_pop will do that. */
6296 /* pop locally defined symbols */
6297 sym_pop(&local_stack
, o
->lstk
, is_expr
);
6299 cur_scope
= o
->prev
;
6303 /* leave a scope via break/continue(/goto) */
6304 void leave_scope(struct scope
*o
)
6308 try_call_scope_cleanup(o
->cl
.s
);
6312 /* ------------------------------------------------------------------------- */
6313 /* call block from 'for do while' loops */
6315 static void lblock(int *bsym
, int *csym
)
6317 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6318 int *b
= co
->bsym
, *c
= co
->csym
;
6332 static void block(int is_expr
)
6334 int a
, b
, c
, d
, e
, t
;
6338 /* default return value is (void) */
6340 vtop
->type
.t
= VT_VOID
;
6352 if (tok
== TOK_ELSE
) {
6357 gsym(d
); /* patch else jmp */
6362 } else if (t
== TOK_WHILE
) {
6374 } else if (t
== '{') {
6378 /* handle local labels declarations */
6379 while (tok
== TOK_LABEL
) {
6382 if (tok
< TOK_UIDENT
)
6383 expect("label identifier");
6384 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6386 } while (tok
== ',');
6390 while (tok
!= '}') {
6399 prev_scope(&o
, is_expr
);
6401 if (0 == local_scope
&& !nocode_wanted
)
6402 check_func_return();
6405 } else if (t
== TOK_RETURN
) {
6407 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6409 gexpr(), gen_assign_cast(&func_vt
);
6410 leave_scope(root_scope
);
6412 gfunc_return(&func_vt
);
6416 tcc_warning("'return' with no value.");
6418 /* jump unless last stmt in top-level block */
6419 if (tok
!= '}' || local_scope
!= 1)
6423 } else if (t
== TOK_BREAK
) {
6425 if (!cur_scope
->bsym
)
6426 tcc_error("cannot break");
6427 if (!cur_switch
|| cur_scope
->bsym
!= cur_switch
->bsym
)
6428 leave_scope(loop_scope
);
6430 leave_scope(cur_switch
->scope
);
6431 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6434 } else if (t
== TOK_CONTINUE
) {
6436 if (!cur_scope
->csym
)
6437 tcc_error("cannot continue");
6438 leave_scope(loop_scope
);
6439 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6442 } else if (t
== TOK_FOR
) {
6448 /* c99 for-loop init decl? */
6449 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6450 /* no, regular for-loop init expr */
6478 } else if (t
== TOK_DO
) {
6492 } else if (t
== TOK_SWITCH
) {
6493 struct switch_t
*saved
, sw
;
6500 sw
.scope
= cur_scope
;
6508 switchval
= *vtop
--;
6511 b
= gjmp(0); /* jump to first case */
6513 a
= gjmp(a
); /* add implicit break */
6517 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6518 for (b
= 1; b
< sw
.n
; b
++)
6519 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6520 tcc_error("duplicate case value");
6522 /* Our switch table sorting is signed, so the compared
6523 value needs to be as well when it's 64bit. */
6524 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6525 switchval
.type
.t
&= ~VT_UNSIGNED
;
6528 d
= 0, gcase(sw
.p
, sw
.n
, &d
);
6531 gsym_addr(d
, sw
.def_sym
);
6537 dynarray_reset(&sw
.p
, &sw
.n
);
6540 } else if (t
== TOK_CASE
) {
6541 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6544 cr
->v1
= cr
->v2
= expr_const64();
6545 if (gnu_ext
&& tok
== TOK_DOTS
) {
6547 cr
->v2
= expr_const64();
6548 if (cr
->v2
< cr
->v1
)
6549 tcc_warning("empty case range");
6552 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6555 goto block_after_label
;
6557 } else if (t
== TOK_DEFAULT
) {
6560 if (cur_switch
->def_sym
)
6561 tcc_error("too many 'default'");
6562 cur_switch
->def_sym
= gind();
6565 goto block_after_label
;
6567 } else if (t
== TOK_GOTO
) {
6568 vla_restore(root_scope
->vla
.loc
);
6569 if (tok
== '*' && gnu_ext
) {
6573 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6577 } else if (tok
>= TOK_UIDENT
) {
6578 s
= label_find(tok
);
6579 /* put forward definition if needed */
6581 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6582 else if (s
->r
== LABEL_DECLARED
)
6583 s
->r
= LABEL_FORWARD
;
6585 if (s
->r
& LABEL_FORWARD
) {
6586 /* start new goto chain for cleanups, linked via label->next */
6587 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
6588 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
6589 pending_gotos
->prev_tok
= s
;
6590 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6591 pending_gotos
->next
= s
;
6593 s
->jnext
= gjmp(s
->jnext
);
6595 try_call_cleanup_goto(s
->cleanupstate
);
6596 gjmp_addr(s
->jnext
);
6601 expect("label identifier");
6605 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
6609 if (tok
== ':' && t
>= TOK_UIDENT
) {
6614 if (s
->r
== LABEL_DEFINED
)
6615 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6616 s
->r
= LABEL_DEFINED
;
6618 Sym
*pcl
; /* pending cleanup goto */
6619 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6621 sym_pop(&s
->next
, NULL
, 0);
6625 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
6628 s
->cleanupstate
= cur_scope
->cl
.s
;
6631 vla_restore(cur_scope
->vla
.loc
);
6632 /* we accept this, but it is a mistake */
6634 tcc_warning("deprecated use of label at end of compound statement");
6640 /* expression case */
6656 /* This skips over a stream of tokens containing balanced {} and ()
6657 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6658 with a '{'). If STR then allocates and stores the skipped tokens
6659 in *STR. This doesn't check if () and {} are nested correctly,
6660 i.e. "({)}" is accepted. */
6661 static void skip_or_save_block(TokenString
**str
)
6663 int braces
= tok
== '{';
6666 *str
= tok_str_alloc();
6668 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6670 if (tok
== TOK_EOF
) {
6671 if (str
|| level
> 0)
6672 tcc_error("unexpected end of file");
6677 tok_str_add_tok(*str
);
6680 if (t
== '{' || t
== '(') {
6682 } else if (t
== '}' || t
== ')') {
6684 if (level
== 0 && braces
&& t
== '}')
6689 tok_str_add(*str
, -1);
6690 tok_str_add(*str
, 0);
6694 #define EXPR_CONST 1
6697 static void parse_init_elem(int expr_type
)
6699 int saved_global_expr
;
6702 /* compound literals must be allocated globally in this case */
6703 saved_global_expr
= global_expr
;
6706 global_expr
= saved_global_expr
;
6707 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6708 (compound literals). */
6709 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6710 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6711 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6712 #ifdef TCC_TARGET_PE
6713 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6716 tcc_error("initializer element is not constant");
6724 /* put zeros for variable based init */
6725 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6728 /* nothing to do because globals are already set to zero */
6730 vpush_global_sym(&func_old_type
, TOK_memset
);
6732 #ifdef TCC_TARGET_ARM
6744 #define DIF_SIZE_ONLY 2
6745 #define DIF_HAVE_ELEM 4
6747 /* t is the array or struct type. c is the array or struct
6748 address. cur_field is the pointer to the current
6749 field, for arrays the 'c' member contains the current start
6750 index. 'flags' is as in decl_initializer.
6751 'al' contains the already initialized length of the
6752 current container (starting at c). This returns the new length of that. */
6753 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6754 Sym
**cur_field
, int flags
, int al
)
6757 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6758 unsigned long corig
= c
;
6763 if (flags
& DIF_HAVE_ELEM
)
6766 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
6773 /* NOTE: we only support ranges for last designator */
6774 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6776 if (!(type
->t
& VT_ARRAY
))
6777 expect("array type");
6779 index
= index_last
= expr_const();
6780 if (tok
== TOK_DOTS
&& gnu_ext
) {
6782 index_last
= expr_const();
6786 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6788 tcc_error("invalid index");
6790 (*cur_field
)->c
= index_last
;
6791 type
= pointed_type(type
);
6792 elem_size
= type_size(type
, &align
);
6793 c
+= index
* elem_size
;
6794 nb_elems
= index_last
- index
+ 1;
6801 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6802 expect("struct/union type");
6804 f
= find_field(type
, l
, &cumofs
);
6817 } else if (!gnu_ext
) {
6822 if (type
->t
& VT_ARRAY
) {
6823 index
= (*cur_field
)->c
;
6824 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6825 tcc_error("index too large");
6826 type
= pointed_type(type
);
6827 c
+= index
* type_size(type
, &align
);
6830 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6831 *cur_field
= f
= f
->next
;
6833 tcc_error("too many field init");
6838 /* must put zero in holes (note that doing it that way
6839 ensures that it even works with designators) */
6840 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
6841 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6842 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
6844 /* XXX: make it more general */
6845 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
6846 unsigned long c_end
;
6851 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6852 for (i
= 1; i
< nb_elems
; i
++) {
6853 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6858 } else if (!NODATA_WANTED
) {
6859 c_end
= c
+ nb_elems
* elem_size
;
6860 if (c_end
> sec
->data_allocated
)
6861 section_realloc(sec
, c_end
);
6862 src
= sec
->data
+ c
;
6864 for(i
= 1; i
< nb_elems
; i
++) {
6866 memcpy(dst
, src
, elem_size
);
6870 c
+= nb_elems
* type_size(type
, &align
);
6876 /* store a value or an expression directly in global data or in local array */
6877 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6884 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6888 /* XXX: not portable */
6889 /* XXX: generate error if incorrect relocation */
6890 gen_assign_cast(&dtype
);
6891 bt
= type
->t
& VT_BTYPE
;
6893 if ((vtop
->r
& VT_SYM
)
6896 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6897 || (type
->t
& VT_BITFIELD
))
6898 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6900 tcc_error("initializer element is not computable at load time");
6902 if (NODATA_WANTED
) {
6907 size
= type_size(type
, &align
);
6908 section_reserve(sec
, c
+ size
);
6909 ptr
= sec
->data
+ c
;
6911 /* XXX: make code faster ? */
6912 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6913 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6914 /* XXX This rejects compound literals like
6915 '(void *){ptr}'. The problem is that '&sym' is
6916 represented the same way, which would be ruled out
6917 by the SYM_FIRST_ANOM check above, but also '"string"'
6918 in 'char *p = "string"' is represented the same
6919 with the type being VT_PTR and the symbol being an
6920 anonymous one. That is, there's no difference in vtop
6921 between '(void *){x}' and '&(void *){x}'. Ignore
6922 pointer typed entities here. Hopefully no real code
6923 will every use compound literals with scalar type. */
6924 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6925 /* These come from compound literals, memcpy stuff over. */
6929 esym
= elfsym(vtop
->sym
);
6930 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6931 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6933 /* We need to copy over all memory contents, and that
6934 includes relocations. Use the fact that relocs are
6935 created it order, so look from the end of relocs
6936 until we hit one before the copied region. */
6937 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6938 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6939 while (num_relocs
--) {
6941 if (rel
->r_offset
>= esym
->st_value
+ size
)
6943 if (rel
->r_offset
< esym
->st_value
)
6945 /* Note: if the same fields are initialized multiple
6946 times (possible with designators) then we possibly
6947 add multiple relocations for the same offset here.
6948 That would lead to wrong code, the last reloc needs
6949 to win. We clean this up later after the whole
6950 initializer is parsed. */
6951 put_elf_reloca(symtab_section
, sec
,
6952 c
+ rel
->r_offset
- esym
->st_value
,
6953 ELFW(R_TYPE
)(rel
->r_info
),
6954 ELFW(R_SYM
)(rel
->r_info
),
6964 if (type
->t
& VT_BITFIELD
) {
6965 int bit_pos
, bit_size
, bits
, n
;
6966 unsigned char *p
, v
, m
;
6967 bit_pos
= BIT_POS(vtop
->type
.t
);
6968 bit_size
= BIT_SIZE(vtop
->type
.t
);
6969 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6970 bit_pos
&= 7, bits
= 0;
6975 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6976 m
= ((1 << n
) - 1) << bit_pos
;
6977 *p
= (*p
& ~m
) | (v
& m
);
6978 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6982 /* XXX: when cross-compiling we assume that each type has the
6983 same representation on host and target, which is likely to
6984 be wrong in the case of long double */
6986 vtop
->c
.i
= vtop
->c
.i
!= 0;
6988 *(char *)ptr
|= vtop
->c
.i
;
6991 *(short *)ptr
|= vtop
->c
.i
;
6994 *(float*)ptr
= vtop
->c
.f
;
6997 *(double *)ptr
= vtop
->c
.d
;
7000 #if defined TCC_IS_NATIVE_387
7001 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7002 memcpy(ptr
, &vtop
->c
.ld
, 10);
7004 else if (sizeof (long double) == sizeof (double))
7005 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7007 else if (vtop
->c
.ld
== 0.0)
7011 if (sizeof(long double) == LDOUBLE_SIZE
)
7012 *(long double*)ptr
= vtop
->c
.ld
;
7013 else if (sizeof(double) == LDOUBLE_SIZE
)
7014 *(double *)ptr
= (double)vtop
->c
.ld
;
7016 tcc_error("can't cross compile long double constants");
7020 *(long long *)ptr
|= vtop
->c
.i
;
7027 addr_t val
= vtop
->c
.i
;
7029 if (vtop
->r
& VT_SYM
)
7030 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7032 *(addr_t
*)ptr
|= val
;
7034 if (vtop
->r
& VT_SYM
)
7035 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7036 *(addr_t
*)ptr
|= val
;
7042 int val
= vtop
->c
.i
;
7044 if (vtop
->r
& VT_SYM
)
7045 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7049 if (vtop
->r
& VT_SYM
)
7050 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7059 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7066 /* 't' contains the type and storage info. 'c' is the offset of the
7067 object in section 'sec'. If 'sec' is NULL, it means stack based
7068 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7069 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7070 size only evaluation is wanted (only for arrays). */
7071 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
7074 int len
, n
, no_oblock
, nb
, i
;
7080 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7081 /* In case of strings we have special handling for arrays, so
7082 don't consume them as initializer value (which would commit them
7083 to some anonymous symbol). */
7084 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7085 !(flags
& DIF_SIZE_ONLY
)) {
7086 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7087 flags
|= DIF_HAVE_ELEM
;
7090 if ((flags
& DIF_HAVE_ELEM
) &&
7091 !(type
->t
& VT_ARRAY
) &&
7092 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7093 The source type might have VT_CONSTANT set, which is
7094 of course assignable to non-const elements. */
7095 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7096 init_putv(type
, sec
, c
);
7097 } else if (type
->t
& VT_ARRAY
) {
7100 t1
= pointed_type(type
);
7101 size1
= type_size(t1
, &align1
);
7104 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7107 tcc_error("character array initializer must be a literal,"
7108 " optionally enclosed in braces");
7113 /* only parse strings here if correct type (otherwise: handle
7114 them as ((w)char *) expressions */
7115 if ((tok
== TOK_LSTR
&&
7116 #ifdef TCC_TARGET_PE
7117 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7119 (t1
->t
& VT_BTYPE
) == VT_INT
7121 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7123 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7126 /* compute maximum number of chars wanted */
7128 cstr_len
= tokc
.str
.size
;
7130 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
7133 if (n
>= 0 && nb
> (n
- len
))
7135 if (!(flags
& DIF_SIZE_ONLY
)) {
7137 tcc_warning("initializer-string for array is too long");
7138 /* in order to go faster for common case (char
7139 string in global variable, we handle it
7141 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
7143 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
7147 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
7149 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
7151 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
7158 /* only add trailing zero if enough storage (no
7159 warning in this case since it is standard) */
7160 if (n
< 0 || len
< n
) {
7161 if (!(flags
& DIF_SIZE_ONLY
)) {
7163 init_putv(t1
, sec
, c
+ (len
* size1
));
7174 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7175 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7176 flags
&= ~DIF_HAVE_ELEM
;
7177 if (type
->t
& VT_ARRAY
) {
7179 /* special test for multi dimensional arrays (may not
7180 be strictly correct if designators are used at the
7182 if (no_oblock
&& len
>= n
*size1
)
7185 if (s
->type
.t
== VT_UNION
)
7189 if (no_oblock
&& f
== NULL
)
7198 /* put zeros at the end */
7199 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7200 init_putz(sec
, c
+ len
, n
*size1
- len
);
7203 /* patch type size if needed, which happens only for array types */
7205 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7206 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7209 if ((flags
& DIF_FIRST
) || tok
== '{') {
7217 } else if (tok
== '{') {
7218 if (flags
& DIF_HAVE_ELEM
)
7221 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7223 } else if ((flags
& DIF_SIZE_ONLY
)) {
7224 /* If we supported only ISO C we wouldn't have to accept calling
7225 this on anything than an array if DIF_SIZE_ONLY (and even then
7226 only on the outermost level, so no recursion would be needed),
7227 because initializing a flex array member isn't supported.
7228 But GNU C supports it, so we need to recurse even into
7229 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7230 /* just skip expression */
7231 skip_or_save_block(NULL
);
7233 if (!(flags
& DIF_HAVE_ELEM
)) {
7234 /* This should happen only when we haven't parsed
7235 the init element above for fear of committing a
7236 string constant to memory too early. */
7237 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7238 expect("string constant");
7239 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7241 init_putv(type
, sec
, c
);
7245 /* parse an initializer for type 't' if 'has_init' is non zero, and
7246 allocate space in local or global data space ('r' is either
7247 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7248 variable 'v' of scope 'scope' is declared before initializers
7249 are parsed. If 'v' is zero, then a reference to the new object
7250 is put in the value stack. If 'has_init' is 2, a special parsing
7251 is done to handle string constants. */
7252 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7253 int has_init
, int v
, int scope
)
7255 int size
, align
, addr
;
7256 TokenString
*init_str
= NULL
;
7259 Sym
*flexible_array
;
7261 int saved_nocode_wanted
= nocode_wanted
;
7262 #ifdef CONFIG_TCC_BCHECK
7266 /* Always allocate static or global variables */
7267 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7268 nocode_wanted
|= 0x80000000;
7270 #ifdef CONFIG_TCC_BCHECK
7271 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7274 flexible_array
= NULL
;
7275 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7276 Sym
*field
= type
->ref
->next
;
7279 field
= field
->next
;
7280 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7281 flexible_array
= field
;
7285 size
= type_size(type
, &align
);
7286 /* If unknown size, we must evaluate it before
7287 evaluating initializers because
7288 initializers can generate global data too
7289 (e.g. string pointers or ISOC99 compound
7290 literals). It also simplifies local
7291 initializers handling */
7292 if (size
< 0 || (flexible_array
&& has_init
)) {
7294 tcc_error("unknown type size");
7295 /* get all init string */
7296 if (has_init
== 2) {
7297 init_str
= tok_str_alloc();
7298 /* only get strings */
7299 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7300 tok_str_add_tok(init_str
);
7303 tok_str_add(init_str
, -1);
7304 tok_str_add(init_str
, 0);
7306 skip_or_save_block(&init_str
);
7311 begin_macro(init_str
, 1);
7313 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7314 /* prepare second initializer parsing */
7315 macro_ptr
= init_str
->str
;
7318 /* if still unknown size, error */
7319 size
= type_size(type
, &align
);
7321 tcc_error("unknown type size");
7323 /* If there's a flex member and it was used in the initializer
7325 if (flexible_array
&&
7326 flexible_array
->type
.ref
->c
> 0)
7327 size
+= flexible_array
->type
.ref
->c
7328 * pointed_size(&flexible_array
->type
);
7329 /* take into account specified alignment if bigger */
7330 if (ad
->a
.aligned
) {
7331 int speca
= 1 << (ad
->a
.aligned
- 1);
7334 } else if (ad
->a
.packed
) {
7338 if (!v
&& NODATA_WANTED
)
7339 size
= 0, align
= 1;
7341 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7343 #ifdef CONFIG_TCC_BCHECK
7344 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7348 loc
= (loc
- size
) & -align
;
7350 #ifdef CONFIG_TCC_BCHECK
7351 /* handles bounds */
7352 /* XXX: currently, since we do only one pass, we cannot track
7353 '&' operators, so we add only arrays */
7354 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7356 /* add padding between regions */
7358 /* then add local bound info */
7359 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7360 bounds_ptr
[0] = addr
;
7361 bounds_ptr
[1] = size
;
7365 /* local variable */
7366 #ifdef CONFIG_TCC_ASM
7367 if (ad
->asm_label
) {
7368 int reg
= asm_parse_regvar(ad
->asm_label
);
7370 r
= (r
& ~VT_VALMASK
) | reg
;
7373 sym
= sym_push(v
, type
, r
, addr
);
7374 if (ad
->cleanup_func
) {
7375 Sym
*cls
= sym_push2(&all_cleanups
,
7376 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7377 cls
->prev_tok
= sym
;
7378 cls
->next
= ad
->cleanup_func
;
7379 cls
->ncl
= cur_scope
->cl
.s
;
7380 cur_scope
->cl
.s
= cls
;
7385 /* push local reference */
7386 vset(type
, r
, addr
);
7389 if (v
&& scope
== VT_CONST
) {
7390 /* see if the symbol was already defined */
7393 patch_storage(sym
, ad
, type
);
7394 /* we accept several definitions of the same global variable. */
7395 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7400 /* allocate symbol in corresponding section */
7405 else if (tcc_state
->nocommon
)
7410 addr
= section_add(sec
, size
, align
);
7411 #ifdef CONFIG_TCC_BCHECK
7412 /* add padding if bound check */
7414 section_add(sec
, 1, 1);
7417 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7418 sec
= common_section
;
7423 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7424 patch_storage(sym
, ad
, NULL
);
7426 /* update symbol definition */
7427 put_extern_sym(sym
, sec
, addr
, size
);
7429 /* push global reference */
7430 vpush_ref(type
, sec
, addr
, size
);
7435 #ifdef CONFIG_TCC_BCHECK
7436 /* handles bounds now because the symbol must be defined
7437 before for the relocation */
7441 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7442 /* then add global bound info */
7443 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7444 bounds_ptr
[0] = 0; /* relocated */
7445 bounds_ptr
[1] = size
;
7450 if (type
->t
& VT_VLA
) {
7456 /* save current stack pointer */
7457 if (root_scope
->vla
.loc
== 0) {
7458 struct scope
*v
= cur_scope
;
7459 gen_vla_sp_save(loc
-= PTR_SIZE
);
7460 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
7463 vla_runtime_type_size(type
, &a
);
7464 gen_vla_alloc(type
, a
);
7465 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7466 /* on _WIN64, because of the function args scratch area, the
7467 result of alloca differs from RSP and is returned in RAX. */
7468 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7470 gen_vla_sp_save(addr
);
7471 cur_scope
->vla
.loc
= addr
;
7472 cur_scope
->vla
.num
++;
7474 } else if (has_init
) {
7475 size_t oldreloc_offset
= 0;
7476 if (sec
&& sec
->reloc
)
7477 oldreloc_offset
= sec
->reloc
->data_offset
;
7478 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7479 if (sec
&& sec
->reloc
)
7480 squeeze_multi_relocs(sec
, oldreloc_offset
);
7481 /* patch flexible array member size back to -1, */
7482 /* for possible subsequent similar declarations */
7484 flexible_array
->type
.ref
->c
= -1;
7488 /* restore parse state if needed */
7494 nocode_wanted
= saved_nocode_wanted
;
7497 /* parse a function defined by symbol 'sym' and generate its code in
7498 'cur_text_section' */
7499 static void gen_function(Sym
*sym
)
7501 /* Initialize VLA state */
7502 struct scope f
= { 0 };
7503 cur_scope
= root_scope
= &f
;
7506 ind
= cur_text_section
->data_offset
;
7507 if (sym
->a
.aligned
) {
7508 size_t newoff
= section_add(cur_text_section
, 0,
7509 1 << (sym
->a
.aligned
- 1));
7510 gen_fill_nops(newoff
- ind
);
7512 /* NOTE: we patch the symbol size later */
7513 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7515 funcname
= get_tok_str(sym
->v
, NULL
);
7518 /* put debug symbol */
7519 tcc_debug_funcstart(tcc_state
, sym
);
7520 /* push a dummy symbol to enable local sym storage */
7521 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7522 local_scope
= 1; /* for function parameters */
7523 gfunc_prolog(&sym
->type
);
7526 clear_temp_local_var_list();
7531 cur_text_section
->data_offset
= ind
;
7532 /* reset local stack */
7533 sym_pop(&local_stack
, NULL
, 0);
7535 label_pop(&global_label_stack
, NULL
, 0);
7536 sym_pop(&all_cleanups
, NULL
, 0);
7537 /* patch symbol size */
7538 elfsym(sym
)->st_size
= ind
- func_ind
;
7539 /* end of function */
7540 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7541 /* It's better to crash than to generate wrong code */
7542 cur_text_section
= NULL
;
7543 funcname
= ""; /* for safety */
7544 func_vt
.t
= VT_VOID
; /* for safety */
7545 func_var
= 0; /* for safety */
7546 ind
= 0; /* for safety */
7547 nocode_wanted
= 0x80000000;
7551 static void gen_inline_functions(TCCState
*s
)
7554 int inline_generated
, i
;
7555 struct InlineFunc
*fn
;
7557 tcc_open_bf(s
, ":inline:", 0);
7558 /* iterate while inline function are referenced */
7560 inline_generated
= 0;
7561 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7562 fn
= s
->inline_fns
[i
];
7564 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
7565 /* the function was used or forced (and then not internal):
7566 generate its code and convert it to a normal function */
7569 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7570 begin_macro(fn
->func_str
, 1);
7572 cur_text_section
= text_section
;
7576 inline_generated
= 1;
7579 } while (inline_generated
);
7583 ST_FUNC
void free_inline_functions(TCCState
*s
)
7586 /* free tokens of unused inline functions */
7587 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7588 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7590 tok_str_free(fn
->func_str
);
7592 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7595 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7596 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7597 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7602 AttributeDef ad
, adbase
;
7605 if (tok
== TOK_STATIC_ASSERT
) {
7613 tcc_error("%s", get_tok_str(tok
, &tokc
));
7619 if (!parse_btype(&btype
, &adbase
)) {
7620 if (is_for_loop_init
)
7622 /* skip redundant ';' if not in old parameter decl scope */
7623 if (tok
== ';' && l
!= VT_CMP
) {
7629 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7630 /* global asm block */
7634 if (tok
>= TOK_UIDENT
) {
7635 /* special test for old K&R protos without explicit int
7636 type. Only accepted when defining global data */
7640 expect("declaration");
7645 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7646 int v
= btype
.ref
->v
;
7647 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7648 tcc_warning("unnamed struct/union that defines no instances");
7652 if (IS_ENUM(btype
.t
)) {
7657 while (1) { /* iterate thru each declaration */
7659 /* If the base type itself was an array type of unspecified
7660 size (like in 'typedef int arr[]; arr x = {1};') then
7661 we will overwrite the unknown size by the real one for
7662 this decl. We need to unshare the ref symbol holding
7664 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7665 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7668 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7672 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7673 printf("type = '%s'\n", buf
);
7676 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7677 /* if old style function prototype, we accept a
7680 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7681 decl0(VT_CMP
, 0, sym
);
7682 /* always compile 'extern inline' */
7683 if (type
.t
& VT_EXTERN
)
7684 type
.t
&= ~VT_INLINE
;
7687 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7688 ad
.asm_label
= asm_label_instr();
7689 /* parse one last attribute list, after asm label */
7690 parse_attribute(&ad
);
7692 /* gcc does not allow __asm__("label") with function definition,
7699 #ifdef TCC_TARGET_PE
7700 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7701 if (type
.t
& VT_STATIC
)
7702 tcc_error("cannot have dll linkage with static");
7703 if (type
.t
& VT_TYPEDEF
) {
7704 tcc_warning("'%s' attribute ignored for typedef",
7705 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
7706 (ad
.a
.dllexport
= 0, "dllexport"));
7707 } else if (ad
.a
.dllimport
) {
7708 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7711 type
.t
|= VT_EXTERN
;
7717 tcc_error("cannot use local functions");
7718 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7719 expect("function definition");
7721 /* reject abstract declarators in function definition
7722 make old style params without decl have int type */
7724 while ((sym
= sym
->next
) != NULL
) {
7725 if (!(sym
->v
& ~SYM_FIELD
))
7726 expect("identifier");
7727 if (sym
->type
.t
== VT_VOID
)
7728 sym
->type
= int_type
;
7731 /* put function symbol */
7732 type
.t
&= ~VT_EXTERN
;
7733 sym
= external_sym(v
, &type
, 0, &ad
);
7734 /* static inline functions are just recorded as a kind
7735 of macro. Their code will be emitted at the end of
7736 the compilation unit only if they are used */
7737 if (sym
->type
.t
& VT_INLINE
) {
7738 struct InlineFunc
*fn
;
7739 const char *filename
;
7741 filename
= file
? file
->filename
: "";
7742 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7743 strcpy(fn
->filename
, filename
);
7745 skip_or_save_block(&fn
->func_str
);
7746 dynarray_add(&tcc_state
->inline_fns
,
7747 &tcc_state
->nb_inline_fns
, fn
);
7749 /* compute text section */
7750 cur_text_section
= ad
.section
;
7751 if (!cur_text_section
)
7752 cur_text_section
= text_section
;
7758 /* find parameter in function parameter list */
7759 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7760 if ((sym
->v
& ~SYM_FIELD
) == v
)
7762 tcc_error("declaration for parameter '%s' but no such parameter",
7763 get_tok_str(v
, NULL
));
7765 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7766 tcc_error("storage class specified for '%s'",
7767 get_tok_str(v
, NULL
));
7768 if (sym
->type
.t
!= VT_VOID
)
7769 tcc_error("redefinition of parameter '%s'",
7770 get_tok_str(v
, NULL
));
7771 convert_parameter_type(&type
);
7773 } else if (type
.t
& VT_TYPEDEF
) {
7774 /* save typedefed type */
7775 /* XXX: test storage specifiers ? */
7777 if (sym
&& sym
->sym_scope
== local_scope
) {
7778 if (!is_compatible_types(&sym
->type
, &type
)
7779 || !(sym
->type
.t
& VT_TYPEDEF
))
7780 tcc_error("incompatible redefinition of '%s'",
7781 get_tok_str(v
, NULL
));
7784 sym
= sym_push(v
, &type
, 0, 0);
7788 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7789 && !(type
.t
& VT_EXTERN
)) {
7790 tcc_error("declaration of void object");
7793 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7794 /* external function definition */
7795 /* specific case for func_call attribute */
7797 } else if (!(type
.t
& VT_ARRAY
)) {
7798 /* not lvalue if array */
7799 r
|= lvalue_type(type
.t
);
7801 has_init
= (tok
== '=');
7802 if (has_init
&& (type
.t
& VT_VLA
))
7803 tcc_error("variable length array cannot be initialized");
7804 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
7805 || (type
.t
& VT_BTYPE
) == VT_FUNC
7806 /* as with GCC, uninitialized global arrays with no size
7807 are considered extern: */
7808 || ((type
.t
& VT_ARRAY
) && !has_init
7809 && l
== VT_CONST
&& type
.ref
->c
< 0)
7811 /* external variable or function */
7812 type
.t
|= VT_EXTERN
;
7813 sym
= external_sym(v
, &type
, r
, &ad
);
7814 if (ad
.alias_target
) {
7817 alias_target
= sym_find(ad
.alias_target
);
7818 esym
= elfsym(alias_target
);
7820 tcc_error("unsupported forward __alias__ attribute");
7821 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7824 if (type
.t
& VT_STATIC
)
7830 else if (l
== VT_CONST
)
7831 /* uninitialized global variables may be overridden */
7832 type
.t
|= VT_EXTERN
;
7833 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7837 if (is_for_loop_init
)
7849 static void decl(int l
)
7854 /* ------------------------------------------------------------------------- */
7857 /* ------------------------------------------------------------------------- */