2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
42 static Sym
*all_cleanups
, *current_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
50 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
51 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
52 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
54 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
56 ST_DATA
int const_wanted
; /* true if constant wanted */
57 ST_DATA
int nocode_wanted
; /* no code generation wanted */
58 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
59 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
61 /* Automagical code suppression ----> */
62 #define CODE_OFF() (nocode_wanted |= 0x20000000)
63 #define CODE_ON() (nocode_wanted &= ~0x20000000)
65 /* Clear 'nocode_wanted' at label if it was used */
66 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
67 static int gind(void) { CODE_ON(); return ind
; }
69 /* Set 'nocode_wanted' after unconditional jumps */
70 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
71 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
73 /* These are #undef'd at the end of this file */
74 #define gjmp_addr gjmp_addr_acs
78 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
79 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
80 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
82 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
83 ST_DATA
const char *funcname
;
86 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
88 ST_DATA
struct switch_t
{
92 } **p
; int n
; /* list of case ranges */
93 int def_sym
; /* default symbol */
94 } *cur_switch
; /* current switch */
96 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
97 /*list of temporary local variables on the stack in current function. */
98 ST_DATA
struct temp_local_variable
{
99 int location
; //offset on stack. Svalue.c.i
102 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
103 short nb_temp_local_vars
;
105 /* ------------------------------------------------------------------------- */
107 static void gen_cast(CType
*type
);
108 static void gen_cast_s(int t
);
109 static inline CType
*pointed_type(CType
*type
);
110 static int is_compatible_types(CType
*type1
, CType
*type2
);
111 static int parse_btype(CType
*type
, AttributeDef
*ad
);
112 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
113 static void parse_expr_type(CType
*type
);
114 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
115 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
116 static void block(int *bsym
, Sym
*bcl
, int *csym
, Sym
*ccl
, int is_expr
);
117 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
118 static void decl(int l
);
119 static int decl0(int l
, int is_for_loop_init
, Sym
*);
120 static void expr_eq(void);
121 static void vla_runtime_type_size(CType
*type
, int *a
);
122 static void vla_sp_restore(void);
123 static void vla_sp_restore_root(void);
124 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
125 static inline int64_t expr_const64(void);
126 static void vpush64(int ty
, unsigned long long v
);
127 static void vpush(CType
*type
);
128 static int gvtst(int inv
, int t
);
129 static void gen_inline_functions(TCCState
*s
);
130 static void skip_or_save_block(TokenString
**str
);
131 static void gv_dup(void);
132 static int get_temp_local_var(int size
,int align
);
133 static void clear_temp_local_var_list();
136 static void reset_local_scope(void)
138 if (current_cleanups
)
139 tcc_error("ICE current_cleanups");
140 sym_pop(&all_cleanups
, NULL
, 0);
144 ST_INLN
int is_float(int t
)
148 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
151 /* we use our own 'finite' function to avoid potential problems with
152 non standard math libs */
153 /* XXX: endianness dependent */
154 ST_FUNC
int ieee_finite(double d
)
157 memcpy(p
, &d
, sizeof(double));
158 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
161 /* compiling intel long double natively */
162 #if (defined __i386__ || defined __x86_64__) \
163 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
164 # define TCC_IS_NATIVE_387
167 ST_FUNC
void test_lvalue(void)
169 if (!(vtop
->r
& VT_LVAL
))
173 ST_FUNC
void check_vstack(void)
176 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
179 /* ------------------------------------------------------------------------- */
180 /* vstack debugging aid */
183 void pv (const char *lbl
, int a
, int b
)
186 for (i
= a
; i
< a
+ b
; ++i
) {
187 SValue
*p
= &vtop
[-i
];
188 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
189 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
194 /* ------------------------------------------------------------------------- */
195 /* start of translation unit info */
196 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
201 /* file info: full path + filename */
202 section_sym
= put_elf_sym(symtab_section
, 0, 0,
203 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
204 text_section
->sh_num
, NULL
);
205 getcwd(buf
, sizeof(buf
));
207 normalize_slashes(buf
);
209 pstrcat(buf
, sizeof(buf
), "/");
210 put_stabs_r(buf
, N_SO
, 0, 0,
211 text_section
->data_offset
, text_section
, section_sym
);
212 put_stabs_r(file
->filename
, N_SO
, 0, 0,
213 text_section
->data_offset
, text_section
, section_sym
);
218 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
219 symbols can be safely used */
220 put_elf_sym(symtab_section
, 0, 0,
221 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
222 SHN_ABS
, file
->filename
);
225 /* put end of translation unit info */
226 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
230 put_stabs_r(NULL
, N_SO
, 0, 0,
231 text_section
->data_offset
, text_section
, section_sym
);
235 /* generate line number info */
236 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
240 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
241 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
243 last_line_num
= file
->line_num
;
247 /* put function symbol */
248 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
256 /* XXX: we put here a dummy type */
257 snprintf(buf
, sizeof(buf
), "%s:%c1",
258 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
259 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
260 cur_text_section
, sym
->c
);
261 /* //gr gdb wants a line at the function */
262 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
268 /* put function size */
269 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
273 put_stabn(N_FUN
, 0, 0, size
);
276 /* ------------------------------------------------------------------------- */
277 ST_FUNC
int tccgen_compile(TCCState
*s1
)
279 cur_text_section
= NULL
;
281 anon_sym
= SYM_FIRST_ANOM
;
284 nocode_wanted
= 0x80000000;
287 /* define some often used types */
289 char_pointer_type
.t
= VT_BYTE
;
290 mk_pointer(&char_pointer_type
);
292 size_type
.t
= VT_INT
| VT_UNSIGNED
;
293 ptrdiff_type
.t
= VT_INT
;
295 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
296 ptrdiff_type
.t
= VT_LLONG
;
298 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
299 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
301 func_old_type
.t
= VT_FUNC
;
302 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
303 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
304 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
308 #ifdef TCC_TARGET_ARM
313 printf("%s: **** new file\n", file
->filename
);
316 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
319 gen_inline_functions(s1
);
321 /* end of translation unit info */
326 /* ------------------------------------------------------------------------- */
327 ST_FUNC ElfSym
*elfsym(Sym
*s
)
331 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
334 /* apply storage attributes to Elf symbol */
335 ST_FUNC
void update_storage(Sym
*sym
)
338 int sym_bind
, old_sym_bind
;
344 if (sym
->a
.visibility
)
345 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
348 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
349 sym_bind
= STB_LOCAL
;
350 else if (sym
->a
.weak
)
353 sym_bind
= STB_GLOBAL
;
354 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
355 if (sym_bind
!= old_sym_bind
) {
356 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
360 if (sym
->a
.dllimport
)
361 esym
->st_other
|= ST_PE_IMPORT
;
362 if (sym
->a
.dllexport
)
363 esym
->st_other
|= ST_PE_EXPORT
;
367 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
368 get_tok_str(sym
->v
, NULL
),
369 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
377 /* ------------------------------------------------------------------------- */
378 /* update sym->c so that it points to an external symbol in section
379 'section' with value 'value' */
381 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
382 addr_t value
, unsigned long size
,
383 int can_add_underscore
)
385 int sym_type
, sym_bind
, info
, other
, t
;
389 #ifdef CONFIG_TCC_BCHECK
394 name
= get_tok_str(sym
->v
, NULL
);
395 #ifdef CONFIG_TCC_BCHECK
396 if (tcc_state
->do_bounds_check
) {
397 /* XXX: avoid doing that for statics ? */
398 /* if bound checking is activated, we change some function
399 names by adding the "__bound" prefix */
402 /* XXX: we rely only on malloc hooks */
415 strcpy(buf
, "__bound_");
423 if ((t
& VT_BTYPE
) == VT_FUNC
) {
425 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
426 sym_type
= STT_NOTYPE
;
428 sym_type
= STT_OBJECT
;
430 if (t
& (VT_STATIC
| VT_INLINE
))
431 sym_bind
= STB_LOCAL
;
433 sym_bind
= STB_GLOBAL
;
436 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
437 Sym
*ref
= sym
->type
.ref
;
438 if (ref
->a
.nodecorate
) {
439 can_add_underscore
= 0;
441 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
442 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
444 other
|= ST_PE_STDCALL
;
445 can_add_underscore
= 0;
449 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
451 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
455 name
= get_tok_str(sym
->asm_label
, NULL
);
456 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
457 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
460 esym
->st_value
= value
;
461 esym
->st_size
= size
;
462 esym
->st_shndx
= sh_num
;
467 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
468 addr_t value
, unsigned long size
)
470 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
471 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
474 /* add a new relocation entry to symbol 'sym' in section 's' */
475 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
480 if (nocode_wanted
&& s
== cur_text_section
)
485 put_extern_sym(sym
, NULL
, 0, 0);
489 /* now we can add ELF relocation info */
490 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
494 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
496 greloca(s
, sym
, offset
, type
, 0);
500 /* ------------------------------------------------------------------------- */
501 /* symbol allocator */
502 static Sym
*__sym_malloc(void)
504 Sym
*sym_pool
, *sym
, *last_sym
;
507 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
508 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
510 last_sym
= sym_free_first
;
512 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
513 sym
->next
= last_sym
;
517 sym_free_first
= last_sym
;
521 static inline Sym
*sym_malloc(void)
525 sym
= sym_free_first
;
527 sym
= __sym_malloc();
528 sym_free_first
= sym
->next
;
531 sym
= tcc_malloc(sizeof(Sym
));
536 ST_INLN
void sym_free(Sym
*sym
)
539 sym
->next
= sym_free_first
;
540 sym_free_first
= sym
;
546 /* push, without hashing */
547 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
552 memset(s
, 0, sizeof *s
);
562 /* find a symbol and return its associated structure. 's' is the top
563 of the symbol stack */
564 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
576 /* structure lookup */
577 ST_INLN Sym
*struct_find(int v
)
580 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
582 return table_ident
[v
]->sym_struct
;
585 /* find an identifier */
586 ST_INLN Sym
*sym_find(int v
)
589 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
591 return table_ident
[v
]->sym_identifier
;
594 static int sym_scope(Sym
*s
)
596 if (IS_ENUM_VAL (s
->type
.t
))
597 return s
->type
.ref
->sym_scope
;
602 /* push a given symbol on the symbol stack */
603 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
612 s
= sym_push2(ps
, v
, type
->t
, c
);
613 s
->type
.ref
= type
->ref
;
615 /* don't record fields or anonymous symbols */
617 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
618 /* record symbol in token array */
619 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
621 ps
= &ts
->sym_struct
;
623 ps
= &ts
->sym_identifier
;
626 s
->sym_scope
= local_scope
;
627 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
628 tcc_error("redeclaration of '%s'",
629 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
634 /* push a global identifier */
635 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
638 s
= sym_push2(&global_stack
, v
, t
, c
);
639 s
->r
= VT_CONST
| VT_SYM
;
640 /* don't record anonymous symbol */
641 if (v
< SYM_FIRST_ANOM
) {
642 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
643 /* modify the top most local identifier, so that sym_identifier will
644 point to 's' when popped; happens when called from inline asm */
645 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
646 ps
= &(*ps
)->prev_tok
;
653 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
654 pop them yet from the list, but do remove them from the token array. */
655 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
665 /* remove symbol in token array */
667 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
668 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
670 ps
= &ts
->sym_struct
;
672 ps
= &ts
->sym_identifier
;
683 /* ------------------------------------------------------------------------- */
684 static void vcheck_cmp(void)
686 /* cannot let cpu flags if other instruction are generated. Also
687 avoid leaving VT_JMP anywhere except on the top of the stack
688 because it would complicate the code generator.
690 Don't do this when nocode_wanted. vtop might come from
691 !nocode_wanted regions (see 88_codeopt.c) and transforming
692 it to a register without actually generating code is wrong
693 as their value might still be used for real. All values
694 we push under nocode_wanted will eventually be popped
695 again, so that the VT_CMP/VT_JMP value will be in vtop
696 when code is unsuppressed again. */
698 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
702 static void vsetc(CType
*type
, int r
, CValue
*vc
)
704 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
705 tcc_error("memory full (vstack)");
715 ST_FUNC
void vswap(void)
725 /* pop stack value */
726 ST_FUNC
void vpop(void)
729 v
= vtop
->r
& VT_VALMASK
;
730 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
731 /* for x86, we need to pop the FP stack */
733 o(0xd8dd); /* fstp %st(0) */
737 /* need to put correct jump if && or || without test */
744 /* push constant of type "type" with useless value */
745 ST_FUNC
void vpush(CType
*type
)
747 vset(type
, VT_CONST
, 0);
750 /* push integer constant */
751 ST_FUNC
void vpushi(int v
)
755 vsetc(&int_type
, VT_CONST
, &cval
);
758 /* push a pointer sized constant */
759 static void vpushs(addr_t v
)
763 vsetc(&size_type
, VT_CONST
, &cval
);
766 /* push arbitrary 64bit constant */
767 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
774 vsetc(&ctype
, VT_CONST
, &cval
);
777 /* push long long constant */
778 static inline void vpushll(long long v
)
780 vpush64(VT_LLONG
, v
);
783 ST_FUNC
void vset(CType
*type
, int r
, int v
)
788 vsetc(type
, r
, &cval
);
791 static void vseti(int r
, int v
)
799 ST_FUNC
void vpushv(SValue
*v
)
801 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
802 tcc_error("memory full (vstack)");
807 static void vdup(void)
812 /* rotate n first stack elements to the bottom
813 I1 ... In -> I2 ... In I1 [top is right]
815 ST_FUNC
void vrotb(int n
)
827 /* rotate the n elements before entry e towards the top
828 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
830 ST_FUNC
void vrote(SValue
*e
, int n
)
837 for(i
= 0;i
< n
- 1; i
++)
842 /* rotate n first stack elements to the top
843 I1 ... In -> In I1 ... I(n-1) [top is right]
845 ST_FUNC
void vrott(int n
)
850 /* ------------------------------------------------------------------------- */
851 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
853 /* called from generators to set the result from relational ops */
854 ST_FUNC
void vset_VT_CMP(int op
)
862 /* called once before asking generators to load VT_CMP to a register */
863 static void vset_VT_JMP(void)
865 int op
= vtop
->cmp_op
;
866 if (vtop
->jtrue
|| vtop
->jfalse
) {
867 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
868 int inv
= op
& (op
< 2); /* small optimization */
869 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
871 /* otherwise convert flags (rsp. 0/1) to register */
873 if (op
< 2) /* doesn't seem to happen */
878 /* Set CPU Flags, doesn't yet jump */
879 static void gvtst_set(int inv
, int t
)
882 if (vtop
->r
!= VT_CMP
) {
885 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
886 vset_VT_CMP(vtop
->c
.i
!= 0);
888 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
889 *p
= gjmp_append(*p
, t
);
892 /* Generate value test
894 * Generate a test for any value (jump, comparison and integers) */
895 static int gvtst(int inv
, int t
)
901 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
906 /* jump to the wanted target */
908 t
= gjmp_cond(op
^ inv
, t
);
911 /* resolve complementary jumps to here */
918 /* ------------------------------------------------------------------------- */
919 /* push a symbol value of TYPE */
920 static inline void vpushsym(CType
*type
, Sym
*sym
)
924 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
928 /* Return a static symbol pointing to a section */
929 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
935 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
936 sym
->type
.t
|= VT_STATIC
;
937 put_extern_sym(sym
, sec
, offset
, size
);
941 /* push a reference to a section offset by adding a dummy symbol */
942 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
944 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
947 /* define a new external reference to a symbol 'v' of type 'u' */
948 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
954 /* push forward reference */
955 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
956 s
->type
.ref
= type
->ref
;
957 } else if (IS_ASM_SYM(s
)) {
958 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
959 s
->type
.ref
= type
->ref
;
965 /* Merge symbol attributes. */
966 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
968 if (sa1
->aligned
&& !sa
->aligned
)
969 sa
->aligned
= sa1
->aligned
;
970 sa
->packed
|= sa1
->packed
;
971 sa
->weak
|= sa1
->weak
;
972 if (sa1
->visibility
!= STV_DEFAULT
) {
973 int vis
= sa
->visibility
;
974 if (vis
== STV_DEFAULT
975 || vis
> sa1
->visibility
)
976 vis
= sa1
->visibility
;
977 sa
->visibility
= vis
;
979 sa
->dllexport
|= sa1
->dllexport
;
980 sa
->nodecorate
|= sa1
->nodecorate
;
981 sa
->dllimport
|= sa1
->dllimport
;
984 /* Merge function attributes. */
985 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
987 if (fa1
->func_call
&& !fa
->func_call
)
988 fa
->func_call
= fa1
->func_call
;
989 if (fa1
->func_type
&& !fa
->func_type
)
990 fa
->func_type
= fa1
->func_type
;
991 if (fa1
->func_args
&& !fa
->func_args
)
992 fa
->func_args
= fa1
->func_args
;
995 /* Merge attributes. */
996 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
998 merge_symattr(&ad
->a
, &ad1
->a
);
999 merge_funcattr(&ad
->f
, &ad1
->f
);
1002 ad
->section
= ad1
->section
;
1003 if (ad1
->alias_target
)
1004 ad
->alias_target
= ad1
->alias_target
;
1006 ad
->asm_label
= ad1
->asm_label
;
1008 ad
->attr_mode
= ad1
->attr_mode
;
1011 /* Merge some type attributes. */
1012 static void patch_type(Sym
*sym
, CType
*type
)
1014 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1015 if (!(sym
->type
.t
& VT_EXTERN
))
1016 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1017 sym
->type
.t
&= ~VT_EXTERN
;
1020 if (IS_ASM_SYM(sym
)) {
1021 /* stay static if both are static */
1022 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1023 sym
->type
.ref
= type
->ref
;
1026 if (!is_compatible_types(&sym
->type
, type
)) {
1027 tcc_error("incompatible types for redefinition of '%s'",
1028 get_tok_str(sym
->v
, NULL
));
1030 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1031 int static_proto
= sym
->type
.t
& VT_STATIC
;
1032 /* warn if static follows non-static function declaration */
1033 if ((type
->t
& VT_STATIC
) && !static_proto
1034 /* XXX this test for inline shouldn't be here. Until we
1035 implement gnu-inline mode again it silences a warning for
1036 mingw caused by our workarounds. */
1037 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1038 tcc_warning("static storage ignored for redefinition of '%s'",
1039 get_tok_str(sym
->v
, NULL
));
1041 /* set 'inline' if both agree or if one has static */
1042 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1043 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1044 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1045 static_proto
|= VT_INLINE
;
1048 if (0 == (type
->t
& VT_EXTERN
)) {
1049 /* put complete type, use static from prototype */
1050 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1051 sym
->type
.ref
= type
->ref
;
1053 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1056 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1057 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1058 sym
->type
.ref
= type
->ref
;
1062 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1063 /* set array size if it was omitted in extern declaration */
1064 sym
->type
.ref
->c
= type
->ref
->c
;
1066 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1067 tcc_warning("storage mismatch for redefinition of '%s'",
1068 get_tok_str(sym
->v
, NULL
));
1072 /* Merge some storage attributes. */
1073 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1076 patch_type(sym
, type
);
1078 #ifdef TCC_TARGET_PE
1079 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1080 tcc_error("incompatible dll linkage for redefinition of '%s'",
1081 get_tok_str(sym
->v
, NULL
));
1083 merge_symattr(&sym
->a
, &ad
->a
);
1085 sym
->asm_label
= ad
->asm_label
;
1086 update_storage(sym
);
1089 /* copy sym to other stack */
1090 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1093 s
= sym_malloc(), *s
= *s0
;
1094 s
->prev
= *ps
, *ps
= s
;
1095 if (s
->v
< SYM_FIRST_ANOM
) {
1096 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1097 s
->prev_tok
= *ps
, *ps
= s
;
1102 /* copy a list of syms */
1103 static void sym_copy_ref(Sym
*s0
, Sym
**ps
)
1105 Sym
*s
, **sp
= &s0
->type
.ref
;
1106 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
)
1107 sp
= &(*sp
= sym_copy(s
, ps
))->next
;
1110 /* define a new external reference to a symbol 'v' */
1111 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1115 /* look for global symbol */
1117 while (s
&& s
->sym_scope
)
1121 /* push forward reference */
1122 s
= global_identifier_push(v
, type
->t
, 0);
1125 s
->asm_label
= ad
->asm_label
;
1126 s
->type
.ref
= type
->ref
;
1127 bt
= s
->type
.t
& (VT_BTYPE
|VT_ARRAY
);
1128 /* copy type to the global stack also */
1129 if (local_scope
&& (bt
== VT_FUNC
|| (bt
& VT_ARRAY
)))
1130 sym_copy_ref(s
, &global_stack
);
1132 patch_storage(s
, ad
, type
);
1133 bt
= s
->type
.t
& VT_BTYPE
;
1135 /* push variables to local scope if any */
1136 if (local_stack
&& bt
!= VT_FUNC
)
1137 s
= sym_copy(s
, &local_stack
);
1141 /* push a reference to global symbol v */
1142 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1144 vpushsym(type
, external_global_sym(v
, type
));
1147 /* save registers up to (vtop - n) stack entry */
1148 ST_FUNC
void save_regs(int n
)
1151 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1155 /* save r to the memory stack, and mark it as being free */
1156 ST_FUNC
void save_reg(int r
)
1158 save_reg_upstack(r
, 0);
1161 /* save r to the memory stack, and mark it as being free,
1162 if seen up to (vtop - n) stack entry */
1163 ST_FUNC
void save_reg_upstack(int r
, int n
)
1165 int l
, saved
, size
, align
;
1169 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1174 /* modify all stack values */
1177 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1178 if ((p
->r
& VT_VALMASK
) == r
||
1179 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
1180 /* must save value on stack if not already done */
1182 /* NOTE: must reload 'r' because r might be equal to r2 */
1183 r
= p
->r
& VT_VALMASK
;
1184 /* store register in the stack */
1186 if ((p
->r
& VT_LVAL
) ||
1187 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1189 type
= &char_pointer_type
;
1193 size
= type_size(type
, &align
);
1194 l
=get_temp_local_var(size
,align
);
1195 sv
.type
.t
= type
->t
;
1196 sv
.r
= VT_LOCAL
| VT_LVAL
;
1199 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1200 /* x86 specific: need to pop fp register ST0 if saved */
1201 if (r
== TREG_ST0
) {
1202 o(0xd8dd); /* fstp %st(0) */
1206 /* special long long case */
1207 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1214 /* mark that stack entry as being saved on the stack */
1215 if (p
->r
& VT_LVAL
) {
1216 /* also clear the bounded flag because the
1217 relocation address of the function was stored in
1219 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1221 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1229 #ifdef TCC_TARGET_ARM
1230 /* find a register of class 'rc2' with at most one reference on stack.
1231 * If none, call get_reg(rc) */
1232 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1237 for(r
=0;r
<NB_REGS
;r
++) {
1238 if (reg_classes
[r
] & rc2
) {
1241 for(p
= vstack
; p
<= vtop
; p
++) {
1242 if ((p
->r
& VT_VALMASK
) == r
||
1243 (p
->r2
& VT_VALMASK
) == r
)
1254 /* find a free register of class 'rc'. If none, save one register */
1255 ST_FUNC
int get_reg(int rc
)
1260 /* find a free register */
1261 for(r
=0;r
<NB_REGS
;r
++) {
1262 if (reg_classes
[r
] & rc
) {
1265 for(p
=vstack
;p
<=vtop
;p
++) {
1266 if ((p
->r
& VT_VALMASK
) == r
||
1267 (p
->r2
& VT_VALMASK
) == r
)
1275 /* no register left : free the first one on the stack (VERY
1276 IMPORTANT to start from the bottom to ensure that we don't
1277 spill registers used in gen_opi()) */
1278 for(p
=vstack
;p
<=vtop
;p
++) {
1279 /* look at second register (if long long) */
1280 r
= p
->r2
& VT_VALMASK
;
1281 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1283 r
= p
->r
& VT_VALMASK
;
1284 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1290 /* Should never comes here */
1294 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1295 static int get_temp_local_var(int size
,int align
){
1297 struct temp_local_variable
*temp_var
;
1304 for(i
=0;i
<nb_temp_local_vars
;i
++){
1305 temp_var
=&arr_temp_local_vars
[i
];
1306 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1309 /*check if temp_var is free*/
1311 for(p
=vstack
;p
<=vtop
;p
++) {
1313 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1314 if(p
->c
.i
==temp_var
->location
){
1321 found_var
=temp_var
->location
;
1327 loc
= (loc
- size
) & -align
;
1328 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1329 temp_var
=&arr_temp_local_vars
[i
];
1330 temp_var
->location
=loc
;
1331 temp_var
->size
=size
;
1332 temp_var
->align
=align
;
1333 nb_temp_local_vars
++;
1340 static void clear_temp_local_var_list(){
1341 nb_temp_local_vars
=0;
1344 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1346 static void move_reg(int r
, int s
, int t
)
1360 /* get address of vtop (vtop MUST BE an lvalue) */
1361 ST_FUNC
void gaddrof(void)
1363 vtop
->r
&= ~VT_LVAL
;
1364 /* tricky: if saved lvalue, then we can go back to lvalue */
1365 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1366 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1371 #ifdef CONFIG_TCC_BCHECK
1372 /* generate lvalue bound code */
1373 static void gbound(void)
1378 vtop
->r
&= ~VT_MUSTBOUND
;
1379 /* if lvalue, then use checking code before dereferencing */
1380 if (vtop
->r
& VT_LVAL
) {
1381 /* if not VT_BOUNDED value, then make one */
1382 if (!(vtop
->r
& VT_BOUNDED
)) {
1383 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1384 /* must save type because we must set it to int to get pointer */
1386 vtop
->type
.t
= VT_PTR
;
1389 gen_bounded_ptr_add();
1390 vtop
->r
|= lval_type
;
1393 /* then check for dereferencing */
1394 gen_bounded_ptr_deref();
1399 static void incr_bf_adr(int o
)
1401 vtop
->type
= char_pointer_type
;
1405 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1406 | (VT_BYTE
|VT_UNSIGNED
);
1407 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1408 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1411 /* single-byte load mode for packed or otherwise unaligned bitfields */
1412 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1415 save_reg_upstack(vtop
->r
, 1);
1416 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1417 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1426 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1428 vpushi((1 << n
) - 1), gen_op('&');
1431 vpushi(bits
), gen_op(TOK_SHL
);
1434 bits
+= n
, bit_size
-= n
, o
= 1;
1437 if (!(type
->t
& VT_UNSIGNED
)) {
1438 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1439 vpushi(n
), gen_op(TOK_SHL
);
1440 vpushi(n
), gen_op(TOK_SAR
);
1444 /* single-byte store mode for packed or otherwise unaligned bitfields */
1445 static void store_packed_bf(int bit_pos
, int bit_size
)
1447 int bits
, n
, o
, m
, c
;
1449 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1451 save_reg_upstack(vtop
->r
, 1);
1452 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1454 incr_bf_adr(o
); // X B
1456 c
? vdup() : gv_dup(); // B V X
1459 vpushi(bits
), gen_op(TOK_SHR
);
1461 vpushi(bit_pos
), gen_op(TOK_SHL
);
1466 m
= ((1 << n
) - 1) << bit_pos
;
1467 vpushi(m
), gen_op('&'); // X B V1
1468 vpushv(vtop
-1); // X B V1 B
1469 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1470 gen_op('&'); // X B V1 B1
1471 gen_op('|'); // X B V2
1473 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1474 vstore(), vpop(); // X B
1475 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1480 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1483 if (0 == sv
->type
.ref
)
1485 t
= sv
->type
.ref
->auxtype
;
1486 if (t
!= -1 && t
!= VT_STRUCT
) {
1487 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1488 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1493 /* store vtop a register belonging to class 'rc'. lvalues are
1494 converted to values. Cannot be used if cannot be converted to
1495 register value (such as structures). */
1496 ST_FUNC
int gv(int rc
)
1498 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1500 /* NOTE: get_reg can modify vstack[] */
1501 if (vtop
->type
.t
& VT_BITFIELD
) {
1504 bit_pos
= BIT_POS(vtop
->type
.t
);
1505 bit_size
= BIT_SIZE(vtop
->type
.t
);
1506 /* remove bit field info to avoid loops */
1507 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1510 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1511 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1512 type
.t
|= VT_UNSIGNED
;
1514 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1516 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1521 if (r
== VT_STRUCT
) {
1522 load_packed_bf(&type
, bit_pos
, bit_size
);
1524 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1525 /* cast to int to propagate signedness in following ops */
1527 /* generate shifts */
1528 vpushi(bits
- (bit_pos
+ bit_size
));
1530 vpushi(bits
- bit_size
);
1531 /* NOTE: transformed to SHR if unsigned */
1536 if (is_float(vtop
->type
.t
) &&
1537 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1538 unsigned long offset
;
1539 /* CPUs usually cannot use float constants, so we store them
1540 generically in data segment */
1541 size
= type_size(&vtop
->type
, &align
);
1543 size
= 0, align
= 1;
1544 offset
= section_add(data_section
, size
, align
);
1545 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1547 init_putv(&vtop
->type
, data_section
, offset
);
1550 #ifdef CONFIG_TCC_BCHECK
1551 if (vtop
->r
& VT_MUSTBOUND
)
1555 r
= vtop
->r
& VT_VALMASK
;
1556 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1557 #ifndef TCC_TARGET_ARM64
1560 #ifdef TCC_TARGET_X86_64
1561 else if (rc
== RC_FRET
)
1565 /* need to reload if:
1567 - lvalue (need to dereference pointer)
1568 - already a register, but not in the right class */
1570 || (vtop
->r
& VT_LVAL
)
1571 || !(reg_classes
[r
] & rc
)
1573 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1574 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1576 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1582 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1583 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1585 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1586 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1587 unsigned long long ll
;
1589 int r2
, original_type
;
1590 original_type
= vtop
->type
.t
;
1591 /* two register type load : expand to two words
1594 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1597 vtop
->c
.i
= ll
; /* first word */
1599 vtop
->r
= r
; /* save register value */
1600 vpushi(ll
>> 32); /* second word */
1603 if (vtop
->r
& VT_LVAL
) {
1604 /* We do not want to modifier the long long
1605 pointer here, so the safest (and less
1606 efficient) is to save all the other registers
1607 in the stack. XXX: totally inefficient. */
1611 /* lvalue_save: save only if used further down the stack */
1612 save_reg_upstack(vtop
->r
, 1);
1614 /* load from memory */
1615 vtop
->type
.t
= load_type
;
1618 vtop
[-1].r
= r
; /* save register value */
1619 /* increment pointer to get second word */
1620 vtop
->type
.t
= addr_type
;
1625 vtop
->type
.t
= load_type
;
1627 /* move registers */
1630 vtop
[-1].r
= r
; /* save register value */
1631 vtop
->r
= vtop
[-1].r2
;
1633 /* Allocate second register. Here we rely on the fact that
1634 get_reg() tries first to free r2 of an SValue. */
1638 /* write second register */
1640 vtop
->type
.t
= original_type
;
1641 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1643 /* lvalue of scalar type : need to use lvalue type
1644 because of possible cast */
1647 /* compute memory access type */
1648 if (vtop
->r
& VT_LVAL_BYTE
)
1650 else if (vtop
->r
& VT_LVAL_SHORT
)
1652 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1656 /* restore wanted type */
1659 if (vtop
->r
== VT_CMP
)
1661 /* one register type load */
1666 #ifdef TCC_TARGET_C67
1667 /* uses register pairs for doubles */
1668 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1675 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1676 ST_FUNC
void gv2(int rc1
, int rc2
)
1678 /* generate more generic register first. But VT_JMP or VT_CMP
1679 values must be generated first in all cases to avoid possible
1681 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1686 /* test if reload is needed for first register */
1687 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1697 /* test if reload is needed for first register */
1698 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1704 #ifndef TCC_TARGET_ARM64
1705 /* wrapper around RC_FRET to return a register by type */
1706 static int rc_fret(int t
)
1708 #ifdef TCC_TARGET_X86_64
1709 if (t
== VT_LDOUBLE
) {
1717 /* wrapper around REG_FRET to return a register by type */
1718 static int reg_fret(int t
)
1720 #ifdef TCC_TARGET_X86_64
1721 if (t
== VT_LDOUBLE
) {
1729 /* expand 64bit on stack in two ints */
1730 ST_FUNC
void lexpand(void)
1733 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1734 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1735 if (v
== VT_CONST
) {
1738 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1744 vtop
[0].r
= vtop
[-1].r2
;
1745 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1747 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1752 /* build a long long from two ints */
1753 static void lbuild(int t
)
1755 gv2(RC_INT
, RC_INT
);
1756 vtop
[-1].r2
= vtop
[0].r
;
1757 vtop
[-1].type
.t
= t
;
1762 /* convert stack entry to register and duplicate its value in another
1764 static void gv_dup(void)
1771 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1772 if (t
& VT_BITFIELD
) {
1782 /* stack: H L L1 H1 */
1792 /* duplicate value */
1797 #ifdef TCC_TARGET_X86_64
1798 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1808 load(r1
, &sv
); /* move r to r1 */
1810 /* duplicates value */
1817 /* generate CPU independent (unsigned) long long operations */
1818 static void gen_opl(int op
)
1820 int t
, a
, b
, op1
, c
, i
;
1822 unsigned short reg_iret
= REG_IRET
;
1823 unsigned short reg_lret
= REG_LRET
;
1829 func
= TOK___divdi3
;
1832 func
= TOK___udivdi3
;
1835 func
= TOK___moddi3
;
1838 func
= TOK___umoddi3
;
1845 /* call generic long long function */
1846 vpush_global_sym(&func_old_type
, func
);
1851 vtop
->r2
= reg_lret
;
1859 //pv("gen_opl A",0,2);
1865 /* stack: L1 H1 L2 H2 */
1870 vtop
[-2] = vtop
[-3];
1873 /* stack: H1 H2 L1 L2 */
1874 //pv("gen_opl B",0,4);
1880 /* stack: H1 H2 L1 L2 ML MH */
1883 /* stack: ML MH H1 H2 L1 L2 */
1887 /* stack: ML MH H1 L2 H2 L1 */
1892 /* stack: ML MH M1 M2 */
1895 } else if (op
== '+' || op
== '-') {
1896 /* XXX: add non carry method too (for MIPS or alpha) */
1902 /* stack: H1 H2 (L1 op L2) */
1905 gen_op(op1
+ 1); /* TOK_xxxC2 */
1908 /* stack: H1 H2 (L1 op L2) */
1911 /* stack: (L1 op L2) H1 H2 */
1913 /* stack: (L1 op L2) (H1 op H2) */
1921 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1922 t
= vtop
[-1].type
.t
;
1926 /* stack: L H shift */
1928 /* constant: simpler */
1929 /* NOTE: all comments are for SHL. the other cases are
1930 done by swapping words */
1941 if (op
!= TOK_SAR
) {
1974 /* XXX: should provide a faster fallback on x86 ? */
1977 func
= TOK___ashrdi3
;
1980 func
= TOK___lshrdi3
;
1983 func
= TOK___ashldi3
;
1989 /* compare operations */
1995 /* stack: L1 H1 L2 H2 */
1997 vtop
[-1] = vtop
[-2];
1999 /* stack: L1 L2 H1 H2 */
2002 /* when values are equal, we need to compare low words. since
2003 the jump is inverted, we invert the test too. */
2006 else if (op1
== TOK_GT
)
2008 else if (op1
== TOK_ULT
)
2010 else if (op1
== TOK_UGT
)
2020 /* generate non equal test */
2022 vset_VT_CMP(TOK_NE
);
2026 /* compare low. Always unsigned */
2030 else if (op1
== TOK_LE
)
2032 else if (op1
== TOK_GT
)
2034 else if (op1
== TOK_GE
)
2037 #if 0//def TCC_TARGET_I386
2038 if (op
== TOK_NE
) { gsym(b
); break; }
2039 if (op
== TOK_EQ
) { gsym(a
); break; }
2048 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2050 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2051 return (a
^ b
) >> 63 ? -x
: x
;
2054 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2056 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2059 /* handle integer constant optimizations and various machine
2061 static void gen_opic(int op
)
2063 SValue
*v1
= vtop
- 1;
2065 int t1
= v1
->type
.t
& VT_BTYPE
;
2066 int t2
= v2
->type
.t
& VT_BTYPE
;
2067 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2068 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2069 uint64_t l1
= c1
? v1
->c
.i
: 0;
2070 uint64_t l2
= c2
? v2
->c
.i
: 0;
2071 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2073 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2074 l1
= ((uint32_t)l1
|
2075 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2076 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2077 l2
= ((uint32_t)l2
|
2078 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2082 case '+': l1
+= l2
; break;
2083 case '-': l1
-= l2
; break;
2084 case '&': l1
&= l2
; break;
2085 case '^': l1
^= l2
; break;
2086 case '|': l1
|= l2
; break;
2087 case '*': l1
*= l2
; break;
2094 /* if division by zero, generate explicit division */
2097 tcc_error("division by zero in constant");
2101 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2102 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2103 case TOK_UDIV
: l1
= l1
/ l2
; break;
2104 case TOK_UMOD
: l1
= l1
% l2
; break;
2107 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2108 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2110 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2113 case TOK_ULT
: l1
= l1
< l2
; break;
2114 case TOK_UGE
: l1
= l1
>= l2
; break;
2115 case TOK_EQ
: l1
= l1
== l2
; break;
2116 case TOK_NE
: l1
= l1
!= l2
; break;
2117 case TOK_ULE
: l1
= l1
<= l2
; break;
2118 case TOK_UGT
: l1
= l1
> l2
; break;
2119 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2120 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2121 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2122 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2124 case TOK_LAND
: l1
= l1
&& l2
; break;
2125 case TOK_LOR
: l1
= l1
|| l2
; break;
2129 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2130 l1
= ((uint32_t)l1
|
2131 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2135 /* if commutative ops, put c2 as constant */
2136 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2137 op
== '|' || op
== '*')) {
2139 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2140 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2142 if (!const_wanted
&&
2144 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2145 (l1
== -1 && op
== TOK_SAR
))) {
2146 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2148 } else if (!const_wanted
&&
2149 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2151 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2152 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2153 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2158 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2161 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2162 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2165 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2166 /* filter out NOP operations like x*1, x-0, x&-1... */
2168 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2169 /* try to use shifts instead of muls or divs */
2170 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2179 else if (op
== TOK_PDIV
)
2185 } else if (c2
&& (op
== '+' || op
== '-') &&
2186 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2187 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2188 /* symbol + constant case */
2192 /* The backends can't always deal with addends to symbols
2193 larger than +-1<<31. Don't construct such. */
2200 /* call low level op generator */
2201 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2202 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2210 /* generate a floating point operation with constant propagation */
2211 static void gen_opif(int op
)
2215 #if defined _MSC_VER && defined _AMD64_
2216 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2223 /* currently, we cannot do computations with forward symbols */
2224 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2225 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2227 if (v1
->type
.t
== VT_FLOAT
) {
2230 } else if (v1
->type
.t
== VT_DOUBLE
) {
2238 /* NOTE: we only do constant propagation if finite number (not
2239 NaN or infinity) (ANSI spec) */
2240 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2244 case '+': f1
+= f2
; break;
2245 case '-': f1
-= f2
; break;
2246 case '*': f1
*= f2
; break;
2249 /* If not in initializer we need to potentially generate
2250 FP exceptions at runtime, otherwise we want to fold. */
2256 /* XXX: also handles tests ? */
2260 /* XXX: overflow test ? */
2261 if (v1
->type
.t
== VT_FLOAT
) {
2263 } else if (v1
->type
.t
== VT_DOUBLE
) {
2275 static int pointed_size(CType
*type
)
2278 return type_size(pointed_type(type
), &align
);
2281 static void vla_runtime_pointed_size(CType
*type
)
2284 vla_runtime_type_size(pointed_type(type
), &align
);
2287 static inline int is_null_pointer(SValue
*p
)
2289 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2291 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2292 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2293 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2294 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2295 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2296 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2299 static inline int is_integer_btype(int bt
)
2301 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2302 bt
== VT_INT
|| bt
== VT_LLONG
);
2305 /* check types for comparison or subtraction of pointers */
2306 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2308 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2311 /* null pointers are accepted for all comparisons as gcc */
2312 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2316 bt1
= type1
->t
& VT_BTYPE
;
2317 bt2
= type2
->t
& VT_BTYPE
;
2318 /* accept comparison between pointer and integer with a warning */
2319 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2320 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2321 tcc_warning("comparison between pointer and integer");
2325 /* both must be pointers or implicit function pointers */
2326 if (bt1
== VT_PTR
) {
2327 type1
= pointed_type(type1
);
2328 } else if (bt1
!= VT_FUNC
)
2329 goto invalid_operands
;
2331 if (bt2
== VT_PTR
) {
2332 type2
= pointed_type(type2
);
2333 } else if (bt2
!= VT_FUNC
) {
2335 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2337 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2338 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2342 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2343 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2344 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2345 /* gcc-like error if '-' is used */
2347 goto invalid_operands
;
2349 tcc_warning("comparison of distinct pointer types lacks a cast");
2353 /* generic gen_op: handles types problems */
2354 ST_FUNC
void gen_op(int op
)
2356 int u
, t1
, t2
, bt1
, bt2
, t
;
2360 t1
= vtop
[-1].type
.t
;
2361 t2
= vtop
[0].type
.t
;
2362 bt1
= t1
& VT_BTYPE
;
2363 bt2
= t2
& VT_BTYPE
;
2365 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2366 tcc_error("operation on a struct");
2367 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2368 if (bt2
== VT_FUNC
) {
2369 mk_pointer(&vtop
->type
);
2372 if (bt1
== VT_FUNC
) {
2374 mk_pointer(&vtop
->type
);
2379 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2380 /* at least one operand is a pointer */
2381 /* relational op: must be both pointers */
2382 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2383 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2384 /* pointers are handled are unsigned */
2386 t
= VT_LLONG
| VT_UNSIGNED
;
2388 t
= VT_INT
| VT_UNSIGNED
;
2392 /* if both pointers, then it must be the '-' op */
2393 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2395 tcc_error("cannot use pointers here");
2396 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2397 /* XXX: check that types are compatible */
2398 if (vtop
[-1].type
.t
& VT_VLA
) {
2399 vla_runtime_pointed_size(&vtop
[-1].type
);
2401 vpushi(pointed_size(&vtop
[-1].type
));
2405 vtop
->type
.t
= ptrdiff_type
.t
;
2409 /* exactly one pointer : must be '+' or '-'. */
2410 if (op
!= '-' && op
!= '+')
2411 tcc_error("cannot use pointers here");
2412 /* Put pointer as first operand */
2413 if (bt2
== VT_PTR
) {
2415 t
= t1
, t1
= t2
, t2
= t
;
2418 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2419 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2422 type1
= vtop
[-1].type
;
2423 type1
.t
&= ~VT_ARRAY
;
2424 if (vtop
[-1].type
.t
& VT_VLA
)
2425 vla_runtime_pointed_size(&vtop
[-1].type
);
2427 u
= pointed_size(&vtop
[-1].type
);
2429 tcc_error("unknown array element size");
2433 /* XXX: cast to int ? (long long case) */
2439 /* #ifdef CONFIG_TCC_BCHECK
2440 The main reason to removing this code:
2447 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2448 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2450 When this code is on. then the output looks like
2452 v+(i-j) = 0xbff84000
2454 /* if evaluating constant expression, no code should be
2455 generated, so no bound check */
2456 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2457 /* if bounded pointers, we generate a special code to
2464 gen_bounded_ptr_add();
2470 /* put again type if gen_opic() swaped operands */
2473 } else if (is_float(bt1
) || is_float(bt2
)) {
2474 /* compute bigger type and do implicit casts */
2475 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2477 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2482 /* floats can only be used for a few operations */
2483 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2484 (op
< TOK_ULT
|| op
> TOK_GT
))
2485 tcc_error("invalid operands for binary operation");
2487 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2488 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2489 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2491 t
|= (VT_LONG
& t1
);
2493 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2494 /* cast to biggest op */
2495 t
= VT_LLONG
| VT_LONG
;
2496 if (bt1
== VT_LLONG
)
2498 if (bt2
== VT_LLONG
)
2500 /* convert to unsigned if it does not fit in a long long */
2501 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2502 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2506 /* integer operations */
2507 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2508 /* convert to unsigned if it does not fit in an integer */
2509 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2510 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2513 /* XXX: currently, some unsigned operations are explicit, so
2514 we modify them here */
2515 if (t
& VT_UNSIGNED
) {
2522 else if (op
== TOK_LT
)
2524 else if (op
== TOK_GT
)
2526 else if (op
== TOK_LE
)
2528 else if (op
== TOK_GE
)
2536 /* special case for shifts and long long: we keep the shift as
2538 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2545 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2546 /* relational op: the result is an int */
2547 vtop
->type
.t
= VT_INT
;
2552 // Make sure that we have converted to an rvalue:
2553 if (vtop
->r
& VT_LVAL
)
2554 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2557 #ifndef TCC_TARGET_ARM
2558 /* generic itof for unsigned long long case */
2559 static void gen_cvt_itof1(int t
)
2561 #ifdef TCC_TARGET_ARM64
2564 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2565 (VT_LLONG
| VT_UNSIGNED
)) {
2568 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2569 #if LDOUBLE_SIZE != 8
2570 else if (t
== VT_LDOUBLE
)
2571 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2574 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2578 vtop
->r
= reg_fret(t
);
2586 /* generic ftoi for unsigned long long case */
2587 static void gen_cvt_ftoi1(int t
)
2589 #ifdef TCC_TARGET_ARM64
2594 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2595 /* not handled natively */
2596 st
= vtop
->type
.t
& VT_BTYPE
;
2598 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2599 #if LDOUBLE_SIZE != 8
2600 else if (st
== VT_LDOUBLE
)
2601 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2604 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2609 vtop
->r2
= REG_LRET
;
2616 /* force char or short cast */
2617 static void force_charshort_cast(int t
)
2621 /* cannot cast static initializers */
2622 if (STATIC_DATA_WANTED
)
2626 /* XXX: add optimization if lvalue : just change type and offset */
2631 if (t
& VT_UNSIGNED
) {
2632 vpushi((1 << bits
) - 1);
2635 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2641 /* result must be signed or the SAR is converted to an SHL
2642 This was not the case when "t" was a signed short
2643 and the last value on the stack was an unsigned int */
2644 vtop
->type
.t
&= ~VT_UNSIGNED
;
2650 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2651 static void gen_cast_s(int t
)
2659 static void gen_cast(CType
*type
)
2661 int sbt
, dbt
, sf
, df
, c
, p
;
2663 /* special delayed cast for char/short */
2664 /* XXX: in some cases (multiple cascaded casts), it may still
2666 if (vtop
->r
& VT_MUSTCAST
) {
2667 vtop
->r
&= ~VT_MUSTCAST
;
2668 force_charshort_cast(vtop
->type
.t
);
2671 /* bitfields first get cast to ints */
2672 if (vtop
->type
.t
& VT_BITFIELD
) {
2676 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2677 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2682 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2683 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2684 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2685 c
&= dbt
!= VT_LDOUBLE
;
2688 /* constant case: we can do it now */
2689 /* XXX: in ISOC, cannot do it if error in convert */
2690 if (sbt
== VT_FLOAT
)
2691 vtop
->c
.ld
= vtop
->c
.f
;
2692 else if (sbt
== VT_DOUBLE
)
2693 vtop
->c
.ld
= vtop
->c
.d
;
2696 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2697 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2698 vtop
->c
.ld
= vtop
->c
.i
;
2700 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2702 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2703 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2705 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2708 if (dbt
== VT_FLOAT
)
2709 vtop
->c
.f
= (float)vtop
->c
.ld
;
2710 else if (dbt
== VT_DOUBLE
)
2711 vtop
->c
.d
= (double)vtop
->c
.ld
;
2712 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2713 vtop
->c
.i
= vtop
->c
.ld
;
2714 } else if (sf
&& dbt
== VT_BOOL
) {
2715 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2718 vtop
->c
.i
= vtop
->c
.ld
;
2719 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2721 else if (sbt
& VT_UNSIGNED
)
2722 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2724 else if (sbt
== VT_PTR
)
2727 else if (sbt
!= VT_LLONG
)
2728 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2729 -(vtop
->c
.i
& 0x80000000));
2731 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2733 else if (dbt
== VT_BOOL
)
2734 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2736 else if (dbt
== VT_PTR
)
2739 else if (dbt
!= VT_LLONG
) {
2740 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2741 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2744 if (!(dbt
& VT_UNSIGNED
))
2745 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2748 } else if (p
&& dbt
== VT_BOOL
) {
2752 /* non constant case: generate code */
2754 /* convert from fp to fp */
2757 /* convert int to fp */
2760 /* convert fp to int */
2761 if (dbt
== VT_BOOL
) {
2765 /* we handle char/short/etc... with generic code */
2766 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2767 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2771 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2772 /* additional cast for char/short... */
2778 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2779 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2780 /* scalar to long long */
2781 /* machine independent conversion */
2783 /* generate high word */
2784 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2788 if (sbt
== VT_PTR
) {
2789 /* cast from pointer to int before we apply
2790 shift operation, which pointers don't support*/
2797 /* patch second register */
2798 vtop
[-1].r2
= vtop
->r
;
2802 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2803 (dbt
& VT_BTYPE
) == VT_PTR
||
2804 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2805 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2806 (sbt
& VT_BTYPE
) != VT_PTR
&&
2807 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2808 /* need to convert from 32bit to 64bit */
2810 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2811 #if defined(TCC_TARGET_ARM64)
2813 #elif defined(TCC_TARGET_X86_64)
2815 /* x86_64 specific: movslq */
2817 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2824 } else if (dbt
== VT_BOOL
) {
2825 /* scalar to bool */
2828 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2829 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2830 if (sbt
== VT_PTR
) {
2831 vtop
->type
.t
= VT_INT
;
2832 tcc_warning("nonportable conversion from pointer to char/short");
2834 force_charshort_cast(dbt
);
2835 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2837 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2839 /* from long long: just take low order word */
2844 vtop
->type
.t
|= VT_UNSIGNED
;
2848 /* if lvalue and single word type, nothing to do because
2849 the lvalue already contains the real type size (see
2850 VT_LVAL_xxx constants) */
2853 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2854 /* if we are casting between pointer types,
2855 we must update the VT_LVAL_xxx size */
2856 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2857 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2860 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2863 /* return type size as known at compile time. Put alignment at 'a' */
2864 ST_FUNC
int type_size(CType
*type
, int *a
)
2869 bt
= type
->t
& VT_BTYPE
;
2870 if (bt
== VT_STRUCT
) {
2875 } else if (bt
== VT_PTR
) {
2876 if (type
->t
& VT_ARRAY
) {
2880 ts
= type_size(&s
->type
, a
);
2882 if (ts
< 0 && s
->c
< 0)
2890 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
2891 return -1; /* incomplete enum */
2892 } else if (bt
== VT_LDOUBLE
) {
2894 return LDOUBLE_SIZE
;
2895 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2896 #ifdef TCC_TARGET_I386
2897 #ifdef TCC_TARGET_PE
2902 #elif defined(TCC_TARGET_ARM)
2912 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2915 } else if (bt
== VT_SHORT
) {
2918 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2922 /* char, void, function, _Bool */
2928 /* push type size as known at runtime time on top of value stack. Put
2930 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2932 if (type
->t
& VT_VLA
) {
2933 type_size(&type
->ref
->type
, a
);
2934 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2936 vpushi(type_size(type
, a
));
2940 static void vla_sp_restore(void) {
2941 if (vlas_in_scope
) {
2942 gen_vla_sp_restore(vla_sp_loc
);
2946 static void vla_sp_restore_root(void) {
2947 if (vlas_in_scope
) {
2948 gen_vla_sp_restore(vla_sp_root_loc
);
2952 /* return the pointed type of t */
2953 static inline CType
*pointed_type(CType
*type
)
2955 return &type
->ref
->type
;
2958 /* modify type so that its it is a pointer to type. */
2959 ST_FUNC
void mk_pointer(CType
*type
)
2962 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2963 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2967 /* compare function types. OLD functions match any new functions */
2968 static int is_compatible_func(CType
*type1
, CType
*type2
)
2974 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2976 if (s1
->f
.func_type
!= s2
->f
.func_type
2977 && s1
->f
.func_type
!= FUNC_OLD
2978 && s2
->f
.func_type
!= FUNC_OLD
)
2980 /* we should check the function return type for FUNC_OLD too
2981 but that causes problems with the internally used support
2982 functions such as TOK_memmove */
2983 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
2985 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
2988 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2999 /* return true if type1 and type2 are the same. If unqualified is
3000 true, qualifiers on the types are ignored.
3002 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3006 t1
= type1
->t
& VT_TYPE
;
3007 t2
= type2
->t
& VT_TYPE
;
3009 /* strip qualifiers before comparing */
3010 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3011 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3014 /* Default Vs explicit signedness only matters for char */
3015 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3019 /* XXX: bitfields ? */
3024 && !(type1
->ref
->c
< 0
3025 || type2
->ref
->c
< 0
3026 || type1
->ref
->c
== type2
->ref
->c
))
3029 /* test more complicated cases */
3030 bt1
= t1
& VT_BTYPE
;
3031 if (bt1
== VT_PTR
) {
3032 type1
= pointed_type(type1
);
3033 type2
= pointed_type(type2
);
3034 return is_compatible_types(type1
, type2
);
3035 } else if (bt1
== VT_STRUCT
) {
3036 return (type1
->ref
== type2
->ref
);
3037 } else if (bt1
== VT_FUNC
) {
3038 return is_compatible_func(type1
, type2
);
3039 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
3040 return type1
->ref
== type2
->ref
;
3046 /* return true if type1 and type2 are exactly the same (including
3049 static int is_compatible_types(CType
*type1
, CType
*type2
)
3051 return compare_types(type1
,type2
,0);
3054 /* return true if type1 and type2 are the same (ignoring qualifiers).
3056 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3058 return compare_types(type1
,type2
,1);
3061 /* print a type. If 'varstr' is not NULL, then the variable is also
3062 printed in the type */
3064 /* XXX: add array and function pointers */
3065 static void type_to_str(char *buf
, int buf_size
,
3066 CType
*type
, const char *varstr
)
3078 pstrcat(buf
, buf_size
, "extern ");
3080 pstrcat(buf
, buf_size
, "static ");
3082 pstrcat(buf
, buf_size
, "typedef ");
3084 pstrcat(buf
, buf_size
, "inline ");
3085 if (t
& VT_VOLATILE
)
3086 pstrcat(buf
, buf_size
, "volatile ");
3087 if (t
& VT_CONSTANT
)
3088 pstrcat(buf
, buf_size
, "const ");
3090 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
3091 || ((t
& VT_UNSIGNED
)
3092 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
3095 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
3097 buf_size
-= strlen(buf
);
3132 tstr
= "long double";
3134 pstrcat(buf
, buf_size
, tstr
);
3141 pstrcat(buf
, buf_size
, tstr
);
3142 v
= type
->ref
->v
& ~SYM_STRUCT
;
3143 if (v
>= SYM_FIRST_ANOM
)
3144 pstrcat(buf
, buf_size
, "<anonymous>");
3146 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3151 if (varstr
&& '*' == *varstr
) {
3152 pstrcat(buf1
, sizeof(buf1
), "(");
3153 pstrcat(buf1
, sizeof(buf1
), varstr
);
3154 pstrcat(buf1
, sizeof(buf1
), ")");
3156 pstrcat(buf1
, buf_size
, "(");
3158 while (sa
!= NULL
) {
3160 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3161 pstrcat(buf1
, sizeof(buf1
), buf2
);
3164 pstrcat(buf1
, sizeof(buf1
), ", ");
3166 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3167 pstrcat(buf1
, sizeof(buf1
), ", ...");
3168 pstrcat(buf1
, sizeof(buf1
), ")");
3169 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3174 if (varstr
&& '*' == *varstr
)
3175 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3177 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3178 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3181 pstrcpy(buf1
, sizeof(buf1
), "*");
3182 if (t
& VT_CONSTANT
)
3183 pstrcat(buf1
, buf_size
, "const ");
3184 if (t
& VT_VOLATILE
)
3185 pstrcat(buf1
, buf_size
, "volatile ");
3187 pstrcat(buf1
, sizeof(buf1
), varstr
);
3188 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3192 pstrcat(buf
, buf_size
, " ");
3193 pstrcat(buf
, buf_size
, varstr
);
3198 /* verify type compatibility to store vtop in 'dt' type, and generate
3200 static void gen_assign_cast(CType
*dt
)
3202 CType
*st
, *type1
, *type2
;
3203 char buf1
[256], buf2
[256];
3204 int dbt
, sbt
, qualwarn
, lvl
;
3206 st
= &vtop
->type
; /* source type */
3207 dbt
= dt
->t
& VT_BTYPE
;
3208 sbt
= st
->t
& VT_BTYPE
;
3209 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3210 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3211 ; /* It is Ok if both are void */
3213 tcc_error("cannot cast from/to void");
3215 if (dt
->t
& VT_CONSTANT
)
3216 tcc_warning("assignment of read-only location");
3219 /* special cases for pointers */
3220 /* '0' can also be a pointer */
3221 if (is_null_pointer(vtop
))
3223 /* accept implicit pointer to integer cast with warning */
3224 if (is_integer_btype(sbt
)) {
3225 tcc_warning("assignment makes pointer from integer without a cast");
3228 type1
= pointed_type(dt
);
3230 type2
= pointed_type(st
);
3231 else if (sbt
== VT_FUNC
)
3232 type2
= st
; /* a function is implicitly a function pointer */
3235 if (is_compatible_types(type1
, type2
))
3237 for (qualwarn
= lvl
= 0;; ++lvl
) {
3238 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3239 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3241 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3242 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3243 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3245 type1
= pointed_type(type1
);
3246 type2
= pointed_type(type2
);
3248 if (!is_compatible_unqualified_types(type1
, type2
)) {
3249 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3250 /* void * can match anything */
3251 } else if (dbt
== sbt
3252 && is_integer_btype(sbt
& VT_BTYPE
)
3253 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3254 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3255 /* Like GCC don't warn by default for merely changes
3256 in pointer target signedness. Do warn for different
3257 base types, though, in particular for unsigned enums
3258 and signed int targets. */
3260 tcc_warning("assignment from incompatible pointer type");
3265 tcc_warning("assignment discards qualifiers from pointer target type");
3271 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3272 tcc_warning("assignment makes integer from pointer without a cast");
3273 } else if (sbt
== VT_STRUCT
) {
3274 goto case_VT_STRUCT
;
3276 /* XXX: more tests */
3280 if (!is_compatible_unqualified_types(dt
, st
)) {
3282 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3283 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3284 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3291 /* store vtop in lvalue pushed on stack */
3292 ST_FUNC
void vstore(void)
3294 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3296 ft
= vtop
[-1].type
.t
;
3297 sbt
= vtop
->type
.t
& VT_BTYPE
;
3298 dbt
= ft
& VT_BTYPE
;
3299 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3300 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3301 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3302 /* optimize char/short casts */
3303 delayed_cast
= VT_MUSTCAST
;
3304 vtop
->type
.t
= ft
& VT_TYPE
;
3305 /* XXX: factorize */
3306 if (ft
& VT_CONSTANT
)
3307 tcc_warning("assignment of read-only location");
3310 if (!(ft
& VT_BITFIELD
))
3311 gen_assign_cast(&vtop
[-1].type
);
3314 if (sbt
== VT_STRUCT
) {
3315 /* if structure, only generate pointer */
3316 /* structure assignment : generate memcpy */
3317 /* XXX: optimize if small size */
3318 size
= type_size(&vtop
->type
, &align
);
3322 vtop
->type
.t
= VT_PTR
;
3325 /* address of memcpy() */
3328 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3329 else if(!(align
& 3))
3330 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3333 /* Use memmove, rather than memcpy, as dest and src may be same: */
3334 vpush_global_sym(&func_old_type
, TOK_memmove
);
3339 vtop
->type
.t
= VT_PTR
;
3345 /* leave source on stack */
3346 } else if (ft
& VT_BITFIELD
) {
3347 /* bitfield store handling */
3349 /* save lvalue as expression result (example: s.b = s.a = n;) */
3350 vdup(), vtop
[-1] = vtop
[-2];
3352 bit_pos
= BIT_POS(ft
);
3353 bit_size
= BIT_SIZE(ft
);
3354 /* remove bit field info to avoid loops */
3355 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3357 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3358 gen_cast(&vtop
[-1].type
);
3359 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3362 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3363 if (r
== VT_STRUCT
) {
3364 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3365 store_packed_bf(bit_pos
, bit_size
);
3367 unsigned long long mask
= (1ULL << bit_size
) - 1;
3368 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3370 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3373 vpushi((unsigned)mask
);
3380 /* duplicate destination */
3383 /* load destination, mask and or with source */
3384 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3385 vpushll(~(mask
<< bit_pos
));
3387 vpushi(~((unsigned)mask
<< bit_pos
));
3392 /* ... and discard */
3395 } else if (dbt
== VT_VOID
) {
3398 #ifdef CONFIG_TCC_BCHECK
3399 /* bound check case */
3400 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3409 #ifdef TCC_TARGET_X86_64
3410 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3412 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3417 r
= gv(rc
); /* generate value */
3418 /* if lvalue was saved on stack, must read it */
3419 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3421 t
= get_reg(RC_INT
);
3427 sv
.r
= VT_LOCAL
| VT_LVAL
;
3428 sv
.c
.i
= vtop
[-1].c
.i
;
3430 vtop
[-1].r
= t
| VT_LVAL
;
3432 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3434 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3435 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3437 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3438 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3440 vtop
[-1].type
.t
= load_type
;
3443 /* convert to int to increment easily */
3444 vtop
->type
.t
= addr_type
;
3450 vtop
[-1].type
.t
= load_type
;
3451 /* XXX: it works because r2 is spilled last ! */
3452 store(vtop
->r2
, vtop
- 1);
3458 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3459 vtop
->r
|= delayed_cast
;
3463 /* post defines POST/PRE add. c is the token ++ or -- */
3464 ST_FUNC
void inc(int post
, int c
)
3467 vdup(); /* save lvalue */
3469 gv_dup(); /* duplicate value */
3474 vpushi(c
- TOK_MID
);
3476 vstore(); /* store value */
3478 vpop(); /* if post op, return saved value */
3481 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3483 /* read the string */
3487 while (tok
== TOK_STR
) {
3488 /* XXX: add \0 handling too ? */
3489 cstr_cat(astr
, tokc
.str
.data
, -1);
3492 cstr_ccat(astr
, '\0');
3495 /* If I is >= 1 and a power of two, returns log2(i)+1.
3496 If I is 0 returns 0. */
3497 static int exact_log2p1(int i
)
3502 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3513 /* Parse __attribute__((...)) GNUC extension. */
3514 static void parse_attribute(AttributeDef
*ad
)
3520 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3525 while (tok
!= ')') {
3526 if (tok
< TOK_IDENT
)
3527 expect("attribute name");
3539 tcc_warning("implicit declaration of function '%s'",
3540 get_tok_str(tok
, &tokc
));
3541 s
= external_global_sym(tok
, &func_old_type
);
3543 ad
->cleanup_func
= s
;
3551 parse_mult_str(&astr
, "section name");
3552 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3559 parse_mult_str(&astr
, "alias(\"target\")");
3560 ad
->alias_target
= /* save string as token, for later */
3561 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3565 case TOK_VISIBILITY1
:
3566 case TOK_VISIBILITY2
:
3568 parse_mult_str(&astr
,
3569 "visibility(\"default|hidden|internal|protected\")");
3570 if (!strcmp (astr
.data
, "default"))
3571 ad
->a
.visibility
= STV_DEFAULT
;
3572 else if (!strcmp (astr
.data
, "hidden"))
3573 ad
->a
.visibility
= STV_HIDDEN
;
3574 else if (!strcmp (astr
.data
, "internal"))
3575 ad
->a
.visibility
= STV_INTERNAL
;
3576 else if (!strcmp (astr
.data
, "protected"))
3577 ad
->a
.visibility
= STV_PROTECTED
;
3579 expect("visibility(\"default|hidden|internal|protected\")");
3588 if (n
<= 0 || (n
& (n
- 1)) != 0)
3589 tcc_error("alignment must be a positive power of two");
3594 ad
->a
.aligned
= exact_log2p1(n
);
3595 if (n
!= 1 << (ad
->a
.aligned
- 1))
3596 tcc_error("alignment of %d is larger than implemented", n
);
3608 /* currently, no need to handle it because tcc does not
3609 track unused objects */
3613 ad
->f
.func_noreturn
= 1;
3618 ad
->f
.func_call
= FUNC_CDECL
;
3623 ad
->f
.func_call
= FUNC_STDCALL
;
3625 #ifdef TCC_TARGET_I386
3635 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3641 ad
->f
.func_call
= FUNC_FASTCALLW
;
3648 ad
->attr_mode
= VT_LLONG
+ 1;
3651 ad
->attr_mode
= VT_BYTE
+ 1;
3654 ad
->attr_mode
= VT_SHORT
+ 1;
3658 ad
->attr_mode
= VT_INT
+ 1;
3661 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3668 ad
->a
.dllexport
= 1;
3670 case TOK_NODECORATE
:
3671 ad
->a
.nodecorate
= 1;
3674 ad
->a
.dllimport
= 1;
3677 if (tcc_state
->warn_unsupported
)
3678 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3679 /* skip parameters */
3681 int parenthesis
= 0;
3685 else if (tok
== ')')
3688 } while (parenthesis
&& tok
!= -1);
3701 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3705 while ((s
= s
->next
) != NULL
) {
3706 if ((s
->v
& SYM_FIELD
) &&
3707 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3708 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3709 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3721 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3723 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3724 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3725 int pcc
= !tcc_state
->ms_bitfields
;
3726 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3733 prevbt
= VT_STRUCT
; /* make it never match */
3738 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3739 if (f
->type
.t
& VT_BITFIELD
)
3740 bit_size
= BIT_SIZE(f
->type
.t
);
3743 size
= type_size(&f
->type
, &align
);
3744 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3747 if (pcc
&& bit_size
== 0) {
3748 /* in pcc mode, packing does not affect zero-width bitfields */
3751 /* in pcc mode, attribute packed overrides if set. */
3752 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3755 /* pragma pack overrides align if lesser and packs bitfields always */
3758 if (pragma_pack
< align
)
3759 align
= pragma_pack
;
3760 /* in pcc mode pragma pack also overrides individual align */
3761 if (pcc
&& pragma_pack
< a
)
3765 /* some individual align was specified */
3769 if (type
->ref
->type
.t
== VT_UNION
) {
3770 if (pcc
&& bit_size
>= 0)
3771 size
= (bit_size
+ 7) >> 3;
3776 } else if (bit_size
< 0) {
3778 c
+= (bit_pos
+ 7) >> 3;
3779 c
= (c
+ align
- 1) & -align
;
3788 /* A bit-field. Layout is more complicated. There are two
3789 options: PCC (GCC) compatible and MS compatible */
3791 /* In PCC layout a bit-field is placed adjacent to the
3792 preceding bit-fields, except if:
3794 - an individual alignment was given
3795 - it would overflow its base type container and
3796 there is no packing */
3797 if (bit_size
== 0) {
3799 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3801 } else if (f
->a
.aligned
) {
3803 } else if (!packed
) {
3805 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3806 if (ofs
> size
/ align
)
3810 /* in pcc mode, long long bitfields have type int if they fit */
3811 if (size
== 8 && bit_size
<= 32)
3812 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3814 while (bit_pos
>= align
* 8)
3815 c
+= align
, bit_pos
-= align
* 8;
3818 /* In PCC layout named bit-fields influence the alignment
3819 of the containing struct using the base types alignment,
3820 except for packed fields (which here have correct align). */
3821 if (f
->v
& SYM_FIRST_ANOM
3822 // && bit_size // ??? gcc on ARM/rpi does that
3827 bt
= f
->type
.t
& VT_BTYPE
;
3828 if ((bit_pos
+ bit_size
> size
* 8)
3829 || (bit_size
> 0) == (bt
!= prevbt
)
3831 c
= (c
+ align
- 1) & -align
;
3834 /* In MS bitfield mode a bit-field run always uses
3835 at least as many bits as the underlying type.
3836 To start a new run it's also required that this
3837 or the last bit-field had non-zero width. */
3838 if (bit_size
|| prev_bit_size
)
3841 /* In MS layout the records alignment is normally
3842 influenced by the field, except for a zero-width
3843 field at the start of a run (but by further zero-width
3844 fields it is again). */
3845 if (bit_size
== 0 && prevbt
!= bt
)
3848 prev_bit_size
= bit_size
;
3851 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3852 | (bit_pos
<< VT_STRUCT_SHIFT
);
3853 bit_pos
+= bit_size
;
3855 if (align
> maxalign
)
3859 printf("set field %s offset %-2d size %-2d align %-2d",
3860 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3861 if (f
->type
.t
& VT_BITFIELD
) {
3862 printf(" pos %-2d bits %-2d",
3875 c
+= (bit_pos
+ 7) >> 3;
3877 /* store size and alignment */
3878 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3882 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3883 /* can happen if individual align for some member was given. In
3884 this case MSVC ignores maxalign when aligning the size */
3889 c
= (c
+ a
- 1) & -a
;
3893 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3896 /* check whether we can access bitfields by their type */
3897 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3901 if (0 == (f
->type
.t
& VT_BITFIELD
))
3905 bit_size
= BIT_SIZE(f
->type
.t
);
3908 bit_pos
= BIT_POS(f
->type
.t
);
3909 size
= type_size(&f
->type
, &align
);
3910 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3913 /* try to access the field using a different type */
3914 c0
= -1, s
= align
= 1;
3916 px
= f
->c
* 8 + bit_pos
;
3917 cx
= (px
>> 3) & -align
;
3918 px
= px
- (cx
<< 3);
3921 s
= (px
+ bit_size
+ 7) >> 3;
3931 s
= type_size(&t
, &align
);
3935 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3936 /* update offset and bit position */
3939 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3940 | (bit_pos
<< VT_STRUCT_SHIFT
);
3944 printf("FIX field %s offset %-2d size %-2d align %-2d "
3945 "pos %-2d bits %-2d\n",
3946 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3947 cx
, s
, align
, px
, bit_size
);
3950 /* fall back to load/store single-byte wise */
3951 f
->auxtype
= VT_STRUCT
;
3953 printf("FIX field %s : load byte-wise\n",
3954 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3960 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3961 static void struct_decl(CType
*type
, int u
)
3963 int v
, c
, size
, align
, flexible
;
3964 int bit_size
, bsize
, bt
;
3966 AttributeDef ad
, ad1
;
3969 memset(&ad
, 0, sizeof ad
);
3971 parse_attribute(&ad
);
3975 /* struct already defined ? return it */
3977 expect("struct/union/enum name");
3979 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3982 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3984 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3989 /* Record the original enum/struct/union token. */
3990 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3992 /* we put an undefined size for struct/union */
3993 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3994 s
->r
= 0; /* default alignment is zero as gcc */
3996 type
->t
= s
->type
.t
;
4002 tcc_error("struct/union/enum already defined");
4004 /* cannot be empty */
4005 /* non empty enums are not allowed */
4008 long long ll
= 0, pl
= 0, nl
= 0;
4011 /* enum symbols have static storage */
4012 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4016 expect("identifier");
4018 if (ss
&& !local_stack
)
4019 tcc_error("redefinition of enumerator '%s'",
4020 get_tok_str(v
, NULL
));
4024 ll
= expr_const64();
4026 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4028 *ps
= ss
, ps
= &ss
->next
;
4037 /* NOTE: we accept a trailing comma */
4042 /* set integral type of the enum */
4045 if (pl
!= (unsigned)pl
)
4046 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4048 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4049 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4050 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4052 /* set type for enum members */
4053 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4055 if (ll
== (int)ll
) /* default is int if it fits */
4057 if (t
.t
& VT_UNSIGNED
) {
4058 ss
->type
.t
|= VT_UNSIGNED
;
4059 if (ll
== (unsigned)ll
)
4062 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4063 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4068 while (tok
!= '}') {
4069 if (!parse_btype(&btype
, &ad1
)) {
4075 tcc_error("flexible array member '%s' not at the end of struct",
4076 get_tok_str(v
, NULL
));
4082 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4084 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4085 expect("identifier");
4087 int v
= btype
.ref
->v
;
4088 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4089 if (tcc_state
->ms_extensions
== 0)
4090 expect("identifier");
4094 if (type_size(&type1
, &align
) < 0) {
4095 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4098 tcc_error("field '%s' has incomplete type",
4099 get_tok_str(v
, NULL
));
4101 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4102 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4103 (type1
.t
& VT_STORAGE
))
4104 tcc_error("invalid type for '%s'",
4105 get_tok_str(v
, NULL
));
4109 bit_size
= expr_const();
4110 /* XXX: handle v = 0 case for messages */
4112 tcc_error("negative width in bit-field '%s'",
4113 get_tok_str(v
, NULL
));
4114 if (v
&& bit_size
== 0)
4115 tcc_error("zero width for bit-field '%s'",
4116 get_tok_str(v
, NULL
));
4117 parse_attribute(&ad1
);
4119 size
= type_size(&type1
, &align
);
4120 if (bit_size
>= 0) {
4121 bt
= type1
.t
& VT_BTYPE
;
4127 tcc_error("bitfields must have scalar type");
4129 if (bit_size
> bsize
) {
4130 tcc_error("width of '%s' exceeds its type",
4131 get_tok_str(v
, NULL
));
4132 } else if (bit_size
== bsize
4133 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4134 /* no need for bit fields */
4136 } else if (bit_size
== 64) {
4137 tcc_error("field width 64 not implemented");
4139 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4141 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4144 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4145 /* Remember we've seen a real field to check
4146 for placement of flexible array member. */
4149 /* If member is a struct or bit-field, enforce
4150 placing into the struct (as anonymous). */
4152 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4157 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4162 if (tok
== ';' || tok
== TOK_EOF
)
4169 parse_attribute(&ad
);
4170 struct_layout(type
, &ad
);
4175 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4177 merge_symattr(&ad
->a
, &s
->a
);
4178 merge_funcattr(&ad
->f
, &s
->f
);
4181 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4182 are added to the element type, copied because it could be a typedef. */
4183 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4185 while (type
->t
& VT_ARRAY
) {
4186 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4187 type
= &type
->ref
->type
;
4189 type
->t
|= qualifiers
;
4192 /* return 0 if no type declaration. otherwise, return the basic type
4195 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4197 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4201 memset(ad
, 0, sizeof(AttributeDef
));
4211 /* currently, we really ignore extension */
4221 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4222 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4223 tmbt
: tcc_error("too many basic types");
4226 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4231 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4248 memset(&ad1
, 0, sizeof(AttributeDef
));
4249 if (parse_btype(&type1
, &ad1
)) {
4250 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4252 n
= 1 << (ad1
.a
.aligned
- 1);
4254 type_size(&type1
, &n
);
4257 if (n
<= 0 || (n
& (n
- 1)) != 0)
4258 tcc_error("alignment must be a positive power of two");
4261 ad
->a
.aligned
= exact_log2p1(n
);
4265 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4266 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4267 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4268 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4275 #ifdef TCC_TARGET_ARM64
4277 /* GCC's __uint128_t appears in some Linux header files. Make it a
4278 synonym for long double to get the size and alignment right. */
4289 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4290 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4298 struct_decl(&type1
, VT_ENUM
);
4301 type
->ref
= type1
.ref
;
4304 struct_decl(&type1
, VT_STRUCT
);
4307 struct_decl(&type1
, VT_UNION
);
4310 /* type modifiers */
4315 parse_btype_qualify(type
, VT_CONSTANT
);
4323 parse_btype_qualify(type
, VT_VOLATILE
);
4330 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4331 tcc_error("signed and unsigned modifier");
4344 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4345 tcc_error("signed and unsigned modifier");
4346 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4362 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4363 tcc_error("multiple storage classes");
4374 /* currently, no need to handle it because tcc does not
4375 track unused objects */
4378 /* GNUC attribute */
4379 case TOK_ATTRIBUTE1
:
4380 case TOK_ATTRIBUTE2
:
4381 parse_attribute(ad
);
4382 if (ad
->attr_mode
) {
4383 u
= ad
->attr_mode
-1;
4384 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4392 parse_expr_type(&type1
);
4393 /* remove all storage modifiers except typedef */
4394 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4396 sym_to_attr(ad
, type1
.ref
);
4402 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4406 if (tok
== ':' && !in_generic
) {
4407 /* ignore if it's a label */
4412 t
&= ~(VT_BTYPE
|VT_LONG
);
4413 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4414 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4415 type
->ref
= s
->type
.ref
;
4417 parse_btype_qualify(type
, t
);
4419 /* get attributes from typedef */
4428 if (tcc_state
->char_is_unsigned
) {
4429 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4432 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4433 bt
= t
& (VT_BTYPE
|VT_LONG
);
4435 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4436 #ifdef TCC_TARGET_PE
4437 if (bt
== VT_LDOUBLE
)
4438 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4444 /* convert a function parameter type (array to pointer and function to
4445 function pointer) */
4446 static inline void convert_parameter_type(CType
*pt
)
4448 /* remove const and volatile qualifiers (XXX: const could be used
4449 to indicate a const function parameter */
4450 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4451 /* array must be transformed to pointer according to ANSI C */
4453 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4458 ST_FUNC
void parse_asm_str(CString
*astr
)
4461 parse_mult_str(astr
, "string constant");
4464 /* Parse an asm label and return the token */
4465 static int asm_label_instr(void)
4471 parse_asm_str(&astr
);
4474 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4476 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4481 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4483 int n
, l
, t1
, arg_size
, align
, unused_align
;
4484 Sym
**plast
, *s
, *first
;
4489 /* function type, or recursive declarator (return if so) */
4491 if (td
&& !(td
& TYPE_ABSTRACT
))
4495 else if (parse_btype(&pt
, &ad1
))
4498 merge_attr (ad
, &ad1
);
4507 /* read param name and compute offset */
4508 if (l
!= FUNC_OLD
) {
4509 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4511 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4512 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4513 tcc_error("parameter declared as void");
4517 expect("identifier");
4518 pt
.t
= VT_VOID
; /* invalid type */
4521 convert_parameter_type(&pt
);
4522 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4523 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4529 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4534 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4535 tcc_error("invalid type");
4538 /* if no parameters, then old type prototype */
4541 /* NOTE: const is ignored in returned type as it has a special
4542 meaning in gcc / C++ */
4543 type
->t
&= ~VT_CONSTANT
;
4544 /* some ancient pre-K&R C allows a function to return an array
4545 and the array brackets to be put after the arguments, such
4546 that "int c()[]" means something like "int[] c()" */
4549 skip(']'); /* only handle simple "[]" */
4552 /* we push a anonymous symbol which will contain the function prototype */
4553 ad
->f
.func_args
= arg_size
;
4554 ad
->f
.func_type
= l
;
4555 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4561 } else if (tok
== '[') {
4562 int saved_nocode_wanted
= nocode_wanted
;
4563 /* array definition */
4566 /* XXX The optional type-quals and static should only be accepted
4567 in parameter decls. The '*' as well, and then even only
4568 in prototypes (not function defs). */
4570 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4585 if (!local_stack
|| (storage
& VT_STATIC
))
4586 vpushi(expr_const());
4588 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4589 length must always be evaluated, even under nocode_wanted,
4590 so that its size slot is initialized (e.g. under sizeof
4595 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4598 tcc_error("invalid array size");
4600 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4601 tcc_error("size of variable length array should be an integer");
4607 /* parse next post type */
4608 post_type(type
, ad
, storage
, 0);
4610 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4611 tcc_error("declaration of an array of functions");
4612 if ((type
->t
& VT_BTYPE
) == VT_VOID
4613 || type_size(type
, &unused_align
) < 0)
4614 tcc_error("declaration of an array of incomplete type elements");
4616 t1
|= type
->t
& VT_VLA
;
4620 tcc_error("need explicit inner array size in VLAs");
4621 loc
-= type_size(&int_type
, &align
);
4625 vla_runtime_type_size(type
, &align
);
4627 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4633 nocode_wanted
= saved_nocode_wanted
;
4635 /* we push an anonymous symbol which will contain the array
4637 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4638 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4644 /* Parse a type declarator (except basic type), and return the type
4645 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4646 expected. 'type' should contain the basic type. 'ad' is the
4647 attribute definition of the basic type. It can be modified by
4648 type_decl(). If this (possibly abstract) declarator is a pointer chain
4649 it returns the innermost pointed to type (equals *type, but is a different
4650 pointer), otherwise returns type itself, that's used for recursive calls. */
4651 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4654 int qualifiers
, storage
;
4656 /* recursive type, remove storage bits first, apply them later again */
4657 storage
= type
->t
& VT_STORAGE
;
4658 type
->t
&= ~VT_STORAGE
;
4661 while (tok
== '*') {
4669 qualifiers
|= VT_CONSTANT
;
4674 qualifiers
|= VT_VOLATILE
;
4680 /* XXX: clarify attribute handling */
4681 case TOK_ATTRIBUTE1
:
4682 case TOK_ATTRIBUTE2
:
4683 parse_attribute(ad
);
4687 type
->t
|= qualifiers
;
4689 /* innermost pointed to type is the one for the first derivation */
4690 ret
= pointed_type(type
);
4694 /* This is possibly a parameter type list for abstract declarators
4695 ('int ()'), use post_type for testing this. */
4696 if (!post_type(type
, ad
, 0, td
)) {
4697 /* It's not, so it's a nested declarator, and the post operations
4698 apply to the innermost pointed to type (if any). */
4699 /* XXX: this is not correct to modify 'ad' at this point, but
4700 the syntax is not clear */
4701 parse_attribute(ad
);
4702 post
= type_decl(type
, ad
, v
, td
);
4706 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4707 /* type identifier */
4712 if (!(td
& TYPE_ABSTRACT
))
4713 expect("identifier");
4716 post_type(post
, ad
, storage
, 0);
4717 parse_attribute(ad
);
4722 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4723 ST_FUNC
int lvalue_type(int t
)
4728 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4730 else if (bt
== VT_SHORT
)
4734 if (t
& VT_UNSIGNED
)
4735 r
|= VT_LVAL_UNSIGNED
;
4739 /* indirection with full error checking and bound check */
4740 ST_FUNC
void indir(void)
4742 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4743 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4747 if (vtop
->r
& VT_LVAL
)
4749 vtop
->type
= *pointed_type(&vtop
->type
);
4750 /* Arrays and functions are never lvalues */
4751 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4752 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4753 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4754 /* if bound checking, the referenced pointer must be checked */
4755 #ifdef CONFIG_TCC_BCHECK
4756 if (tcc_state
->do_bounds_check
)
4757 vtop
->r
|= VT_MUSTBOUND
;
4762 /* pass a parameter to a function and do type checking and casting */
4763 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4768 func_type
= func
->f
.func_type
;
4769 if (func_type
== FUNC_OLD
||
4770 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4771 /* default casting : only need to convert float to double */
4772 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4773 gen_cast_s(VT_DOUBLE
);
4774 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4775 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4776 type
.ref
= vtop
->type
.ref
;
4779 } else if (arg
== NULL
) {
4780 tcc_error("too many arguments to function");
4783 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4784 gen_assign_cast(&type
);
4788 /* parse an expression and return its type without any side effect. */
4789 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4798 /* parse an expression of the form '(type)' or '(expr)' and return its
4800 static void parse_expr_type(CType
*type
)
4806 if (parse_btype(type
, &ad
)) {
4807 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4809 expr_type(type
, gexpr
);
4814 static void parse_type(CType
*type
)
4819 if (!parse_btype(type
, &ad
)) {
4822 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4825 static void parse_builtin_params(int nc
, const char *args
)
4832 while ((c
= *args
++)) {
4836 case 'e': expr_eq(); continue;
4837 case 't': parse_type(&t
); vpush(&t
); continue;
4838 default: tcc_error("internal error"); break;
4846 static void try_call_scope_cleanup(Sym
*stop
)
4848 Sym
*cls
= current_cleanups
;
4850 for (; cls
!= stop
; cls
= cls
->ncl
) {
4851 Sym
*fs
= cls
->next
;
4852 Sym
*vs
= cls
->prev_tok
;
4854 vpushsym(&fs
->type
, fs
);
4855 vset(&vs
->type
, vs
->r
, vs
->c
);
4857 mk_pointer(&vtop
->type
);
4863 static void try_call_cleanup_goto(Sym
*cleanupstate
)
4868 if (!current_cleanups
)
4871 /* search NCA of both cleanup chains given parents and initial depth */
4872 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
4873 for (ccd
= ncleanups
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
4875 for (cc
= current_cleanups
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
4877 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
4880 try_call_scope_cleanup(cc
);
4883 ST_FUNC
void unary(void)
4885 int n
, t
, align
, size
, r
, sizeof_caller
;
4890 sizeof_caller
= in_sizeof
;
4893 /* XXX: GCC 2.95.3 does not generate a table although it should be
4901 #ifdef TCC_TARGET_PE
4902 t
= VT_SHORT
|VT_UNSIGNED
;
4910 vsetc(&type
, VT_CONST
, &tokc
);
4914 t
= VT_INT
| VT_UNSIGNED
;
4920 t
= VT_LLONG
| VT_UNSIGNED
;
4932 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4935 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4937 case TOK___FUNCTION__
:
4939 goto tok_identifier
;
4945 /* special function name identifier */
4946 len
= strlen(funcname
) + 1;
4947 /* generate char[len] type */
4952 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4953 if (!NODATA_WANTED
) {
4954 ptr
= section_ptr_add(data_section
, len
);
4955 memcpy(ptr
, funcname
, len
);
4961 #ifdef TCC_TARGET_PE
4962 t
= VT_SHORT
| VT_UNSIGNED
;
4968 /* string parsing */
4970 if (tcc_state
->char_is_unsigned
)
4971 t
= VT_BYTE
| VT_UNSIGNED
;
4973 if (tcc_state
->warn_write_strings
)
4978 memset(&ad
, 0, sizeof(AttributeDef
));
4979 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4984 if (parse_btype(&type
, &ad
)) {
4985 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4987 /* check ISOC99 compound literal */
4989 /* data is allocated locally by default */
4994 /* all except arrays are lvalues */
4995 if (!(type
.t
& VT_ARRAY
))
4996 r
|= lvalue_type(type
.t
);
4997 memset(&ad
, 0, sizeof(AttributeDef
));
4998 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5000 if (sizeof_caller
) {
5007 } else if (tok
== '{') {
5008 int saved_nocode_wanted
= nocode_wanted
;
5010 tcc_error("expected constant");
5011 /* save all registers */
5013 /* statement expression : we do not accept break/continue
5014 inside as GCC does. We do retain the nocode_wanted state,
5015 as statement expressions can't ever be entered from the
5016 outside, so any reactivation of code emission (from labels
5017 or loop heads) can be disabled again after the end of it. */
5018 block(NULL
, NULL
, NULL
, NULL
, 1);
5019 nocode_wanted
= saved_nocode_wanted
;
5034 /* functions names must be treated as function pointers,
5035 except for unary '&' and sizeof. Since we consider that
5036 functions are not lvalues, we only have to handle it
5037 there and in function calls. */
5038 /* arrays can also be used although they are not lvalues */
5039 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5040 !(vtop
->type
.t
& VT_ARRAY
))
5042 mk_pointer(&vtop
->type
);
5048 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5049 gen_cast_s(VT_BOOL
);
5050 vtop
->c
.i
= !vtop
->c
.i
;
5051 } else if (vtop
->r
== VT_CMP
) {
5053 n
= vtop
->jfalse
, vtop
->jfalse
= vtop
->jtrue
, vtop
->jtrue
= n
;
5068 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5069 tcc_error("pointer not accepted for unary plus");
5070 /* In order to force cast, we add zero, except for floating point
5071 where we really need an noop (otherwise -0.0 will be transformed
5073 if (!is_float(vtop
->type
.t
)) {
5085 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5087 if (vtop
[1].r
& VT_SYM
)
5088 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5089 size
= type_size(&type
, &align
);
5090 if (s
&& s
->a
.aligned
)
5091 align
= 1 << (s
->a
.aligned
- 1);
5092 if (t
== TOK_SIZEOF
) {
5093 if (!(type
.t
& VT_VLA
)) {
5095 tcc_error("sizeof applied to an incomplete type");
5098 vla_runtime_type_size(&type
, &align
);
5103 vtop
->type
.t
|= VT_UNSIGNED
;
5106 case TOK_builtin_expect
:
5107 /* __builtin_expect is a no-op for now */
5108 parse_builtin_params(0, "ee");
5111 case TOK_builtin_types_compatible_p
:
5112 parse_builtin_params(0, "tt");
5113 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5114 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5115 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5119 case TOK_builtin_choose_expr
:
5146 case TOK_builtin_constant_p
:
5147 parse_builtin_params(1, "e");
5148 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5152 case TOK_builtin_frame_address
:
5153 case TOK_builtin_return_address
:
5159 if (tok
!= TOK_CINT
) {
5160 tcc_error("%s only takes positive integers",
5161 tok1
== TOK_builtin_return_address
?
5162 "__builtin_return_address" :
5163 "__builtin_frame_address");
5165 level
= (uint32_t)tokc
.i
;
5170 vset(&type
, VT_LOCAL
, 0); /* local frame */
5172 mk_pointer(&vtop
->type
);
5173 indir(); /* -> parent frame */
5175 if (tok1
== TOK_builtin_return_address
) {
5176 // assume return address is just above frame pointer on stack
5179 mk_pointer(&vtop
->type
);
5184 #ifdef TCC_TARGET_X86_64
5185 #ifdef TCC_TARGET_PE
5186 case TOK_builtin_va_start
:
5187 parse_builtin_params(0, "ee");
5188 r
= vtop
->r
& VT_VALMASK
;
5192 tcc_error("__builtin_va_start expects a local variable");
5194 vtop
->type
= char_pointer_type
;
5199 case TOK_builtin_va_arg_types
:
5200 parse_builtin_params(0, "t");
5201 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5208 #ifdef TCC_TARGET_ARM64
5209 case TOK___va_start
: {
5210 parse_builtin_params(0, "ee");
5214 vtop
->type
.t
= VT_VOID
;
5217 case TOK___va_arg
: {
5218 parse_builtin_params(0, "et");
5226 case TOK___arm64_clear_cache
: {
5227 parse_builtin_params(0, "ee");
5230 vtop
->type
.t
= VT_VOID
;
5234 /* pre operations */
5245 t
= vtop
->type
.t
& VT_BTYPE
;
5247 /* In IEEE negate(x) isn't subtract(0,x), but rather
5251 vtop
->c
.f
= -1.0 * 0.0;
5252 else if (t
== VT_DOUBLE
)
5253 vtop
->c
.d
= -1.0 * 0.0;
5255 vtop
->c
.ld
= -1.0 * 0.0;
5263 goto tok_identifier
;
5265 /* allow to take the address of a label */
5266 if (tok
< TOK_UIDENT
)
5267 expect("label identifier");
5268 s
= label_find(tok
);
5270 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5272 if (s
->r
== LABEL_DECLARED
)
5273 s
->r
= LABEL_FORWARD
;
5276 s
->type
.t
= VT_VOID
;
5277 mk_pointer(&s
->type
);
5278 s
->type
.t
|= VT_STATIC
;
5280 vpushsym(&s
->type
, s
);
5286 CType controlling_type
;
5287 int has_default
= 0;
5290 TokenString
*str
= NULL
;
5291 int saved_const_wanted
= const_wanted
;
5296 expr_type(&controlling_type
, expr_eq
);
5297 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5298 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5299 mk_pointer(&controlling_type
);
5300 const_wanted
= saved_const_wanted
;
5304 if (tok
== TOK_DEFAULT
) {
5306 tcc_error("too many 'default'");
5312 AttributeDef ad_tmp
;
5317 parse_btype(&cur_type
, &ad_tmp
);
5320 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5321 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5323 tcc_error("type match twice");
5333 skip_or_save_block(&str
);
5335 skip_or_save_block(NULL
);
5342 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5343 tcc_error("type '%s' does not match any association", buf
);
5345 begin_macro(str
, 1);
5354 // special qnan , snan and infinity values
5359 vtop
->type
.t
= VT_FLOAT
;
5364 goto special_math_val
;
5367 goto special_math_val
;
5374 expect("identifier");
5376 if (!s
|| IS_ASM_SYM(s
)) {
5377 const char *name
= get_tok_str(t
, NULL
);
5379 tcc_error("'%s' undeclared", name
);
5380 /* for simple function calls, we tolerate undeclared
5381 external reference to int() function */
5382 if (tcc_state
->warn_implicit_function_declaration
5383 #ifdef TCC_TARGET_PE
5384 /* people must be warned about using undeclared WINAPI functions
5385 (which usually start with uppercase letter) */
5386 || (name
[0] >= 'A' && name
[0] <= 'Z')
5389 tcc_warning("implicit declaration of function '%s'", name
);
5390 s
= external_global_sym(t
, &func_old_type
);
5394 /* A symbol that has a register is a local register variable,
5395 which starts out as VT_LOCAL value. */
5396 if ((r
& VT_VALMASK
) < VT_CONST
)
5397 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5399 vset(&s
->type
, r
, s
->c
);
5400 /* Point to s as backpointer (even without r&VT_SYM).
5401 Will be used by at least the x86 inline asm parser for
5407 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5408 vtop
->c
.i
= s
->enum_val
;
5413 /* post operations */
5415 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5418 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5419 int qualifiers
, cumofs
= 0;
5421 if (tok
== TOK_ARROW
)
5423 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5426 /* expect pointer on structure */
5427 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5428 expect("struct or union");
5429 if (tok
== TOK_CDOUBLE
)
5430 expect("field name");
5432 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5433 expect("field name");
5434 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5436 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5437 /* add field offset to pointer */
5438 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5439 vpushi(cumofs
+ s
->c
);
5441 /* change type to field type, and set to lvalue */
5442 vtop
->type
= s
->type
;
5443 vtop
->type
.t
|= qualifiers
;
5444 /* an array is never an lvalue */
5445 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5446 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5447 #ifdef CONFIG_TCC_BCHECK
5448 /* if bound checking, the referenced pointer must be checked */
5449 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5450 vtop
->r
|= VT_MUSTBOUND
;
5454 } else if (tok
== '[') {
5460 } else if (tok
== '(') {
5463 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5466 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5467 /* pointer test (no array accepted) */
5468 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5469 vtop
->type
= *pointed_type(&vtop
->type
);
5470 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5474 expect("function pointer");
5477 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5479 /* get return type */
5482 sa
= s
->next
; /* first parameter */
5483 nb_args
= regsize
= 0;
5485 /* compute first implicit argument if a structure is returned */
5486 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5487 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5488 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5489 &ret_align
, ®size
);
5491 /* get some space for the returned structure */
5492 size
= type_size(&s
->type
, &align
);
5493 #ifdef TCC_TARGET_ARM64
5494 /* On arm64, a small struct is return in registers.
5495 It is much easier to write it to memory if we know
5496 that we are allowed to write some extra bytes, so
5497 round the allocated space up to a power of 2: */
5499 while (size
& (size
- 1))
5500 size
= (size
| (size
- 1)) + 1;
5502 loc
= (loc
- size
) & -align
;
5504 ret
.r
= VT_LOCAL
| VT_LVAL
;
5505 /* pass it as 'int' to avoid structure arg passing
5507 vseti(VT_LOCAL
, loc
);
5517 /* return in register */
5518 if (is_float(ret
.type
.t
)) {
5519 ret
.r
= reg_fret(ret
.type
.t
);
5520 #ifdef TCC_TARGET_X86_64
5521 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5525 #ifndef TCC_TARGET_ARM64
5526 #ifdef TCC_TARGET_X86_64
5527 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5529 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5540 gfunc_param_typed(s
, sa
);
5550 tcc_error("too few arguments to function");
5552 gfunc_call(nb_args
);
5555 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5556 vsetc(&ret
.type
, r
, &ret
.c
);
5557 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5560 /* handle packed struct return */
5561 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5564 size
= type_size(&s
->type
, &align
);
5565 /* We're writing whole regs often, make sure there's enough
5566 space. Assume register size is power of 2. */
5567 if (regsize
> align
)
5569 loc
= (loc
- size
) & -align
;
5573 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5577 if (--ret_nregs
== 0)
5581 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5583 if (s
->f
.func_noreturn
)
5591 ST_FUNC
void expr_prod(void)
5596 while (tok
== '*' || tok
== '/' || tok
== '%') {
5604 ST_FUNC
void expr_sum(void)
5609 while (tok
== '+' || tok
== '-') {
5617 static void expr_shift(void)
5622 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5630 static void expr_cmp(void)
5635 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5636 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5644 static void expr_cmpeq(void)
5649 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5657 static void expr_and(void)
5660 while (tok
== '&') {
5667 static void expr_xor(void)
5670 while (tok
== '^') {
5677 static void expr_or(void)
5680 while (tok
== '|') {
5687 static int condition_3way(void);
5689 static void expr_landor(void(*e_fn
)(void), int e_op
, int i
)
5691 int t
= 0, cc
= 1, f
= 0, c
;
5693 c
= f
? i
: condition_3way();
5695 save_regs(1), cc
= 0;
5696 } else if (c
!= i
) {
5697 nocode_wanted
++, f
= 1;
5719 static void expr_land(void)
5722 if (tok
== TOK_LAND
)
5723 expr_landor(expr_or
, TOK_LAND
, 1);
5726 static void expr_lor(void)
5730 expr_landor(expr_land
, TOK_LOR
, 0);
5733 /* Assuming vtop is a value used in a conditional context
5734 (i.e. compared with zero) return 0 if it's false, 1 if
5735 true and -1 if it can't be statically determined. */
5736 static int condition_3way(void)
5739 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5740 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5742 gen_cast_s(VT_BOOL
);
5749 static int is_cond_bool(SValue
*sv
)
5751 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
5752 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
5753 return (unsigned)sv
->c
.i
< 2;
5754 if (sv
->r
== VT_CMP
)
5759 static void expr_cond(void)
5761 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5763 CType type
, type1
, type2
;
5769 c
= condition_3way();
5770 g
= (tok
== ':' && gnu_ext
);
5780 /* needed to avoid having different registers saved in
5783 if (is_float(vtop
->type
.t
)) {
5785 #ifdef TCC_TARGET_X86_64
5786 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5797 ncw_prev
= nocode_wanted
;
5804 if (c
< 0 && vtop
->r
== VT_CMP
) {
5810 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5811 mk_pointer(&vtop
->type
);
5813 sv
= *vtop
; /* save value to handle it later */
5814 vtop
--; /* no vpop so that FP stack is not flushed */
5824 nocode_wanted
= ncw_prev
;
5830 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
5831 if (sv
.r
== VT_CMP
) {
5842 nocode_wanted
= ncw_prev
;
5843 // tcc_warning("two conditions expr_cond");
5847 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5848 mk_pointer(&vtop
->type
);
5851 bt1
= t1
& VT_BTYPE
;
5853 bt2
= t2
& VT_BTYPE
;
5856 /* cast operands to correct type according to ISOC rules */
5857 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5858 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5859 } else if (is_float(bt1
) || is_float(bt2
)) {
5860 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5861 type
.t
= VT_LDOUBLE
;
5863 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5868 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5869 /* cast to biggest op */
5870 type
.t
= VT_LLONG
| VT_LONG
;
5871 if (bt1
== VT_LLONG
)
5873 if (bt2
== VT_LLONG
)
5875 /* convert to unsigned if it does not fit in a long long */
5876 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5877 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5878 type
.t
|= VT_UNSIGNED
;
5879 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5880 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5881 /* If one is a null ptr constant the result type
5883 if (is_null_pointer (vtop
)) type
= type1
;
5884 else if (is_null_pointer (&sv
)) type
= type2
;
5885 else if (bt1
!= bt2
)
5886 tcc_error("incompatible types in conditional expressions");
5888 CType
*pt1
= pointed_type(&type1
);
5889 CType
*pt2
= pointed_type(&type2
);
5890 int pbt1
= pt1
->t
& VT_BTYPE
;
5891 int pbt2
= pt2
->t
& VT_BTYPE
;
5892 int newquals
, copied
= 0;
5893 /* pointers to void get preferred, otherwise the
5894 pointed to types minus qualifs should be compatible */
5895 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5896 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5897 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5898 tcc_warning("pointer type mismatch in conditional expression\n");
5900 /* combine qualifs */
5901 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5902 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5905 /* copy the pointer target symbol */
5906 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5909 pointed_type(&type
)->t
|= newquals
;
5911 /* pointers to incomplete arrays get converted to
5912 pointers to completed ones if possible */
5913 if (pt1
->t
& VT_ARRAY
5914 && pt2
->t
& VT_ARRAY
5915 && pointed_type(&type
)->ref
->c
< 0
5916 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5919 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5921 pointed_type(&type
)->ref
=
5922 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5923 0, pointed_type(&type
)->ref
->c
);
5924 pointed_type(&type
)->ref
->c
=
5925 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5928 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5929 /* XXX: test structure compatibility */
5930 type
= bt1
== VT_STRUCT
? type1
: type2
;
5932 /* integer operations */
5933 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5934 /* convert to unsigned if it does not fit in an integer */
5935 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5936 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5937 type
.t
|= VT_UNSIGNED
;
5939 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5940 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5941 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5943 /* now we convert second operand */
5947 mk_pointer(&vtop
->type
);
5949 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5954 if (is_float(type
.t
)) {
5956 #ifdef TCC_TARGET_X86_64
5957 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5961 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5962 /* for long longs, we use fixed registers to avoid having
5963 to handle a complicated move */
5974 /* this is horrible, but we must also convert first
5980 mk_pointer(&vtop
->type
);
5982 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5988 move_reg(r2
, r1
, type
.t
);
5996 nocode_wanted
= ncw_prev
;
6000 static void expr_eq(void)
6006 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
6007 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
6008 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
6023 ST_FUNC
void gexpr(void)
6034 /* parse a constant expression and return value in vtop. */
6035 static void expr_const1(void)
6044 /* parse an integer constant and return its value. */
6045 static inline int64_t expr_const64(void)
6049 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6050 expect("constant expression");
6056 /* parse an integer constant and return its value.
6057 Complain if it doesn't fit 32bit (signed or unsigned). */
6058 ST_FUNC
int expr_const(void)
6061 int64_t wc
= expr_const64();
6063 if (c
!= wc
&& (unsigned)c
!= wc
)
6064 tcc_error("constant exceeds 32 bit");
6068 /* ------------------------------------------------------------------------- */
6069 /* return from function */
6071 #ifndef TCC_TARGET_ARM64
6072 static void gfunc_return(CType
*func_type
)
6074 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6075 CType type
, ret_type
;
6076 int ret_align
, ret_nregs
, regsize
;
6077 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6078 &ret_align
, ®size
);
6079 if (0 == ret_nregs
) {
6080 /* if returning structure, must copy it to implicit
6081 first pointer arg location */
6084 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6087 /* copy structure value to pointer */
6090 /* returning structure packed into registers */
6091 int r
, size
, addr
, align
;
6092 size
= type_size(func_type
,&align
);
6093 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6094 (vtop
->c
.i
& (ret_align
-1)))
6095 && (align
& (ret_align
-1))) {
6096 loc
= (loc
- size
) & -ret_align
;
6099 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6103 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6105 vtop
->type
= ret_type
;
6106 if (is_float(ret_type
.t
))
6107 r
= rc_fret(ret_type
.t
);
6118 if (--ret_nregs
== 0)
6120 /* We assume that when a structure is returned in multiple
6121 registers, their classes are consecutive values of the
6124 vtop
->c
.i
+= regsize
;
6128 } else if (is_float(func_type
->t
)) {
6129 gv(rc_fret(func_type
->t
));
6133 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6137 static int case_cmp(const void *pa
, const void *pb
)
6139 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6140 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6141 return a
< b
? -1 : a
> b
;
6144 static void gtst_addr(int t
, int a
)
6146 gsym_addr(gvtst(0, t
), a
);
6149 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6153 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6170 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6172 gcase(base
, len
/2, bsym
);
6176 base
+= e
; len
-= e
;
6186 if (p
->v1
== p
->v2
) {
6188 gtst_addr(0, p
->sym
);
6198 gtst_addr(0, p
->sym
);
6202 *bsym
= gjmp(*bsym
);
6205 /* call 'func' for each __attribute__((cleanup(func))) */
6206 static void block_cleanup(Sym
*lcleanup
, int lncleanups
)
6210 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> lncleanups
;) {
6211 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6216 try_call_scope_cleanup(lcleanup
);
6217 pcl
->jnext
= gjmp(0);
6219 goto remove_pending
;
6229 try_call_scope_cleanup(lcleanup
);
6230 current_cleanups
= lcleanup
;
6231 ncleanups
= lncleanups
;
6234 static void check_func_return(void)
6236 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6238 if (!strcmp (funcname
, "main")
6239 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6240 /* main returns 0 by default */
6242 gen_assign_cast(&func_vt
);
6243 gfunc_return(&func_vt
);
6245 tcc_warning("function might return no value: '%s'", funcname
);
6249 static void block(int *bsym
, Sym
*bcl
, int *csym
, Sym
*ccl
, int is_expr
)
6251 int a
, b
, c
, d
, e
, t
;
6255 /* default return value is (void) */
6257 vtop
->type
.t
= VT_VOID
;
6267 block(bsym
, bcl
, csym
, ccl
, 0);
6268 if (tok
== TOK_ELSE
) {
6272 block(bsym
, bcl
, csym
, ccl
, 0);
6273 gsym(d
); /* patch else jmp */
6278 } else if (t
== TOK_WHILE
) {
6286 block(&a
, current_cleanups
, &b
, current_cleanups
, 0);
6291 } else if (t
== '{') {
6292 Sym
*llabel
, *lcleanup
;
6293 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
6294 int lncleanups
= ncleanups
;
6296 /* record local declaration stack position */
6298 llabel
= local_label_stack
;
6299 lcleanup
= current_cleanups
;
6302 /* handle local labels declarations */
6303 while (tok
== TOK_LABEL
) {
6306 if (tok
< TOK_UIDENT
)
6307 expect("label identifier");
6308 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6310 } while (tok
== ',');
6314 while (tok
!= '}') {
6319 block(bsym
, bcl
, csym
, ccl
, is_expr
);
6323 if (current_cleanups
!= lcleanup
)
6324 block_cleanup(lcleanup
, lncleanups
);
6326 /* pop locally defined labels */
6327 label_pop(&local_label_stack
, llabel
, is_expr
);
6329 /* In the is_expr case (a statement expression is finished here),
6330 vtop might refer to symbols on the local_stack. Either via the
6331 type or via vtop->sym. We can't pop those nor any that in turn
6332 might be referred to. To make it easier we don't roll back
6333 any symbols in that case; some upper level call to block() will
6334 do that. We do have to remove such symbols from the lookup
6335 tables, though. sym_pop will do that. */
6337 /* pop locally defined symbols */
6338 sym_pop(&local_stack
, s
, is_expr
);
6340 /* Pop VLA frames and restore stack pointer if required */
6341 if (vlas_in_scope
> saved_vlas_in_scope
) {
6342 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
6345 vlas_in_scope
= saved_vlas_in_scope
;
6347 if (0 == --local_scope
&& !nocode_wanted
)
6348 check_func_return();
6351 } else if (t
== TOK_RETURN
) {
6353 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6355 gexpr(), gen_assign_cast(&func_vt
);
6356 try_call_scope_cleanup(NULL
);
6358 gfunc_return(&func_vt
);
6362 tcc_warning("'return' with no value.");
6364 /* jump unless last stmt in top-level block */
6365 if (tok
!= '}' || local_scope
!= 1)
6369 } else if (t
== TOK_BREAK
) {
6372 tcc_error("cannot break");
6373 try_call_scope_cleanup(bcl
);
6374 *bsym
= gjmp(*bsym
);
6377 } else if (t
== TOK_CONTINUE
) {
6380 tcc_error("cannot continue");
6381 try_call_scope_cleanup(ccl
);
6382 vla_sp_restore_root();
6383 *csym
= gjmp(*csym
);
6386 } else if (t
== TOK_FOR
) {
6387 Sym
*lcleanup
= current_cleanups
;
6388 int lncleanups
= ncleanups
;
6394 /* c99 for-loop init decl? */
6395 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6396 /* no, regular for-loop init expr */
6420 block(&a
, current_cleanups
, &b
, current_cleanups
, 0);
6425 try_call_scope_cleanup(lcleanup
);
6426 ncleanups
= lncleanups
;
6427 current_cleanups
= lcleanup
;
6428 sym_pop(&local_stack
, s
, 0);
6430 } else if (t
== TOK_DO
) {
6434 block(&a
, current_cleanups
, &b
, current_cleanups
, 0);
6445 } else if (t
== TOK_SWITCH
) {
6446 struct switch_t
*saved
, sw
;
6452 switchval
= *vtop
--;
6454 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6458 b
= gjmp(0); /* jump to first case */
6459 block(&a
, current_cleanups
, csym
, ccl
, 0);
6460 a
= gjmp(a
); /* add implicit break */
6464 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6465 for (b
= 1; b
< sw
.n
; b
++)
6466 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6467 tcc_error("duplicate case value");
6468 /* Our switch table sorting is signed, so the compared
6469 value needs to be as well when it's 64bit. */
6470 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6471 switchval
.type
.t
&= ~VT_UNSIGNED
;
6474 d
= 0, gcase(sw
.p
, sw
.n
, &d
);
6477 gsym_addr(d
, sw
.def_sym
);
6482 dynarray_reset(&sw
.p
, &sw
.n
);
6485 } else if (t
== TOK_CASE
) {
6486 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6489 cr
->v1
= cr
->v2
= expr_const64();
6490 if (gnu_ext
&& tok
== TOK_DOTS
) {
6492 cr
->v2
= expr_const64();
6493 if (cr
->v2
< cr
->v1
)
6494 tcc_warning("empty case range");
6497 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6500 goto block_after_label
;
6502 } else if (t
== TOK_DEFAULT
) {
6505 if (cur_switch
->def_sym
)
6506 tcc_error("too many 'default'");
6507 cur_switch
->def_sym
= gind();
6510 goto block_after_label
;
6512 } else if (t
== TOK_GOTO
) {
6513 if (tok
== '*' && gnu_ext
) {
6517 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6520 } else if (tok
>= TOK_UIDENT
) {
6521 s
= label_find(tok
);
6522 /* put forward definition if needed */
6524 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6525 else if (s
->r
== LABEL_DECLARED
)
6526 s
->r
= LABEL_FORWARD
;
6528 vla_sp_restore_root();
6529 if (s
->r
& LABEL_FORWARD
) {
6530 /* start new goto chain for cleanups, linked via label->next */
6531 if (current_cleanups
) {
6532 sym_push2(&pending_gotos
, SYM_FIELD
, 0, ncleanups
);
6533 pending_gotos
->prev_tok
= s
;
6534 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6535 pending_gotos
->next
= s
;
6537 s
->jnext
= gjmp(s
->jnext
);
6539 try_call_cleanup_goto(s
->cleanupstate
);
6540 gjmp_addr(s
->jnext
);
6545 expect("label identifier");
6549 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
6553 if (tok
== ':' && t
>= TOK_UIDENT
) {
6558 if (s
->r
== LABEL_DEFINED
)
6559 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6560 s
->r
= LABEL_DEFINED
;
6562 Sym
*pcl
; /* pending cleanup goto */
6563 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6565 sym_pop(&s
->next
, NULL
, 0);
6569 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
6572 s
->cleanupstate
= current_cleanups
;
6576 /* we accept this, but it is a mistake */
6578 tcc_warning("deprecated use of label at end of compound statement");
6582 block(bsym
, bcl
, csym
, ccl
, is_expr
);
6586 /* expression case */
6602 /* This skips over a stream of tokens containing balanced {} and ()
6603 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6604 with a '{'). If STR then allocates and stores the skipped tokens
6605 in *STR. This doesn't check if () and {} are nested correctly,
6606 i.e. "({)}" is accepted. */
6607 static void skip_or_save_block(TokenString
**str
)
6609 int braces
= tok
== '{';
6612 *str
= tok_str_alloc();
6614 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6616 if (tok
== TOK_EOF
) {
6617 if (str
|| level
> 0)
6618 tcc_error("unexpected end of file");
6623 tok_str_add_tok(*str
);
6626 if (t
== '{' || t
== '(') {
6628 } else if (t
== '}' || t
== ')') {
6630 if (level
== 0 && braces
&& t
== '}')
6635 tok_str_add(*str
, -1);
6636 tok_str_add(*str
, 0);
6640 #define EXPR_CONST 1
6643 static void parse_init_elem(int expr_type
)
6645 int saved_global_expr
;
6648 /* compound literals must be allocated globally in this case */
6649 saved_global_expr
= global_expr
;
6652 global_expr
= saved_global_expr
;
6653 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6654 (compound literals). */
6655 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6656 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6657 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6658 #ifdef TCC_TARGET_PE
6659 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6662 tcc_error("initializer element is not constant");
6670 /* put zeros for variable based init */
6671 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6674 /* nothing to do because globals are already set to zero */
6676 vpush_global_sym(&func_old_type
, TOK_memset
);
6678 #ifdef TCC_TARGET_ARM
6690 #define DIF_SIZE_ONLY 2
6691 #define DIF_HAVE_ELEM 4
6693 /* t is the array or struct type. c is the array or struct
6694 address. cur_field is the pointer to the current
6695 field, for arrays the 'c' member contains the current start
6696 index. 'flags' is as in decl_initializer.
6697 'al' contains the already initialized length of the
6698 current container (starting at c). This returns the new length of that. */
6699 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6700 Sym
**cur_field
, int flags
, int al
)
6703 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6704 unsigned long corig
= c
;
6709 if (flags
& DIF_HAVE_ELEM
)
6712 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
6719 /* NOTE: we only support ranges for last designator */
6720 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6722 if (!(type
->t
& VT_ARRAY
))
6723 expect("array type");
6725 index
= index_last
= expr_const();
6726 if (tok
== TOK_DOTS
&& gnu_ext
) {
6728 index_last
= expr_const();
6732 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6734 tcc_error("invalid index");
6736 (*cur_field
)->c
= index_last
;
6737 type
= pointed_type(type
);
6738 elem_size
= type_size(type
, &align
);
6739 c
+= index
* elem_size
;
6740 nb_elems
= index_last
- index
+ 1;
6747 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6748 expect("struct/union type");
6749 f
= find_field(type
, l
, &cumofs
);
6762 } else if (!gnu_ext
) {
6767 if (type
->t
& VT_ARRAY
) {
6768 index
= (*cur_field
)->c
;
6769 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6770 tcc_error("index too large");
6771 type
= pointed_type(type
);
6772 c
+= index
* type_size(type
, &align
);
6775 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6776 *cur_field
= f
= f
->next
;
6778 tcc_error("too many field init");
6783 /* must put zero in holes (note that doing it that way
6784 ensures that it even works with designators) */
6785 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
6786 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6787 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
6789 /* XXX: make it more general */
6790 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
6791 unsigned long c_end
;
6796 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6797 for (i
= 1; i
< nb_elems
; i
++) {
6798 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6803 } else if (!NODATA_WANTED
) {
6804 c_end
= c
+ nb_elems
* elem_size
;
6805 if (c_end
> sec
->data_allocated
)
6806 section_realloc(sec
, c_end
);
6807 src
= sec
->data
+ c
;
6809 for(i
= 1; i
< nb_elems
; i
++) {
6811 memcpy(dst
, src
, elem_size
);
6815 c
+= nb_elems
* type_size(type
, &align
);
6821 /* store a value or an expression directly in global data or in local array */
6822 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6829 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6833 /* XXX: not portable */
6834 /* XXX: generate error if incorrect relocation */
6835 gen_assign_cast(&dtype
);
6836 bt
= type
->t
& VT_BTYPE
;
6838 if ((vtop
->r
& VT_SYM
)
6841 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6842 || (type
->t
& VT_BITFIELD
))
6843 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6845 tcc_error("initializer element is not computable at load time");
6847 if (NODATA_WANTED
) {
6852 size
= type_size(type
, &align
);
6853 section_reserve(sec
, c
+ size
);
6854 ptr
= sec
->data
+ c
;
6856 /* XXX: make code faster ? */
6857 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6858 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6859 /* XXX This rejects compound literals like
6860 '(void *){ptr}'. The problem is that '&sym' is
6861 represented the same way, which would be ruled out
6862 by the SYM_FIRST_ANOM check above, but also '"string"'
6863 in 'char *p = "string"' is represented the same
6864 with the type being VT_PTR and the symbol being an
6865 anonymous one. That is, there's no difference in vtop
6866 between '(void *){x}' and '&(void *){x}'. Ignore
6867 pointer typed entities here. Hopefully no real code
6868 will every use compound literals with scalar type. */
6869 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6870 /* These come from compound literals, memcpy stuff over. */
6874 esym
= elfsym(vtop
->sym
);
6875 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6876 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6878 /* We need to copy over all memory contents, and that
6879 includes relocations. Use the fact that relocs are
6880 created it order, so look from the end of relocs
6881 until we hit one before the copied region. */
6882 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6883 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6884 while (num_relocs
--) {
6886 if (rel
->r_offset
>= esym
->st_value
+ size
)
6888 if (rel
->r_offset
< esym
->st_value
)
6890 /* Note: if the same fields are initialized multiple
6891 times (possible with designators) then we possibly
6892 add multiple relocations for the same offset here.
6893 That would lead to wrong code, the last reloc needs
6894 to win. We clean this up later after the whole
6895 initializer is parsed. */
6896 put_elf_reloca(symtab_section
, sec
,
6897 c
+ rel
->r_offset
- esym
->st_value
,
6898 ELFW(R_TYPE
)(rel
->r_info
),
6899 ELFW(R_SYM
)(rel
->r_info
),
6909 if (type
->t
& VT_BITFIELD
) {
6910 int bit_pos
, bit_size
, bits
, n
;
6911 unsigned char *p
, v
, m
;
6912 bit_pos
= BIT_POS(vtop
->type
.t
);
6913 bit_size
= BIT_SIZE(vtop
->type
.t
);
6914 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6915 bit_pos
&= 7, bits
= 0;
6920 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6921 m
= ((1 << n
) - 1) << bit_pos
;
6922 *p
= (*p
& ~m
) | (v
& m
);
6923 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6927 /* XXX: when cross-compiling we assume that each type has the
6928 same representation on host and target, which is likely to
6929 be wrong in the case of long double */
6931 vtop
->c
.i
= vtop
->c
.i
!= 0;
6933 *(char *)ptr
|= vtop
->c
.i
;
6936 *(short *)ptr
|= vtop
->c
.i
;
6939 *(float*)ptr
= vtop
->c
.f
;
6942 *(double *)ptr
= vtop
->c
.d
;
6945 #if defined TCC_IS_NATIVE_387
6946 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6947 memcpy(ptr
, &vtop
->c
.ld
, 10);
6949 else if (sizeof (long double) == sizeof (double))
6950 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
6952 else if (vtop
->c
.ld
== 0.0)
6956 if (sizeof(long double) == LDOUBLE_SIZE
)
6957 *(long double*)ptr
= vtop
->c
.ld
;
6958 else if (sizeof(double) == LDOUBLE_SIZE
)
6959 *(double *)ptr
= (double)vtop
->c
.ld
;
6961 tcc_error("can't cross compile long double constants");
6965 *(long long *)ptr
|= vtop
->c
.i
;
6972 addr_t val
= vtop
->c
.i
;
6974 if (vtop
->r
& VT_SYM
)
6975 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6977 *(addr_t
*)ptr
|= val
;
6979 if (vtop
->r
& VT_SYM
)
6980 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6981 *(addr_t
*)ptr
|= val
;
6987 int val
= vtop
->c
.i
;
6989 if (vtop
->r
& VT_SYM
)
6990 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6994 if (vtop
->r
& VT_SYM
)
6995 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7004 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7011 /* 't' contains the type and storage info. 'c' is the offset of the
7012 object in section 'sec'. If 'sec' is NULL, it means stack based
7013 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7014 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7015 size only evaluation is wanted (only for arrays). */
7016 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
7019 int len
, n
, no_oblock
, nb
, i
;
7025 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7026 /* In case of strings we have special handling for arrays, so
7027 don't consume them as initializer value (which would commit them
7028 to some anonymous symbol). */
7029 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7030 !(flags
& DIF_SIZE_ONLY
)) {
7031 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7032 flags
|= DIF_HAVE_ELEM
;
7035 if ((flags
& DIF_HAVE_ELEM
) &&
7036 !(type
->t
& VT_ARRAY
) &&
7037 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7038 The source type might have VT_CONSTANT set, which is
7039 of course assignable to non-const elements. */
7040 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7041 init_putv(type
, sec
, c
);
7042 } else if (type
->t
& VT_ARRAY
) {
7045 t1
= pointed_type(type
);
7046 size1
= type_size(t1
, &align1
);
7049 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7052 tcc_error("character array initializer must be a literal,"
7053 " optionally enclosed in braces");
7058 /* only parse strings here if correct type (otherwise: handle
7059 them as ((w)char *) expressions */
7060 if ((tok
== TOK_LSTR
&&
7061 #ifdef TCC_TARGET_PE
7062 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7064 (t1
->t
& VT_BTYPE
) == VT_INT
7066 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7068 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7071 /* compute maximum number of chars wanted */
7073 cstr_len
= tokc
.str
.size
;
7075 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
7078 if (n
>= 0 && nb
> (n
- len
))
7080 if (!(flags
& DIF_SIZE_ONLY
)) {
7082 tcc_warning("initializer-string for array is too long");
7083 /* in order to go faster for common case (char
7084 string in global variable, we handle it
7086 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
7088 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
7092 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
7094 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
7096 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
7103 /* only add trailing zero if enough storage (no
7104 warning in this case since it is standard) */
7105 if (n
< 0 || len
< n
) {
7106 if (!(flags
& DIF_SIZE_ONLY
)) {
7108 init_putv(t1
, sec
, c
+ (len
* size1
));
7119 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7120 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7121 flags
&= ~DIF_HAVE_ELEM
;
7122 if (type
->t
& VT_ARRAY
) {
7124 /* special test for multi dimensional arrays (may not
7125 be strictly correct if designators are used at the
7127 if (no_oblock
&& len
>= n
*size1
)
7130 if (s
->type
.t
== VT_UNION
)
7134 if (no_oblock
&& f
== NULL
)
7143 /* put zeros at the end */
7144 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7145 init_putz(sec
, c
+ len
, n
*size1
- len
);
7148 /* patch type size if needed, which happens only for array types */
7150 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7151 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7154 if ((flags
& DIF_FIRST
) || tok
== '{') {
7162 } else if (tok
== '{') {
7163 if (flags
& DIF_HAVE_ELEM
)
7166 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7168 } else if ((flags
& DIF_SIZE_ONLY
)) {
7169 /* If we supported only ISO C we wouldn't have to accept calling
7170 this on anything than an array if DIF_SIZE_ONLY (and even then
7171 only on the outermost level, so no recursion would be needed),
7172 because initializing a flex array member isn't supported.
7173 But GNU C supports it, so we need to recurse even into
7174 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7175 /* just skip expression */
7176 skip_or_save_block(NULL
);
7178 if (!(flags
& DIF_HAVE_ELEM
)) {
7179 /* This should happen only when we haven't parsed
7180 the init element above for fear of committing a
7181 string constant to memory too early. */
7182 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7183 expect("string constant");
7184 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7186 init_putv(type
, sec
, c
);
7190 /* parse an initializer for type 't' if 'has_init' is non zero, and
7191 allocate space in local or global data space ('r' is either
7192 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7193 variable 'v' of scope 'scope' is declared before initializers
7194 are parsed. If 'v' is zero, then a reference to the new object
7195 is put in the value stack. If 'has_init' is 2, a special parsing
7196 is done to handle string constants. */
7197 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7198 int has_init
, int v
, int scope
)
7200 int size
, align
, addr
;
7201 TokenString
*init_str
= NULL
;
7204 Sym
*flexible_array
;
7206 int saved_nocode_wanted
= nocode_wanted
;
7207 #ifdef CONFIG_TCC_BCHECK
7211 /* Always allocate static or global variables */
7212 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7213 nocode_wanted
|= 0x80000000;
7215 #ifdef CONFIG_TCC_BCHECK
7216 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7219 flexible_array
= NULL
;
7220 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7221 Sym
*field
= type
->ref
->next
;
7224 field
= field
->next
;
7225 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7226 flexible_array
= field
;
7230 size
= type_size(type
, &align
);
7231 /* If unknown size, we must evaluate it before
7232 evaluating initializers because
7233 initializers can generate global data too
7234 (e.g. string pointers or ISOC99 compound
7235 literals). It also simplifies local
7236 initializers handling */
7237 if (size
< 0 || (flexible_array
&& has_init
)) {
7239 tcc_error("unknown type size");
7240 /* get all init string */
7241 if (has_init
== 2) {
7242 init_str
= tok_str_alloc();
7243 /* only get strings */
7244 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7245 tok_str_add_tok(init_str
);
7248 tok_str_add(init_str
, -1);
7249 tok_str_add(init_str
, 0);
7251 skip_or_save_block(&init_str
);
7256 begin_macro(init_str
, 1);
7258 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7259 /* prepare second initializer parsing */
7260 macro_ptr
= init_str
->str
;
7263 /* if still unknown size, error */
7264 size
= type_size(type
, &align
);
7266 tcc_error("unknown type size");
7268 /* If there's a flex member and it was used in the initializer
7270 if (flexible_array
&&
7271 flexible_array
->type
.ref
->c
> 0)
7272 size
+= flexible_array
->type
.ref
->c
7273 * pointed_size(&flexible_array
->type
);
7274 /* take into account specified alignment if bigger */
7275 if (ad
->a
.aligned
) {
7276 int speca
= 1 << (ad
->a
.aligned
- 1);
7279 } else if (ad
->a
.packed
) {
7283 if (!v
&& NODATA_WANTED
)
7284 size
= 0, align
= 1;
7286 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7288 #ifdef CONFIG_TCC_BCHECK
7289 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7293 loc
= (loc
- size
) & -align
;
7295 #ifdef CONFIG_TCC_BCHECK
7296 /* handles bounds */
7297 /* XXX: currently, since we do only one pass, we cannot track
7298 '&' operators, so we add only arrays */
7299 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7301 /* add padding between regions */
7303 /* then add local bound info */
7304 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7305 bounds_ptr
[0] = addr
;
7306 bounds_ptr
[1] = size
;
7310 /* local variable */
7311 #ifdef CONFIG_TCC_ASM
7312 if (ad
->asm_label
) {
7313 int reg
= asm_parse_regvar(ad
->asm_label
);
7315 r
= (r
& ~VT_VALMASK
) | reg
;
7318 sym
= sym_push(v
, type
, r
, addr
);
7319 if (ad
->cleanup_func
) {
7320 Sym
*cls
= sym_push2(&all_cleanups
, SYM_FIELD
| ++ncleanups
, 0, 0);
7321 cls
->prev_tok
= sym
;
7322 cls
->next
= ad
->cleanup_func
;
7323 cls
->ncl
= current_cleanups
;
7324 current_cleanups
= cls
;
7329 /* push local reference */
7330 vset(type
, r
, addr
);
7333 if (v
&& scope
== VT_CONST
) {
7334 /* see if the symbol was already defined */
7337 patch_storage(sym
, ad
, type
);
7338 /* we accept several definitions of the same global variable. */
7339 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7344 /* allocate symbol in corresponding section */
7349 else if (tcc_state
->nocommon
)
7354 addr
= section_add(sec
, size
, align
);
7355 #ifdef CONFIG_TCC_BCHECK
7356 /* add padding if bound check */
7358 section_add(sec
, 1, 1);
7361 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7362 sec
= common_section
;
7367 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7368 patch_storage(sym
, ad
, NULL
);
7370 /* update symbol definition */
7371 put_extern_sym(sym
, sec
, addr
, size
);
7373 /* push global reference */
7374 vpush_ref(type
, sec
, addr
, size
);
7379 #ifdef CONFIG_TCC_BCHECK
7380 /* handles bounds now because the symbol must be defined
7381 before for the relocation */
7385 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7386 /* then add global bound info */
7387 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7388 bounds_ptr
[0] = 0; /* relocated */
7389 bounds_ptr
[1] = size
;
7394 if (type
->t
& VT_VLA
) {
7400 /* save current stack pointer */
7401 if (vlas_in_scope
== 0) {
7402 if (vla_sp_root_loc
== -1)
7403 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
7404 gen_vla_sp_save(vla_sp_root_loc
);
7407 vla_runtime_type_size(type
, &a
);
7408 gen_vla_alloc(type
, a
);
7409 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7410 /* on _WIN64, because of the function args scratch area, the
7411 result of alloca differs from RSP and is returned in RAX. */
7412 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7414 gen_vla_sp_save(addr
);
7418 } else if (has_init
) {
7419 size_t oldreloc_offset
= 0;
7420 if (sec
&& sec
->reloc
)
7421 oldreloc_offset
= sec
->reloc
->data_offset
;
7422 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7423 if (sec
&& sec
->reloc
)
7424 squeeze_multi_relocs(sec
, oldreloc_offset
);
7425 /* patch flexible array member size back to -1, */
7426 /* for possible subsequent similar declarations */
7428 flexible_array
->type
.ref
->c
= -1;
7432 /* restore parse state if needed */
7438 nocode_wanted
= saved_nocode_wanted
;
7441 /* parse a function defined by symbol 'sym' and generate its code in
7442 'cur_text_section' */
7443 static void gen_function(Sym
*sym
)
7446 ind
= cur_text_section
->data_offset
;
7447 if (sym
->a
.aligned
) {
7448 size_t newoff
= section_add(cur_text_section
, 0,
7449 1 << (sym
->a
.aligned
- 1));
7450 gen_fill_nops(newoff
- ind
);
7452 /* NOTE: we patch the symbol size later */
7453 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7454 funcname
= get_tok_str(sym
->v
, NULL
);
7456 /* Initialize VLA state */
7458 vla_sp_root_loc
= -1;
7459 /* put debug symbol */
7460 tcc_debug_funcstart(tcc_state
, sym
);
7461 /* push a dummy symbol to enable local sym storage */
7462 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7463 local_scope
= 1; /* for function parameters */
7464 gfunc_prolog(&sym
->type
);
7465 reset_local_scope();
7467 clear_temp_local_var_list();
7468 block(NULL
, NULL
, NULL
, NULL
, 0);
7472 cur_text_section
->data_offset
= ind
;
7473 label_pop(&global_label_stack
, NULL
, 0);
7474 /* reset local stack */
7475 reset_local_scope();
7476 sym_pop(&local_stack
, NULL
, 0);
7477 /* end of function */
7478 /* patch symbol size */
7479 elfsym(sym
)->st_size
= ind
- func_ind
;
7480 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7481 /* It's better to crash than to generate wrong code */
7482 cur_text_section
= NULL
;
7483 funcname
= ""; /* for safety */
7484 func_vt
.t
= VT_VOID
; /* for safety */
7485 func_var
= 0; /* for safety */
7486 ind
= 0; /* for safety */
7487 nocode_wanted
= 0x80000000;
7491 static void gen_inline_functions(TCCState
*s
)
7494 int inline_generated
, i
, ln
;
7495 struct InlineFunc
*fn
;
7497 ln
= file
->line_num
;
7498 /* iterate while inline function are referenced */
7500 inline_generated
= 0;
7501 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7502 fn
= s
->inline_fns
[i
];
7504 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
7505 /* the function was used or forced (and then not internal):
7506 generate its code and convert it to a normal function */
7509 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7510 begin_macro(fn
->func_str
, 1);
7512 cur_text_section
= text_section
;
7516 inline_generated
= 1;
7519 } while (inline_generated
);
7520 file
->line_num
= ln
;
7523 ST_FUNC
void free_inline_functions(TCCState
*s
)
7526 /* free tokens of unused inline functions */
7527 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7528 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7530 tok_str_free(fn
->func_str
);
7532 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7535 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7536 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7537 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7542 AttributeDef ad
, adbase
;
7545 if (tok
== TOK_STATIC_ASSERT
) {
7553 tcc_error("%s", get_tok_str(tok
, &tokc
));
7559 if (!parse_btype(&btype
, &adbase
)) {
7560 if (is_for_loop_init
)
7562 /* skip redundant ';' if not in old parameter decl scope */
7563 if (tok
== ';' && l
!= VT_CMP
) {
7569 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7570 /* global asm block */
7574 if (tok
>= TOK_UIDENT
) {
7575 /* special test for old K&R protos without explicit int
7576 type. Only accepted when defining global data */
7580 expect("declaration");
7585 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7586 int v
= btype
.ref
->v
;
7587 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7588 tcc_warning("unnamed struct/union that defines no instances");
7592 if (IS_ENUM(btype
.t
)) {
7597 while (1) { /* iterate thru each declaration */
7599 /* If the base type itself was an array type of unspecified
7600 size (like in 'typedef int arr[]; arr x = {1};') then
7601 we will overwrite the unknown size by the real one for
7602 this decl. We need to unshare the ref symbol holding
7604 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7605 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7608 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7612 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7613 printf("type = '%s'\n", buf
);
7616 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7617 /* if old style function prototype, we accept a
7620 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7621 decl0(VT_CMP
, 0, sym
);
7622 /* always compile 'extern inline' */
7623 if (type
.t
& VT_EXTERN
)
7624 type
.t
&= ~VT_INLINE
;
7627 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7628 ad
.asm_label
= asm_label_instr();
7629 /* parse one last attribute list, after asm label */
7630 parse_attribute(&ad
);
7632 /* gcc does not allow __asm__("label") with function definition,
7639 #ifdef TCC_TARGET_PE
7640 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7641 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7642 tcc_error("cannot have dll linkage with static or typedef");
7643 if (ad
.a
.dllimport
) {
7644 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7647 type
.t
|= VT_EXTERN
;
7653 tcc_error("cannot use local functions");
7654 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7655 expect("function definition");
7657 /* reject abstract declarators in function definition
7658 make old style params without decl have int type */
7660 while ((sym
= sym
->next
) != NULL
) {
7661 if (!(sym
->v
& ~SYM_FIELD
))
7662 expect("identifier");
7663 if (sym
->type
.t
== VT_VOID
)
7664 sym
->type
= int_type
;
7667 /* put function symbol */
7668 type
.t
&= ~VT_EXTERN
;
7669 sym
= external_sym(v
, &type
, 0, &ad
);
7670 /* static inline functions are just recorded as a kind
7671 of macro. Their code will be emitted at the end of
7672 the compilation unit only if they are used */
7673 if (sym
->type
.t
& VT_INLINE
) {
7674 struct InlineFunc
*fn
;
7675 const char *filename
;
7677 filename
= file
? file
->filename
: "";
7678 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7679 strcpy(fn
->filename
, filename
);
7681 skip_or_save_block(&fn
->func_str
);
7682 dynarray_add(&tcc_state
->inline_fns
,
7683 &tcc_state
->nb_inline_fns
, fn
);
7685 /* compute text section */
7686 cur_text_section
= ad
.section
;
7687 if (!cur_text_section
)
7688 cur_text_section
= text_section
;
7694 /* find parameter in function parameter list */
7695 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7696 if ((sym
->v
& ~SYM_FIELD
) == v
)
7698 tcc_error("declaration for parameter '%s' but no such parameter",
7699 get_tok_str(v
, NULL
));
7701 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7702 tcc_error("storage class specified for '%s'",
7703 get_tok_str(v
, NULL
));
7704 if (sym
->type
.t
!= VT_VOID
)
7705 tcc_error("redefinition of parameter '%s'",
7706 get_tok_str(v
, NULL
));
7707 convert_parameter_type(&type
);
7709 } else if (type
.t
& VT_TYPEDEF
) {
7710 /* save typedefed type */
7711 /* XXX: test storage specifiers ? */
7713 if (sym
&& sym
->sym_scope
== local_scope
) {
7714 if (!is_compatible_types(&sym
->type
, &type
)
7715 || !(sym
->type
.t
& VT_TYPEDEF
))
7716 tcc_error("incompatible redefinition of '%s'",
7717 get_tok_str(v
, NULL
));
7720 sym
= sym_push(v
, &type
, 0, 0);
7724 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7725 && !(type
.t
& VT_EXTERN
)) {
7726 tcc_error("declaration of void object");
7729 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7730 /* external function definition */
7731 /* specific case for func_call attribute */
7733 } else if (!(type
.t
& VT_ARRAY
)) {
7734 /* not lvalue if array */
7735 r
|= lvalue_type(type
.t
);
7737 has_init
= (tok
== '=');
7738 if (has_init
&& (type
.t
& VT_VLA
))
7739 tcc_error("variable length array cannot be initialized");
7740 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
7741 || (type
.t
& VT_BTYPE
) == VT_FUNC
7742 /* as with GCC, uninitialized global arrays with no size
7743 are considered extern: */
7744 || ((type
.t
& VT_ARRAY
) && !has_init
7745 && l
== VT_CONST
&& type
.ref
->c
< 0)
7747 /* external variable or function */
7748 type
.t
|= VT_EXTERN
;
7749 sym
= external_sym(v
, &type
, r
, &ad
);
7750 if (ad
.alias_target
) {
7753 alias_target
= sym_find(ad
.alias_target
);
7754 esym
= elfsym(alias_target
);
7756 tcc_error("unsupported forward __alias__ attribute");
7757 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7760 if (type
.t
& VT_STATIC
)
7766 else if (l
== VT_CONST
)
7767 /* uninitialized global variables may be overridden */
7768 type
.t
|= VT_EXTERN
;
7769 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7773 if (is_for_loop_init
)
7785 static void decl(int l
)
7790 /* ------------------------------------------------------------------------- */
7793 /* ------------------------------------------------------------------------- */