2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
42 static Sym
*all_cleanups
, *current_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
50 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
51 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
52 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
54 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
56 ST_DATA
int const_wanted
; /* true if constant wanted */
57 ST_DATA
int nocode_wanted
; /* no code generation wanted */
58 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
59 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
61 /* Automagical code suppression ----> */
62 #define CODE_OFF() (nocode_wanted |= 0x20000000)
63 #define CODE_ON() (nocode_wanted &= ~0x20000000)
65 /* Clear 'nocode_wanted' at label if it was used */
66 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
67 static int gind(void) { CODE_ON(); return ind
; }
69 /* Set 'nocode_wanted' after unconditional jumps */
70 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
71 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
73 /* These are #undef'd at the end of this file */
74 #define gjmp_addr gjmp_addr_acs
78 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
79 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
80 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
82 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
83 ST_DATA
const char *funcname
;
86 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
88 ST_DATA
struct switch_t
{
92 } **p
; int n
; /* list of case ranges */
93 int def_sym
; /* default symbol */
94 } *cur_switch
; /* current switch */
96 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 0x4
97 /*list of temporary local variables on the stack in current function. */
98 ST_DATA
struct temp_local_variable
{
99 int location
; //offset on stack. Svalue.c.i
102 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
103 short nb_temp_local_vars
;
105 /* ------------------------------------------------------------------------- */
107 static void gen_cast(CType
*type
);
108 static void gen_cast_s(int t
);
109 static inline CType
*pointed_type(CType
*type
);
110 static int is_compatible_types(CType
*type1
, CType
*type2
);
111 static int parse_btype(CType
*type
, AttributeDef
*ad
);
112 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
113 static void parse_expr_type(CType
*type
);
114 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
115 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
116 static void block(int *bsym
, Sym
*bcl
, int *csym
, Sym
*ccl
, int is_expr
);
117 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
118 static void decl(int l
);
119 static int decl0(int l
, int is_for_loop_init
, Sym
*);
120 static void expr_eq(void);
121 static void vla_runtime_type_size(CType
*type
, int *a
);
122 static void vla_sp_restore(void);
123 static void vla_sp_restore_root(void);
124 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
125 static inline int64_t expr_const64(void);
126 static void vpush64(int ty
, unsigned long long v
);
127 static void vpush(CType
*type
);
128 static int gvtst(int inv
, int t
);
129 static void gen_inline_functions(TCCState
*s
);
130 static void skip_or_save_block(TokenString
**str
);
131 static void gv_dup(void);
132 static int get_temp_local_var(int size
,int align
);
133 static void clear_temp_local_var_list();
136 static void reset_local_scope(void)
138 if (current_cleanups
)
139 tcc_error("ICE current_cleanups");
140 sym_pop(&all_cleanups
, NULL
, 0);
144 ST_INLN
int is_float(int t
)
148 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
151 /* we use our own 'finite' function to avoid potential problems with
152 non standard math libs */
153 /* XXX: endianness dependent */
154 ST_FUNC
int ieee_finite(double d
)
157 memcpy(p
, &d
, sizeof(double));
158 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
161 /* compiling intel long double natively */
162 #if (defined __i386__ || defined __x86_64__) \
163 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
164 # define TCC_IS_NATIVE_387
167 ST_FUNC
void test_lvalue(void)
169 if (!(vtop
->r
& VT_LVAL
))
173 ST_FUNC
void check_vstack(void)
176 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
179 /* ------------------------------------------------------------------------- */
180 /* vstack debugging aid */
183 void pv (const char *lbl
, int a
, int b
)
186 for (i
= a
; i
< a
+ b
; ++i
) {
187 SValue
*p
= &vtop
[-i
];
188 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
189 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
194 /* ------------------------------------------------------------------------- */
195 /* start of translation unit info */
196 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
201 /* file info: full path + filename */
202 section_sym
= put_elf_sym(symtab_section
, 0, 0,
203 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
204 text_section
->sh_num
, NULL
);
205 getcwd(buf
, sizeof(buf
));
207 normalize_slashes(buf
);
209 pstrcat(buf
, sizeof(buf
), "/");
210 put_stabs_r(buf
, N_SO
, 0, 0,
211 text_section
->data_offset
, text_section
, section_sym
);
212 put_stabs_r(file
->filename
, N_SO
, 0, 0,
213 text_section
->data_offset
, text_section
, section_sym
);
218 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
219 symbols can be safely used */
220 put_elf_sym(symtab_section
, 0, 0,
221 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
222 SHN_ABS
, file
->filename
);
225 /* put end of translation unit info */
226 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
230 put_stabs_r(NULL
, N_SO
, 0, 0,
231 text_section
->data_offset
, text_section
, section_sym
);
235 /* generate line number info */
236 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
240 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
241 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
243 last_line_num
= file
->line_num
;
247 /* put function symbol */
248 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
256 /* XXX: we put here a dummy type */
257 snprintf(buf
, sizeof(buf
), "%s:%c1",
258 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
259 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
260 cur_text_section
, sym
->c
);
261 /* //gr gdb wants a line at the function */
262 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
268 /* put function size */
269 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
273 put_stabn(N_FUN
, 0, 0, size
);
276 /* ------------------------------------------------------------------------- */
277 ST_FUNC
int tccgen_compile(TCCState
*s1
)
279 cur_text_section
= NULL
;
281 anon_sym
= SYM_FIRST_ANOM
;
284 nocode_wanted
= 0x80000000;
287 /* define some often used types */
289 char_pointer_type
.t
= VT_BYTE
;
290 mk_pointer(&char_pointer_type
);
292 size_type
.t
= VT_INT
| VT_UNSIGNED
;
293 ptrdiff_type
.t
= VT_INT
;
295 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
296 ptrdiff_type
.t
= VT_LLONG
;
298 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
299 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
301 func_old_type
.t
= VT_FUNC
;
302 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
303 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
304 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
308 #ifdef TCC_TARGET_ARM
313 printf("%s: **** new file\n", file
->filename
);
316 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
319 gen_inline_functions(s1
);
321 /* end of translation unit info */
326 /* ------------------------------------------------------------------------- */
327 ST_FUNC ElfSym
*elfsym(Sym
*s
)
331 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
334 /* apply storage attributes to Elf symbol */
335 ST_FUNC
void update_storage(Sym
*sym
)
338 int sym_bind
, old_sym_bind
;
344 if (sym
->a
.visibility
)
345 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
348 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
349 sym_bind
= STB_LOCAL
;
350 else if (sym
->a
.weak
)
353 sym_bind
= STB_GLOBAL
;
354 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
355 if (sym_bind
!= old_sym_bind
) {
356 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
360 if (sym
->a
.dllimport
)
361 esym
->st_other
|= ST_PE_IMPORT
;
362 if (sym
->a
.dllexport
)
363 esym
->st_other
|= ST_PE_EXPORT
;
367 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
368 get_tok_str(sym
->v
, NULL
),
369 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
377 /* ------------------------------------------------------------------------- */
378 /* update sym->c so that it points to an external symbol in section
379 'section' with value 'value' */
381 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
382 addr_t value
, unsigned long size
,
383 int can_add_underscore
)
385 int sym_type
, sym_bind
, info
, other
, t
;
389 #ifdef CONFIG_TCC_BCHECK
394 name
= get_tok_str(sym
->v
, NULL
);
395 #ifdef CONFIG_TCC_BCHECK
396 if (tcc_state
->do_bounds_check
) {
397 /* XXX: avoid doing that for statics ? */
398 /* if bound checking is activated, we change some function
399 names by adding the "__bound" prefix */
402 /* XXX: we rely only on malloc hooks */
415 strcpy(buf
, "__bound_");
423 if ((t
& VT_BTYPE
) == VT_FUNC
) {
425 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
426 sym_type
= STT_NOTYPE
;
428 sym_type
= STT_OBJECT
;
430 if (t
& (VT_STATIC
| VT_INLINE
))
431 sym_bind
= STB_LOCAL
;
433 sym_bind
= STB_GLOBAL
;
436 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
437 Sym
*ref
= sym
->type
.ref
;
438 if (ref
->a
.nodecorate
) {
439 can_add_underscore
= 0;
441 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
442 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
444 other
|= ST_PE_STDCALL
;
445 can_add_underscore
= 0;
449 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
451 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
455 name
= get_tok_str(sym
->asm_label
, NULL
);
456 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
457 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
460 esym
->st_value
= value
;
461 esym
->st_size
= size
;
462 esym
->st_shndx
= sh_num
;
467 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
468 addr_t value
, unsigned long size
)
470 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
471 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
474 /* add a new relocation entry to symbol 'sym' in section 's' */
475 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
480 if (nocode_wanted
&& s
== cur_text_section
)
485 put_extern_sym(sym
, NULL
, 0, 0);
489 /* now we can add ELF relocation info */
490 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
494 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
496 greloca(s
, sym
, offset
, type
, 0);
500 /* ------------------------------------------------------------------------- */
501 /* symbol allocator */
502 static Sym
*__sym_malloc(void)
504 Sym
*sym_pool
, *sym
, *last_sym
;
507 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
508 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
510 last_sym
= sym_free_first
;
512 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
513 sym
->next
= last_sym
;
517 sym_free_first
= last_sym
;
521 static inline Sym
*sym_malloc(void)
525 sym
= sym_free_first
;
527 sym
= __sym_malloc();
528 sym_free_first
= sym
->next
;
531 sym
= tcc_malloc(sizeof(Sym
));
536 ST_INLN
void sym_free(Sym
*sym
)
539 sym
->next
= sym_free_first
;
540 sym_free_first
= sym
;
546 /* push, without hashing */
547 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
552 memset(s
, 0, sizeof *s
);
562 /* find a symbol and return its associated structure. 's' is the top
563 of the symbol stack */
564 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
576 /* structure lookup */
577 ST_INLN Sym
*struct_find(int v
)
580 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
582 return table_ident
[v
]->sym_struct
;
585 /* find an identifier */
586 ST_INLN Sym
*sym_find(int v
)
589 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
591 return table_ident
[v
]->sym_identifier
;
594 static int sym_scope(Sym
*s
)
596 if (IS_ENUM_VAL (s
->type
.t
))
597 return s
->type
.ref
->sym_scope
;
602 /* push a given symbol on the symbol stack */
603 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
612 s
= sym_push2(ps
, v
, type
->t
, c
);
613 s
->type
.ref
= type
->ref
;
615 /* don't record fields or anonymous symbols */
617 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
618 /* record symbol in token array */
619 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
621 ps
= &ts
->sym_struct
;
623 ps
= &ts
->sym_identifier
;
626 s
->sym_scope
= local_scope
;
627 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
628 tcc_error("redeclaration of '%s'",
629 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
634 /* push a global identifier */
635 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
638 s
= sym_push2(&global_stack
, v
, t
, c
);
639 s
->r
= VT_CONST
| VT_SYM
;
640 /* don't record anonymous symbol */
641 if (v
< SYM_FIRST_ANOM
) {
642 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
643 /* modify the top most local identifier, so that sym_identifier will
644 point to 's' when popped; happens when called from inline asm */
645 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
646 ps
= &(*ps
)->prev_tok
;
653 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
654 pop them yet from the list, but do remove them from the token array. */
655 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
665 /* remove symbol in token array */
667 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
668 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
670 ps
= &ts
->sym_struct
;
672 ps
= &ts
->sym_identifier
;
683 /* ------------------------------------------------------------------------- */
685 static void vsetc(CType
*type
, int r
, CValue
*vc
)
689 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
690 tcc_error("memory full (vstack)");
691 /* cannot let cpu flags if other instruction are generated. Also
692 avoid leaving VT_JMP anywhere except on the top of the stack
693 because it would complicate the code generator.
695 Don't do this when nocode_wanted. vtop might come from
696 !nocode_wanted regions (see 88_codeopt.c) and transforming
697 it to a register without actually generating code is wrong
698 as their value might still be used for real. All values
699 we push under nocode_wanted will eventually be popped
700 again, so that the VT_CMP/VT_JMP value will be in vtop
701 when code is unsuppressed again.
703 Same logic below in vswap(); */
704 if (vtop
>= vstack
&& !nocode_wanted
) {
705 v
= vtop
->r
& VT_VALMASK
;
706 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
718 ST_FUNC
void vswap(void)
721 /* cannot vswap cpu flags. See comment at vsetc() above */
722 if (vtop
>= vstack
&& !nocode_wanted
) {
723 int v
= vtop
->r
& VT_VALMASK
;
724 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
732 /* pop stack value */
733 ST_FUNC
void vpop(void)
736 v
= vtop
->r
& VT_VALMASK
;
737 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
738 /* for x86, we need to pop the FP stack */
740 o(0xd8dd); /* fstp %st(0) */
743 if (v
== VT_JMP
|| v
== VT_JMPI
) {
744 /* need to put correct jump if && or || without test */
750 /* push constant of type "type" with useless value */
751 ST_FUNC
void vpush(CType
*type
)
753 vset(type
, VT_CONST
, 0);
756 /* push integer constant */
757 ST_FUNC
void vpushi(int v
)
761 vsetc(&int_type
, VT_CONST
, &cval
);
764 /* push a pointer sized constant */
765 static void vpushs(addr_t v
)
769 vsetc(&size_type
, VT_CONST
, &cval
);
772 /* push arbitrary 64bit constant */
773 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
780 vsetc(&ctype
, VT_CONST
, &cval
);
783 /* push long long constant */
784 static inline void vpushll(long long v
)
786 vpush64(VT_LLONG
, v
);
789 ST_FUNC
void vset(CType
*type
, int r
, int v
)
794 vsetc(type
, r
, &cval
);
797 static void vseti(int r
, int v
)
805 ST_FUNC
void vpushv(SValue
*v
)
807 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
808 tcc_error("memory full (vstack)");
813 static void vdup(void)
818 /* rotate n first stack elements to the bottom
819 I1 ... In -> I2 ... In I1 [top is right]
821 ST_FUNC
void vrotb(int n
)
832 /* rotate the n elements before entry e towards the top
833 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
835 ST_FUNC
void vrote(SValue
*e
, int n
)
841 for(i
= 0;i
< n
- 1; i
++)
846 /* rotate n first stack elements to the top
847 I1 ... In -> In I1 ... I(n-1) [top is right]
849 ST_FUNC
void vrott(int n
)
854 /* push a symbol value of TYPE */
855 static inline void vpushsym(CType
*type
, Sym
*sym
)
859 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
863 /* Return a static symbol pointing to a section */
864 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
870 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
871 sym
->type
.t
|= VT_STATIC
;
872 put_extern_sym(sym
, sec
, offset
, size
);
876 /* push a reference to a section offset by adding a dummy symbol */
877 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
879 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
882 /* define a new external reference to a symbol 'v' of type 'u' */
883 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
889 /* push forward reference */
890 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
891 s
->type
.ref
= type
->ref
;
892 } else if (IS_ASM_SYM(s
)) {
893 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
894 s
->type
.ref
= type
->ref
;
900 /* Merge symbol attributes. */
901 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
903 if (sa1
->aligned
&& !sa
->aligned
)
904 sa
->aligned
= sa1
->aligned
;
905 sa
->packed
|= sa1
->packed
;
906 sa
->weak
|= sa1
->weak
;
907 if (sa1
->visibility
!= STV_DEFAULT
) {
908 int vis
= sa
->visibility
;
909 if (vis
== STV_DEFAULT
910 || vis
> sa1
->visibility
)
911 vis
= sa1
->visibility
;
912 sa
->visibility
= vis
;
914 sa
->dllexport
|= sa1
->dllexport
;
915 sa
->nodecorate
|= sa1
->nodecorate
;
916 sa
->dllimport
|= sa1
->dllimport
;
919 /* Merge function attributes. */
920 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
922 if (fa1
->func_call
&& !fa
->func_call
)
923 fa
->func_call
= fa1
->func_call
;
924 if (fa1
->func_type
&& !fa
->func_type
)
925 fa
->func_type
= fa1
->func_type
;
926 if (fa1
->func_args
&& !fa
->func_args
)
927 fa
->func_args
= fa1
->func_args
;
930 /* Merge attributes. */
931 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
933 merge_symattr(&ad
->a
, &ad1
->a
);
934 merge_funcattr(&ad
->f
, &ad1
->f
);
937 ad
->section
= ad1
->section
;
938 if (ad1
->alias_target
)
939 ad
->alias_target
= ad1
->alias_target
;
941 ad
->asm_label
= ad1
->asm_label
;
943 ad
->attr_mode
= ad1
->attr_mode
;
946 /* Merge some type attributes. */
947 static void patch_type(Sym
*sym
, CType
*type
)
949 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
950 if (!(sym
->type
.t
& VT_EXTERN
))
951 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
952 sym
->type
.t
&= ~VT_EXTERN
;
955 if (IS_ASM_SYM(sym
)) {
956 /* stay static if both are static */
957 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
958 sym
->type
.ref
= type
->ref
;
961 if (!is_compatible_types(&sym
->type
, type
)) {
962 tcc_error("incompatible types for redefinition of '%s'",
963 get_tok_str(sym
->v
, NULL
));
965 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
966 int static_proto
= sym
->type
.t
& VT_STATIC
;
967 /* warn if static follows non-static function declaration */
968 if ((type
->t
& VT_STATIC
) && !static_proto
969 /* XXX this test for inline shouldn't be here. Until we
970 implement gnu-inline mode again it silences a warning for
971 mingw caused by our workarounds. */
972 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
973 tcc_warning("static storage ignored for redefinition of '%s'",
974 get_tok_str(sym
->v
, NULL
));
976 /* set 'inline' if both agree or if one has static */
977 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
978 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
979 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
980 static_proto
|= VT_INLINE
;
983 if (0 == (type
->t
& VT_EXTERN
)) {
984 /* put complete type, use static from prototype */
985 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
986 sym
->type
.ref
= type
->ref
;
988 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
991 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
992 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
993 sym
->type
.ref
= type
->ref
;
997 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
998 /* set array size if it was omitted in extern declaration */
999 sym
->type
.ref
->c
= type
->ref
->c
;
1001 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1002 tcc_warning("storage mismatch for redefinition of '%s'",
1003 get_tok_str(sym
->v
, NULL
));
1007 /* Merge some storage attributes. */
1008 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1011 patch_type(sym
, type
);
1013 #ifdef TCC_TARGET_PE
1014 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1015 tcc_error("incompatible dll linkage for redefinition of '%s'",
1016 get_tok_str(sym
->v
, NULL
));
1018 merge_symattr(&sym
->a
, &ad
->a
);
1020 sym
->asm_label
= ad
->asm_label
;
1021 update_storage(sym
);
1024 /* copy sym to other stack */
1025 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1028 s
= sym_malloc(), *s
= *s0
;
1029 s
->prev
= *ps
, *ps
= s
;
1030 if (s
->v
< SYM_FIRST_ANOM
) {
1031 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1032 s
->prev_tok
= *ps
, *ps
= s
;
1037 /* copy a list of syms */
1038 static void sym_copy_ref(Sym
*s0
, Sym
**ps
)
1040 Sym
*s
, **sp
= &s0
->type
.ref
;
1041 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
)
1042 sp
= &(*sp
= sym_copy(s
, ps
))->next
;
1045 /* define a new external reference to a symbol 'v' */
1046 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1050 /* look for global symbol */
1052 while (s
&& s
->sym_scope
)
1056 /* push forward reference */
1057 s
= global_identifier_push(v
, type
->t
, 0);
1060 s
->asm_label
= ad
->asm_label
;
1061 s
->type
.ref
= type
->ref
;
1062 bt
= s
->type
.t
& (VT_BTYPE
|VT_ARRAY
);
1063 /* copy type to the global stack also */
1064 if (local_scope
&& (bt
== VT_FUNC
|| (bt
& VT_ARRAY
)))
1065 sym_copy_ref(s
, &global_stack
);
1067 patch_storage(s
, ad
, type
);
1068 bt
= s
->type
.t
& VT_BTYPE
;
1070 /* push variables to local scope if any */
1071 if (local_stack
&& bt
!= VT_FUNC
)
1072 s
= sym_copy(s
, &local_stack
);
1076 /* push a reference to global symbol v */
1077 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1079 vpushsym(type
, external_global_sym(v
, type
));
1082 /* save registers up to (vtop - n) stack entry */
1083 ST_FUNC
void save_regs(int n
)
1086 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1090 /* save r to the memory stack, and mark it as being free */
1091 ST_FUNC
void save_reg(int r
)
1093 save_reg_upstack(r
, 0);
1096 /* save r to the memory stack, and mark it as being free,
1097 if seen up to (vtop - n) stack entry */
1098 ST_FUNC
void save_reg_upstack(int r
, int n
)
1100 int l
, saved
, size
, align
;
1104 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1109 /* modify all stack values */
1112 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1113 if ((p
->r
& VT_VALMASK
) == r
||
1114 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
1115 /* must save value on stack if not already done */
1117 /* NOTE: must reload 'r' because r might be equal to r2 */
1118 r
= p
->r
& VT_VALMASK
;
1119 /* store register in the stack */
1121 if ((p
->r
& VT_LVAL
) ||
1122 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1124 type
= &char_pointer_type
;
1128 size
= type_size(type
, &align
);
1129 l
=get_temp_local_var(size
,align
);
1130 sv
.type
.t
= type
->t
;
1131 sv
.r
= VT_LOCAL
| VT_LVAL
;
1134 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1135 /* x86 specific: need to pop fp register ST0 if saved */
1136 if (r
== TREG_ST0
) {
1137 o(0xd8dd); /* fstp %st(0) */
1141 /* special long long case */
1142 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1149 /* mark that stack entry as being saved on the stack */
1150 if (p
->r
& VT_LVAL
) {
1151 /* also clear the bounded flag because the
1152 relocation address of the function was stored in
1154 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1156 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1164 #ifdef TCC_TARGET_ARM
1165 /* find a register of class 'rc2' with at most one reference on stack.
1166 * If none, call get_reg(rc) */
1167 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1172 for(r
=0;r
<NB_REGS
;r
++) {
1173 if (reg_classes
[r
] & rc2
) {
1176 for(p
= vstack
; p
<= vtop
; p
++) {
1177 if ((p
->r
& VT_VALMASK
) == r
||
1178 (p
->r2
& VT_VALMASK
) == r
)
1189 /* find a free register of class 'rc'. If none, save one register */
1190 ST_FUNC
int get_reg(int rc
)
1195 /* find a free register */
1196 for(r
=0;r
<NB_REGS
;r
++) {
1197 if (reg_classes
[r
] & rc
) {
1200 for(p
=vstack
;p
<=vtop
;p
++) {
1201 if ((p
->r
& VT_VALMASK
) == r
||
1202 (p
->r2
& VT_VALMASK
) == r
)
1210 /* no register left : free the first one on the stack (VERY
1211 IMPORTANT to start from the bottom to ensure that we don't
1212 spill registers used in gen_opi()) */
1213 for(p
=vstack
;p
<=vtop
;p
++) {
1214 /* look at second register (if long long) */
1215 r
= p
->r2
& VT_VALMASK
;
1216 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1218 r
= p
->r
& VT_VALMASK
;
1219 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1225 /* Should never comes here */
1229 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1230 static int get_temp_local_var(int size
,int align
){
1232 struct temp_local_variable
*temp_var
;
1239 for(i
=0;i
<nb_temp_local_vars
;i
++){
1240 temp_var
=&arr_temp_local_vars
[i
];
1241 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1244 /*check if temp_var is free*/
1246 for(p
=vstack
;p
<=vtop
;p
++) {
1248 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1249 if(p
->c
.i
==temp_var
->location
){
1256 found_var
=temp_var
->location
;
1262 loc
= (loc
- size
) & -align
;
1263 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1264 temp_var
=&arr_temp_local_vars
[i
];
1265 temp_var
->location
=loc
;
1266 temp_var
->size
=size
;
1267 temp_var
->align
=align
;
1268 nb_temp_local_vars
++;
1275 static void clear_temp_local_var_list(){
1276 nb_temp_local_vars
=0;
1279 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1281 static void move_reg(int r
, int s
, int t
)
1295 /* get address of vtop (vtop MUST BE an lvalue) */
1296 ST_FUNC
void gaddrof(void)
1298 vtop
->r
&= ~VT_LVAL
;
1299 /* tricky: if saved lvalue, then we can go back to lvalue */
1300 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1301 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1306 #ifdef CONFIG_TCC_BCHECK
1307 /* generate lvalue bound code */
1308 static void gbound(void)
1313 vtop
->r
&= ~VT_MUSTBOUND
;
1314 /* if lvalue, then use checking code before dereferencing */
1315 if (vtop
->r
& VT_LVAL
) {
1316 /* if not VT_BOUNDED value, then make one */
1317 if (!(vtop
->r
& VT_BOUNDED
)) {
1318 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1319 /* must save type because we must set it to int to get pointer */
1321 vtop
->type
.t
= VT_PTR
;
1324 gen_bounded_ptr_add();
1325 vtop
->r
|= lval_type
;
1328 /* then check for dereferencing */
1329 gen_bounded_ptr_deref();
1334 static void incr_bf_adr(int o
)
1336 vtop
->type
= char_pointer_type
;
1340 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1341 | (VT_BYTE
|VT_UNSIGNED
);
1342 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1343 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1346 /* single-byte load mode for packed or otherwise unaligned bitfields */
1347 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1350 save_reg_upstack(vtop
->r
, 1);
1351 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1352 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1361 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1363 vpushi((1 << n
) - 1), gen_op('&');
1366 vpushi(bits
), gen_op(TOK_SHL
);
1369 bits
+= n
, bit_size
-= n
, o
= 1;
1372 if (!(type
->t
& VT_UNSIGNED
)) {
1373 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1374 vpushi(n
), gen_op(TOK_SHL
);
1375 vpushi(n
), gen_op(TOK_SAR
);
1379 /* single-byte store mode for packed or otherwise unaligned bitfields */
1380 static void store_packed_bf(int bit_pos
, int bit_size
)
1382 int bits
, n
, o
, m
, c
;
1384 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1386 save_reg_upstack(vtop
->r
, 1);
1387 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1389 incr_bf_adr(o
); // X B
1391 c
? vdup() : gv_dup(); // B V X
1394 vpushi(bits
), gen_op(TOK_SHR
);
1396 vpushi(bit_pos
), gen_op(TOK_SHL
);
1401 m
= ((1 << n
) - 1) << bit_pos
;
1402 vpushi(m
), gen_op('&'); // X B V1
1403 vpushv(vtop
-1); // X B V1 B
1404 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1405 gen_op('&'); // X B V1 B1
1406 gen_op('|'); // X B V2
1408 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1409 vstore(), vpop(); // X B
1410 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1415 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1418 if (0 == sv
->type
.ref
)
1420 t
= sv
->type
.ref
->auxtype
;
1421 if (t
!= -1 && t
!= VT_STRUCT
) {
1422 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1423 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1428 /* store vtop a register belonging to class 'rc'. lvalues are
1429 converted to values. Cannot be used if cannot be converted to
1430 register value (such as structures). */
1431 ST_FUNC
int gv(int rc
)
1433 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1435 /* NOTE: get_reg can modify vstack[] */
1436 if (vtop
->type
.t
& VT_BITFIELD
) {
1439 bit_pos
= BIT_POS(vtop
->type
.t
);
1440 bit_size
= BIT_SIZE(vtop
->type
.t
);
1441 /* remove bit field info to avoid loops */
1442 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1445 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1446 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1447 type
.t
|= VT_UNSIGNED
;
1449 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1451 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1456 if (r
== VT_STRUCT
) {
1457 load_packed_bf(&type
, bit_pos
, bit_size
);
1459 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1460 /* cast to int to propagate signedness in following ops */
1462 /* generate shifts */
1463 vpushi(bits
- (bit_pos
+ bit_size
));
1465 vpushi(bits
- bit_size
);
1466 /* NOTE: transformed to SHR if unsigned */
1471 if (is_float(vtop
->type
.t
) &&
1472 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1473 unsigned long offset
;
1474 /* CPUs usually cannot use float constants, so we store them
1475 generically in data segment */
1476 size
= type_size(&vtop
->type
, &align
);
1478 size
= 0, align
= 1;
1479 offset
= section_add(data_section
, size
, align
);
1480 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1482 init_putv(&vtop
->type
, data_section
, offset
);
1485 #ifdef CONFIG_TCC_BCHECK
1486 if (vtop
->r
& VT_MUSTBOUND
)
1490 r
= vtop
->r
& VT_VALMASK
;
1491 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1492 #ifndef TCC_TARGET_ARM64
1495 #ifdef TCC_TARGET_X86_64
1496 else if (rc
== RC_FRET
)
1500 /* need to reload if:
1502 - lvalue (need to dereference pointer)
1503 - already a register, but not in the right class */
1505 || (vtop
->r
& VT_LVAL
)
1506 || !(reg_classes
[r
] & rc
)
1508 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1509 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1511 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1517 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1518 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1520 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1521 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1522 unsigned long long ll
;
1524 int r2
, original_type
;
1525 original_type
= vtop
->type
.t
;
1526 /* two register type load : expand to two words
1529 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1532 vtop
->c
.i
= ll
; /* first word */
1534 vtop
->r
= r
; /* save register value */
1535 vpushi(ll
>> 32); /* second word */
1538 if (vtop
->r
& VT_LVAL
) {
1539 /* We do not want to modifier the long long
1540 pointer here, so the safest (and less
1541 efficient) is to save all the other registers
1542 in the stack. XXX: totally inefficient. */
1546 /* lvalue_save: save only if used further down the stack */
1547 save_reg_upstack(vtop
->r
, 1);
1549 /* load from memory */
1550 vtop
->type
.t
= load_type
;
1553 vtop
[-1].r
= r
; /* save register value */
1554 /* increment pointer to get second word */
1555 vtop
->type
.t
= addr_type
;
1560 vtop
->type
.t
= load_type
;
1562 /* move registers */
1565 vtop
[-1].r
= r
; /* save register value */
1566 vtop
->r
= vtop
[-1].r2
;
1568 /* Allocate second register. Here we rely on the fact that
1569 get_reg() tries first to free r2 of an SValue. */
1573 /* write second register */
1575 vtop
->type
.t
= original_type
;
1576 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1578 /* lvalue of scalar type : need to use lvalue type
1579 because of possible cast */
1582 /* compute memory access type */
1583 if (vtop
->r
& VT_LVAL_BYTE
)
1585 else if (vtop
->r
& VT_LVAL_SHORT
)
1587 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1591 /* restore wanted type */
1594 /* one register type load */
1599 #ifdef TCC_TARGET_C67
1600 /* uses register pairs for doubles */
1601 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1608 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1609 ST_FUNC
void gv2(int rc1
, int rc2
)
1613 /* generate more generic register first. But VT_JMP or VT_CMP
1614 values must be generated first in all cases to avoid possible
1616 v
= vtop
[0].r
& VT_VALMASK
;
1617 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1622 /* test if reload is needed for first register */
1623 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1633 /* test if reload is needed for first register */
1634 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1640 #ifndef TCC_TARGET_ARM64
1641 /* wrapper around RC_FRET to return a register by type */
1642 static int rc_fret(int t
)
1644 #ifdef TCC_TARGET_X86_64
1645 if (t
== VT_LDOUBLE
) {
1653 /* wrapper around REG_FRET to return a register by type */
1654 static int reg_fret(int t
)
1656 #ifdef TCC_TARGET_X86_64
1657 if (t
== VT_LDOUBLE
) {
1665 /* expand 64bit on stack in two ints */
1666 ST_FUNC
void lexpand(void)
1669 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1670 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1671 if (v
== VT_CONST
) {
1674 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1680 vtop
[0].r
= vtop
[-1].r2
;
1681 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1683 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1688 /* build a long long from two ints */
1689 static void lbuild(int t
)
1691 gv2(RC_INT
, RC_INT
);
1692 vtop
[-1].r2
= vtop
[0].r
;
1693 vtop
[-1].type
.t
= t
;
1698 /* convert stack entry to register and duplicate its value in another
1700 static void gv_dup(void)
1707 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1708 if (t
& VT_BITFIELD
) {
1718 /* stack: H L L1 H1 */
1728 /* duplicate value */
1733 #ifdef TCC_TARGET_X86_64
1734 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1744 load(r1
, &sv
); /* move r to r1 */
1746 /* duplicates value */
1752 /* Generate value test
1754 * Generate a test for any value (jump, comparison and integers) */
1755 ST_FUNC
int gvtst(int inv
, int t
)
1757 int v
= vtop
->r
& VT_VALMASK
;
1758 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1762 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1763 /* constant jmp optimization */
1764 if ((vtop
->c
.i
!= 0) != inv
)
1769 return gtst(inv
, t
);
1773 /* generate CPU independent (unsigned) long long operations */
1774 static void gen_opl(int op
)
1776 int t
, a
, b
, op1
, c
, i
;
1778 unsigned short reg_iret
= REG_IRET
;
1779 unsigned short reg_lret
= REG_LRET
;
1785 func
= TOK___divdi3
;
1788 func
= TOK___udivdi3
;
1791 func
= TOK___moddi3
;
1794 func
= TOK___umoddi3
;
1801 /* call generic long long function */
1802 vpush_global_sym(&func_old_type
, func
);
1807 vtop
->r2
= reg_lret
;
1815 //pv("gen_opl A",0,2);
1821 /* stack: L1 H1 L2 H2 */
1826 vtop
[-2] = vtop
[-3];
1829 /* stack: H1 H2 L1 L2 */
1830 //pv("gen_opl B",0,4);
1836 /* stack: H1 H2 L1 L2 ML MH */
1839 /* stack: ML MH H1 H2 L1 L2 */
1843 /* stack: ML MH H1 L2 H2 L1 */
1848 /* stack: ML MH M1 M2 */
1851 } else if (op
== '+' || op
== '-') {
1852 /* XXX: add non carry method too (for MIPS or alpha) */
1858 /* stack: H1 H2 (L1 op L2) */
1861 gen_op(op1
+ 1); /* TOK_xxxC2 */
1864 /* stack: H1 H2 (L1 op L2) */
1867 /* stack: (L1 op L2) H1 H2 */
1869 /* stack: (L1 op L2) (H1 op H2) */
1877 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1878 t
= vtop
[-1].type
.t
;
1882 /* stack: L H shift */
1884 /* constant: simpler */
1885 /* NOTE: all comments are for SHL. the other cases are
1886 done by swapping words */
1897 if (op
!= TOK_SAR
) {
1930 /* XXX: should provide a faster fallback on x86 ? */
1933 func
= TOK___ashrdi3
;
1936 func
= TOK___lshrdi3
;
1939 func
= TOK___ashldi3
;
1945 /* compare operations */
1951 /* stack: L1 H1 L2 H2 */
1953 vtop
[-1] = vtop
[-2];
1955 /* stack: L1 L2 H1 H2 */
1958 /* when values are equal, we need to compare low words. since
1959 the jump is inverted, we invert the test too. */
1962 else if (op1
== TOK_GT
)
1964 else if (op1
== TOK_ULT
)
1966 else if (op1
== TOK_UGT
)
1976 /* generate non equal test */
1982 /* compare low. Always unsigned */
1986 else if (op1
== TOK_LE
)
1988 else if (op1
== TOK_GT
)
1990 else if (op1
== TOK_GE
)
2001 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2003 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2004 return (a
^ b
) >> 63 ? -x
: x
;
2007 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2009 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2012 /* handle integer constant optimizations and various machine
2014 static void gen_opic(int op
)
2016 SValue
*v1
= vtop
- 1;
2018 int t1
= v1
->type
.t
& VT_BTYPE
;
2019 int t2
= v2
->type
.t
& VT_BTYPE
;
2020 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2021 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2022 uint64_t l1
= c1
? v1
->c
.i
: 0;
2023 uint64_t l2
= c2
? v2
->c
.i
: 0;
2024 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2026 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2027 l1
= ((uint32_t)l1
|
2028 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2029 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2030 l2
= ((uint32_t)l2
|
2031 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2035 case '+': l1
+= l2
; break;
2036 case '-': l1
-= l2
; break;
2037 case '&': l1
&= l2
; break;
2038 case '^': l1
^= l2
; break;
2039 case '|': l1
|= l2
; break;
2040 case '*': l1
*= l2
; break;
2047 /* if division by zero, generate explicit division */
2050 tcc_error("division by zero in constant");
2054 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2055 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2056 case TOK_UDIV
: l1
= l1
/ l2
; break;
2057 case TOK_UMOD
: l1
= l1
% l2
; break;
2060 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2061 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2063 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2066 case TOK_ULT
: l1
= l1
< l2
; break;
2067 case TOK_UGE
: l1
= l1
>= l2
; break;
2068 case TOK_EQ
: l1
= l1
== l2
; break;
2069 case TOK_NE
: l1
= l1
!= l2
; break;
2070 case TOK_ULE
: l1
= l1
<= l2
; break;
2071 case TOK_UGT
: l1
= l1
> l2
; break;
2072 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2073 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2074 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2075 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2077 case TOK_LAND
: l1
= l1
&& l2
; break;
2078 case TOK_LOR
: l1
= l1
|| l2
; break;
2082 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2083 l1
= ((uint32_t)l1
|
2084 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2088 /* if commutative ops, put c2 as constant */
2089 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2090 op
== '|' || op
== '*')) {
2092 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2093 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2095 if (!const_wanted
&&
2097 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2098 (l1
== -1 && op
== TOK_SAR
))) {
2099 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2101 } else if (!const_wanted
&&
2102 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2104 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2105 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2106 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2111 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2114 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2115 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2118 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2119 /* filter out NOP operations like x*1, x-0, x&-1... */
2121 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2122 /* try to use shifts instead of muls or divs */
2123 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2132 else if (op
== TOK_PDIV
)
2138 } else if (c2
&& (op
== '+' || op
== '-') &&
2139 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2140 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2141 /* symbol + constant case */
2145 /* The backends can't always deal with addends to symbols
2146 larger than +-1<<31. Don't construct such. */
2153 /* call low level op generator */
2154 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2155 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2163 /* generate a floating point operation with constant propagation */
2164 static void gen_opif(int op
)
2168 #if defined _MSC_VER && defined _AMD64_
2169 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2176 /* currently, we cannot do computations with forward symbols */
2177 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2178 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2180 if (v1
->type
.t
== VT_FLOAT
) {
2183 } else if (v1
->type
.t
== VT_DOUBLE
) {
2191 /* NOTE: we only do constant propagation if finite number (not
2192 NaN or infinity) (ANSI spec) */
2193 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2197 case '+': f1
+= f2
; break;
2198 case '-': f1
-= f2
; break;
2199 case '*': f1
*= f2
; break;
2202 /* If not in initializer we need to potentially generate
2203 FP exceptions at runtime, otherwise we want to fold. */
2209 /* XXX: also handles tests ? */
2213 /* XXX: overflow test ? */
2214 if (v1
->type
.t
== VT_FLOAT
) {
2216 } else if (v1
->type
.t
== VT_DOUBLE
) {
2228 static int pointed_size(CType
*type
)
2231 return type_size(pointed_type(type
), &align
);
2234 static void vla_runtime_pointed_size(CType
*type
)
2237 vla_runtime_type_size(pointed_type(type
), &align
);
2240 static inline int is_null_pointer(SValue
*p
)
2242 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2244 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2245 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2246 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2247 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2248 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2249 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2252 static inline int is_integer_btype(int bt
)
2254 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2255 bt
== VT_INT
|| bt
== VT_LLONG
);
2258 /* check types for comparison or subtraction of pointers */
2259 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2261 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2264 /* null pointers are accepted for all comparisons as gcc */
2265 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2269 bt1
= type1
->t
& VT_BTYPE
;
2270 bt2
= type2
->t
& VT_BTYPE
;
2271 /* accept comparison between pointer and integer with a warning */
2272 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2273 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2274 tcc_warning("comparison between pointer and integer");
2278 /* both must be pointers or implicit function pointers */
2279 if (bt1
== VT_PTR
) {
2280 type1
= pointed_type(type1
);
2281 } else if (bt1
!= VT_FUNC
)
2282 goto invalid_operands
;
2284 if (bt2
== VT_PTR
) {
2285 type2
= pointed_type(type2
);
2286 } else if (bt2
!= VT_FUNC
) {
2288 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2290 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2291 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2295 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2296 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2297 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2298 /* gcc-like error if '-' is used */
2300 goto invalid_operands
;
2302 tcc_warning("comparison of distinct pointer types lacks a cast");
2306 /* generic gen_op: handles types problems */
2307 ST_FUNC
void gen_op(int op
)
2309 int u
, t1
, t2
, bt1
, bt2
, t
;
2313 t1
= vtop
[-1].type
.t
;
2314 t2
= vtop
[0].type
.t
;
2315 bt1
= t1
& VT_BTYPE
;
2316 bt2
= t2
& VT_BTYPE
;
2318 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2319 tcc_error("operation on a struct");
2320 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2321 if (bt2
== VT_FUNC
) {
2322 mk_pointer(&vtop
->type
);
2325 if (bt1
== VT_FUNC
) {
2327 mk_pointer(&vtop
->type
);
2332 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2333 /* at least one operand is a pointer */
2334 /* relational op: must be both pointers */
2335 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2336 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2337 /* pointers are handled are unsigned */
2339 t
= VT_LLONG
| VT_UNSIGNED
;
2341 t
= VT_INT
| VT_UNSIGNED
;
2345 /* if both pointers, then it must be the '-' op */
2346 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2348 tcc_error("cannot use pointers here");
2349 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2350 /* XXX: check that types are compatible */
2351 if (vtop
[-1].type
.t
& VT_VLA
) {
2352 vla_runtime_pointed_size(&vtop
[-1].type
);
2354 vpushi(pointed_size(&vtop
[-1].type
));
2358 vtop
->type
.t
= ptrdiff_type
.t
;
2362 /* exactly one pointer : must be '+' or '-'. */
2363 if (op
!= '-' && op
!= '+')
2364 tcc_error("cannot use pointers here");
2365 /* Put pointer as first operand */
2366 if (bt2
== VT_PTR
) {
2368 t
= t1
, t1
= t2
, t2
= t
;
2371 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2372 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2375 type1
= vtop
[-1].type
;
2376 type1
.t
&= ~VT_ARRAY
;
2377 if (vtop
[-1].type
.t
& VT_VLA
)
2378 vla_runtime_pointed_size(&vtop
[-1].type
);
2380 u
= pointed_size(&vtop
[-1].type
);
2382 tcc_error("unknown array element size");
2386 /* XXX: cast to int ? (long long case) */
2392 /* #ifdef CONFIG_TCC_BCHECK
2393 The main reason to removing this code:
2400 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2401 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2403 When this code is on. then the output looks like
2405 v+(i-j) = 0xbff84000
2407 /* if evaluating constant expression, no code should be
2408 generated, so no bound check */
2409 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2410 /* if bounded pointers, we generate a special code to
2417 gen_bounded_ptr_add();
2423 /* put again type if gen_opic() swaped operands */
2426 } else if (is_float(bt1
) || is_float(bt2
)) {
2427 /* compute bigger type and do implicit casts */
2428 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2430 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2435 /* floats can only be used for a few operations */
2436 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2437 (op
< TOK_ULT
|| op
> TOK_GT
))
2438 tcc_error("invalid operands for binary operation");
2440 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2441 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2442 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2444 t
|= (VT_LONG
& t1
);
2446 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2447 /* cast to biggest op */
2448 t
= VT_LLONG
| VT_LONG
;
2449 if (bt1
== VT_LLONG
)
2451 if (bt2
== VT_LLONG
)
2453 /* convert to unsigned if it does not fit in a long long */
2454 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2455 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2459 /* integer operations */
2460 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2461 /* convert to unsigned if it does not fit in an integer */
2462 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2463 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2466 /* XXX: currently, some unsigned operations are explicit, so
2467 we modify them here */
2468 if (t
& VT_UNSIGNED
) {
2475 else if (op
== TOK_LT
)
2477 else if (op
== TOK_GT
)
2479 else if (op
== TOK_LE
)
2481 else if (op
== TOK_GE
)
2489 /* special case for shifts and long long: we keep the shift as
2491 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2498 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2499 /* relational op: the result is an int */
2500 vtop
->type
.t
= VT_INT
;
2505 // Make sure that we have converted to an rvalue:
2506 if (vtop
->r
& VT_LVAL
)
2507 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2510 #ifndef TCC_TARGET_ARM
2511 /* generic itof for unsigned long long case */
2512 static void gen_cvt_itof1(int t
)
2514 #ifdef TCC_TARGET_ARM64
2517 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2518 (VT_LLONG
| VT_UNSIGNED
)) {
2521 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2522 #if LDOUBLE_SIZE != 8
2523 else if (t
== VT_LDOUBLE
)
2524 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2527 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2531 vtop
->r
= reg_fret(t
);
2539 /* generic ftoi for unsigned long long case */
2540 static void gen_cvt_ftoi1(int t
)
2542 #ifdef TCC_TARGET_ARM64
2547 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2548 /* not handled natively */
2549 st
= vtop
->type
.t
& VT_BTYPE
;
2551 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2552 #if LDOUBLE_SIZE != 8
2553 else if (st
== VT_LDOUBLE
)
2554 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2557 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2562 vtop
->r2
= REG_LRET
;
2569 /* force char or short cast */
2570 static void force_charshort_cast(int t
)
2574 /* cannot cast static initializers */
2575 if (STATIC_DATA_WANTED
)
2579 /* XXX: add optimization if lvalue : just change type and offset */
2584 if (t
& VT_UNSIGNED
) {
2585 vpushi((1 << bits
) - 1);
2588 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2594 /* result must be signed or the SAR is converted to an SHL
2595 This was not the case when "t" was a signed short
2596 and the last value on the stack was an unsigned int */
2597 vtop
->type
.t
&= ~VT_UNSIGNED
;
2603 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2604 static void gen_cast_s(int t
)
2612 static void gen_cast(CType
*type
)
2614 int sbt
, dbt
, sf
, df
, c
, p
;
2616 /* special delayed cast for char/short */
2617 /* XXX: in some cases (multiple cascaded casts), it may still
2619 if (vtop
->r
& VT_MUSTCAST
) {
2620 vtop
->r
&= ~VT_MUSTCAST
;
2621 force_charshort_cast(vtop
->type
.t
);
2624 /* bitfields first get cast to ints */
2625 if (vtop
->type
.t
& VT_BITFIELD
) {
2629 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2630 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2635 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2636 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2637 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2638 c
&= dbt
!= VT_LDOUBLE
;
2641 /* constant case: we can do it now */
2642 /* XXX: in ISOC, cannot do it if error in convert */
2643 if (sbt
== VT_FLOAT
)
2644 vtop
->c
.ld
= vtop
->c
.f
;
2645 else if (sbt
== VT_DOUBLE
)
2646 vtop
->c
.ld
= vtop
->c
.d
;
2649 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2650 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2651 vtop
->c
.ld
= vtop
->c
.i
;
2653 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2655 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2656 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2658 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2661 if (dbt
== VT_FLOAT
)
2662 vtop
->c
.f
= (float)vtop
->c
.ld
;
2663 else if (dbt
== VT_DOUBLE
)
2664 vtop
->c
.d
= (double)vtop
->c
.ld
;
2665 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2666 vtop
->c
.i
= vtop
->c
.ld
;
2667 } else if (sf
&& dbt
== VT_BOOL
) {
2668 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2671 vtop
->c
.i
= vtop
->c
.ld
;
2672 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2674 else if (sbt
& VT_UNSIGNED
)
2675 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2677 else if (sbt
== VT_PTR
)
2680 else if (sbt
!= VT_LLONG
)
2681 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2682 -(vtop
->c
.i
& 0x80000000));
2684 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2686 else if (dbt
== VT_BOOL
)
2687 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2689 else if (dbt
== VT_PTR
)
2692 else if (dbt
!= VT_LLONG
) {
2693 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2694 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2697 if (!(dbt
& VT_UNSIGNED
))
2698 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2701 } else if (p
&& dbt
== VT_BOOL
) {
2705 /* non constant case: generate code */
2707 /* convert from fp to fp */
2710 /* convert int to fp */
2713 /* convert fp to int */
2714 if (dbt
== VT_BOOL
) {
2718 /* we handle char/short/etc... with generic code */
2719 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2720 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2724 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2725 /* additional cast for char/short... */
2731 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2732 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2733 /* scalar to long long */
2734 /* machine independent conversion */
2736 /* generate high word */
2737 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2741 if (sbt
== VT_PTR
) {
2742 /* cast from pointer to int before we apply
2743 shift operation, which pointers don't support*/
2750 /* patch second register */
2751 vtop
[-1].r2
= vtop
->r
;
2755 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2756 (dbt
& VT_BTYPE
) == VT_PTR
||
2757 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2758 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2759 (sbt
& VT_BTYPE
) != VT_PTR
&&
2760 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2761 /* need to convert from 32bit to 64bit */
2763 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2764 #if defined(TCC_TARGET_ARM64)
2766 #elif defined(TCC_TARGET_X86_64)
2768 /* x86_64 specific: movslq */
2770 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2777 } else if (dbt
== VT_BOOL
) {
2778 /* scalar to bool */
2781 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2782 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2783 if (sbt
== VT_PTR
) {
2784 vtop
->type
.t
= VT_INT
;
2785 tcc_warning("nonportable conversion from pointer to char/short");
2787 force_charshort_cast(dbt
);
2788 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2790 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2792 /* from long long: just take low order word */
2797 vtop
->type
.t
|= VT_UNSIGNED
;
2801 /* if lvalue and single word type, nothing to do because
2802 the lvalue already contains the real type size (see
2803 VT_LVAL_xxx constants) */
2806 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2807 /* if we are casting between pointer types,
2808 we must update the VT_LVAL_xxx size */
2809 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2810 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2813 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2816 /* return type size as known at compile time. Put alignment at 'a' */
2817 ST_FUNC
int type_size(CType
*type
, int *a
)
2822 bt
= type
->t
& VT_BTYPE
;
2823 if (bt
== VT_STRUCT
) {
2828 } else if (bt
== VT_PTR
) {
2829 if (type
->t
& VT_ARRAY
) {
2833 ts
= type_size(&s
->type
, a
);
2835 if (ts
< 0 && s
->c
< 0)
2843 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
2844 return -1; /* incomplete enum */
2845 } else if (bt
== VT_LDOUBLE
) {
2847 return LDOUBLE_SIZE
;
2848 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2849 #ifdef TCC_TARGET_I386
2850 #ifdef TCC_TARGET_PE
2855 #elif defined(TCC_TARGET_ARM)
2865 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2868 } else if (bt
== VT_SHORT
) {
2871 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2875 /* char, void, function, _Bool */
2881 /* push type size as known at runtime time on top of value stack. Put
2883 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2885 if (type
->t
& VT_VLA
) {
2886 type_size(&type
->ref
->type
, a
);
2887 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2889 vpushi(type_size(type
, a
));
2893 static void vla_sp_restore(void) {
2894 if (vlas_in_scope
) {
2895 gen_vla_sp_restore(vla_sp_loc
);
2899 static void vla_sp_restore_root(void) {
2900 if (vlas_in_scope
) {
2901 gen_vla_sp_restore(vla_sp_root_loc
);
2905 /* return the pointed type of t */
2906 static inline CType
*pointed_type(CType
*type
)
2908 return &type
->ref
->type
;
2911 /* modify type so that its it is a pointer to type. */
2912 ST_FUNC
void mk_pointer(CType
*type
)
2915 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2916 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2920 /* compare function types. OLD functions match any new functions */
2921 static int is_compatible_func(CType
*type1
, CType
*type2
)
2927 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2929 if (s1
->f
.func_type
!= s2
->f
.func_type
2930 && s1
->f
.func_type
!= FUNC_OLD
2931 && s2
->f
.func_type
!= FUNC_OLD
)
2933 /* we should check the function return type for FUNC_OLD too
2934 but that causes problems with the internally used support
2935 functions such as TOK_memmove */
2936 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
2938 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
2941 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2952 /* return true if type1 and type2 are the same. If unqualified is
2953 true, qualifiers on the types are ignored.
2955 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2959 t1
= type1
->t
& VT_TYPE
;
2960 t2
= type2
->t
& VT_TYPE
;
2962 /* strip qualifiers before comparing */
2963 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2964 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2967 /* Default Vs explicit signedness only matters for char */
2968 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2972 /* XXX: bitfields ? */
2977 && !(type1
->ref
->c
< 0
2978 || type2
->ref
->c
< 0
2979 || type1
->ref
->c
== type2
->ref
->c
))
2982 /* test more complicated cases */
2983 bt1
= t1
& VT_BTYPE
;
2984 if (bt1
== VT_PTR
) {
2985 type1
= pointed_type(type1
);
2986 type2
= pointed_type(type2
);
2987 return is_compatible_types(type1
, type2
);
2988 } else if (bt1
== VT_STRUCT
) {
2989 return (type1
->ref
== type2
->ref
);
2990 } else if (bt1
== VT_FUNC
) {
2991 return is_compatible_func(type1
, type2
);
2992 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
2993 return type1
->ref
== type2
->ref
;
2999 /* return true if type1 and type2 are exactly the same (including
3002 static int is_compatible_types(CType
*type1
, CType
*type2
)
3004 return compare_types(type1
,type2
,0);
3007 /* return true if type1 and type2 are the same (ignoring qualifiers).
3009 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3011 return compare_types(type1
,type2
,1);
3014 /* print a type. If 'varstr' is not NULL, then the variable is also
3015 printed in the type */
3017 /* XXX: add array and function pointers */
3018 static void type_to_str(char *buf
, int buf_size
,
3019 CType
*type
, const char *varstr
)
3031 pstrcat(buf
, buf_size
, "extern ");
3033 pstrcat(buf
, buf_size
, "static ");
3035 pstrcat(buf
, buf_size
, "typedef ");
3037 pstrcat(buf
, buf_size
, "inline ");
3038 if (t
& VT_VOLATILE
)
3039 pstrcat(buf
, buf_size
, "volatile ");
3040 if (t
& VT_CONSTANT
)
3041 pstrcat(buf
, buf_size
, "const ");
3043 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
3044 || ((t
& VT_UNSIGNED
)
3045 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
3048 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
3050 buf_size
-= strlen(buf
);
3085 tstr
= "long double";
3087 pstrcat(buf
, buf_size
, tstr
);
3094 pstrcat(buf
, buf_size
, tstr
);
3095 v
= type
->ref
->v
& ~SYM_STRUCT
;
3096 if (v
>= SYM_FIRST_ANOM
)
3097 pstrcat(buf
, buf_size
, "<anonymous>");
3099 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3104 if (varstr
&& '*' == *varstr
) {
3105 pstrcat(buf1
, sizeof(buf1
), "(");
3106 pstrcat(buf1
, sizeof(buf1
), varstr
);
3107 pstrcat(buf1
, sizeof(buf1
), ")");
3109 pstrcat(buf1
, buf_size
, "(");
3111 while (sa
!= NULL
) {
3113 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3114 pstrcat(buf1
, sizeof(buf1
), buf2
);
3117 pstrcat(buf1
, sizeof(buf1
), ", ");
3119 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3120 pstrcat(buf1
, sizeof(buf1
), ", ...");
3121 pstrcat(buf1
, sizeof(buf1
), ")");
3122 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3127 if (varstr
&& '*' == *varstr
)
3128 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3130 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3131 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3134 pstrcpy(buf1
, sizeof(buf1
), "*");
3135 if (t
& VT_CONSTANT
)
3136 pstrcat(buf1
, buf_size
, "const ");
3137 if (t
& VT_VOLATILE
)
3138 pstrcat(buf1
, buf_size
, "volatile ");
3140 pstrcat(buf1
, sizeof(buf1
), varstr
);
3141 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3145 pstrcat(buf
, buf_size
, " ");
3146 pstrcat(buf
, buf_size
, varstr
);
3151 /* verify type compatibility to store vtop in 'dt' type, and generate
3153 static void gen_assign_cast(CType
*dt
)
3155 CType
*st
, *type1
, *type2
;
3156 char buf1
[256], buf2
[256];
3157 int dbt
, sbt
, qualwarn
, lvl
;
3159 st
= &vtop
->type
; /* source type */
3160 dbt
= dt
->t
& VT_BTYPE
;
3161 sbt
= st
->t
& VT_BTYPE
;
3162 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3163 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3164 ; /* It is Ok if both are void */
3166 tcc_error("cannot cast from/to void");
3168 if (dt
->t
& VT_CONSTANT
)
3169 tcc_warning("assignment of read-only location");
3172 /* special cases for pointers */
3173 /* '0' can also be a pointer */
3174 if (is_null_pointer(vtop
))
3176 /* accept implicit pointer to integer cast with warning */
3177 if (is_integer_btype(sbt
)) {
3178 tcc_warning("assignment makes pointer from integer without a cast");
3181 type1
= pointed_type(dt
);
3183 type2
= pointed_type(st
);
3184 else if (sbt
== VT_FUNC
)
3185 type2
= st
; /* a function is implicitly a function pointer */
3188 if (is_compatible_types(type1
, type2
))
3190 for (qualwarn
= lvl
= 0;; ++lvl
) {
3191 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3192 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3194 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3195 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3196 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3198 type1
= pointed_type(type1
);
3199 type2
= pointed_type(type2
);
3201 if (!is_compatible_unqualified_types(type1
, type2
)) {
3202 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3203 /* void * can match anything */
3204 } else if (dbt
== sbt
3205 && is_integer_btype(sbt
& VT_BTYPE
)
3206 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3207 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3208 /* Like GCC don't warn by default for merely changes
3209 in pointer target signedness. Do warn for different
3210 base types, though, in particular for unsigned enums
3211 and signed int targets. */
3213 tcc_warning("assignment from incompatible pointer type");
3218 tcc_warning("assignment discards qualifiers from pointer target type");
3224 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3225 tcc_warning("assignment makes integer from pointer without a cast");
3226 } else if (sbt
== VT_STRUCT
) {
3227 goto case_VT_STRUCT
;
3229 /* XXX: more tests */
3233 if (!is_compatible_unqualified_types(dt
, st
)) {
3235 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3236 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3237 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3244 /* store vtop in lvalue pushed on stack */
3245 ST_FUNC
void vstore(void)
3247 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3249 ft
= vtop
[-1].type
.t
;
3250 sbt
= vtop
->type
.t
& VT_BTYPE
;
3251 dbt
= ft
& VT_BTYPE
;
3252 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3253 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3254 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3255 /* optimize char/short casts */
3256 delayed_cast
= VT_MUSTCAST
;
3257 vtop
->type
.t
= ft
& VT_TYPE
;
3258 /* XXX: factorize */
3259 if (ft
& VT_CONSTANT
)
3260 tcc_warning("assignment of read-only location");
3263 if (!(ft
& VT_BITFIELD
))
3264 gen_assign_cast(&vtop
[-1].type
);
3267 if (sbt
== VT_STRUCT
) {
3268 /* if structure, only generate pointer */
3269 /* structure assignment : generate memcpy */
3270 /* XXX: optimize if small size */
3271 size
= type_size(&vtop
->type
, &align
);
3275 vtop
->type
.t
= VT_PTR
;
3278 /* address of memcpy() */
3281 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3282 else if(!(align
& 3))
3283 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3286 /* Use memmove, rather than memcpy, as dest and src may be same: */
3287 vpush_global_sym(&func_old_type
, TOK_memmove
);
3292 vtop
->type
.t
= VT_PTR
;
3298 /* leave source on stack */
3299 } else if (ft
& VT_BITFIELD
) {
3300 /* bitfield store handling */
3302 /* save lvalue as expression result (example: s.b = s.a = n;) */
3303 vdup(), vtop
[-1] = vtop
[-2];
3305 bit_pos
= BIT_POS(ft
);
3306 bit_size
= BIT_SIZE(ft
);
3307 /* remove bit field info to avoid loops */
3308 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3310 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3311 gen_cast(&vtop
[-1].type
);
3312 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3315 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3316 if (r
== VT_STRUCT
) {
3317 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3318 store_packed_bf(bit_pos
, bit_size
);
3320 unsigned long long mask
= (1ULL << bit_size
) - 1;
3321 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3323 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3326 vpushi((unsigned)mask
);
3333 /* duplicate destination */
3336 /* load destination, mask and or with source */
3337 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3338 vpushll(~(mask
<< bit_pos
));
3340 vpushi(~((unsigned)mask
<< bit_pos
));
3345 /* ... and discard */
3348 } else if (dbt
== VT_VOID
) {
3351 #ifdef CONFIG_TCC_BCHECK
3352 /* bound check case */
3353 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3362 #ifdef TCC_TARGET_X86_64
3363 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3365 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3370 r
= gv(rc
); /* generate value */
3371 /* if lvalue was saved on stack, must read it */
3372 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3374 t
= get_reg(RC_INT
);
3380 sv
.r
= VT_LOCAL
| VT_LVAL
;
3381 sv
.c
.i
= vtop
[-1].c
.i
;
3383 vtop
[-1].r
= t
| VT_LVAL
;
3385 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3387 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3388 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3390 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3391 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3393 vtop
[-1].type
.t
= load_type
;
3396 /* convert to int to increment easily */
3397 vtop
->type
.t
= addr_type
;
3403 vtop
[-1].type
.t
= load_type
;
3404 /* XXX: it works because r2 is spilled last ! */
3405 store(vtop
->r2
, vtop
- 1);
3411 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3412 vtop
->r
|= delayed_cast
;
3416 /* post defines POST/PRE add. c is the token ++ or -- */
3417 ST_FUNC
void inc(int post
, int c
)
3420 vdup(); /* save lvalue */
3422 gv_dup(); /* duplicate value */
3427 vpushi(c
- TOK_MID
);
3429 vstore(); /* store value */
3431 vpop(); /* if post op, return saved value */
3434 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3436 /* read the string */
3440 while (tok
== TOK_STR
) {
3441 /* XXX: add \0 handling too ? */
3442 cstr_cat(astr
, tokc
.str
.data
, -1);
3445 cstr_ccat(astr
, '\0');
3448 /* If I is >= 1 and a power of two, returns log2(i)+1.
3449 If I is 0 returns 0. */
3450 static int exact_log2p1(int i
)
3455 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3466 /* Parse __attribute__((...)) GNUC extension. */
3467 static void parse_attribute(AttributeDef
*ad
)
3473 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3478 while (tok
!= ')') {
3479 if (tok
< TOK_IDENT
)
3480 expect("attribute name");
3492 tcc_warning("implicit declaration of function '%s'",
3493 get_tok_str(tok
, &tokc
));
3494 s
= external_global_sym(tok
, &func_old_type
);
3496 ad
->cleanup_func
= s
;
3504 parse_mult_str(&astr
, "section name");
3505 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3512 parse_mult_str(&astr
, "alias(\"target\")");
3513 ad
->alias_target
= /* save string as token, for later */
3514 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3518 case TOK_VISIBILITY1
:
3519 case TOK_VISIBILITY2
:
3521 parse_mult_str(&astr
,
3522 "visibility(\"default|hidden|internal|protected\")");
3523 if (!strcmp (astr
.data
, "default"))
3524 ad
->a
.visibility
= STV_DEFAULT
;
3525 else if (!strcmp (astr
.data
, "hidden"))
3526 ad
->a
.visibility
= STV_HIDDEN
;
3527 else if (!strcmp (astr
.data
, "internal"))
3528 ad
->a
.visibility
= STV_INTERNAL
;
3529 else if (!strcmp (astr
.data
, "protected"))
3530 ad
->a
.visibility
= STV_PROTECTED
;
3532 expect("visibility(\"default|hidden|internal|protected\")");
3541 if (n
<= 0 || (n
& (n
- 1)) != 0)
3542 tcc_error("alignment must be a positive power of two");
3547 ad
->a
.aligned
= exact_log2p1(n
);
3548 if (n
!= 1 << (ad
->a
.aligned
- 1))
3549 tcc_error("alignment of %d is larger than implemented", n
);
3561 /* currently, no need to handle it because tcc does not
3562 track unused objects */
3566 ad
->f
.func_noreturn
= 1;
3571 ad
->f
.func_call
= FUNC_CDECL
;
3576 ad
->f
.func_call
= FUNC_STDCALL
;
3578 #ifdef TCC_TARGET_I386
3588 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3594 ad
->f
.func_call
= FUNC_FASTCALLW
;
3601 ad
->attr_mode
= VT_LLONG
+ 1;
3604 ad
->attr_mode
= VT_BYTE
+ 1;
3607 ad
->attr_mode
= VT_SHORT
+ 1;
3611 ad
->attr_mode
= VT_INT
+ 1;
3614 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3621 ad
->a
.dllexport
= 1;
3623 case TOK_NODECORATE
:
3624 ad
->a
.nodecorate
= 1;
3627 ad
->a
.dllimport
= 1;
3630 if (tcc_state
->warn_unsupported
)
3631 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3632 /* skip parameters */
3634 int parenthesis
= 0;
3638 else if (tok
== ')')
3641 } while (parenthesis
&& tok
!= -1);
3654 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
3658 while ((s
= s
->next
) != NULL
) {
3659 if ((s
->v
& SYM_FIELD
) &&
3660 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3661 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3662 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
3674 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3676 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3677 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3678 int pcc
= !tcc_state
->ms_bitfields
;
3679 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3686 prevbt
= VT_STRUCT
; /* make it never match */
3691 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3692 if (f
->type
.t
& VT_BITFIELD
)
3693 bit_size
= BIT_SIZE(f
->type
.t
);
3696 size
= type_size(&f
->type
, &align
);
3697 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3700 if (pcc
&& bit_size
== 0) {
3701 /* in pcc mode, packing does not affect zero-width bitfields */
3704 /* in pcc mode, attribute packed overrides if set. */
3705 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3708 /* pragma pack overrides align if lesser and packs bitfields always */
3711 if (pragma_pack
< align
)
3712 align
= pragma_pack
;
3713 /* in pcc mode pragma pack also overrides individual align */
3714 if (pcc
&& pragma_pack
< a
)
3718 /* some individual align was specified */
3722 if (type
->ref
->type
.t
== VT_UNION
) {
3723 if (pcc
&& bit_size
>= 0)
3724 size
= (bit_size
+ 7) >> 3;
3729 } else if (bit_size
< 0) {
3731 c
+= (bit_pos
+ 7) >> 3;
3732 c
= (c
+ align
- 1) & -align
;
3741 /* A bit-field. Layout is more complicated. There are two
3742 options: PCC (GCC) compatible and MS compatible */
3744 /* In PCC layout a bit-field is placed adjacent to the
3745 preceding bit-fields, except if:
3747 - an individual alignment was given
3748 - it would overflow its base type container and
3749 there is no packing */
3750 if (bit_size
== 0) {
3752 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3754 } else if (f
->a
.aligned
) {
3756 } else if (!packed
) {
3758 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3759 if (ofs
> size
/ align
)
3763 /* in pcc mode, long long bitfields have type int if they fit */
3764 if (size
== 8 && bit_size
<= 32)
3765 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3767 while (bit_pos
>= align
* 8)
3768 c
+= align
, bit_pos
-= align
* 8;
3771 /* In PCC layout named bit-fields influence the alignment
3772 of the containing struct using the base types alignment,
3773 except for packed fields (which here have correct align). */
3774 if (f
->v
& SYM_FIRST_ANOM
3775 // && bit_size // ??? gcc on ARM/rpi does that
3780 bt
= f
->type
.t
& VT_BTYPE
;
3781 if ((bit_pos
+ bit_size
> size
* 8)
3782 || (bit_size
> 0) == (bt
!= prevbt
)
3784 c
= (c
+ align
- 1) & -align
;
3787 /* In MS bitfield mode a bit-field run always uses
3788 at least as many bits as the underlying type.
3789 To start a new run it's also required that this
3790 or the last bit-field had non-zero width. */
3791 if (bit_size
|| prev_bit_size
)
3794 /* In MS layout the records alignment is normally
3795 influenced by the field, except for a zero-width
3796 field at the start of a run (but by further zero-width
3797 fields it is again). */
3798 if (bit_size
== 0 && prevbt
!= bt
)
3801 prev_bit_size
= bit_size
;
3804 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3805 | (bit_pos
<< VT_STRUCT_SHIFT
);
3806 bit_pos
+= bit_size
;
3808 if (align
> maxalign
)
3812 printf("set field %s offset %-2d size %-2d align %-2d",
3813 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3814 if (f
->type
.t
& VT_BITFIELD
) {
3815 printf(" pos %-2d bits %-2d",
3828 c
+= (bit_pos
+ 7) >> 3;
3830 /* store size and alignment */
3831 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3835 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3836 /* can happen if individual align for some member was given. In
3837 this case MSVC ignores maxalign when aligning the size */
3842 c
= (c
+ a
- 1) & -a
;
3846 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3849 /* check whether we can access bitfields by their type */
3850 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3854 if (0 == (f
->type
.t
& VT_BITFIELD
))
3858 bit_size
= BIT_SIZE(f
->type
.t
);
3861 bit_pos
= BIT_POS(f
->type
.t
);
3862 size
= type_size(&f
->type
, &align
);
3863 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3866 /* try to access the field using a different type */
3867 c0
= -1, s
= align
= 1;
3869 px
= f
->c
* 8 + bit_pos
;
3870 cx
= (px
>> 3) & -align
;
3871 px
= px
- (cx
<< 3);
3874 s
= (px
+ bit_size
+ 7) >> 3;
3884 s
= type_size(&t
, &align
);
3888 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3889 /* update offset and bit position */
3892 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3893 | (bit_pos
<< VT_STRUCT_SHIFT
);
3897 printf("FIX field %s offset %-2d size %-2d align %-2d "
3898 "pos %-2d bits %-2d\n",
3899 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3900 cx
, s
, align
, px
, bit_size
);
3903 /* fall back to load/store single-byte wise */
3904 f
->auxtype
= VT_STRUCT
;
3906 printf("FIX field %s : load byte-wise\n",
3907 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3913 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3914 static void struct_decl(CType
*type
, int u
)
3916 int v
, c
, size
, align
, flexible
;
3917 int bit_size
, bsize
, bt
;
3919 AttributeDef ad
, ad1
;
3922 memset(&ad
, 0, sizeof ad
);
3924 parse_attribute(&ad
);
3928 /* struct already defined ? return it */
3930 expect("struct/union/enum name");
3932 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3935 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3937 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3942 /* Record the original enum/struct/union token. */
3943 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3945 /* we put an undefined size for struct/union */
3946 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3947 s
->r
= 0; /* default alignment is zero as gcc */
3949 type
->t
= s
->type
.t
;
3955 tcc_error("struct/union/enum already defined");
3957 /* cannot be empty */
3958 /* non empty enums are not allowed */
3961 long long ll
= 0, pl
= 0, nl
= 0;
3964 /* enum symbols have static storage */
3965 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3969 expect("identifier");
3971 if (ss
&& !local_stack
)
3972 tcc_error("redefinition of enumerator '%s'",
3973 get_tok_str(v
, NULL
));
3977 ll
= expr_const64();
3979 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3981 *ps
= ss
, ps
= &ss
->next
;
3990 /* NOTE: we accept a trailing comma */
3995 /* set integral type of the enum */
3998 if (pl
!= (unsigned)pl
)
3999 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4001 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4002 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4003 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4005 /* set type for enum members */
4006 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4008 if (ll
== (int)ll
) /* default is int if it fits */
4010 if (t
.t
& VT_UNSIGNED
) {
4011 ss
->type
.t
|= VT_UNSIGNED
;
4012 if (ll
== (unsigned)ll
)
4015 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4016 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4021 while (tok
!= '}') {
4022 if (!parse_btype(&btype
, &ad1
)) {
4028 tcc_error("flexible array member '%s' not at the end of struct",
4029 get_tok_str(v
, NULL
));
4035 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4037 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4038 expect("identifier");
4040 int v
= btype
.ref
->v
;
4041 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4042 if (tcc_state
->ms_extensions
== 0)
4043 expect("identifier");
4047 if (type_size(&type1
, &align
) < 0) {
4048 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4051 tcc_error("field '%s' has incomplete type",
4052 get_tok_str(v
, NULL
));
4054 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4055 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4056 (type1
.t
& VT_STORAGE
))
4057 tcc_error("invalid type for '%s'",
4058 get_tok_str(v
, NULL
));
4062 bit_size
= expr_const();
4063 /* XXX: handle v = 0 case for messages */
4065 tcc_error("negative width in bit-field '%s'",
4066 get_tok_str(v
, NULL
));
4067 if (v
&& bit_size
== 0)
4068 tcc_error("zero width for bit-field '%s'",
4069 get_tok_str(v
, NULL
));
4070 parse_attribute(&ad1
);
4072 size
= type_size(&type1
, &align
);
4073 if (bit_size
>= 0) {
4074 bt
= type1
.t
& VT_BTYPE
;
4080 tcc_error("bitfields must have scalar type");
4082 if (bit_size
> bsize
) {
4083 tcc_error("width of '%s' exceeds its type",
4084 get_tok_str(v
, NULL
));
4085 } else if (bit_size
== bsize
4086 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4087 /* no need for bit fields */
4089 } else if (bit_size
== 64) {
4090 tcc_error("field width 64 not implemented");
4092 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4094 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4097 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4098 /* Remember we've seen a real field to check
4099 for placement of flexible array member. */
4102 /* If member is a struct or bit-field, enforce
4103 placing into the struct (as anonymous). */
4105 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4110 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4115 if (tok
== ';' || tok
== TOK_EOF
)
4122 parse_attribute(&ad
);
4123 struct_layout(type
, &ad
);
4128 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4130 merge_symattr(&ad
->a
, &s
->a
);
4131 merge_funcattr(&ad
->f
, &s
->f
);
4134 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4135 are added to the element type, copied because it could be a typedef. */
4136 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4138 while (type
->t
& VT_ARRAY
) {
4139 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4140 type
= &type
->ref
->type
;
4142 type
->t
|= qualifiers
;
4145 /* return 0 if no type declaration. otherwise, return the basic type
4148 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4150 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4154 memset(ad
, 0, sizeof(AttributeDef
));
4164 /* currently, we really ignore extension */
4174 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4175 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4176 tmbt
: tcc_error("too many basic types");
4179 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4184 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4201 memset(&ad1
, 0, sizeof(AttributeDef
));
4202 if (parse_btype(&type1
, &ad1
)) {
4203 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4205 n
= 1 << (ad1
.a
.aligned
- 1);
4207 type_size(&type1
, &n
);
4210 if (n
<= 0 || (n
& (n
- 1)) != 0)
4211 tcc_error("alignment must be a positive power of two");
4214 ad
->a
.aligned
= exact_log2p1(n
);
4218 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4219 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4220 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4221 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4228 #ifdef TCC_TARGET_ARM64
4230 /* GCC's __uint128_t appears in some Linux header files. Make it a
4231 synonym for long double to get the size and alignment right. */
4242 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4243 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4251 struct_decl(&type1
, VT_ENUM
);
4254 type
->ref
= type1
.ref
;
4257 struct_decl(&type1
, VT_STRUCT
);
4260 struct_decl(&type1
, VT_UNION
);
4263 /* type modifiers */
4268 parse_btype_qualify(type
, VT_CONSTANT
);
4276 parse_btype_qualify(type
, VT_VOLATILE
);
4283 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4284 tcc_error("signed and unsigned modifier");
4297 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4298 tcc_error("signed and unsigned modifier");
4299 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4315 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4316 tcc_error("multiple storage classes");
4327 /* currently, no need to handle it because tcc does not
4328 track unused objects */
4331 /* GNUC attribute */
4332 case TOK_ATTRIBUTE1
:
4333 case TOK_ATTRIBUTE2
:
4334 parse_attribute(ad
);
4335 if (ad
->attr_mode
) {
4336 u
= ad
->attr_mode
-1;
4337 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4345 parse_expr_type(&type1
);
4346 /* remove all storage modifiers except typedef */
4347 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4349 sym_to_attr(ad
, type1
.ref
);
4355 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4359 if (tok
== ':' && !in_generic
) {
4360 /* ignore if it's a label */
4365 t
&= ~(VT_BTYPE
|VT_LONG
);
4366 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4367 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4368 type
->ref
= s
->type
.ref
;
4370 parse_btype_qualify(type
, t
);
4372 /* get attributes from typedef */
4381 if (tcc_state
->char_is_unsigned
) {
4382 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4385 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4386 bt
= t
& (VT_BTYPE
|VT_LONG
);
4388 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4389 #ifdef TCC_TARGET_PE
4390 if (bt
== VT_LDOUBLE
)
4391 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4397 /* convert a function parameter type (array to pointer and function to
4398 function pointer) */
4399 static inline void convert_parameter_type(CType
*pt
)
4401 /* remove const and volatile qualifiers (XXX: const could be used
4402 to indicate a const function parameter */
4403 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4404 /* array must be transformed to pointer according to ANSI C */
4406 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4411 ST_FUNC
void parse_asm_str(CString
*astr
)
4414 parse_mult_str(astr
, "string constant");
4417 /* Parse an asm label and return the token */
4418 static int asm_label_instr(void)
4424 parse_asm_str(&astr
);
4427 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4429 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4434 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4436 int n
, l
, t1
, arg_size
, align
, unused_align
;
4437 Sym
**plast
, *s
, *first
;
4442 /* function type, or recursive declarator (return if so) */
4444 if (td
&& !(td
& TYPE_ABSTRACT
))
4448 else if (parse_btype(&pt
, &ad1
))
4451 merge_attr (ad
, &ad1
);
4460 /* read param name and compute offset */
4461 if (l
!= FUNC_OLD
) {
4462 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4464 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4465 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4466 tcc_error("parameter declared as void");
4470 expect("identifier");
4471 pt
.t
= VT_VOID
; /* invalid type */
4474 convert_parameter_type(&pt
);
4475 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4476 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4482 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4487 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4488 tcc_error("invalid type");
4491 /* if no parameters, then old type prototype */
4494 /* NOTE: const is ignored in returned type as it has a special
4495 meaning in gcc / C++ */
4496 type
->t
&= ~VT_CONSTANT
;
4497 /* some ancient pre-K&R C allows a function to return an array
4498 and the array brackets to be put after the arguments, such
4499 that "int c()[]" means something like "int[] c()" */
4502 skip(']'); /* only handle simple "[]" */
4505 /* we push a anonymous symbol which will contain the function prototype */
4506 ad
->f
.func_args
= arg_size
;
4507 ad
->f
.func_type
= l
;
4508 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4514 } else if (tok
== '[') {
4515 int saved_nocode_wanted
= nocode_wanted
;
4516 /* array definition */
4519 /* XXX The optional type-quals and static should only be accepted
4520 in parameter decls. The '*' as well, and then even only
4521 in prototypes (not function defs). */
4523 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4538 if (!local_stack
|| (storage
& VT_STATIC
))
4539 vpushi(expr_const());
4541 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4542 length must always be evaluated, even under nocode_wanted,
4543 so that its size slot is initialized (e.g. under sizeof
4548 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4551 tcc_error("invalid array size");
4553 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4554 tcc_error("size of variable length array should be an integer");
4560 /* parse next post type */
4561 post_type(type
, ad
, storage
, 0);
4563 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
4564 tcc_error("declaration of an array of functions");
4565 if ((type
->t
& VT_BTYPE
) == VT_VOID
4566 || type_size(type
, &unused_align
) < 0)
4567 tcc_error("declaration of an array of incomplete type elements");
4569 t1
|= type
->t
& VT_VLA
;
4573 tcc_error("need explicit inner array size in VLAs");
4574 loc
-= type_size(&int_type
, &align
);
4578 vla_runtime_type_size(type
, &align
);
4580 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4586 nocode_wanted
= saved_nocode_wanted
;
4588 /* we push an anonymous symbol which will contain the array
4590 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4591 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4597 /* Parse a type declarator (except basic type), and return the type
4598 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4599 expected. 'type' should contain the basic type. 'ad' is the
4600 attribute definition of the basic type. It can be modified by
4601 type_decl(). If this (possibly abstract) declarator is a pointer chain
4602 it returns the innermost pointed to type (equals *type, but is a different
4603 pointer), otherwise returns type itself, that's used for recursive calls. */
4604 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4607 int qualifiers
, storage
;
4609 /* recursive type, remove storage bits first, apply them later again */
4610 storage
= type
->t
& VT_STORAGE
;
4611 type
->t
&= ~VT_STORAGE
;
4614 while (tok
== '*') {
4622 qualifiers
|= VT_CONSTANT
;
4627 qualifiers
|= VT_VOLATILE
;
4633 /* XXX: clarify attribute handling */
4634 case TOK_ATTRIBUTE1
:
4635 case TOK_ATTRIBUTE2
:
4636 parse_attribute(ad
);
4640 type
->t
|= qualifiers
;
4642 /* innermost pointed to type is the one for the first derivation */
4643 ret
= pointed_type(type
);
4647 /* This is possibly a parameter type list for abstract declarators
4648 ('int ()'), use post_type for testing this. */
4649 if (!post_type(type
, ad
, 0, td
)) {
4650 /* It's not, so it's a nested declarator, and the post operations
4651 apply to the innermost pointed to type (if any). */
4652 /* XXX: this is not correct to modify 'ad' at this point, but
4653 the syntax is not clear */
4654 parse_attribute(ad
);
4655 post
= type_decl(type
, ad
, v
, td
);
4659 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4660 /* type identifier */
4665 if (!(td
& TYPE_ABSTRACT
))
4666 expect("identifier");
4669 post_type(post
, ad
, storage
, 0);
4670 parse_attribute(ad
);
4675 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4676 ST_FUNC
int lvalue_type(int t
)
4681 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4683 else if (bt
== VT_SHORT
)
4687 if (t
& VT_UNSIGNED
)
4688 r
|= VT_LVAL_UNSIGNED
;
4692 /* indirection with full error checking and bound check */
4693 ST_FUNC
void indir(void)
4695 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4696 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4700 if (vtop
->r
& VT_LVAL
)
4702 vtop
->type
= *pointed_type(&vtop
->type
);
4703 /* Arrays and functions are never lvalues */
4704 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4705 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4706 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4707 /* if bound checking, the referenced pointer must be checked */
4708 #ifdef CONFIG_TCC_BCHECK
4709 if (tcc_state
->do_bounds_check
)
4710 vtop
->r
|= VT_MUSTBOUND
;
4715 /* pass a parameter to a function and do type checking and casting */
4716 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4721 func_type
= func
->f
.func_type
;
4722 if (func_type
== FUNC_OLD
||
4723 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4724 /* default casting : only need to convert float to double */
4725 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4726 gen_cast_s(VT_DOUBLE
);
4727 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4728 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4729 type
.ref
= vtop
->type
.ref
;
4732 } else if (arg
== NULL
) {
4733 tcc_error("too many arguments to function");
4736 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4737 gen_assign_cast(&type
);
4741 /* parse an expression and return its type without any side effect. */
4742 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4751 /* parse an expression of the form '(type)' or '(expr)' and return its
4753 static void parse_expr_type(CType
*type
)
4759 if (parse_btype(type
, &ad
)) {
4760 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4762 expr_type(type
, gexpr
);
4767 static void parse_type(CType
*type
)
4772 if (!parse_btype(type
, &ad
)) {
4775 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4778 static void parse_builtin_params(int nc
, const char *args
)
4785 while ((c
= *args
++)) {
4789 case 'e': expr_eq(); continue;
4790 case 't': parse_type(&t
); vpush(&t
); continue;
4791 default: tcc_error("internal error"); break;
4799 static void try_call_scope_cleanup(Sym
*stop
)
4801 Sym
*cls
= current_cleanups
;
4803 for (; cls
!= stop
; cls
= cls
->ncl
) {
4804 Sym
*fs
= cls
->next
;
4805 Sym
*vs
= cls
->prev_tok
;
4807 vpushsym(&fs
->type
, fs
);
4808 vset(&vs
->type
, vs
->r
, vs
->c
);
4810 mk_pointer(&vtop
->type
);
4816 static void try_call_cleanup_goto(Sym
*cleanupstate
)
4821 if (!current_cleanups
)
4824 /* search NCA of both cleanup chains given parents and initial depth */
4825 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
4826 for (ccd
= ncleanups
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
4828 for (cc
= current_cleanups
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
4830 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
4833 try_call_scope_cleanup(cc
);
4836 ST_FUNC
void unary(void)
4838 int n
, t
, align
, size
, r
, sizeof_caller
;
4843 sizeof_caller
= in_sizeof
;
4846 /* XXX: GCC 2.95.3 does not generate a table although it should be
4854 #ifdef TCC_TARGET_PE
4855 t
= VT_SHORT
|VT_UNSIGNED
;
4863 vsetc(&type
, VT_CONST
, &tokc
);
4867 t
= VT_INT
| VT_UNSIGNED
;
4873 t
= VT_LLONG
| VT_UNSIGNED
;
4885 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4888 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4890 case TOK___FUNCTION__
:
4892 goto tok_identifier
;
4898 /* special function name identifier */
4899 len
= strlen(funcname
) + 1;
4900 /* generate char[len] type */
4905 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4906 if (!NODATA_WANTED
) {
4907 ptr
= section_ptr_add(data_section
, len
);
4908 memcpy(ptr
, funcname
, len
);
4914 #ifdef TCC_TARGET_PE
4915 t
= VT_SHORT
| VT_UNSIGNED
;
4921 /* string parsing */
4923 if (tcc_state
->char_is_unsigned
)
4924 t
= VT_BYTE
| VT_UNSIGNED
;
4926 if (tcc_state
->warn_write_strings
)
4931 memset(&ad
, 0, sizeof(AttributeDef
));
4932 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4937 if (parse_btype(&type
, &ad
)) {
4938 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4940 /* check ISOC99 compound literal */
4942 /* data is allocated locally by default */
4947 /* all except arrays are lvalues */
4948 if (!(type
.t
& VT_ARRAY
))
4949 r
|= lvalue_type(type
.t
);
4950 memset(&ad
, 0, sizeof(AttributeDef
));
4951 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4953 if (sizeof_caller
) {
4960 } else if (tok
== '{') {
4961 int saved_nocode_wanted
= nocode_wanted
;
4963 tcc_error("expected constant");
4964 /* save all registers */
4966 /* statement expression : we do not accept break/continue
4967 inside as GCC does. We do retain the nocode_wanted state,
4968 as statement expressions can't ever be entered from the
4969 outside, so any reactivation of code emission (from labels
4970 or loop heads) can be disabled again after the end of it. */
4971 block(NULL
, NULL
, NULL
, NULL
, 1);
4972 nocode_wanted
= saved_nocode_wanted
;
4987 /* functions names must be treated as function pointers,
4988 except for unary '&' and sizeof. Since we consider that
4989 functions are not lvalues, we only have to handle it
4990 there and in function calls. */
4991 /* arrays can also be used although they are not lvalues */
4992 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4993 !(vtop
->type
.t
& VT_ARRAY
))
4995 mk_pointer(&vtop
->type
);
5001 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5002 gen_cast_s(VT_BOOL
);
5003 vtop
->c
.i
= !vtop
->c
.i
;
5004 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
5008 vseti(VT_JMP
, gvtst(1, 0));
5020 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5021 tcc_error("pointer not accepted for unary plus");
5022 /* In order to force cast, we add zero, except for floating point
5023 where we really need an noop (otherwise -0.0 will be transformed
5025 if (!is_float(vtop
->type
.t
)) {
5037 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5038 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5039 size
= type_size(&type
, &align
);
5040 if (s
&& s
->a
.aligned
)
5041 align
= 1 << (s
->a
.aligned
- 1);
5042 if (t
== TOK_SIZEOF
) {
5043 if (!(type
.t
& VT_VLA
)) {
5045 tcc_error("sizeof applied to an incomplete type");
5048 vla_runtime_type_size(&type
, &align
);
5053 vtop
->type
.t
|= VT_UNSIGNED
;
5056 case TOK_builtin_expect
:
5057 /* __builtin_expect is a no-op for now */
5058 parse_builtin_params(0, "ee");
5061 case TOK_builtin_types_compatible_p
:
5062 parse_builtin_params(0, "tt");
5063 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5064 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5065 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5069 case TOK_builtin_choose_expr
:
5096 case TOK_builtin_constant_p
:
5097 parse_builtin_params(1, "e");
5098 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5102 case TOK_builtin_frame_address
:
5103 case TOK_builtin_return_address
:
5109 if (tok
!= TOK_CINT
) {
5110 tcc_error("%s only takes positive integers",
5111 tok1
== TOK_builtin_return_address
?
5112 "__builtin_return_address" :
5113 "__builtin_frame_address");
5115 level
= (uint32_t)tokc
.i
;
5120 vset(&type
, VT_LOCAL
, 0); /* local frame */
5122 mk_pointer(&vtop
->type
);
5123 indir(); /* -> parent frame */
5125 if (tok1
== TOK_builtin_return_address
) {
5126 // assume return address is just above frame pointer on stack
5129 mk_pointer(&vtop
->type
);
5134 #ifdef TCC_TARGET_X86_64
5135 #ifdef TCC_TARGET_PE
5136 case TOK_builtin_va_start
:
5137 parse_builtin_params(0, "ee");
5138 r
= vtop
->r
& VT_VALMASK
;
5142 tcc_error("__builtin_va_start expects a local variable");
5144 vtop
->type
= char_pointer_type
;
5149 case TOK_builtin_va_arg_types
:
5150 parse_builtin_params(0, "t");
5151 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5158 #ifdef TCC_TARGET_ARM64
5159 case TOK___va_start
: {
5160 parse_builtin_params(0, "ee");
5164 vtop
->type
.t
= VT_VOID
;
5167 case TOK___va_arg
: {
5168 parse_builtin_params(0, "et");
5176 case TOK___arm64_clear_cache
: {
5177 parse_builtin_params(0, "ee");
5180 vtop
->type
.t
= VT_VOID
;
5184 /* pre operations */
5195 t
= vtop
->type
.t
& VT_BTYPE
;
5197 /* In IEEE negate(x) isn't subtract(0,x), but rather
5201 vtop
->c
.f
= -1.0 * 0.0;
5202 else if (t
== VT_DOUBLE
)
5203 vtop
->c
.d
= -1.0 * 0.0;
5205 vtop
->c
.ld
= -1.0 * 0.0;
5213 goto tok_identifier
;
5215 /* allow to take the address of a label */
5216 if (tok
< TOK_UIDENT
)
5217 expect("label identifier");
5218 s
= label_find(tok
);
5220 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5222 if (s
->r
== LABEL_DECLARED
)
5223 s
->r
= LABEL_FORWARD
;
5226 s
->type
.t
= VT_VOID
;
5227 mk_pointer(&s
->type
);
5228 s
->type
.t
|= VT_STATIC
;
5230 vpushsym(&s
->type
, s
);
5236 CType controlling_type
;
5237 int has_default
= 0;
5240 TokenString
*str
= NULL
;
5241 int saved_const_wanted
= const_wanted
;
5246 expr_type(&controlling_type
, expr_eq
);
5247 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5248 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5249 mk_pointer(&controlling_type
);
5250 const_wanted
= saved_const_wanted
;
5254 if (tok
== TOK_DEFAULT
) {
5256 tcc_error("too many 'default'");
5262 AttributeDef ad_tmp
;
5267 parse_btype(&cur_type
, &ad_tmp
);
5270 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5271 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5273 tcc_error("type match twice");
5283 skip_or_save_block(&str
);
5285 skip_or_save_block(NULL
);
5292 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5293 tcc_error("type '%s' does not match any association", buf
);
5295 begin_macro(str
, 1);
5304 // special qnan , snan and infinity values
5309 vtop
->type
.t
= VT_FLOAT
;
5314 goto special_math_val
;
5317 goto special_math_val
;
5324 expect("identifier");
5326 if (!s
|| IS_ASM_SYM(s
)) {
5327 const char *name
= get_tok_str(t
, NULL
);
5329 tcc_error("'%s' undeclared", name
);
5330 /* for simple function calls, we tolerate undeclared
5331 external reference to int() function */
5332 if (tcc_state
->warn_implicit_function_declaration
5333 #ifdef TCC_TARGET_PE
5334 /* people must be warned about using undeclared WINAPI functions
5335 (which usually start with uppercase letter) */
5336 || (name
[0] >= 'A' && name
[0] <= 'Z')
5339 tcc_warning("implicit declaration of function '%s'", name
);
5340 s
= external_global_sym(t
, &func_old_type
);
5344 /* A symbol that has a register is a local register variable,
5345 which starts out as VT_LOCAL value. */
5346 if ((r
& VT_VALMASK
) < VT_CONST
)
5347 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5349 vset(&s
->type
, r
, s
->c
);
5350 /* Point to s as backpointer (even without r&VT_SYM).
5351 Will be used by at least the x86 inline asm parser for
5357 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5358 vtop
->c
.i
= s
->enum_val
;
5363 /* post operations */
5365 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5368 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5369 int qualifiers
, cumofs
= 0;
5371 if (tok
== TOK_ARROW
)
5373 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5376 /* expect pointer on structure */
5377 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5378 expect("struct or union");
5379 if (tok
== TOK_CDOUBLE
)
5380 expect("field name");
5382 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5383 expect("field name");
5384 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5386 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5387 /* add field offset to pointer */
5388 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5389 vpushi(cumofs
+ s
->c
);
5391 /* change type to field type, and set to lvalue */
5392 vtop
->type
= s
->type
;
5393 vtop
->type
.t
|= qualifiers
;
5394 /* an array is never an lvalue */
5395 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5396 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5397 #ifdef CONFIG_TCC_BCHECK
5398 /* if bound checking, the referenced pointer must be checked */
5399 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5400 vtop
->r
|= VT_MUSTBOUND
;
5404 } else if (tok
== '[') {
5410 } else if (tok
== '(') {
5413 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5416 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5417 /* pointer test (no array accepted) */
5418 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5419 vtop
->type
= *pointed_type(&vtop
->type
);
5420 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5424 expect("function pointer");
5427 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5429 /* get return type */
5432 sa
= s
->next
; /* first parameter */
5433 nb_args
= regsize
= 0;
5435 /* compute first implicit argument if a structure is returned */
5436 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5437 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5438 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5439 &ret_align
, ®size
);
5441 /* get some space for the returned structure */
5442 size
= type_size(&s
->type
, &align
);
5443 #ifdef TCC_TARGET_ARM64
5444 /* On arm64, a small struct is return in registers.
5445 It is much easier to write it to memory if we know
5446 that we are allowed to write some extra bytes, so
5447 round the allocated space up to a power of 2: */
5449 while (size
& (size
- 1))
5450 size
= (size
| (size
- 1)) + 1;
5452 loc
= (loc
- size
) & -align
;
5454 ret
.r
= VT_LOCAL
| VT_LVAL
;
5455 /* pass it as 'int' to avoid structure arg passing
5457 vseti(VT_LOCAL
, loc
);
5467 /* return in register */
5468 if (is_float(ret
.type
.t
)) {
5469 ret
.r
= reg_fret(ret
.type
.t
);
5470 #ifdef TCC_TARGET_X86_64
5471 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5475 #ifndef TCC_TARGET_ARM64
5476 #ifdef TCC_TARGET_X86_64
5477 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5479 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5490 gfunc_param_typed(s
, sa
);
5500 tcc_error("too few arguments to function");
5502 gfunc_call(nb_args
);
5505 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5506 vsetc(&ret
.type
, r
, &ret
.c
);
5507 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5510 /* handle packed struct return */
5511 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5514 size
= type_size(&s
->type
, &align
);
5515 /* We're writing whole regs often, make sure there's enough
5516 space. Assume register size is power of 2. */
5517 if (regsize
> align
)
5519 loc
= (loc
- size
) & -align
;
5523 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5527 if (--ret_nregs
== 0)
5531 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5533 if (s
->f
.func_noreturn
)
5541 ST_FUNC
void expr_prod(void)
5546 while (tok
== '*' || tok
== '/' || tok
== '%') {
5554 ST_FUNC
void expr_sum(void)
5559 while (tok
== '+' || tok
== '-') {
5567 static void expr_shift(void)
5572 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5580 static void expr_cmp(void)
5585 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5586 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5594 static void expr_cmpeq(void)
5599 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5607 static void expr_and(void)
5610 while (tok
== '&') {
5617 static void expr_xor(void)
5620 while (tok
== '^') {
5627 static void expr_or(void)
5630 while (tok
== '|') {
5637 static int condition_3way(void);
5639 static void expr_landor(void(*e_fn
)(void), int e_op
, int i
)
5641 int t
= 0, cc
= 1, f
= 0, c
;
5643 c
= f
? i
: condition_3way();
5645 save_regs(1), cc
= 0;
5646 } else if (c
!= i
) {
5647 nocode_wanted
++, f
= 1;
5656 vseti(VT_JMP
+ i
, gvtst(i
, t
));
5669 static void expr_land(void)
5672 if (tok
== TOK_LAND
)
5673 expr_landor(expr_or
, TOK_LAND
, 1);
5676 static void expr_lor(void)
5680 expr_landor(expr_land
, TOK_LOR
, 0);
5683 /* Assuming vtop is a value used in a conditional context
5684 (i.e. compared with zero) return 0 if it's false, 1 if
5685 true and -1 if it can't be statically determined. */
5686 static int condition_3way(void)
5689 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5690 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5692 gen_cast_s(VT_BOOL
);
5699 static void expr_cond(void)
5701 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5703 CType type
, type1
, type2
;
5709 c
= condition_3way();
5710 g
= (tok
== ':' && gnu_ext
);
5720 /* needed to avoid having different registers saved in
5723 if (is_float(vtop
->type
.t
)) {
5725 #ifdef TCC_TARGET_X86_64
5726 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5737 ncw_prev
= nocode_wanted
;
5744 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5745 mk_pointer(&vtop
->type
);
5747 sv
= *vtop
; /* save value to handle it later */
5748 vtop
--; /* no vpop so that FP stack is not flushed */
5758 nocode_wanted
= ncw_prev
;
5765 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5766 mk_pointer(&vtop
->type
);
5769 bt1
= t1
& VT_BTYPE
;
5771 bt2
= t2
& VT_BTYPE
;
5774 /* cast operands to correct type according to ISOC rules */
5775 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5776 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5777 } else if (is_float(bt1
) || is_float(bt2
)) {
5778 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5779 type
.t
= VT_LDOUBLE
;
5781 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5786 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5787 /* cast to biggest op */
5788 type
.t
= VT_LLONG
| VT_LONG
;
5789 if (bt1
== VT_LLONG
)
5791 if (bt2
== VT_LLONG
)
5793 /* convert to unsigned if it does not fit in a long long */
5794 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5795 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5796 type
.t
|= VT_UNSIGNED
;
5797 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5798 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5799 /* If one is a null ptr constant the result type
5801 if (is_null_pointer (vtop
)) type
= type1
;
5802 else if (is_null_pointer (&sv
)) type
= type2
;
5803 else if (bt1
!= bt2
)
5804 tcc_error("incompatible types in conditional expressions");
5806 CType
*pt1
= pointed_type(&type1
);
5807 CType
*pt2
= pointed_type(&type2
);
5808 int pbt1
= pt1
->t
& VT_BTYPE
;
5809 int pbt2
= pt2
->t
& VT_BTYPE
;
5810 int newquals
, copied
= 0;
5811 /* pointers to void get preferred, otherwise the
5812 pointed to types minus qualifs should be compatible */
5813 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5814 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5815 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5816 tcc_warning("pointer type mismatch in conditional expression\n");
5818 /* combine qualifs */
5819 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5820 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5823 /* copy the pointer target symbol */
5824 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5827 pointed_type(&type
)->t
|= newquals
;
5829 /* pointers to incomplete arrays get converted to
5830 pointers to completed ones if possible */
5831 if (pt1
->t
& VT_ARRAY
5832 && pt2
->t
& VT_ARRAY
5833 && pointed_type(&type
)->ref
->c
< 0
5834 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5837 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5839 pointed_type(&type
)->ref
=
5840 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5841 0, pointed_type(&type
)->ref
->c
);
5842 pointed_type(&type
)->ref
->c
=
5843 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5846 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5847 /* XXX: test structure compatibility */
5848 type
= bt1
== VT_STRUCT
? type1
: type2
;
5850 /* integer operations */
5851 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5852 /* convert to unsigned if it does not fit in an integer */
5853 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5854 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5855 type
.t
|= VT_UNSIGNED
;
5857 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5858 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5859 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5861 /* now we convert second operand */
5865 mk_pointer(&vtop
->type
);
5867 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5872 if (is_float(type
.t
)) {
5874 #ifdef TCC_TARGET_X86_64
5875 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5879 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5880 /* for long longs, we use fixed registers to avoid having
5881 to handle a complicated move */
5892 /* this is horrible, but we must also convert first
5898 mk_pointer(&vtop
->type
);
5900 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5906 move_reg(r2
, r1
, type
.t
);
5914 nocode_wanted
= ncw_prev
;
5918 static void expr_eq(void)
5924 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5925 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5926 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5941 ST_FUNC
void gexpr(void)
5952 /* parse a constant expression and return value in vtop. */
5953 static void expr_const1(void)
5962 /* parse an integer constant and return its value. */
5963 static inline int64_t expr_const64(void)
5967 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5968 expect("constant expression");
5974 /* parse an integer constant and return its value.
5975 Complain if it doesn't fit 32bit (signed or unsigned). */
5976 ST_FUNC
int expr_const(void)
5979 int64_t wc
= expr_const64();
5981 if (c
!= wc
&& (unsigned)c
!= wc
)
5982 tcc_error("constant exceeds 32 bit");
5986 /* ------------------------------------------------------------------------- */
5987 /* return from function */
5989 #ifndef TCC_TARGET_ARM64
5990 static void gfunc_return(CType
*func_type
)
5992 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5993 CType type
, ret_type
;
5994 int ret_align
, ret_nregs
, regsize
;
5995 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5996 &ret_align
, ®size
);
5997 if (0 == ret_nregs
) {
5998 /* if returning structure, must copy it to implicit
5999 first pointer arg location */
6002 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6005 /* copy structure value to pointer */
6008 /* returning structure packed into registers */
6009 int r
, size
, addr
, align
;
6010 size
= type_size(func_type
,&align
);
6011 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6012 (vtop
->c
.i
& (ret_align
-1)))
6013 && (align
& (ret_align
-1))) {
6014 loc
= (loc
- size
) & -ret_align
;
6017 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6021 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6023 vtop
->type
= ret_type
;
6024 if (is_float(ret_type
.t
))
6025 r
= rc_fret(ret_type
.t
);
6036 if (--ret_nregs
== 0)
6038 /* We assume that when a structure is returned in multiple
6039 registers, their classes are consecutive values of the
6042 vtop
->c
.i
+= regsize
;
6046 } else if (is_float(func_type
->t
)) {
6047 gv(rc_fret(func_type
->t
));
6051 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6055 static int case_cmp(const void *pa
, const void *pb
)
6057 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6058 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6059 return a
< b
? -1 : a
> b
;
6062 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6066 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6084 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6086 gcase(base
, len
/2, bsym
);
6087 if (cur_switch
->def_sym
)
6088 gjmp_addr(cur_switch
->def_sym
);
6090 *bsym
= gjmp(*bsym
);
6094 base
+= e
; len
-= e
;
6104 if (p
->v1
== p
->v2
) {
6106 gtst_addr(0, p
->sym
);
6116 gtst_addr(0, p
->sym
);
6122 /* call 'func' for each __attribute__((cleanup(func))) */
6123 static void block_cleanup(Sym
*lcleanup
, int lncleanups
)
6127 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> lncleanups
;) {
6128 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6133 try_call_scope_cleanup(lcleanup
);
6134 pcl
->jnext
= gjmp(0);
6136 goto remove_pending
;
6146 try_call_scope_cleanup(lcleanup
);
6147 current_cleanups
= lcleanup
;
6148 ncleanups
= lncleanups
;
6151 static void check_func_return(void)
6153 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6155 if (!strcmp (funcname
, "main")
6156 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6157 /* main returns 0 by default */
6159 gen_assign_cast(&func_vt
);
6160 gfunc_return(&func_vt
);
6162 tcc_warning("function might return no value: '%s'", funcname
);
6166 static void block(int *bsym
, Sym
*bcl
, int *csym
, Sym
*ccl
, int is_expr
)
6168 int a
, b
, c
, d
, e
, t
;
6172 /* default return value is (void) */
6174 vtop
->type
.t
= VT_VOID
;
6184 block(bsym
, bcl
, csym
, ccl
, 0);
6185 if (tok
== TOK_ELSE
) {
6189 block(bsym
, bcl
, csym
, ccl
, 0);
6190 gsym(d
); /* patch else jmp */
6195 } else if (t
== TOK_WHILE
) {
6203 block(&a
, current_cleanups
, &b
, current_cleanups
, 0);
6208 } else if (t
== '{') {
6209 Sym
*llabel
, *lcleanup
;
6210 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
6211 int lncleanups
= ncleanups
;
6213 /* record local declaration stack position */
6215 llabel
= local_label_stack
;
6216 lcleanup
= current_cleanups
;
6219 /* handle local labels declarations */
6220 while (tok
== TOK_LABEL
) {
6223 if (tok
< TOK_UIDENT
)
6224 expect("label identifier");
6225 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6227 } while (tok
== ',');
6231 while (tok
!= '}') {
6236 block(bsym
, bcl
, csym
, ccl
, is_expr
);
6240 if (current_cleanups
!= lcleanup
)
6241 block_cleanup(lcleanup
, lncleanups
);
6243 /* pop locally defined labels */
6244 label_pop(&local_label_stack
, llabel
, is_expr
);
6246 /* In the is_expr case (a statement expression is finished here),
6247 vtop might refer to symbols on the local_stack. Either via the
6248 type or via vtop->sym. We can't pop those nor any that in turn
6249 might be referred to. To make it easier we don't roll back
6250 any symbols in that case; some upper level call to block() will
6251 do that. We do have to remove such symbols from the lookup
6252 tables, though. sym_pop will do that. */
6254 /* pop locally defined symbols */
6255 sym_pop(&local_stack
, s
, is_expr
);
6257 /* Pop VLA frames and restore stack pointer if required */
6258 if (vlas_in_scope
> saved_vlas_in_scope
) {
6259 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
6262 vlas_in_scope
= saved_vlas_in_scope
;
6264 if (0 == --local_scope
&& !nocode_wanted
)
6265 check_func_return();
6268 } else if (t
== TOK_RETURN
) {
6270 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6272 gexpr(), gen_assign_cast(&func_vt
);
6273 try_call_scope_cleanup(NULL
);
6275 gfunc_return(&func_vt
);
6279 tcc_warning("'return' with no value.");
6281 /* jump unless last stmt in top-level block */
6282 if (tok
!= '}' || local_scope
!= 1)
6286 } else if (t
== TOK_BREAK
) {
6289 tcc_error("cannot break");
6290 try_call_scope_cleanup(bcl
);
6291 *bsym
= gjmp(*bsym
);
6294 } else if (t
== TOK_CONTINUE
) {
6297 tcc_error("cannot continue");
6298 try_call_scope_cleanup(ccl
);
6299 vla_sp_restore_root();
6300 *csym
= gjmp(*csym
);
6303 } else if (t
== TOK_FOR
) {
6304 Sym
*lcleanup
= current_cleanups
;
6305 int lncleanups
= ncleanups
;
6311 /* c99 for-loop init decl? */
6312 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6313 /* no, regular for-loop init expr */
6337 block(&a
, current_cleanups
, &b
, current_cleanups
, 0);
6342 try_call_scope_cleanup(lcleanup
);
6343 ncleanups
= lncleanups
;
6344 current_cleanups
= lcleanup
;
6345 sym_pop(&local_stack
, s
, 0);
6347 } else if (t
== TOK_DO
) {
6351 block(&a
, current_cleanups
, &b
, current_cleanups
, 0);
6362 } else if (t
== TOK_SWITCH
) {
6363 struct switch_t
*saved
, sw
;
6369 switchval
= *vtop
--;
6371 b
= gjmp(0); /* jump to first case */
6372 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6375 block(&a
, current_cleanups
, csym
, ccl
, 0);
6376 a
= gjmp(a
); /* add implicit break */
6379 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6380 for (b
= 1; b
< sw
.n
; b
++)
6381 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6382 tcc_error("duplicate case value");
6383 /* Our switch table sorting is signed, so the compared
6384 value needs to be as well when it's 64bit. */
6385 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6386 switchval
.type
.t
&= ~VT_UNSIGNED
;
6388 gcase(sw
.p
, sw
.n
, &a
);
6391 gjmp_addr(sw
.def_sym
);
6392 dynarray_reset(&sw
.p
, &sw
.n
);
6397 } else if (t
== TOK_CASE
) {
6398 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6401 cr
->v1
= cr
->v2
= expr_const64();
6402 if (gnu_ext
&& tok
== TOK_DOTS
) {
6404 cr
->v2
= expr_const64();
6405 if (cr
->v2
< cr
->v1
)
6406 tcc_warning("empty case range");
6409 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6412 goto block_after_label
;
6414 } else if (t
== TOK_DEFAULT
) {
6417 if (cur_switch
->def_sym
)
6418 tcc_error("too many 'default'");
6419 cur_switch
->def_sym
= gind();
6422 goto block_after_label
;
6424 } else if (t
== TOK_GOTO
) {
6425 if (tok
== '*' && gnu_ext
) {
6429 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6432 } else if (tok
>= TOK_UIDENT
) {
6433 s
= label_find(tok
);
6434 /* put forward definition if needed */
6436 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6437 else if (s
->r
== LABEL_DECLARED
)
6438 s
->r
= LABEL_FORWARD
;
6440 vla_sp_restore_root();
6441 if (s
->r
& LABEL_FORWARD
) {
6442 /* start new goto chain for cleanups, linked via label->next */
6443 if (current_cleanups
) {
6444 sym_push2(&pending_gotos
, SYM_FIELD
, 0, ncleanups
);
6445 pending_gotos
->prev_tok
= s
;
6446 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
6447 pending_gotos
->next
= s
;
6449 s
->jnext
= gjmp(s
->jnext
);
6451 try_call_cleanup_goto(s
->cleanupstate
);
6452 gjmp_addr(s
->jnext
);
6457 expect("label identifier");
6461 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
6465 if (tok
== ':' && t
>= TOK_UIDENT
) {
6470 if (s
->r
== LABEL_DEFINED
)
6471 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6472 s
->r
= LABEL_DEFINED
;
6474 Sym
*pcl
; /* pending cleanup goto */
6475 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
6477 sym_pop(&s
->next
, NULL
, 0);
6481 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
6484 s
->cleanupstate
= current_cleanups
;
6488 /* we accept this, but it is a mistake */
6490 tcc_warning("deprecated use of label at end of compound statement");
6494 block(bsym
, bcl
, csym
, ccl
, is_expr
);
6498 /* expression case */
6514 /* This skips over a stream of tokens containing balanced {} and ()
6515 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6516 with a '{'). If STR then allocates and stores the skipped tokens
6517 in *STR. This doesn't check if () and {} are nested correctly,
6518 i.e. "({)}" is accepted. */
6519 static void skip_or_save_block(TokenString
**str
)
6521 int braces
= tok
== '{';
6524 *str
= tok_str_alloc();
6526 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6528 if (tok
== TOK_EOF
) {
6529 if (str
|| level
> 0)
6530 tcc_error("unexpected end of file");
6535 tok_str_add_tok(*str
);
6538 if (t
== '{' || t
== '(') {
6540 } else if (t
== '}' || t
== ')') {
6542 if (level
== 0 && braces
&& t
== '}')
6547 tok_str_add(*str
, -1);
6548 tok_str_add(*str
, 0);
6552 #define EXPR_CONST 1
6555 static void parse_init_elem(int expr_type
)
6557 int saved_global_expr
;
6560 /* compound literals must be allocated globally in this case */
6561 saved_global_expr
= global_expr
;
6564 global_expr
= saved_global_expr
;
6565 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6566 (compound literals). */
6567 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6568 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6569 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6570 #ifdef TCC_TARGET_PE
6571 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6574 tcc_error("initializer element is not constant");
6582 /* put zeros for variable based init */
6583 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6586 /* nothing to do because globals are already set to zero */
6588 vpush_global_sym(&func_old_type
, TOK_memset
);
6590 #ifdef TCC_TARGET_ARM
6602 #define DIF_SIZE_ONLY 2
6603 #define DIF_HAVE_ELEM 4
6605 /* t is the array or struct type. c is the array or struct
6606 address. cur_field is the pointer to the current
6607 field, for arrays the 'c' member contains the current start
6608 index. 'flags' is as in decl_initializer.
6609 'al' contains the already initialized length of the
6610 current container (starting at c). This returns the new length of that. */
6611 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6612 Sym
**cur_field
, int flags
, int al
)
6615 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6616 unsigned long corig
= c
;
6621 if (flags
& DIF_HAVE_ELEM
)
6624 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
6631 /* NOTE: we only support ranges for last designator */
6632 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6634 if (!(type
->t
& VT_ARRAY
))
6635 expect("array type");
6637 index
= index_last
= expr_const();
6638 if (tok
== TOK_DOTS
&& gnu_ext
) {
6640 index_last
= expr_const();
6644 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6646 tcc_error("invalid index");
6648 (*cur_field
)->c
= index_last
;
6649 type
= pointed_type(type
);
6650 elem_size
= type_size(type
, &align
);
6651 c
+= index
* elem_size
;
6652 nb_elems
= index_last
- index
+ 1;
6659 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6660 expect("struct/union type");
6661 f
= find_field(type
, l
, &cumofs
);
6674 } else if (!gnu_ext
) {
6679 if (type
->t
& VT_ARRAY
) {
6680 index
= (*cur_field
)->c
;
6681 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6682 tcc_error("index too large");
6683 type
= pointed_type(type
);
6684 c
+= index
* type_size(type
, &align
);
6687 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6688 *cur_field
= f
= f
->next
;
6690 tcc_error("too many field init");
6695 /* must put zero in holes (note that doing it that way
6696 ensures that it even works with designators) */
6697 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
6698 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6699 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
6701 /* XXX: make it more general */
6702 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
6703 unsigned long c_end
;
6708 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6709 for (i
= 1; i
< nb_elems
; i
++) {
6710 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6715 } else if (!NODATA_WANTED
) {
6716 c_end
= c
+ nb_elems
* elem_size
;
6717 if (c_end
> sec
->data_allocated
)
6718 section_realloc(sec
, c_end
);
6719 src
= sec
->data
+ c
;
6721 for(i
= 1; i
< nb_elems
; i
++) {
6723 memcpy(dst
, src
, elem_size
);
6727 c
+= nb_elems
* type_size(type
, &align
);
6733 /* store a value or an expression directly in global data or in local array */
6734 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6741 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6745 /* XXX: not portable */
6746 /* XXX: generate error if incorrect relocation */
6747 gen_assign_cast(&dtype
);
6748 bt
= type
->t
& VT_BTYPE
;
6750 if ((vtop
->r
& VT_SYM
)
6753 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6754 || (type
->t
& VT_BITFIELD
))
6755 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6757 tcc_error("initializer element is not computable at load time");
6759 if (NODATA_WANTED
) {
6764 size
= type_size(type
, &align
);
6765 section_reserve(sec
, c
+ size
);
6766 ptr
= sec
->data
+ c
;
6768 /* XXX: make code faster ? */
6769 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6770 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6771 /* XXX This rejects compound literals like
6772 '(void *){ptr}'. The problem is that '&sym' is
6773 represented the same way, which would be ruled out
6774 by the SYM_FIRST_ANOM check above, but also '"string"'
6775 in 'char *p = "string"' is represented the same
6776 with the type being VT_PTR and the symbol being an
6777 anonymous one. That is, there's no difference in vtop
6778 between '(void *){x}' and '&(void *){x}'. Ignore
6779 pointer typed entities here. Hopefully no real code
6780 will every use compound literals with scalar type. */
6781 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6782 /* These come from compound literals, memcpy stuff over. */
6786 esym
= elfsym(vtop
->sym
);
6787 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6788 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6790 /* We need to copy over all memory contents, and that
6791 includes relocations. Use the fact that relocs are
6792 created it order, so look from the end of relocs
6793 until we hit one before the copied region. */
6794 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6795 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6796 while (num_relocs
--) {
6798 if (rel
->r_offset
>= esym
->st_value
+ size
)
6800 if (rel
->r_offset
< esym
->st_value
)
6802 /* Note: if the same fields are initialized multiple
6803 times (possible with designators) then we possibly
6804 add multiple relocations for the same offset here.
6805 That would lead to wrong code, the last reloc needs
6806 to win. We clean this up later after the whole
6807 initializer is parsed. */
6808 put_elf_reloca(symtab_section
, sec
,
6809 c
+ rel
->r_offset
- esym
->st_value
,
6810 ELFW(R_TYPE
)(rel
->r_info
),
6811 ELFW(R_SYM
)(rel
->r_info
),
6821 if (type
->t
& VT_BITFIELD
) {
6822 int bit_pos
, bit_size
, bits
, n
;
6823 unsigned char *p
, v
, m
;
6824 bit_pos
= BIT_POS(vtop
->type
.t
);
6825 bit_size
= BIT_SIZE(vtop
->type
.t
);
6826 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6827 bit_pos
&= 7, bits
= 0;
6832 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6833 m
= ((1 << n
) - 1) << bit_pos
;
6834 *p
= (*p
& ~m
) | (v
& m
);
6835 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6839 /* XXX: when cross-compiling we assume that each type has the
6840 same representation on host and target, which is likely to
6841 be wrong in the case of long double */
6843 vtop
->c
.i
= vtop
->c
.i
!= 0;
6845 *(char *)ptr
|= vtop
->c
.i
;
6848 *(short *)ptr
|= vtop
->c
.i
;
6851 *(float*)ptr
= vtop
->c
.f
;
6854 *(double *)ptr
= vtop
->c
.d
;
6857 #if defined TCC_IS_NATIVE_387
6858 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6859 memcpy(ptr
, &vtop
->c
.ld
, 10);
6861 else if (sizeof (long double) == sizeof (double))
6862 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
6864 else if (vtop
->c
.ld
== 0.0)
6868 if (sizeof(long double) == LDOUBLE_SIZE
)
6869 *(long double*)ptr
= vtop
->c
.ld
;
6870 else if (sizeof(double) == LDOUBLE_SIZE
)
6871 *(double *)ptr
= (double)vtop
->c
.ld
;
6873 tcc_error("can't cross compile long double constants");
6877 *(long long *)ptr
|= vtop
->c
.i
;
6884 addr_t val
= vtop
->c
.i
;
6886 if (vtop
->r
& VT_SYM
)
6887 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6889 *(addr_t
*)ptr
|= val
;
6891 if (vtop
->r
& VT_SYM
)
6892 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6893 *(addr_t
*)ptr
|= val
;
6899 int val
= vtop
->c
.i
;
6901 if (vtop
->r
& VT_SYM
)
6902 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6906 if (vtop
->r
& VT_SYM
)
6907 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6916 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6923 /* 't' contains the type and storage info. 'c' is the offset of the
6924 object in section 'sec'. If 'sec' is NULL, it means stack based
6925 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
6926 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
6927 size only evaluation is wanted (only for arrays). */
6928 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6931 int len
, n
, no_oblock
, nb
, i
;
6937 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
6938 /* In case of strings we have special handling for arrays, so
6939 don't consume them as initializer value (which would commit them
6940 to some anonymous symbol). */
6941 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6942 !(flags
& DIF_SIZE_ONLY
)) {
6943 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6944 flags
|= DIF_HAVE_ELEM
;
6947 if ((flags
& DIF_HAVE_ELEM
) &&
6948 !(type
->t
& VT_ARRAY
) &&
6949 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6950 The source type might have VT_CONSTANT set, which is
6951 of course assignable to non-const elements. */
6952 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6953 init_putv(type
, sec
, c
);
6954 } else if (type
->t
& VT_ARRAY
) {
6957 t1
= pointed_type(type
);
6958 size1
= type_size(t1
, &align1
);
6961 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6964 tcc_error("character array initializer must be a literal,"
6965 " optionally enclosed in braces");
6970 /* only parse strings here if correct type (otherwise: handle
6971 them as ((w)char *) expressions */
6972 if ((tok
== TOK_LSTR
&&
6973 #ifdef TCC_TARGET_PE
6974 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6976 (t1
->t
& VT_BTYPE
) == VT_INT
6978 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6980 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6983 /* compute maximum number of chars wanted */
6985 cstr_len
= tokc
.str
.size
;
6987 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6990 if (n
>= 0 && nb
> (n
- len
))
6992 if (!(flags
& DIF_SIZE_ONLY
)) {
6994 tcc_warning("initializer-string for array is too long");
6995 /* in order to go faster for common case (char
6996 string in global variable, we handle it
6998 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
7000 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
7004 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
7006 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
7008 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
7015 /* only add trailing zero if enough storage (no
7016 warning in this case since it is standard) */
7017 if (n
< 0 || len
< n
) {
7018 if (!(flags
& DIF_SIZE_ONLY
)) {
7020 init_putv(t1
, sec
, c
+ (len
* size1
));
7031 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7032 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7033 flags
&= ~DIF_HAVE_ELEM
;
7034 if (type
->t
& VT_ARRAY
) {
7036 /* special test for multi dimensional arrays (may not
7037 be strictly correct if designators are used at the
7039 if (no_oblock
&& len
>= n
*size1
)
7042 if (s
->type
.t
== VT_UNION
)
7046 if (no_oblock
&& f
== NULL
)
7055 /* put zeros at the end */
7056 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7057 init_putz(sec
, c
+ len
, n
*size1
- len
);
7060 /* patch type size if needed, which happens only for array types */
7062 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7063 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7066 if ((flags
& DIF_FIRST
) || tok
== '{') {
7074 } else if (tok
== '{') {
7075 if (flags
& DIF_HAVE_ELEM
)
7078 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7080 } else if ((flags
& DIF_SIZE_ONLY
)) {
7081 /* If we supported only ISO C we wouldn't have to accept calling
7082 this on anything than an array if DIF_SIZE_ONLY (and even then
7083 only on the outermost level, so no recursion would be needed),
7084 because initializing a flex array member isn't supported.
7085 But GNU C supports it, so we need to recurse even into
7086 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7087 /* just skip expression */
7088 skip_or_save_block(NULL
);
7090 if (!(flags
& DIF_HAVE_ELEM
)) {
7091 /* This should happen only when we haven't parsed
7092 the init element above for fear of committing a
7093 string constant to memory too early. */
7094 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7095 expect("string constant");
7096 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7098 init_putv(type
, sec
, c
);
7102 /* parse an initializer for type 't' if 'has_init' is non zero, and
7103 allocate space in local or global data space ('r' is either
7104 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7105 variable 'v' of scope 'scope' is declared before initializers
7106 are parsed. If 'v' is zero, then a reference to the new object
7107 is put in the value stack. If 'has_init' is 2, a special parsing
7108 is done to handle string constants. */
7109 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7110 int has_init
, int v
, int scope
)
7112 int size
, align
, addr
;
7113 TokenString
*init_str
= NULL
;
7116 Sym
*flexible_array
;
7118 int saved_nocode_wanted
= nocode_wanted
;
7119 #ifdef CONFIG_TCC_BCHECK
7123 /* Always allocate static or global variables */
7124 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7125 nocode_wanted
|= 0x80000000;
7127 #ifdef CONFIG_TCC_BCHECK
7128 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7131 flexible_array
= NULL
;
7132 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7133 Sym
*field
= type
->ref
->next
;
7136 field
= field
->next
;
7137 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7138 flexible_array
= field
;
7142 size
= type_size(type
, &align
);
7143 /* If unknown size, we must evaluate it before
7144 evaluating initializers because
7145 initializers can generate global data too
7146 (e.g. string pointers or ISOC99 compound
7147 literals). It also simplifies local
7148 initializers handling */
7149 if (size
< 0 || (flexible_array
&& has_init
)) {
7151 tcc_error("unknown type size");
7152 /* get all init string */
7153 if (has_init
== 2) {
7154 init_str
= tok_str_alloc();
7155 /* only get strings */
7156 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7157 tok_str_add_tok(init_str
);
7160 tok_str_add(init_str
, -1);
7161 tok_str_add(init_str
, 0);
7163 skip_or_save_block(&init_str
);
7168 begin_macro(init_str
, 1);
7170 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7171 /* prepare second initializer parsing */
7172 macro_ptr
= init_str
->str
;
7175 /* if still unknown size, error */
7176 size
= type_size(type
, &align
);
7178 tcc_error("unknown type size");
7180 /* If there's a flex member and it was used in the initializer
7182 if (flexible_array
&&
7183 flexible_array
->type
.ref
->c
> 0)
7184 size
+= flexible_array
->type
.ref
->c
7185 * pointed_size(&flexible_array
->type
);
7186 /* take into account specified alignment if bigger */
7187 if (ad
->a
.aligned
) {
7188 int speca
= 1 << (ad
->a
.aligned
- 1);
7191 } else if (ad
->a
.packed
) {
7195 if (!v
&& NODATA_WANTED
)
7196 size
= 0, align
= 1;
7198 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7200 #ifdef CONFIG_TCC_BCHECK
7201 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7205 loc
= (loc
- size
) & -align
;
7207 #ifdef CONFIG_TCC_BCHECK
7208 /* handles bounds */
7209 /* XXX: currently, since we do only one pass, we cannot track
7210 '&' operators, so we add only arrays */
7211 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7213 /* add padding between regions */
7215 /* then add local bound info */
7216 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7217 bounds_ptr
[0] = addr
;
7218 bounds_ptr
[1] = size
;
7222 /* local variable */
7223 #ifdef CONFIG_TCC_ASM
7224 if (ad
->asm_label
) {
7225 int reg
= asm_parse_regvar(ad
->asm_label
);
7227 r
= (r
& ~VT_VALMASK
) | reg
;
7230 sym
= sym_push(v
, type
, r
, addr
);
7231 if (ad
->cleanup_func
) {
7232 Sym
*cls
= sym_push2(&all_cleanups
, SYM_FIELD
| ++ncleanups
, 0, 0);
7233 cls
->prev_tok
= sym
;
7234 cls
->next
= ad
->cleanup_func
;
7235 cls
->ncl
= current_cleanups
;
7236 current_cleanups
= cls
;
7241 /* push local reference */
7242 vset(type
, r
, addr
);
7245 if (v
&& scope
== VT_CONST
) {
7246 /* see if the symbol was already defined */
7249 patch_storage(sym
, ad
, type
);
7250 /* we accept several definitions of the same global variable. */
7251 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7256 /* allocate symbol in corresponding section */
7261 else if (tcc_state
->nocommon
)
7266 addr
= section_add(sec
, size
, align
);
7267 #ifdef CONFIG_TCC_BCHECK
7268 /* add padding if bound check */
7270 section_add(sec
, 1, 1);
7273 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7274 sec
= common_section
;
7279 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7280 patch_storage(sym
, ad
, NULL
);
7282 /* update symbol definition */
7283 put_extern_sym(sym
, sec
, addr
, size
);
7285 /* push global reference */
7286 vpush_ref(type
, sec
, addr
, size
);
7291 #ifdef CONFIG_TCC_BCHECK
7292 /* handles bounds now because the symbol must be defined
7293 before for the relocation */
7297 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7298 /* then add global bound info */
7299 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7300 bounds_ptr
[0] = 0; /* relocated */
7301 bounds_ptr
[1] = size
;
7306 if (type
->t
& VT_VLA
) {
7312 /* save current stack pointer */
7313 if (vlas_in_scope
== 0) {
7314 if (vla_sp_root_loc
== -1)
7315 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
7316 gen_vla_sp_save(vla_sp_root_loc
);
7319 vla_runtime_type_size(type
, &a
);
7320 gen_vla_alloc(type
, a
);
7321 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7322 /* on _WIN64, because of the function args scratch area, the
7323 result of alloca differs from RSP and is returned in RAX. */
7324 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7326 gen_vla_sp_save(addr
);
7330 } else if (has_init
) {
7331 size_t oldreloc_offset
= 0;
7332 if (sec
&& sec
->reloc
)
7333 oldreloc_offset
= sec
->reloc
->data_offset
;
7334 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7335 if (sec
&& sec
->reloc
)
7336 squeeze_multi_relocs(sec
, oldreloc_offset
);
7337 /* patch flexible array member size back to -1, */
7338 /* for possible subsequent similar declarations */
7340 flexible_array
->type
.ref
->c
= -1;
7344 /* restore parse state if needed */
7350 nocode_wanted
= saved_nocode_wanted
;
7353 /* parse a function defined by symbol 'sym' and generate its code in
7354 'cur_text_section' */
7355 static void gen_function(Sym
*sym
)
7358 ind
= cur_text_section
->data_offset
;
7359 if (sym
->a
.aligned
) {
7360 size_t newoff
= section_add(cur_text_section
, 0,
7361 1 << (sym
->a
.aligned
- 1));
7362 gen_fill_nops(newoff
- ind
);
7364 /* NOTE: we patch the symbol size later */
7365 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7366 funcname
= get_tok_str(sym
->v
, NULL
);
7368 /* Initialize VLA state */
7370 vla_sp_root_loc
= -1;
7371 /* put debug symbol */
7372 tcc_debug_funcstart(tcc_state
, sym
);
7373 /* push a dummy symbol to enable local sym storage */
7374 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7375 local_scope
= 1; /* for function parameters */
7376 gfunc_prolog(&sym
->type
);
7377 reset_local_scope();
7379 clear_temp_local_var_list();
7380 block(NULL
, NULL
, NULL
, NULL
, 0);
7384 cur_text_section
->data_offset
= ind
;
7385 label_pop(&global_label_stack
, NULL
, 0);
7386 /* reset local stack */
7387 reset_local_scope();
7388 sym_pop(&local_stack
, NULL
, 0);
7389 /* end of function */
7390 /* patch symbol size */
7391 elfsym(sym
)->st_size
= ind
- func_ind
;
7392 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7393 /* It's better to crash than to generate wrong code */
7394 cur_text_section
= NULL
;
7395 funcname
= ""; /* for safety */
7396 func_vt
.t
= VT_VOID
; /* for safety */
7397 func_var
= 0; /* for safety */
7398 ind
= 0; /* for safety */
7399 nocode_wanted
= 0x80000000;
7403 static void gen_inline_functions(TCCState
*s
)
7406 int inline_generated
, i
, ln
;
7407 struct InlineFunc
*fn
;
7409 ln
= file
->line_num
;
7410 /* iterate while inline function are referenced */
7412 inline_generated
= 0;
7413 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7414 fn
= s
->inline_fns
[i
];
7416 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
7417 /* the function was used or forced (and then not internal):
7418 generate its code and convert it to a normal function */
7421 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7422 begin_macro(fn
->func_str
, 1);
7424 cur_text_section
= text_section
;
7428 inline_generated
= 1;
7431 } while (inline_generated
);
7432 file
->line_num
= ln
;
7435 ST_FUNC
void free_inline_functions(TCCState
*s
)
7438 /* free tokens of unused inline functions */
7439 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7440 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7442 tok_str_free(fn
->func_str
);
7444 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7447 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7448 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7449 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7454 AttributeDef ad
, adbase
;
7457 if (tok
== TOK_STATIC_ASSERT
) {
7465 tcc_error("%s", get_tok_str(tok
, &tokc
));
7471 if (!parse_btype(&btype
, &adbase
)) {
7472 if (is_for_loop_init
)
7474 /* skip redundant ';' if not in old parameter decl scope */
7475 if (tok
== ';' && l
!= VT_CMP
) {
7481 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7482 /* global asm block */
7486 if (tok
>= TOK_UIDENT
) {
7487 /* special test for old K&R protos without explicit int
7488 type. Only accepted when defining global data */
7492 expect("declaration");
7497 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7498 int v
= btype
.ref
->v
;
7499 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7500 tcc_warning("unnamed struct/union that defines no instances");
7504 if (IS_ENUM(btype
.t
)) {
7509 while (1) { /* iterate thru each declaration */
7511 /* If the base type itself was an array type of unspecified
7512 size (like in 'typedef int arr[]; arr x = {1};') then
7513 we will overwrite the unknown size by the real one for
7514 this decl. We need to unshare the ref symbol holding
7516 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7517 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7520 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7524 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7525 printf("type = '%s'\n", buf
);
7528 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7529 /* if old style function prototype, we accept a
7532 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7533 decl0(VT_CMP
, 0, sym
);
7534 /* always compile 'extern inline' */
7535 if (type
.t
& VT_EXTERN
)
7536 type
.t
&= ~VT_INLINE
;
7539 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7540 ad
.asm_label
= asm_label_instr();
7541 /* parse one last attribute list, after asm label */
7542 parse_attribute(&ad
);
7544 /* gcc does not allow __asm__("label") with function definition,
7551 #ifdef TCC_TARGET_PE
7552 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7553 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7554 tcc_error("cannot have dll linkage with static or typedef");
7555 if (ad
.a
.dllimport
) {
7556 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7559 type
.t
|= VT_EXTERN
;
7565 tcc_error("cannot use local functions");
7566 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7567 expect("function definition");
7569 /* reject abstract declarators in function definition
7570 make old style params without decl have int type */
7572 while ((sym
= sym
->next
) != NULL
) {
7573 if (!(sym
->v
& ~SYM_FIELD
))
7574 expect("identifier");
7575 if (sym
->type
.t
== VT_VOID
)
7576 sym
->type
= int_type
;
7579 /* put function symbol */
7580 type
.t
&= ~VT_EXTERN
;
7581 sym
= external_sym(v
, &type
, 0, &ad
);
7582 /* static inline functions are just recorded as a kind
7583 of macro. Their code will be emitted at the end of
7584 the compilation unit only if they are used */
7585 if (sym
->type
.t
& VT_INLINE
) {
7586 struct InlineFunc
*fn
;
7587 const char *filename
;
7589 filename
= file
? file
->filename
: "";
7590 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7591 strcpy(fn
->filename
, filename
);
7593 skip_or_save_block(&fn
->func_str
);
7594 dynarray_add(&tcc_state
->inline_fns
,
7595 &tcc_state
->nb_inline_fns
, fn
);
7597 /* compute text section */
7598 cur_text_section
= ad
.section
;
7599 if (!cur_text_section
)
7600 cur_text_section
= text_section
;
7606 /* find parameter in function parameter list */
7607 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7608 if ((sym
->v
& ~SYM_FIELD
) == v
)
7610 tcc_error("declaration for parameter '%s' but no such parameter",
7611 get_tok_str(v
, NULL
));
7613 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7614 tcc_error("storage class specified for '%s'",
7615 get_tok_str(v
, NULL
));
7616 if (sym
->type
.t
!= VT_VOID
)
7617 tcc_error("redefinition of parameter '%s'",
7618 get_tok_str(v
, NULL
));
7619 convert_parameter_type(&type
);
7621 } else if (type
.t
& VT_TYPEDEF
) {
7622 /* save typedefed type */
7623 /* XXX: test storage specifiers ? */
7625 if (sym
&& sym
->sym_scope
== local_scope
) {
7626 if (!is_compatible_types(&sym
->type
, &type
)
7627 || !(sym
->type
.t
& VT_TYPEDEF
))
7628 tcc_error("incompatible redefinition of '%s'",
7629 get_tok_str(v
, NULL
));
7632 sym
= sym_push(v
, &type
, 0, 0);
7636 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7637 && !(type
.t
& VT_EXTERN
)) {
7638 tcc_error("declaration of void object");
7641 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7642 /* external function definition */
7643 /* specific case for func_call attribute */
7645 } else if (!(type
.t
& VT_ARRAY
)) {
7646 /* not lvalue if array */
7647 r
|= lvalue_type(type
.t
);
7649 has_init
= (tok
== '=');
7650 if (has_init
&& (type
.t
& VT_VLA
))
7651 tcc_error("variable length array cannot be initialized");
7652 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
7653 || (type
.t
& VT_BTYPE
) == VT_FUNC
7654 /* as with GCC, uninitialized global arrays with no size
7655 are considered extern: */
7656 || ((type
.t
& VT_ARRAY
) && !has_init
7657 && l
== VT_CONST
&& type
.ref
->c
< 0)
7659 /* external variable or function */
7660 type
.t
|= VT_EXTERN
;
7661 sym
= external_sym(v
, &type
, r
, &ad
);
7662 if (ad
.alias_target
) {
7665 alias_target
= sym_find(ad
.alias_target
);
7666 esym
= elfsym(alias_target
);
7668 tcc_error("unsupported forward __alias__ attribute");
7669 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7672 if (type
.t
& VT_STATIC
)
7678 else if (l
== VT_CONST
)
7679 /* uninitialized global variables may be overridden */
7680 type
.t
|= VT_EXTERN
;
7681 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7685 if (is_for_loop_init
)
7697 static void decl(int l
)
7702 /* ------------------------------------------------------------------------- */
7705 /* ------------------------------------------------------------------------- */