2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* no code generation wanted */
54 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
55 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
56 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
57 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
58 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
60 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
61 ST_DATA
const char *funcname
;
64 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
66 ST_DATA
struct switch_t
{
70 } **p
; int n
; /* list of case ranges */
71 int def_sym
; /* default symbol */
72 } *cur_switch
; /* current switch */
74 /* ------------------------------------------------------------------------- */
76 static void gen_cast(CType
*type
);
77 static void gen_cast_s(int t
);
78 static inline CType
*pointed_type(CType
*type
);
79 static int is_compatible_types(CType
*type1
, CType
*type2
);
80 static int parse_btype(CType
*type
, AttributeDef
*ad
);
81 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
82 static void parse_expr_type(CType
*type
);
83 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
84 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
85 static void block(int *bsym
, int *csym
, int is_expr
);
86 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
87 static void decl(int l
);
88 static int decl0(int l
, int is_for_loop_init
, Sym
*);
89 static void expr_eq(void);
90 static void vla_runtime_type_size(CType
*type
, int *a
);
91 static void vla_sp_restore(void);
92 static void vla_sp_restore_root(void);
93 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
94 static inline int64_t expr_const64(void);
95 static void vpush64(int ty
, unsigned long long v
);
96 static void vpush(CType
*type
);
97 static int gvtst(int inv
, int t
);
98 static void gen_inline_functions(TCCState
*s
);
99 static void skip_or_save_block(TokenString
**str
);
100 static void gv_dup(void);
102 ST_INLN
int is_float(int t
)
106 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
109 /* we use our own 'finite' function to avoid potential problems with
110 non standard math libs */
111 /* XXX: endianness dependent */
112 ST_FUNC
int ieee_finite(double d
)
115 memcpy(p
, &d
, sizeof(double));
116 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
119 /* compiling intel long double natively */
120 #if (defined __i386__ || defined __x86_64__) \
121 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
122 # define TCC_IS_NATIVE_387
125 ST_FUNC
void test_lvalue(void)
127 if (!(vtop
->r
& VT_LVAL
))
131 ST_FUNC
void check_vstack(void)
134 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
137 /* ------------------------------------------------------------------------- */
138 /* vstack debugging aid */
141 void pv (const char *lbl
, int a
, int b
)
144 for (i
= a
; i
< a
+ b
; ++i
) {
145 SValue
*p
= &vtop
[-i
];
146 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
147 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
152 /* ------------------------------------------------------------------------- */
153 /* start of translation unit info */
154 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
159 /* file info: full path + filename */
160 section_sym
= put_elf_sym(symtab_section
, 0, 0,
161 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
162 text_section
->sh_num
, NULL
);
163 getcwd(buf
, sizeof(buf
));
165 normalize_slashes(buf
);
167 pstrcat(buf
, sizeof(buf
), "/");
168 put_stabs_r(buf
, N_SO
, 0, 0,
169 text_section
->data_offset
, text_section
, section_sym
);
170 put_stabs_r(file
->filename
, N_SO
, 0, 0,
171 text_section
->data_offset
, text_section
, section_sym
);
176 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
177 symbols can be safely used */
178 put_elf_sym(symtab_section
, 0, 0,
179 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
180 SHN_ABS
, file
->filename
);
183 /* put end of translation unit info */
184 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
188 put_stabs_r(NULL
, N_SO
, 0, 0,
189 text_section
->data_offset
, text_section
, section_sym
);
193 /* generate line number info */
194 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
198 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
199 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
201 last_line_num
= file
->line_num
;
205 /* put function symbol */
206 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
214 /* XXX: we put here a dummy type */
215 snprintf(buf
, sizeof(buf
), "%s:%c1",
216 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
217 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
218 cur_text_section
, sym
->c
);
219 /* //gr gdb wants a line at the function */
220 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
226 /* put function size */
227 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
231 put_stabn(N_FUN
, 0, 0, size
);
234 /* ------------------------------------------------------------------------- */
235 ST_FUNC
int tccgen_compile(TCCState
*s1
)
237 cur_text_section
= NULL
;
239 anon_sym
= SYM_FIRST_ANOM
;
242 nocode_wanted
= 0x80000000;
244 /* define some often used types */
246 char_pointer_type
.t
= VT_BYTE
;
247 mk_pointer(&char_pointer_type
);
249 size_type
.t
= VT_INT
| VT_UNSIGNED
;
250 ptrdiff_type
.t
= VT_INT
;
252 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
253 ptrdiff_type
.t
= VT_LLONG
;
255 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
256 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
258 func_old_type
.t
= VT_FUNC
;
259 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
260 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
261 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
265 #ifdef TCC_TARGET_ARM
270 printf("%s: **** new file\n", file
->filename
);
273 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
276 gen_inline_functions(s1
);
279 #ifdef CONFIG_TCC_ASM
283 /* end of translation unit info */
288 /* ------------------------------------------------------------------------- */
289 ST_FUNC ElfSym
*elfsym(Sym
*s
)
294 return &tcc_state
->esym_dot
;
296 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
299 /* apply storage attributes to Elf symbol */
301 static void update_storage(Sym
*sym
)
303 ElfSym
*esym
= elfsym(sym
);
306 if (sym
->a
.visibility
)
307 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
310 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, ELFW(ST_TYPE
)(esym
->st_info
));
312 if (sym
->a
.dllimport
)
313 esym
->st_other
|= ST_PE_IMPORT
;
314 if (sym
->a
.dllexport
)
315 esym
->st_other
|= ST_PE_EXPORT
;
318 printf("storage %s: vis=%d weak=%d exp=%d imp=%d\n",
319 get_tok_str(sym
->v
, NULL
),
328 /* ------------------------------------------------------------------------- */
329 /* update sym->c so that it points to an external symbol in section
330 'section' with value 'value' */
332 ST_FUNC
void put_extern_sym2(Sym
*sym
, Section
*section
,
333 addr_t value
, unsigned long size
,
334 int can_add_underscore
)
336 int sym_type
, sym_bind
, sh_num
, info
, other
, t
;
340 #ifdef CONFIG_TCC_BCHECK
346 else if (section
== SECTION_ABS
)
349 sh_num
= section
->sh_num
;
352 name
= get_tok_str(sym
->v
, NULL
);
353 #ifdef CONFIG_TCC_BCHECK
354 if (tcc_state
->do_bounds_check
) {
355 /* XXX: avoid doing that for statics ? */
356 /* if bound checking is activated, we change some function
357 names by adding the "__bound" prefix */
360 /* XXX: we rely only on malloc hooks */
373 strcpy(buf
, "__bound_");
381 if ((t
& VT_BTYPE
) == VT_FUNC
) {
383 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
384 sym_type
= STT_NOTYPE
;
386 sym_type
= STT_OBJECT
;
389 sym_bind
= STB_LOCAL
;
391 sym_bind
= STB_GLOBAL
;
394 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
395 Sym
*ref
= sym
->type
.ref
;
396 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
397 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
399 other
|= ST_PE_STDCALL
;
400 can_add_underscore
= 0;
404 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
406 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
410 name
= get_tok_str(sym
->asm_label
, NULL
);
411 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
412 sym
->c
= set_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
415 esym
->st_value
= value
;
416 esym
->st_size
= size
;
417 esym
->st_shndx
= sh_num
;
422 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
423 addr_t value
, unsigned long size
)
425 put_extern_sym2(sym
, section
, value
, size
, 1);
428 /* add a new relocation entry to symbol 'sym' in section 's' */
429 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
434 if (nocode_wanted
&& s
== cur_text_section
)
439 put_extern_sym(sym
, NULL
, 0, 0);
443 /* now we can add ELF relocation info */
444 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
448 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
450 greloca(s
, sym
, offset
, type
, 0);
454 /* ------------------------------------------------------------------------- */
455 /* symbol allocator */
456 static Sym
*__sym_malloc(void)
458 Sym
*sym_pool
, *sym
, *last_sym
;
461 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
462 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
464 last_sym
= sym_free_first
;
466 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
467 sym
->next
= last_sym
;
471 sym_free_first
= last_sym
;
475 static inline Sym
*sym_malloc(void)
479 sym
= sym_free_first
;
481 sym
= __sym_malloc();
482 sym_free_first
= sym
->next
;
485 sym
= tcc_malloc(sizeof(Sym
));
490 ST_INLN
void sym_free(Sym
*sym
)
493 sym
->next
= sym_free_first
;
494 sym_free_first
= sym
;
500 /* push, without hashing */
501 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
506 memset(s
, 0, sizeof *s
);
516 /* find a symbol and return its associated structure. 's' is the top
517 of the symbol stack */
518 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
530 /* structure lookup */
531 ST_INLN Sym
*struct_find(int v
)
534 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
536 return table_ident
[v
]->sym_struct
;
539 /* find an identifier */
540 ST_INLN Sym
*sym_find(int v
)
543 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
545 return table_ident
[v
]->sym_identifier
;
548 /* push a given symbol on the symbol stack */
549 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
558 s
= sym_push2(ps
, v
, type
->t
, c
);
559 s
->type
.ref
= type
->ref
;
561 /* don't record fields or anonymous symbols */
563 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
564 /* record symbol in token array */
565 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
567 ps
= &ts
->sym_struct
;
569 ps
= &ts
->sym_identifier
;
572 s
->sym_scope
= local_scope
;
573 if (s
->prev_tok
&& s
->prev_tok
->sym_scope
== s
->sym_scope
)
574 tcc_error("redeclaration of '%s'",
575 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
580 /* push a global identifier */
581 ST_FUNC Sym
*global_identifier_push_1(Sym
**ptop
, int v
, int t
, int c
)
584 s
= sym_push2(ptop
, v
, t
, c
);
585 /* don't record anonymous symbol */
586 if (v
< SYM_FIRST_ANOM
) {
587 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
588 /* modify the top most local identifier, so that
589 sym_identifier will point to 's' when popped */
590 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
591 ps
= &(*ps
)->prev_tok
;
598 static Sym
*global_identifier_push(int v
, int t
, int c
)
600 return global_identifier_push_1(&global_stack
, v
, t
, c
);
603 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
604 pop them yet from the list, but do remove them from the token array. */
605 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
615 /* remove symbol in token array */
617 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
618 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
620 ps
= &ts
->sym_struct
;
622 ps
= &ts
->sym_identifier
;
633 /* ------------------------------------------------------------------------- */
635 static void vsetc(CType
*type
, int r
, CValue
*vc
)
639 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
640 tcc_error("memory full (vstack)");
641 /* cannot let cpu flags if other instruction are generated. Also
642 avoid leaving VT_JMP anywhere except on the top of the stack
643 because it would complicate the code generator.
645 Don't do this when nocode_wanted. vtop might come from
646 !nocode_wanted regions (see 88_codeopt.c) and transforming
647 it to a register without actually generating code is wrong
648 as their value might still be used for real. All values
649 we push under nocode_wanted will eventually be popped
650 again, so that the VT_CMP/VT_JMP value will be in vtop
651 when code is unsuppressed again.
653 Same logic below in vswap(); */
654 if (vtop
>= vstack
&& !nocode_wanted
) {
655 v
= vtop
->r
& VT_VALMASK
;
656 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
668 ST_FUNC
void vswap(void)
671 /* cannot vswap cpu flags. See comment at vsetc() above */
672 if (vtop
>= vstack
&& !nocode_wanted
) {
673 int v
= vtop
->r
& VT_VALMASK
;
674 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
682 /* pop stack value */
683 ST_FUNC
void vpop(void)
686 v
= vtop
->r
& VT_VALMASK
;
687 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
688 /* for x86, we need to pop the FP stack */
690 o(0xd8dd); /* fstp %st(0) */
693 if (v
== VT_JMP
|| v
== VT_JMPI
) {
694 /* need to put correct jump if && or || without test */
700 /* push constant of type "type" with useless value */
701 ST_FUNC
void vpush(CType
*type
)
703 vset(type
, VT_CONST
, 0);
706 /* push integer constant */
707 ST_FUNC
void vpushi(int v
)
711 vsetc(&int_type
, VT_CONST
, &cval
);
714 /* push a pointer sized constant */
715 static void vpushs(addr_t v
)
719 vsetc(&size_type
, VT_CONST
, &cval
);
722 /* push arbitrary 64bit constant */
723 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
730 vsetc(&ctype
, VT_CONST
, &cval
);
733 /* push long long constant */
734 static inline void vpushll(long long v
)
736 vpush64(VT_LLONG
, v
);
739 ST_FUNC
void vset(CType
*type
, int r
, int v
)
744 vsetc(type
, r
, &cval
);
747 static void vseti(int r
, int v
)
755 ST_FUNC
void vpushv(SValue
*v
)
757 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
758 tcc_error("memory full (vstack)");
763 static void vdup(void)
768 /* rotate n first stack elements to the bottom
769 I1 ... In -> I2 ... In I1 [top is right]
771 ST_FUNC
void vrotb(int n
)
782 /* rotate the n elements before entry e towards the top
783 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
785 ST_FUNC
void vrote(SValue
*e
, int n
)
791 for(i
= 0;i
< n
- 1; i
++)
796 /* rotate n first stack elements to the top
797 I1 ... In -> In I1 ... I(n-1) [top is right]
799 ST_FUNC
void vrott(int n
)
804 /* push a symbol value of TYPE */
805 static inline void vpushsym(CType
*type
, Sym
*sym
)
809 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
813 /* Return a static symbol pointing to a section */
814 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
820 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
821 sym
->type
.ref
= type
->ref
;
822 sym
->r
= VT_CONST
| VT_SYM
;
823 put_extern_sym(sym
, sec
, offset
, size
);
827 /* push a reference to a section offset by adding a dummy symbol */
828 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
830 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
833 /* define a new external reference to a symbol 'v' of type 'u' */
834 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
840 /* push forward reference */
841 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
842 s
->type
.ref
= type
->ref
;
843 s
->r
= r
| VT_CONST
| VT_SYM
;
848 /* Merge some storage attributes. */
849 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
852 if ((sym
->type
.t
& VT_BTYPE
) == VT_VOID
) /* from asm */
854 else if (!is_compatible_types(&sym
->type
, type
))
855 tcc_error("incompatible types for redefinition of '%s'",
856 get_tok_str(sym
->v
, NULL
));
859 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
860 tcc_error("incompatible dll linkage for redefinition of '%s'",
861 get_tok_str(sym
->v
, NULL
));
863 sym
->a
.dllexport
|= ad
->a
.dllexport
;
864 sym
->a
.weak
|= ad
->a
.weak
;
865 if (ad
->a
.visibility
) {
866 int vis
= sym
->a
.visibility
;
867 int vis2
= ad
->a
.visibility
;
868 if (vis
== STV_DEFAULT
)
870 else if (vis2
!= STV_DEFAULT
)
871 vis
= (vis
< vis2
) ? vis
: vis2
;
872 sym
->a
.visibility
= vis
;
875 sym
->a
.aligned
= ad
->a
.aligned
;
877 sym
->asm_label
= ad
->asm_label
;
881 /* define a new external reference to a symbol 'v' */
882 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
887 /* push forward reference */
888 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
889 s
->type
.t
|= VT_EXTERN
;
893 if (s
->type
.ref
== func_old_type
.ref
) {
894 s
->type
.ref
= type
->ref
;
895 s
->r
= r
| VT_CONST
| VT_SYM
;
896 s
->type
.t
|= VT_EXTERN
;
898 patch_storage(s
, ad
, type
);
903 /* push a reference to global symbol v */
904 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
906 vpushsym(type
, external_global_sym(v
, type
, 0));
909 /* save registers up to (vtop - n) stack entry */
910 ST_FUNC
void save_regs(int n
)
913 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
917 /* save r to the memory stack, and mark it as being free */
918 ST_FUNC
void save_reg(int r
)
920 save_reg_upstack(r
, 0);
923 /* save r to the memory stack, and mark it as being free,
924 if seen up to (vtop - n) stack entry */
925 ST_FUNC
void save_reg_upstack(int r
, int n
)
927 int l
, saved
, size
, align
;
931 if ((r
&= VT_VALMASK
) >= VT_CONST
)
936 /* modify all stack values */
939 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
940 if ((p
->r
& VT_VALMASK
) == r
||
941 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
942 /* must save value on stack if not already done */
944 /* NOTE: must reload 'r' because r might be equal to r2 */
945 r
= p
->r
& VT_VALMASK
;
946 /* store register in the stack */
948 if ((p
->r
& VT_LVAL
) ||
949 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
951 type
= &char_pointer_type
;
955 size
= type_size(type
, &align
);
956 loc
= (loc
- size
) & -align
;
958 sv
.r
= VT_LOCAL
| VT_LVAL
;
961 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
962 /* x86 specific: need to pop fp register ST0 if saved */
964 o(0xd8dd); /* fstp %st(0) */
968 /* special long long case */
969 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
977 /* mark that stack entry as being saved on the stack */
978 if (p
->r
& VT_LVAL
) {
979 /* also clear the bounded flag because the
980 relocation address of the function was stored in
982 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
984 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
992 #ifdef TCC_TARGET_ARM
993 /* find a register of class 'rc2' with at most one reference on stack.
994 * If none, call get_reg(rc) */
995 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1000 for(r
=0;r
<NB_REGS
;r
++) {
1001 if (reg_classes
[r
] & rc2
) {
1004 for(p
= vstack
; p
<= vtop
; p
++) {
1005 if ((p
->r
& VT_VALMASK
) == r
||
1006 (p
->r2
& VT_VALMASK
) == r
)
1017 /* find a free register of class 'rc'. If none, save one register */
1018 ST_FUNC
int get_reg(int rc
)
1023 /* find a free register */
1024 for(r
=0;r
<NB_REGS
;r
++) {
1025 if (reg_classes
[r
] & rc
) {
1028 for(p
=vstack
;p
<=vtop
;p
++) {
1029 if ((p
->r
& VT_VALMASK
) == r
||
1030 (p
->r2
& VT_VALMASK
) == r
)
1038 /* no register left : free the first one on the stack (VERY
1039 IMPORTANT to start from the bottom to ensure that we don't
1040 spill registers used in gen_opi()) */
1041 for(p
=vstack
;p
<=vtop
;p
++) {
1042 /* look at second register (if long long) */
1043 r
= p
->r2
& VT_VALMASK
;
1044 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1046 r
= p
->r
& VT_VALMASK
;
1047 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1053 /* Should never comes here */
1057 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1059 static void move_reg(int r
, int s
, int t
)
1073 /* get address of vtop (vtop MUST BE an lvalue) */
1074 ST_FUNC
void gaddrof(void)
1076 vtop
->r
&= ~VT_LVAL
;
1077 /* tricky: if saved lvalue, then we can go back to lvalue */
1078 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1079 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1084 #ifdef CONFIG_TCC_BCHECK
1085 /* generate lvalue bound code */
1086 static void gbound(void)
1091 vtop
->r
&= ~VT_MUSTBOUND
;
1092 /* if lvalue, then use checking code before dereferencing */
1093 if (vtop
->r
& VT_LVAL
) {
1094 /* if not VT_BOUNDED value, then make one */
1095 if (!(vtop
->r
& VT_BOUNDED
)) {
1096 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1097 /* must save type because we must set it to int to get pointer */
1099 vtop
->type
.t
= VT_PTR
;
1102 gen_bounded_ptr_add();
1103 vtop
->r
|= lval_type
;
1106 /* then check for dereferencing */
1107 gen_bounded_ptr_deref();
1112 static void incr_bf_adr(int o
)
1114 vtop
->type
= char_pointer_type
;
1118 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1119 | (VT_BYTE
|VT_UNSIGNED
);
1120 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1121 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1124 /* single-byte load mode for packed or otherwise unaligned bitfields */
1125 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1128 save_reg_upstack(vtop
->r
, 1);
1129 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1130 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1139 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1141 vpushi((1 << n
) - 1), gen_op('&');
1144 vpushi(bits
), gen_op(TOK_SHL
);
1147 bits
+= n
, bit_size
-= n
, o
= 1;
1150 if (!(type
->t
& VT_UNSIGNED
)) {
1151 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1152 vpushi(n
), gen_op(TOK_SHL
);
1153 vpushi(n
), gen_op(TOK_SAR
);
1157 /* single-byte store mode for packed or otherwise unaligned bitfields */
1158 static void store_packed_bf(int bit_pos
, int bit_size
)
1160 int bits
, n
, o
, m
, c
;
1162 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1164 save_reg_upstack(vtop
->r
, 1);
1165 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1167 incr_bf_adr(o
); // X B
1169 c
? vdup() : gv_dup(); // B V X
1172 vpushi(bits
), gen_op(TOK_SHR
);
1174 vpushi(bit_pos
), gen_op(TOK_SHL
);
1179 m
= ((1 << n
) - 1) << bit_pos
;
1180 vpushi(m
), gen_op('&'); // X B V1
1181 vpushv(vtop
-1); // X B V1 B
1182 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1183 gen_op('&'); // X B V1 B1
1184 gen_op('|'); // X B V2
1186 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1187 vstore(), vpop(); // X B
1188 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1193 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1196 if (0 == sv
->type
.ref
)
1198 t
= sv
->type
.ref
->auxtype
;
1199 if (t
!= -1 && t
!= VT_STRUCT
) {
1200 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1201 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1206 /* store vtop a register belonging to class 'rc'. lvalues are
1207 converted to values. Cannot be used if cannot be converted to
1208 register value (such as structures). */
1209 ST_FUNC
int gv(int rc
)
1211 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1213 /* NOTE: get_reg can modify vstack[] */
1214 if (vtop
->type
.t
& VT_BITFIELD
) {
1217 bit_pos
= BIT_POS(vtop
->type
.t
);
1218 bit_size
= BIT_SIZE(vtop
->type
.t
);
1219 /* remove bit field info to avoid loops */
1220 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1223 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1224 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1225 type
.t
|= VT_UNSIGNED
;
1227 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1229 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1234 if (r
== VT_STRUCT
) {
1235 load_packed_bf(&type
, bit_pos
, bit_size
);
1237 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1238 /* cast to int to propagate signedness in following ops */
1240 /* generate shifts */
1241 vpushi(bits
- (bit_pos
+ bit_size
));
1243 vpushi(bits
- bit_size
);
1244 /* NOTE: transformed to SHR if unsigned */
1249 if (is_float(vtop
->type
.t
) &&
1250 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1251 unsigned long offset
;
1252 /* CPUs usually cannot use float constants, so we store them
1253 generically in data segment */
1254 size
= type_size(&vtop
->type
, &align
);
1256 size
= 0, align
= 1;
1257 offset
= section_add(data_section
, size
, align
);
1258 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1260 init_putv(&vtop
->type
, data_section
, offset
);
1263 #ifdef CONFIG_TCC_BCHECK
1264 if (vtop
->r
& VT_MUSTBOUND
)
1268 r
= vtop
->r
& VT_VALMASK
;
1269 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1270 #ifndef TCC_TARGET_ARM64
1273 #ifdef TCC_TARGET_X86_64
1274 else if (rc
== RC_FRET
)
1278 /* need to reload if:
1280 - lvalue (need to dereference pointer)
1281 - already a register, but not in the right class */
1283 || (vtop
->r
& VT_LVAL
)
1284 || !(reg_classes
[r
] & rc
)
1286 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1287 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1289 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1295 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1296 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1298 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1299 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1300 unsigned long long ll
;
1302 int r2
, original_type
;
1303 original_type
= vtop
->type
.t
;
1304 /* two register type load : expand to two words
1307 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1310 vtop
->c
.i
= ll
; /* first word */
1312 vtop
->r
= r
; /* save register value */
1313 vpushi(ll
>> 32); /* second word */
1316 if (vtop
->r
& VT_LVAL
) {
1317 /* We do not want to modifier the long long
1318 pointer here, so the safest (and less
1319 efficient) is to save all the other registers
1320 in the stack. XXX: totally inefficient. */
1324 /* lvalue_save: save only if used further down the stack */
1325 save_reg_upstack(vtop
->r
, 1);
1327 /* load from memory */
1328 vtop
->type
.t
= load_type
;
1331 vtop
[-1].r
= r
; /* save register value */
1332 /* increment pointer to get second word */
1333 vtop
->type
.t
= addr_type
;
1338 vtop
->type
.t
= load_type
;
1340 /* move registers */
1343 vtop
[-1].r
= r
; /* save register value */
1344 vtop
->r
= vtop
[-1].r2
;
1346 /* Allocate second register. Here we rely on the fact that
1347 get_reg() tries first to free r2 of an SValue. */
1351 /* write second register */
1353 vtop
->type
.t
= original_type
;
1354 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1356 /* lvalue of scalar type : need to use lvalue type
1357 because of possible cast */
1360 /* compute memory access type */
1361 if (vtop
->r
& VT_LVAL_BYTE
)
1363 else if (vtop
->r
& VT_LVAL_SHORT
)
1365 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1369 /* restore wanted type */
1372 /* one register type load */
1377 #ifdef TCC_TARGET_C67
1378 /* uses register pairs for doubles */
1379 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1386 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1387 ST_FUNC
void gv2(int rc1
, int rc2
)
1391 /* generate more generic register first. But VT_JMP or VT_CMP
1392 values must be generated first in all cases to avoid possible
1394 v
= vtop
[0].r
& VT_VALMASK
;
1395 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1400 /* test if reload is needed for first register */
1401 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1411 /* test if reload is needed for first register */
1412 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1418 #ifndef TCC_TARGET_ARM64
1419 /* wrapper around RC_FRET to return a register by type */
1420 static int rc_fret(int t
)
1422 #ifdef TCC_TARGET_X86_64
1423 if (t
== VT_LDOUBLE
) {
1431 /* wrapper around REG_FRET to return a register by type */
1432 static int reg_fret(int t
)
1434 #ifdef TCC_TARGET_X86_64
1435 if (t
== VT_LDOUBLE
) {
1443 /* expand 64bit on stack in two ints */
1444 static void lexpand(void)
1447 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1448 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1449 if (v
== VT_CONST
) {
1452 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1458 vtop
[0].r
= vtop
[-1].r2
;
1459 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1461 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1465 #ifdef TCC_TARGET_ARM
1466 /* expand long long on stack */
1467 ST_FUNC
void lexpand_nr(void)
1471 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1473 vtop
->r2
= VT_CONST
;
1474 vtop
->type
.t
= VT_INT
| u
;
1475 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1476 if (v
== VT_CONST
) {
1477 vtop
[-1].c
.i
= vtop
->c
.i
;
1478 vtop
->c
.i
= vtop
->c
.i
>> 32;
1480 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1482 vtop
->r
= vtop
[-1].r
;
1483 } else if (v
> VT_CONST
) {
1487 vtop
->r
= vtop
[-1].r2
;
1488 vtop
[-1].r2
= VT_CONST
;
1489 vtop
[-1].type
.t
= VT_INT
| u
;
1494 /* build a long long from two ints */
1495 static void lbuild(int t
)
1497 gv2(RC_INT
, RC_INT
);
1498 vtop
[-1].r2
= vtop
[0].r
;
1499 vtop
[-1].type
.t
= t
;
1504 /* convert stack entry to register and duplicate its value in another
1506 static void gv_dup(void)
1513 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1514 if (t
& VT_BITFIELD
) {
1524 /* stack: H L L1 H1 */
1534 /* duplicate value */
1539 #ifdef TCC_TARGET_X86_64
1540 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1550 load(r1
, &sv
); /* move r to r1 */
1552 /* duplicates value */
1558 /* Generate value test
1560 * Generate a test for any value (jump, comparison and integers) */
1561 ST_FUNC
int gvtst(int inv
, int t
)
1563 int v
= vtop
->r
& VT_VALMASK
;
1564 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1568 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1569 /* constant jmp optimization */
1570 if ((vtop
->c
.i
!= 0) != inv
)
1575 return gtst(inv
, t
);
1579 /* generate CPU independent (unsigned) long long operations */
1580 static void gen_opl(int op
)
1582 int t
, a
, b
, op1
, c
, i
;
1584 unsigned short reg_iret
= REG_IRET
;
1585 unsigned short reg_lret
= REG_LRET
;
1591 func
= TOK___divdi3
;
1594 func
= TOK___udivdi3
;
1597 func
= TOK___moddi3
;
1600 func
= TOK___umoddi3
;
1607 /* call generic long long function */
1608 vpush_global_sym(&func_old_type
, func
);
1613 vtop
->r2
= reg_lret
;
1621 //pv("gen_opl A",0,2);
1627 /* stack: L1 H1 L2 H2 */
1632 vtop
[-2] = vtop
[-3];
1635 /* stack: H1 H2 L1 L2 */
1636 //pv("gen_opl B",0,4);
1642 /* stack: H1 H2 L1 L2 ML MH */
1645 /* stack: ML MH H1 H2 L1 L2 */
1649 /* stack: ML MH H1 L2 H2 L1 */
1654 /* stack: ML MH M1 M2 */
1657 } else if (op
== '+' || op
== '-') {
1658 /* XXX: add non carry method too (for MIPS or alpha) */
1664 /* stack: H1 H2 (L1 op L2) */
1667 gen_op(op1
+ 1); /* TOK_xxxC2 */
1670 /* stack: H1 H2 (L1 op L2) */
1673 /* stack: (L1 op L2) H1 H2 */
1675 /* stack: (L1 op L2) (H1 op H2) */
1683 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1684 t
= vtop
[-1].type
.t
;
1688 /* stack: L H shift */
1690 /* constant: simpler */
1691 /* NOTE: all comments are for SHL. the other cases are
1692 done by swapping words */
1703 if (op
!= TOK_SAR
) {
1736 /* XXX: should provide a faster fallback on x86 ? */
1739 func
= TOK___ashrdi3
;
1742 func
= TOK___lshrdi3
;
1745 func
= TOK___ashldi3
;
1751 /* compare operations */
1757 /* stack: L1 H1 L2 H2 */
1759 vtop
[-1] = vtop
[-2];
1761 /* stack: L1 L2 H1 H2 */
1764 /* when values are equal, we need to compare low words. since
1765 the jump is inverted, we invert the test too. */
1768 else if (op1
== TOK_GT
)
1770 else if (op1
== TOK_ULT
)
1772 else if (op1
== TOK_UGT
)
1782 /* generate non equal test */
1788 /* compare low. Always unsigned */
1792 else if (op1
== TOK_LE
)
1794 else if (op1
== TOK_GT
)
1796 else if (op1
== TOK_GE
)
1807 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1809 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1810 return (a
^ b
) >> 63 ? -x
: x
;
1813 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1815 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1818 /* handle integer constant optimizations and various machine
1820 static void gen_opic(int op
)
1822 SValue
*v1
= vtop
- 1;
1824 int t1
= v1
->type
.t
& VT_BTYPE
;
1825 int t2
= v2
->type
.t
& VT_BTYPE
;
1826 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1827 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1828 uint64_t l1
= c1
? v1
->c
.i
: 0;
1829 uint64_t l2
= c2
? v2
->c
.i
: 0;
1830 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1832 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1833 l1
= ((uint32_t)l1
|
1834 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1835 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1836 l2
= ((uint32_t)l2
|
1837 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1841 case '+': l1
+= l2
; break;
1842 case '-': l1
-= l2
; break;
1843 case '&': l1
&= l2
; break;
1844 case '^': l1
^= l2
; break;
1845 case '|': l1
|= l2
; break;
1846 case '*': l1
*= l2
; break;
1853 /* if division by zero, generate explicit division */
1856 tcc_error("division by zero in constant");
1860 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1861 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1862 case TOK_UDIV
: l1
= l1
/ l2
; break;
1863 case TOK_UMOD
: l1
= l1
% l2
; break;
1866 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1867 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1869 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1872 case TOK_ULT
: l1
= l1
< l2
; break;
1873 case TOK_UGE
: l1
= l1
>= l2
; break;
1874 case TOK_EQ
: l1
= l1
== l2
; break;
1875 case TOK_NE
: l1
= l1
!= l2
; break;
1876 case TOK_ULE
: l1
= l1
<= l2
; break;
1877 case TOK_UGT
: l1
= l1
> l2
; break;
1878 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1879 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1880 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1881 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1883 case TOK_LAND
: l1
= l1
&& l2
; break;
1884 case TOK_LOR
: l1
= l1
|| l2
; break;
1888 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1889 l1
= ((uint32_t)l1
|
1890 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1894 /* if commutative ops, put c2 as constant */
1895 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1896 op
== '|' || op
== '*')) {
1898 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1899 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1901 if (!const_wanted
&&
1903 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1904 (l1
== -1 && op
== TOK_SAR
))) {
1905 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1907 } else if (!const_wanted
&&
1908 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1910 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
1911 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1912 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1917 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1920 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1921 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1924 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
1925 /* filter out NOP operations like x*1, x-0, x&-1... */
1927 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1928 /* try to use shifts instead of muls or divs */
1929 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1938 else if (op
== TOK_PDIV
)
1944 } else if (c2
&& (op
== '+' || op
== '-') &&
1945 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1946 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1947 /* symbol + constant case */
1951 /* The backends can't always deal with addends to symbols
1952 larger than +-1<<31. Don't construct such. */
1959 /* call low level op generator */
1960 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1961 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1969 /* generate a floating point operation with constant propagation */
1970 static void gen_opif(int op
)
1974 #if defined _MSC_VER && defined _AMD64_
1975 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
1982 /* currently, we cannot do computations with forward symbols */
1983 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1984 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1986 if (v1
->type
.t
== VT_FLOAT
) {
1989 } else if (v1
->type
.t
== VT_DOUBLE
) {
1997 /* NOTE: we only do constant propagation if finite number (not
1998 NaN or infinity) (ANSI spec) */
1999 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2003 case '+': f1
+= f2
; break;
2004 case '-': f1
-= f2
; break;
2005 case '*': f1
*= f2
; break;
2009 tcc_error("division by zero in constant");
2014 /* XXX: also handles tests ? */
2018 /* XXX: overflow test ? */
2019 if (v1
->type
.t
== VT_FLOAT
) {
2021 } else if (v1
->type
.t
== VT_DOUBLE
) {
2033 static int pointed_size(CType
*type
)
2036 return type_size(pointed_type(type
), &align
);
2039 static void vla_runtime_pointed_size(CType
*type
)
2042 vla_runtime_type_size(pointed_type(type
), &align
);
2045 static inline int is_null_pointer(SValue
*p
)
2047 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2049 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2050 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2051 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2052 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
2055 static inline int is_integer_btype(int bt
)
2057 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2058 bt
== VT_INT
|| bt
== VT_LLONG
);
2061 /* check types for comparison or subtraction of pointers */
2062 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2064 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2067 /* null pointers are accepted for all comparisons as gcc */
2068 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2072 bt1
= type1
->t
& VT_BTYPE
;
2073 bt2
= type2
->t
& VT_BTYPE
;
2074 /* accept comparison between pointer and integer with a warning */
2075 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2076 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2077 tcc_warning("comparison between pointer and integer");
2081 /* both must be pointers or implicit function pointers */
2082 if (bt1
== VT_PTR
) {
2083 type1
= pointed_type(type1
);
2084 } else if (bt1
!= VT_FUNC
)
2085 goto invalid_operands
;
2087 if (bt2
== VT_PTR
) {
2088 type2
= pointed_type(type2
);
2089 } else if (bt2
!= VT_FUNC
) {
2091 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2093 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2094 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2098 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2099 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2100 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2101 /* gcc-like error if '-' is used */
2103 goto invalid_operands
;
2105 tcc_warning("comparison of distinct pointer types lacks a cast");
2109 /* generic gen_op: handles types problems */
2110 ST_FUNC
void gen_op(int op
)
2112 int u
, t1
, t2
, bt1
, bt2
, t
;
2116 t1
= vtop
[-1].type
.t
;
2117 t2
= vtop
[0].type
.t
;
2118 bt1
= t1
& VT_BTYPE
;
2119 bt2
= t2
& VT_BTYPE
;
2121 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2122 tcc_error("operation on a struct");
2123 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2124 if (bt2
== VT_FUNC
) {
2125 mk_pointer(&vtop
->type
);
2128 if (bt1
== VT_FUNC
) {
2130 mk_pointer(&vtop
->type
);
2135 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2136 /* at least one operand is a pointer */
2137 /* relational op: must be both pointers */
2138 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2139 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2140 /* pointers are handled are unsigned */
2142 t
= VT_LLONG
| VT_UNSIGNED
;
2144 t
= VT_INT
| VT_UNSIGNED
;
2148 /* if both pointers, then it must be the '-' op */
2149 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2151 tcc_error("cannot use pointers here");
2152 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2153 /* XXX: check that types are compatible */
2154 if (vtop
[-1].type
.t
& VT_VLA
) {
2155 vla_runtime_pointed_size(&vtop
[-1].type
);
2157 vpushi(pointed_size(&vtop
[-1].type
));
2161 vtop
->type
.t
= ptrdiff_type
.t
;
2165 /* exactly one pointer : must be '+' or '-'. */
2166 if (op
!= '-' && op
!= '+')
2167 tcc_error("cannot use pointers here");
2168 /* Put pointer as first operand */
2169 if (bt2
== VT_PTR
) {
2171 t
= t1
, t1
= t2
, t2
= t
;
2174 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2175 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2178 type1
= vtop
[-1].type
;
2179 type1
.t
&= ~VT_ARRAY
;
2180 if (vtop
[-1].type
.t
& VT_VLA
)
2181 vla_runtime_pointed_size(&vtop
[-1].type
);
2183 u
= pointed_size(&vtop
[-1].type
);
2185 tcc_error("unknown array element size");
2189 /* XXX: cast to int ? (long long case) */
2195 /* #ifdef CONFIG_TCC_BCHECK
2196 The main reason to removing this code:
2203 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2204 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2206 When this code is on. then the output looks like
2208 v+(i-j) = 0xbff84000
2210 /* if evaluating constant expression, no code should be
2211 generated, so no bound check */
2212 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2213 /* if bounded pointers, we generate a special code to
2220 gen_bounded_ptr_add();
2226 /* put again type if gen_opic() swaped operands */
2229 } else if (is_float(bt1
) || is_float(bt2
)) {
2230 /* compute bigger type and do implicit casts */
2231 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2233 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2238 /* floats can only be used for a few operations */
2239 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2240 (op
< TOK_ULT
|| op
> TOK_GT
))
2241 tcc_error("invalid operands for binary operation");
2243 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2244 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2245 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2247 t
|= (VT_LONG
& t1
);
2249 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2250 /* cast to biggest op */
2251 t
= VT_LLONG
| VT_LONG
;
2252 if (bt1
== VT_LLONG
)
2254 if (bt2
== VT_LLONG
)
2256 /* convert to unsigned if it does not fit in a long long */
2257 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2258 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2262 /* integer operations */
2263 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2264 /* convert to unsigned if it does not fit in an integer */
2265 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2266 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2269 /* XXX: currently, some unsigned operations are explicit, so
2270 we modify them here */
2271 if (t
& VT_UNSIGNED
) {
2278 else if (op
== TOK_LT
)
2280 else if (op
== TOK_GT
)
2282 else if (op
== TOK_LE
)
2284 else if (op
== TOK_GE
)
2292 /* special case for shifts and long long: we keep the shift as
2294 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2301 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2302 /* relational op: the result is an int */
2303 vtop
->type
.t
= VT_INT
;
2308 // Make sure that we have converted to an rvalue:
2309 if (vtop
->r
& VT_LVAL
)
2310 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2313 #ifndef TCC_TARGET_ARM
2314 /* generic itof for unsigned long long case */
2315 static void gen_cvt_itof1(int t
)
2317 #ifdef TCC_TARGET_ARM64
2320 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2321 (VT_LLONG
| VT_UNSIGNED
)) {
2324 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2325 #if LDOUBLE_SIZE != 8
2326 else if (t
== VT_LDOUBLE
)
2327 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2330 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2334 vtop
->r
= reg_fret(t
);
2342 /* generic ftoi for unsigned long long case */
2343 static void gen_cvt_ftoi1(int t
)
2345 #ifdef TCC_TARGET_ARM64
2350 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2351 /* not handled natively */
2352 st
= vtop
->type
.t
& VT_BTYPE
;
2354 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2355 #if LDOUBLE_SIZE != 8
2356 else if (st
== VT_LDOUBLE
)
2357 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2360 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2365 vtop
->r2
= REG_LRET
;
2372 /* force char or short cast */
2373 static void force_charshort_cast(int t
)
2377 /* cannot cast static initializers */
2378 if (STATIC_DATA_WANTED
)
2382 /* XXX: add optimization if lvalue : just change type and offset */
2387 if (t
& VT_UNSIGNED
) {
2388 vpushi((1 << bits
) - 1);
2391 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2397 /* result must be signed or the SAR is converted to an SHL
2398 This was not the case when "t" was a signed short
2399 and the last value on the stack was an unsigned int */
2400 vtop
->type
.t
&= ~VT_UNSIGNED
;
2406 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2407 static void gen_cast_s(int t
)
2415 static void gen_cast(CType
*type
)
2417 int sbt
, dbt
, sf
, df
, c
, p
;
2419 /* special delayed cast for char/short */
2420 /* XXX: in some cases (multiple cascaded casts), it may still
2422 if (vtop
->r
& VT_MUSTCAST
) {
2423 vtop
->r
&= ~VT_MUSTCAST
;
2424 force_charshort_cast(vtop
->type
.t
);
2427 /* bitfields first get cast to ints */
2428 if (vtop
->type
.t
& VT_BITFIELD
) {
2432 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2433 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2438 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2439 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2440 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2441 c
&= dbt
!= VT_LDOUBLE
;
2444 /* constant case: we can do it now */
2445 /* XXX: in ISOC, cannot do it if error in convert */
2446 if (sbt
== VT_FLOAT
)
2447 vtop
->c
.ld
= vtop
->c
.f
;
2448 else if (sbt
== VT_DOUBLE
)
2449 vtop
->c
.ld
= vtop
->c
.d
;
2452 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2453 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2454 vtop
->c
.ld
= vtop
->c
.i
;
2456 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2458 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2459 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2461 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2464 if (dbt
== VT_FLOAT
)
2465 vtop
->c
.f
= (float)vtop
->c
.ld
;
2466 else if (dbt
== VT_DOUBLE
)
2467 vtop
->c
.d
= (double)vtop
->c
.ld
;
2468 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2469 vtop
->c
.i
= vtop
->c
.ld
;
2470 } else if (sf
&& dbt
== VT_BOOL
) {
2471 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2474 vtop
->c
.i
= vtop
->c
.ld
;
2475 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2477 else if (sbt
& VT_UNSIGNED
)
2478 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2480 else if (sbt
== VT_PTR
)
2483 else if (sbt
!= VT_LLONG
)
2484 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2485 -(vtop
->c
.i
& 0x80000000));
2487 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2489 else if (dbt
== VT_BOOL
)
2490 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2492 else if (dbt
== VT_PTR
)
2495 else if (dbt
!= VT_LLONG
) {
2496 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2497 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2500 if (!(dbt
& VT_UNSIGNED
))
2501 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2504 } else if (p
&& dbt
== VT_BOOL
) {
2508 /* non constant case: generate code */
2510 /* convert from fp to fp */
2513 /* convert int to fp */
2516 /* convert fp to int */
2517 if (dbt
== VT_BOOL
) {
2521 /* we handle char/short/etc... with generic code */
2522 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2523 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2527 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2528 /* additional cast for char/short... */
2534 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2535 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2536 /* scalar to long long */
2537 /* machine independent conversion */
2539 /* generate high word */
2540 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2544 if (sbt
== VT_PTR
) {
2545 /* cast from pointer to int before we apply
2546 shift operation, which pointers don't support*/
2553 /* patch second register */
2554 vtop
[-1].r2
= vtop
->r
;
2558 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2559 (dbt
& VT_BTYPE
) == VT_PTR
||
2560 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2561 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2562 (sbt
& VT_BTYPE
) != VT_PTR
&&
2563 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2564 /* need to convert from 32bit to 64bit */
2566 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2567 #if defined(TCC_TARGET_ARM64)
2569 #elif defined(TCC_TARGET_X86_64)
2571 /* x86_64 specific: movslq */
2573 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2580 } else if (dbt
== VT_BOOL
) {
2581 /* scalar to bool */
2584 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2585 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2586 if (sbt
== VT_PTR
) {
2587 vtop
->type
.t
= VT_INT
;
2588 tcc_warning("nonportable conversion from pointer to char/short");
2590 force_charshort_cast(dbt
);
2592 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2594 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2595 /* from long long: just take low order word */
2599 /* if lvalue and single word type, nothing to do because
2600 the lvalue already contains the real type size (see
2601 VT_LVAL_xxx constants) */
2605 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2606 /* if we are casting between pointer types,
2607 we must update the VT_LVAL_xxx size */
2608 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2609 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2614 /* return type size as known at compile time. Put alignment at 'a' */
2615 ST_FUNC
int type_size(CType
*type
, int *a
)
2620 bt
= type
->t
& VT_BTYPE
;
2621 if (bt
== VT_STRUCT
) {
2626 } else if (bt
== VT_PTR
) {
2627 if (type
->t
& VT_ARRAY
) {
2631 ts
= type_size(&s
->type
, a
);
2633 if (ts
< 0 && s
->c
< 0)
2641 } else if (IS_ENUM(type
->t
) && type
->ref
->c
== -1) {
2642 return -1; /* incomplete enum */
2643 } else if (bt
== VT_LDOUBLE
) {
2645 return LDOUBLE_SIZE
;
2646 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2647 #ifdef TCC_TARGET_I386
2648 #ifdef TCC_TARGET_PE
2653 #elif defined(TCC_TARGET_ARM)
2663 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2666 } else if (bt
== VT_SHORT
) {
2669 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2673 /* char, void, function, _Bool */
2679 /* push type size as known at runtime time on top of value stack. Put
2681 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2683 if (type
->t
& VT_VLA
) {
2684 type_size(&type
->ref
->type
, a
);
2685 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2687 vpushi(type_size(type
, a
));
2691 static void vla_sp_restore(void) {
2692 if (vlas_in_scope
) {
2693 gen_vla_sp_restore(vla_sp_loc
);
2697 static void vla_sp_restore_root(void) {
2698 if (vlas_in_scope
) {
2699 gen_vla_sp_restore(vla_sp_root_loc
);
2703 /* return the pointed type of t */
2704 static inline CType
*pointed_type(CType
*type
)
2706 return &type
->ref
->type
;
2709 /* modify type so that its it is a pointer to type. */
2710 ST_FUNC
void mk_pointer(CType
*type
)
2713 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2714 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2718 /* compare function types. OLD functions match any new functions */
2719 static int is_compatible_func(CType
*type1
, CType
*type2
)
2725 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2727 /* check func_call */
2728 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2730 /* XXX: not complete */
2731 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2733 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2735 while (s1
!= NULL
) {
2738 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2748 /* return true if type1 and type2 are the same. If unqualified is
2749 true, qualifiers on the types are ignored.
2751 - enums are not checked as gcc __builtin_types_compatible_p ()
2753 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2757 t1
= type1
->t
& VT_TYPE
;
2758 t2
= type2
->t
& VT_TYPE
;
2760 /* strip qualifiers before comparing */
2761 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2762 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2765 /* Default Vs explicit signedness only matters for char */
2766 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2770 /* XXX: bitfields ? */
2773 /* test more complicated cases */
2774 bt1
= t1
& VT_BTYPE
;
2775 if (bt1
== VT_PTR
) {
2776 type1
= pointed_type(type1
);
2777 type2
= pointed_type(type2
);
2778 return is_compatible_types(type1
, type2
);
2779 } else if (bt1
== VT_STRUCT
) {
2780 return (type1
->ref
== type2
->ref
);
2781 } else if (bt1
== VT_FUNC
) {
2782 return is_compatible_func(type1
, type2
);
2788 /* return true if type1 and type2 are exactly the same (including
2791 static int is_compatible_types(CType
*type1
, CType
*type2
)
2793 return compare_types(type1
,type2
,0);
2796 /* return true if type1 and type2 are the same (ignoring qualifiers).
2798 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2800 return compare_types(type1
,type2
,1);
2803 /* print a type. If 'varstr' is not NULL, then the variable is also
2804 printed in the type */
2806 /* XXX: add array and function pointers */
2807 static void type_to_str(char *buf
, int buf_size
,
2808 CType
*type
, const char *varstr
)
2820 pstrcat(buf
, buf_size
, "extern ");
2822 pstrcat(buf
, buf_size
, "static ");
2824 pstrcat(buf
, buf_size
, "typedef ");
2826 pstrcat(buf
, buf_size
, "inline ");
2827 if (t
& VT_VOLATILE
)
2828 pstrcat(buf
, buf_size
, "volatile ");
2829 if (t
& VT_CONSTANT
)
2830 pstrcat(buf
, buf_size
, "const ");
2832 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2833 || ((t
& VT_UNSIGNED
)
2834 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2837 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2839 buf_size
-= strlen(buf
);
2874 tstr
= "long double";
2876 pstrcat(buf
, buf_size
, tstr
);
2883 pstrcat(buf
, buf_size
, tstr
);
2884 v
= type
->ref
->v
& ~SYM_STRUCT
;
2885 if (v
>= SYM_FIRST_ANOM
)
2886 pstrcat(buf
, buf_size
, "<anonymous>");
2888 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2892 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2893 pstrcat(buf
, buf_size
, "(");
2895 while (sa
!= NULL
) {
2896 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2897 pstrcat(buf
, buf_size
, buf1
);
2900 pstrcat(buf
, buf_size
, ", ");
2902 pstrcat(buf
, buf_size
, ")");
2907 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2908 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2911 pstrcpy(buf1
, sizeof(buf1
), "*");
2912 if (t
& VT_CONSTANT
)
2913 pstrcat(buf1
, buf_size
, "const ");
2914 if (t
& VT_VOLATILE
)
2915 pstrcat(buf1
, buf_size
, "volatile ");
2917 pstrcat(buf1
, sizeof(buf1
), varstr
);
2918 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2922 pstrcat(buf
, buf_size
, " ");
2923 pstrcat(buf
, buf_size
, varstr
);
2928 /* verify type compatibility to store vtop in 'dt' type, and generate
2930 static void gen_assign_cast(CType
*dt
)
2932 CType
*st
, *type1
, *type2
;
2933 char buf1
[256], buf2
[256];
2936 st
= &vtop
->type
; /* source type */
2937 dbt
= dt
->t
& VT_BTYPE
;
2938 sbt
= st
->t
& VT_BTYPE
;
2939 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2940 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2942 It is Ok if both are void
2948 gcc accepts this program
2951 tcc_error("cannot cast from/to void");
2953 if (dt
->t
& VT_CONSTANT
)
2954 tcc_warning("assignment of read-only location");
2957 /* special cases for pointers */
2958 /* '0' can also be a pointer */
2959 if (is_null_pointer(vtop
))
2961 /* accept implicit pointer to integer cast with warning */
2962 if (is_integer_btype(sbt
)) {
2963 tcc_warning("assignment makes pointer from integer without a cast");
2966 type1
= pointed_type(dt
);
2967 /* a function is implicitly a function pointer */
2968 if (sbt
== VT_FUNC
) {
2969 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2970 !is_compatible_types(pointed_type(dt
), st
))
2971 tcc_warning("assignment from incompatible pointer type");
2976 type2
= pointed_type(st
);
2977 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2978 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2979 /* void * can match anything */
2981 //printf("types %08x %08x\n", type1->t, type2->t);
2982 /* exact type match, except for qualifiers */
2983 if (!is_compatible_unqualified_types(type1
, type2
)) {
2984 /* Like GCC don't warn by default for merely changes
2985 in pointer target signedness. Do warn for different
2986 base types, though, in particular for unsigned enums
2987 and signed int targets. */
2988 if ((type1
->t
& (VT_BTYPE
|VT_LONG
)) != (type2
->t
& (VT_BTYPE
|VT_LONG
))
2989 || IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)
2991 tcc_warning("assignment from incompatible pointer type");
2994 /* check const and volatile */
2995 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2996 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2997 tcc_warning("assignment discards qualifiers from pointer target type");
3003 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3004 tcc_warning("assignment makes integer from pointer without a cast");
3005 } else if (sbt
== VT_STRUCT
) {
3006 goto case_VT_STRUCT
;
3008 /* XXX: more tests */
3012 if (!is_compatible_unqualified_types(dt
, st
)) {
3014 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3015 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3016 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3024 /* store vtop in lvalue pushed on stack */
3025 ST_FUNC
void vstore(void)
3027 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3029 ft
= vtop
[-1].type
.t
;
3030 sbt
= vtop
->type
.t
& VT_BTYPE
;
3031 dbt
= ft
& VT_BTYPE
;
3032 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3033 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3034 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3035 /* optimize char/short casts */
3036 delayed_cast
= VT_MUSTCAST
;
3037 vtop
->type
.t
= ft
& VT_TYPE
;
3038 /* XXX: factorize */
3039 if (ft
& VT_CONSTANT
)
3040 tcc_warning("assignment of read-only location");
3043 if (!(ft
& VT_BITFIELD
))
3044 gen_assign_cast(&vtop
[-1].type
);
3047 if (sbt
== VT_STRUCT
) {
3048 /* if structure, only generate pointer */
3049 /* structure assignment : generate memcpy */
3050 /* XXX: optimize if small size */
3051 size
= type_size(&vtop
->type
, &align
);
3055 vtop
->type
.t
= VT_PTR
;
3058 /* address of memcpy() */
3061 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3062 else if(!(align
& 3))
3063 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3066 /* Use memmove, rather than memcpy, as dest and src may be same: */
3067 vpush_global_sym(&func_old_type
, TOK_memmove
);
3072 vtop
->type
.t
= VT_PTR
;
3078 /* leave source on stack */
3079 } else if (ft
& VT_BITFIELD
) {
3080 /* bitfield store handling */
3082 /* save lvalue as expression result (example: s.b = s.a = n;) */
3083 vdup(), vtop
[-1] = vtop
[-2];
3085 bit_pos
= BIT_POS(ft
);
3086 bit_size
= BIT_SIZE(ft
);
3087 /* remove bit field info to avoid loops */
3088 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3090 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3091 gen_cast(&vtop
[-1].type
);
3092 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3095 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3096 if (r
== VT_STRUCT
) {
3097 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3098 store_packed_bf(bit_pos
, bit_size
);
3100 unsigned long long mask
= (1ULL << bit_size
) - 1;
3101 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3103 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3106 vpushi((unsigned)mask
);
3113 /* duplicate destination */
3116 /* load destination, mask and or with source */
3117 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3118 vpushll(~(mask
<< bit_pos
));
3120 vpushi(~((unsigned)mask
<< bit_pos
));
3125 /* ... and discard */
3128 } else if (dbt
== VT_VOID
) {
3131 #ifdef CONFIG_TCC_BCHECK
3132 /* bound check case */
3133 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3142 #ifdef TCC_TARGET_X86_64
3143 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3145 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3150 r
= gv(rc
); /* generate value */
3151 /* if lvalue was saved on stack, must read it */
3152 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3154 t
= get_reg(RC_INT
);
3160 sv
.r
= VT_LOCAL
| VT_LVAL
;
3161 sv
.c
.i
= vtop
[-1].c
.i
;
3163 vtop
[-1].r
= t
| VT_LVAL
;
3165 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3167 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3168 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3170 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3171 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3173 vtop
[-1].type
.t
= load_type
;
3176 /* convert to int to increment easily */
3177 vtop
->type
.t
= addr_type
;
3183 vtop
[-1].type
.t
= load_type
;
3184 /* XXX: it works because r2 is spilled last ! */
3185 store(vtop
->r2
, vtop
- 1);
3191 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3192 vtop
->r
|= delayed_cast
;
3196 /* post defines POST/PRE add. c is the token ++ or -- */
3197 ST_FUNC
void inc(int post
, int c
)
3200 vdup(); /* save lvalue */
3202 gv_dup(); /* duplicate value */
3207 vpushi(c
- TOK_MID
);
3209 vstore(); /* store value */
3211 vpop(); /* if post op, return saved value */
3214 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3216 /* read the string */
3220 while (tok
== TOK_STR
) {
3221 /* XXX: add \0 handling too ? */
3222 cstr_cat(astr
, tokc
.str
.data
, -1);
3225 cstr_ccat(astr
, '\0');
3228 /* If I is >= 1 and a power of two, returns log2(i)+1.
3229 If I is 0 returns 0. */
3230 static int exact_log2p1(int i
)
3235 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3246 /* Parse __attribute__((...)) GNUC extension. */
3247 static void parse_attribute(AttributeDef
*ad
)
3253 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3258 while (tok
!= ')') {
3259 if (tok
< TOK_IDENT
)
3260 expect("attribute name");
3267 parse_mult_str(&astr
, "section name");
3268 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3275 parse_mult_str(&astr
, "alias(\"target\")");
3276 ad
->alias_target
= /* save string as token, for later */
3277 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3281 case TOK_VISIBILITY1
:
3282 case TOK_VISIBILITY2
:
3284 parse_mult_str(&astr
,
3285 "visibility(\"default|hidden|internal|protected\")");
3286 if (!strcmp (astr
.data
, "default"))
3287 ad
->a
.visibility
= STV_DEFAULT
;
3288 else if (!strcmp (astr
.data
, "hidden"))
3289 ad
->a
.visibility
= STV_HIDDEN
;
3290 else if (!strcmp (astr
.data
, "internal"))
3291 ad
->a
.visibility
= STV_INTERNAL
;
3292 else if (!strcmp (astr
.data
, "protected"))
3293 ad
->a
.visibility
= STV_PROTECTED
;
3295 expect("visibility(\"default|hidden|internal|protected\")");
3304 if (n
<= 0 || (n
& (n
- 1)) != 0)
3305 tcc_error("alignment must be a positive power of two");
3310 ad
->a
.aligned
= exact_log2p1(n
);
3311 if (n
!= 1 << (ad
->a
.aligned
- 1))
3312 tcc_error("alignment of %d is larger than implemented", n
);
3324 /* currently, no need to handle it because tcc does not
3325 track unused objects */
3329 /* currently, no need to handle it because tcc does not
3330 track unused objects */
3335 ad
->f
.func_call
= FUNC_CDECL
;
3340 ad
->f
.func_call
= FUNC_STDCALL
;
3342 #ifdef TCC_TARGET_I386
3352 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3358 ad
->f
.func_call
= FUNC_FASTCALLW
;
3365 ad
->attr_mode
= VT_LLONG
+ 1;
3368 ad
->attr_mode
= VT_BYTE
+ 1;
3371 ad
->attr_mode
= VT_SHORT
+ 1;
3375 ad
->attr_mode
= VT_INT
+ 1;
3378 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3385 ad
->a
.dllexport
= 1;
3388 ad
->a
.dllimport
= 1;
3391 if (tcc_state
->warn_unsupported
)
3392 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3393 /* skip parameters */
3395 int parenthesis
= 0;
3399 else if (tok
== ')')
3402 } while (parenthesis
&& tok
!= -1);
3415 static Sym
* find_field (CType
*type
, int v
)
3419 while ((s
= s
->next
) != NULL
) {
3420 if ((s
->v
& SYM_FIELD
) &&
3421 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3422 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3423 Sym
*ret
= find_field (&s
->type
, v
);
3433 static void struct_add_offset (Sym
*s
, int offset
)
3435 while ((s
= s
->next
) != NULL
) {
3436 if ((s
->v
& SYM_FIELD
) &&
3437 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3438 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3439 struct_add_offset(s
->type
.ref
, offset
);
3445 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3447 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3448 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3449 int pcc
= !tcc_state
->ms_bitfields
;
3450 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3457 prevbt
= VT_STRUCT
; /* make it never match */
3462 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3463 if (f
->type
.t
& VT_BITFIELD
)
3464 bit_size
= BIT_SIZE(f
->type
.t
);
3467 size
= type_size(&f
->type
, &align
);
3468 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3471 if (pcc
&& bit_size
== 0) {
3472 /* in pcc mode, packing does not affect zero-width bitfields */
3475 /* in pcc mode, attribute packed overrides if set. */
3476 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3479 /* pragma pack overrides align if lesser and packs bitfields always */
3482 if (pragma_pack
< align
)
3483 align
= pragma_pack
;
3484 /* in pcc mode pragma pack also overrides individual align */
3485 if (pcc
&& pragma_pack
< a
)
3489 /* some individual align was specified */
3493 if (type
->ref
->type
.t
== VT_UNION
) {
3494 if (pcc
&& bit_size
>= 0)
3495 size
= (bit_size
+ 7) >> 3;
3500 } else if (bit_size
< 0) {
3502 c
+= (bit_pos
+ 7) >> 3;
3503 c
= (c
+ align
- 1) & -align
;
3512 /* A bit-field. Layout is more complicated. There are two
3513 options: PCC (GCC) compatible and MS compatible */
3515 /* In PCC layout a bit-field is placed adjacent to the
3516 preceding bit-fields, except if:
3518 - an individual alignment was given
3519 - it would overflow its base type container and
3520 there is no packing */
3521 if (bit_size
== 0) {
3523 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3525 } else if (f
->a
.aligned
) {
3527 } else if (!packed
) {
3529 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3530 if (ofs
> size
/ align
)
3534 /* in pcc mode, long long bitfields have type int if they fit */
3535 if (size
== 8 && bit_size
<= 32)
3536 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3538 while (bit_pos
>= align
* 8)
3539 c
+= align
, bit_pos
-= align
* 8;
3542 /* In PCC layout named bit-fields influence the alignment
3543 of the containing struct using the base types alignment,
3544 except for packed fields (which here have correct align). */
3545 if (f
->v
& SYM_FIRST_ANOM
3546 // && bit_size // ??? gcc on ARM/rpi does that
3551 bt
= f
->type
.t
& VT_BTYPE
;
3552 if ((bit_pos
+ bit_size
> size
* 8)
3553 || (bit_size
> 0) == (bt
!= prevbt
)
3555 c
= (c
+ align
- 1) & -align
;
3558 /* In MS bitfield mode a bit-field run always uses
3559 at least as many bits as the underlying type.
3560 To start a new run it's also required that this
3561 or the last bit-field had non-zero width. */
3562 if (bit_size
|| prev_bit_size
)
3565 /* In MS layout the records alignment is normally
3566 influenced by the field, except for a zero-width
3567 field at the start of a run (but by further zero-width
3568 fields it is again). */
3569 if (bit_size
== 0 && prevbt
!= bt
)
3572 prev_bit_size
= bit_size
;
3575 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3576 | (bit_pos
<< VT_STRUCT_SHIFT
);
3577 bit_pos
+= bit_size
;
3579 if (align
> maxalign
)
3583 printf("set field %s offset %-2d size %-2d align %-2d",
3584 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3585 if (f
->type
.t
& VT_BITFIELD
) {
3586 printf(" pos %-2d bits %-2d",
3594 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3596 /* An anonymous struct/union. Adjust member offsets
3597 to reflect the real offset of our containing struct.
3598 Also set the offset of this anon member inside
3599 the outer struct to be zero. Via this it
3600 works when accessing the field offset directly
3601 (from base object), as well as when recursing
3602 members in initializer handling. */
3603 int v2
= f
->type
.ref
->v
;
3604 if (!(v2
& SYM_FIELD
) &&
3605 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3607 /* This happens only with MS extensions. The
3608 anon member has a named struct type, so it
3609 potentially is shared with other references.
3610 We need to unshare members so we can modify
3613 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3614 &f
->type
.ref
->type
, 0,
3616 pps
= &f
->type
.ref
->next
;
3617 while ((ass
= ass
->next
) != NULL
) {
3618 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3619 pps
= &((*pps
)->next
);
3623 struct_add_offset(f
->type
.ref
, offset
);
3633 c
+= (bit_pos
+ 7) >> 3;
3635 /* store size and alignment */
3636 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3640 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3641 /* can happen if individual align for some member was given. In
3642 this case MSVC ignores maxalign when aligning the size */
3647 c
= (c
+ a
- 1) & -a
;
3651 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3654 /* check whether we can access bitfields by their type */
3655 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3659 if (0 == (f
->type
.t
& VT_BITFIELD
))
3663 bit_size
= BIT_SIZE(f
->type
.t
);
3666 bit_pos
= BIT_POS(f
->type
.t
);
3667 size
= type_size(&f
->type
, &align
);
3668 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3671 /* try to access the field using a different type */
3672 c0
= -1, s
= align
= 1;
3674 px
= f
->c
* 8 + bit_pos
;
3675 cx
= (px
>> 3) & -align
;
3676 px
= px
- (cx
<< 3);
3679 s
= (px
+ bit_size
+ 7) >> 3;
3689 s
= type_size(&t
, &align
);
3693 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3694 /* update offset and bit position */
3697 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3698 | (bit_pos
<< VT_STRUCT_SHIFT
);
3702 printf("FIX field %s offset %-2d size %-2d align %-2d "
3703 "pos %-2d bits %-2d\n",
3704 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3705 cx
, s
, align
, px
, bit_size
);
3708 /* fall back to load/store single-byte wise */
3709 f
->auxtype
= VT_STRUCT
;
3711 printf("FIX field %s : load byte-wise\n",
3712 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3718 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3719 static void struct_decl(CType
*type
, int u
)
3721 int v
, c
, size
, align
, flexible
;
3722 int bit_size
, bsize
, bt
;
3724 AttributeDef ad
, ad1
;
3727 memset(&ad
, 0, sizeof ad
);
3729 parse_attribute(&ad
);
3733 /* struct already defined ? return it */
3735 expect("struct/union/enum name");
3737 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3740 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3742 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3747 /* Record the original enum/struct/union token. */
3748 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3750 /* we put an undefined size for struct/union */
3751 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3752 s
->r
= 0; /* default alignment is zero as gcc */
3754 type
->t
= s
->type
.t
;
3760 tcc_error("struct/union/enum already defined");
3761 /* cannot be empty */
3762 /* non empty enums are not allowed */
3765 long long ll
= 0, pl
= 0, nl
= 0;
3768 /* enum symbols have static storage */
3769 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3773 expect("identifier");
3775 if (ss
&& !local_stack
)
3776 tcc_error("redefinition of enumerator '%s'",
3777 get_tok_str(v
, NULL
));
3781 ll
= expr_const64();
3783 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3785 *ps
= ss
, ps
= &ss
->next
;
3794 /* NOTE: we accept a trailing comma */
3799 /* set integral type of the enum */
3802 if (pl
!= (unsigned)pl
)
3803 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3805 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3806 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3807 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3809 /* set type for enum members */
3810 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3812 if (ll
== (int)ll
) /* default is int if it fits */
3814 if (t
.t
& VT_UNSIGNED
) {
3815 ss
->type
.t
|= VT_UNSIGNED
;
3816 if (ll
== (unsigned)ll
)
3819 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3820 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3825 while (tok
!= '}') {
3826 if (!parse_btype(&btype
, &ad1
)) {
3832 tcc_error("flexible array member '%s' not at the end of struct",
3833 get_tok_str(v
, NULL
));
3839 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3841 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3842 expect("identifier");
3844 int v
= btype
.ref
->v
;
3845 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3846 if (tcc_state
->ms_extensions
== 0)
3847 expect("identifier");
3851 if (type_size(&type1
, &align
) < 0) {
3852 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3855 tcc_error("field '%s' has incomplete type",
3856 get_tok_str(v
, NULL
));
3858 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3859 (type1
.t
& VT_STORAGE
))
3860 tcc_error("invalid type for '%s'",
3861 get_tok_str(v
, NULL
));
3865 bit_size
= expr_const();
3866 /* XXX: handle v = 0 case for messages */
3868 tcc_error("negative width in bit-field '%s'",
3869 get_tok_str(v
, NULL
));
3870 if (v
&& bit_size
== 0)
3871 tcc_error("zero width for bit-field '%s'",
3872 get_tok_str(v
, NULL
));
3873 parse_attribute(&ad1
);
3875 size
= type_size(&type1
, &align
);
3876 if (bit_size
>= 0) {
3877 bt
= type1
.t
& VT_BTYPE
;
3883 tcc_error("bitfields must have scalar type");
3885 if (bit_size
> bsize
) {
3886 tcc_error("width of '%s' exceeds its type",
3887 get_tok_str(v
, NULL
));
3888 } else if (bit_size
== bsize
3889 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
3890 /* no need for bit fields */
3892 } else if (bit_size
== 64) {
3893 tcc_error("field width 64 not implemented");
3895 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
3897 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3900 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3901 /* Remember we've seen a real field to check
3902 for placement of flexible array member. */
3905 /* If member is a struct or bit-field, enforce
3906 placing into the struct (as anonymous). */
3908 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3913 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
3918 if (tok
== ';' || tok
== TOK_EOF
)
3925 parse_attribute(&ad
);
3926 struct_layout(type
, &ad
);
3931 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
3933 if (s
->a
.aligned
&& 0 == ad
->a
.aligned
)
3934 ad
->a
.aligned
= s
->a
.aligned
;
3935 if (s
->f
.func_call
&& 0 == ad
->f
.func_call
)
3936 ad
->f
.func_call
= s
->f
.func_call
;
3937 if (s
->f
.func_type
&& 0 == ad
->f
.func_type
)
3938 ad
->f
.func_type
= s
->f
.func_type
;
3943 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3944 are added to the element type, copied because it could be a typedef. */
3945 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3947 while (type
->t
& VT_ARRAY
) {
3948 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3949 type
= &type
->ref
->type
;
3951 type
->t
|= qualifiers
;
3954 /* return 0 if no type declaration. otherwise, return the basic type
3957 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3959 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
3963 memset(ad
, 0, sizeof(AttributeDef
));
3973 /* currently, we really ignore extension */
3983 if (u
== VT_SHORT
|| u
== VT_LONG
) {
3984 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
3985 tmbt
: tcc_error("too many basic types");
3988 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
3993 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4006 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4007 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4008 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4009 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4016 #ifdef TCC_TARGET_ARM64
4018 /* GCC's __uint128_t appears in some Linux header files. Make it a
4019 synonym for long double to get the size and alignment right. */
4030 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4031 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4039 struct_decl(&type1
, VT_ENUM
);
4042 type
->ref
= type1
.ref
;
4045 struct_decl(&type1
, VT_STRUCT
);
4048 struct_decl(&type1
, VT_UNION
);
4051 /* type modifiers */
4056 parse_btype_qualify(type
, VT_CONSTANT
);
4064 parse_btype_qualify(type
, VT_VOLATILE
);
4071 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4072 tcc_error("signed and unsigned modifier");
4085 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4086 tcc_error("signed and unsigned modifier");
4087 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4103 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4104 tcc_error("multiple storage classes");
4115 /* GNUC attribute */
4116 case TOK_ATTRIBUTE1
:
4117 case TOK_ATTRIBUTE2
:
4118 parse_attribute(ad
);
4119 if (ad
->attr_mode
) {
4120 u
= ad
->attr_mode
-1;
4121 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4129 parse_expr_type(&type1
);
4130 /* remove all storage modifiers except typedef */
4131 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4133 sym_to_attr(ad
, type1
.ref
);
4139 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4141 t
&= ~(VT_BTYPE
|VT_LONG
);
4142 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4143 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4144 type
->ref
= s
->type
.ref
;
4146 parse_btype_qualify(type
, t
);
4148 /* get attributes from typedef */
4158 if (tcc_state
->char_is_unsigned
) {
4159 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4162 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4163 bt
= t
& (VT_BTYPE
|VT_LONG
);
4165 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4166 #ifdef TCC_TARGET_PE
4167 if (bt
== VT_LDOUBLE
)
4168 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4174 /* convert a function parameter type (array to pointer and function to
4175 function pointer) */
4176 static inline void convert_parameter_type(CType
*pt
)
4178 /* remove const and volatile qualifiers (XXX: const could be used
4179 to indicate a const function parameter */
4180 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4181 /* array must be transformed to pointer according to ANSI C */
4183 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4188 ST_FUNC
void parse_asm_str(CString
*astr
)
4191 parse_mult_str(astr
, "string constant");
4194 /* Parse an asm label and return the token */
4195 static int asm_label_instr(void)
4201 parse_asm_str(&astr
);
4204 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4206 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4211 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4213 int n
, l
, t1
, arg_size
, align
;
4214 Sym
**plast
, *s
, *first
;
4219 /* function type, or recursive declarator (return if so) */
4221 if (td
&& !(td
& TYPE_ABSTRACT
))
4225 else if (parse_btype(&pt
, &ad1
))
4236 /* read param name and compute offset */
4237 if (l
!= FUNC_OLD
) {
4238 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4240 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4241 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4242 tcc_error("parameter declared as void");
4243 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4247 expect("identifier");
4248 pt
.t
= VT_VOID
; /* invalid type */
4251 convert_parameter_type(&pt
);
4252 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4258 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4263 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4264 tcc_error("invalid type");
4267 /* if no parameters, then old type prototype */
4270 /* NOTE: const is ignored in returned type as it has a special
4271 meaning in gcc / C++ */
4272 type
->t
&= ~VT_CONSTANT
;
4273 /* some ancient pre-K&R C allows a function to return an array
4274 and the array brackets to be put after the arguments, such
4275 that "int c()[]" means something like "int[] c()" */
4278 skip(']'); /* only handle simple "[]" */
4281 /* we push a anonymous symbol which will contain the function prototype */
4282 ad
->f
.func_args
= arg_size
;
4283 ad
->f
.func_type
= l
;
4284 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4290 } else if (tok
== '[') {
4291 int saved_nocode_wanted
= nocode_wanted
;
4292 /* array definition */
4294 if (tok
== TOK_RESTRICT1
)
4299 if (!local_stack
|| (storage
& VT_STATIC
))
4300 vpushi(expr_const());
4302 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4303 length must always be evaluated, even under nocode_wanted,
4304 so that its size slot is initialized (e.g. under sizeof
4309 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4312 tcc_error("invalid array size");
4314 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4315 tcc_error("size of variable length array should be an integer");
4320 /* parse next post type */
4321 post_type(type
, ad
, storage
, 0);
4322 if (type
->t
== VT_FUNC
)
4323 tcc_error("declaration of an array of functions");
4324 t1
|= type
->t
& VT_VLA
;
4327 loc
-= type_size(&int_type
, &align
);
4331 vla_runtime_type_size(type
, &align
);
4333 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4339 nocode_wanted
= saved_nocode_wanted
;
4341 /* we push an anonymous symbol which will contain the array
4343 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4344 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4350 /* Parse a type declarator (except basic type), and return the type
4351 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4352 expected. 'type' should contain the basic type. 'ad' is the
4353 attribute definition of the basic type. It can be modified by
4354 type_decl(). If this (possibly abstract) declarator is a pointer chain
4355 it returns the innermost pointed to type (equals *type, but is a different
4356 pointer), otherwise returns type itself, that's used for recursive calls. */
4357 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4360 int qualifiers
, storage
;
4362 /* recursive type, remove storage bits first, apply them later again */
4363 storage
= type
->t
& VT_STORAGE
;
4364 type
->t
&= ~VT_STORAGE
;
4367 while (tok
== '*') {
4375 qualifiers
|= VT_CONSTANT
;
4380 qualifiers
|= VT_VOLATILE
;
4386 /* XXX: clarify attribute handling */
4387 case TOK_ATTRIBUTE1
:
4388 case TOK_ATTRIBUTE2
:
4389 parse_attribute(ad
);
4393 type
->t
|= qualifiers
;
4395 /* innermost pointed to type is the one for the first derivation */
4396 ret
= pointed_type(type
);
4400 /* This is possibly a parameter type list for abstract declarators
4401 ('int ()'), use post_type for testing this. */
4402 if (!post_type(type
, ad
, 0, td
)) {
4403 /* It's not, so it's a nested declarator, and the post operations
4404 apply to the innermost pointed to type (if any). */
4405 /* XXX: this is not correct to modify 'ad' at this point, but
4406 the syntax is not clear */
4407 parse_attribute(ad
);
4408 post
= type_decl(type
, ad
, v
, td
);
4411 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4412 /* type identifier */
4416 if (!(td
& TYPE_ABSTRACT
))
4417 expect("identifier");
4420 post_type(post
, ad
, storage
, 0);
4421 parse_attribute(ad
);
4426 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4427 ST_FUNC
int lvalue_type(int t
)
4432 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4434 else if (bt
== VT_SHORT
)
4438 if (t
& VT_UNSIGNED
)
4439 r
|= VT_LVAL_UNSIGNED
;
4443 /* indirection with full error checking and bound check */
4444 ST_FUNC
void indir(void)
4446 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4447 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4451 if (vtop
->r
& VT_LVAL
)
4453 vtop
->type
= *pointed_type(&vtop
->type
);
4454 /* Arrays and functions are never lvalues */
4455 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4456 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4457 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4458 /* if bound checking, the referenced pointer must be checked */
4459 #ifdef CONFIG_TCC_BCHECK
4460 if (tcc_state
->do_bounds_check
)
4461 vtop
->r
|= VT_MUSTBOUND
;
4466 /* pass a parameter to a function and do type checking and casting */
4467 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4472 func_type
= func
->f
.func_type
;
4473 if (func_type
== FUNC_OLD
||
4474 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4475 /* default casting : only need to convert float to double */
4476 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4477 gen_cast_s(VT_DOUBLE
);
4478 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4479 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4480 type
.ref
= vtop
->type
.ref
;
4483 } else if (arg
== NULL
) {
4484 tcc_error("too many arguments to function");
4487 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4488 gen_assign_cast(&type
);
4492 /* parse an expression and return its type without any side effect. */
4493 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4502 /* parse an expression of the form '(type)' or '(expr)' and return its
4504 static void parse_expr_type(CType
*type
)
4510 if (parse_btype(type
, &ad
)) {
4511 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4513 expr_type(type
, gexpr
);
4518 static void parse_type(CType
*type
)
4523 if (!parse_btype(type
, &ad
)) {
4526 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4529 static void parse_builtin_params(int nc
, const char *args
)
4536 while ((c
= *args
++)) {
4540 case 'e': expr_eq(); continue;
4541 case 't': parse_type(&t
); vpush(&t
); continue;
4542 default: tcc_error("internal error"); break;
4550 ST_FUNC
void unary(void)
4552 int n
, t
, align
, size
, r
, sizeof_caller
;
4557 sizeof_caller
= in_sizeof
;
4560 /* XXX: GCC 2.95.3 does not generate a table although it should be
4568 #ifdef TCC_TARGET_PE
4569 t
= VT_SHORT
|VT_UNSIGNED
;
4577 vsetc(&type
, VT_CONST
, &tokc
);
4581 t
= VT_INT
| VT_UNSIGNED
;
4587 t
= VT_LLONG
| VT_UNSIGNED
;
4599 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4602 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4604 case TOK___FUNCTION__
:
4606 goto tok_identifier
;
4612 /* special function name identifier */
4613 len
= strlen(funcname
) + 1;
4614 /* generate char[len] type */
4619 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4620 if (!NODATA_WANTED
) {
4621 ptr
= section_ptr_add(data_section
, len
);
4622 memcpy(ptr
, funcname
, len
);
4628 #ifdef TCC_TARGET_PE
4629 t
= VT_SHORT
| VT_UNSIGNED
;
4635 /* string parsing */
4637 if (tcc_state
->char_is_unsigned
)
4638 t
= VT_BYTE
| VT_UNSIGNED
;
4640 if (tcc_state
->warn_write_strings
)
4645 memset(&ad
, 0, sizeof(AttributeDef
));
4646 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4651 if (parse_btype(&type
, &ad
)) {
4652 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4654 /* check ISOC99 compound literal */
4656 /* data is allocated locally by default */
4661 /* all except arrays are lvalues */
4662 if (!(type
.t
& VT_ARRAY
))
4663 r
|= lvalue_type(type
.t
);
4664 memset(&ad
, 0, sizeof(AttributeDef
));
4665 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4667 if (sizeof_caller
) {
4674 } else if (tok
== '{') {
4675 int saved_nocode_wanted
= nocode_wanted
;
4677 tcc_error("expected constant");
4678 /* save all registers */
4680 /* statement expression : we do not accept break/continue
4681 inside as GCC does. We do retain the nocode_wanted state,
4682 as statement expressions can't ever be entered from the
4683 outside, so any reactivation of code emission (from labels
4684 or loop heads) can be disabled again after the end of it. */
4685 block(NULL
, NULL
, 1);
4686 nocode_wanted
= saved_nocode_wanted
;
4701 /* functions names must be treated as function pointers,
4702 except for unary '&' and sizeof. Since we consider that
4703 functions are not lvalues, we only have to handle it
4704 there and in function calls. */
4705 /* arrays can also be used although they are not lvalues */
4706 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4707 !(vtop
->type
.t
& VT_ARRAY
))
4709 mk_pointer(&vtop
->type
);
4715 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4716 gen_cast_s(VT_BOOL
);
4717 vtop
->c
.i
= !vtop
->c
.i
;
4718 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4722 vseti(VT_JMP
, gvtst(1, 0));
4734 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4735 tcc_error("pointer not accepted for unary plus");
4736 /* In order to force cast, we add zero, except for floating point
4737 where we really need an noop (otherwise -0.0 will be transformed
4739 if (!is_float(vtop
->type
.t
)) {
4750 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
4751 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
4752 size
= type_size(&type
, &align
);
4753 if (s
&& s
->a
.aligned
)
4754 align
= 1 << (s
->a
.aligned
- 1);
4755 if (t
== TOK_SIZEOF
) {
4756 if (!(type
.t
& VT_VLA
)) {
4758 tcc_error("sizeof applied to an incomplete type");
4761 vla_runtime_type_size(&type
, &align
);
4766 vtop
->type
.t
|= VT_UNSIGNED
;
4769 case TOK_builtin_expect
:
4770 /* __builtin_expect is a no-op for now */
4771 parse_builtin_params(0, "ee");
4774 case TOK_builtin_types_compatible_p
:
4775 parse_builtin_params(0, "tt");
4776 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4777 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4778 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4782 case TOK_builtin_choose_expr
:
4809 case TOK_builtin_constant_p
:
4810 parse_builtin_params(1, "e");
4811 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4815 case TOK_builtin_frame_address
:
4816 case TOK_builtin_return_address
:
4822 if (tok
!= TOK_CINT
) {
4823 tcc_error("%s only takes positive integers",
4824 tok1
== TOK_builtin_return_address
?
4825 "__builtin_return_address" :
4826 "__builtin_frame_address");
4828 level
= (uint32_t)tokc
.i
;
4833 vset(&type
, VT_LOCAL
, 0); /* local frame */
4835 mk_pointer(&vtop
->type
);
4836 indir(); /* -> parent frame */
4838 if (tok1
== TOK_builtin_return_address
) {
4839 // assume return address is just above frame pointer on stack
4842 mk_pointer(&vtop
->type
);
4847 #ifdef TCC_TARGET_X86_64
4848 #ifdef TCC_TARGET_PE
4849 case TOK_builtin_va_start
:
4850 parse_builtin_params(0, "ee");
4851 r
= vtop
->r
& VT_VALMASK
;
4855 tcc_error("__builtin_va_start expects a local variable");
4857 vtop
->type
= char_pointer_type
;
4862 case TOK_builtin_va_arg_types
:
4863 parse_builtin_params(0, "t");
4864 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4871 #ifdef TCC_TARGET_ARM64
4872 case TOK___va_start
: {
4873 parse_builtin_params(0, "ee");
4877 vtop
->type
.t
= VT_VOID
;
4880 case TOK___va_arg
: {
4881 parse_builtin_params(0, "et");
4889 case TOK___arm64_clear_cache
: {
4890 parse_builtin_params(0, "ee");
4893 vtop
->type
.t
= VT_VOID
;
4897 /* pre operations */
4908 t
= vtop
->type
.t
& VT_BTYPE
;
4910 /* In IEEE negate(x) isn't subtract(0,x), but rather
4914 vtop
->c
.f
= -1.0 * 0.0;
4915 else if (t
== VT_DOUBLE
)
4916 vtop
->c
.d
= -1.0 * 0.0;
4918 vtop
->c
.ld
= -1.0 * 0.0;
4926 goto tok_identifier
;
4928 /* allow to take the address of a label */
4929 if (tok
< TOK_UIDENT
)
4930 expect("label identifier");
4931 s
= label_find(tok
);
4933 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4935 if (s
->r
== LABEL_DECLARED
)
4936 s
->r
= LABEL_FORWARD
;
4939 s
->type
.t
= VT_VOID
;
4940 mk_pointer(&s
->type
);
4941 s
->type
.t
|= VT_STATIC
;
4943 vpushsym(&s
->type
, s
);
4949 CType controlling_type
;
4950 int has_default
= 0;
4953 TokenString
*str
= NULL
;
4957 expr_type(&controlling_type
, expr_eq
);
4958 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
4962 if (tok
== TOK_DEFAULT
) {
4964 tcc_error("too many 'default'");
4970 AttributeDef ad_tmp
;
4973 parse_btype(&cur_type
, &ad_tmp
);
4974 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
4975 if (compare_types(&controlling_type
, &cur_type
, 0)) {
4977 tcc_error("type match twice");
4987 skip_or_save_block(&str
);
4989 skip_or_save_block(NULL
);
4996 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
4997 tcc_error("type '%s' does not match any association", buf
);
4999 begin_macro(str
, 1);
5008 // special qnan , snan and infinity values
5010 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
5014 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
5018 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
5027 expect("identifier");
5030 const char *name
= get_tok_str(t
, NULL
);
5032 tcc_error("'%s' undeclared", name
);
5033 /* for simple function calls, we tolerate undeclared
5034 external reference to int() function */
5035 if (tcc_state
->warn_implicit_function_declaration
5036 #ifdef TCC_TARGET_PE
5037 /* people must be warned about using undeclared WINAPI functions
5038 (which usually start with uppercase letter) */
5039 || (name
[0] >= 'A' && name
[0] <= 'Z')
5042 tcc_warning("implicit declaration of function '%s'", name
);
5043 s
= external_global_sym(t
, &func_old_type
, 0);
5047 /* A symbol that has a register is a local register variable,
5048 which starts out as VT_LOCAL value. */
5049 if ((r
& VT_VALMASK
) < VT_CONST
)
5050 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5052 vset(&s
->type
, r
, s
->c
);
5053 /* Point to s as backpointer (even without r&VT_SYM).
5054 Will be used by at least the x86 inline asm parser for
5060 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5061 vtop
->c
.i
= s
->enum_val
;
5066 /* post operations */
5068 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5071 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5074 if (tok
== TOK_ARROW
)
5076 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5079 /* expect pointer on structure */
5080 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5081 expect("struct or union");
5082 if (tok
== TOK_CDOUBLE
)
5083 expect("field name");
5085 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5086 expect("field name");
5087 s
= find_field(&vtop
->type
, tok
);
5089 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5090 /* add field offset to pointer */
5091 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5094 /* change type to field type, and set to lvalue */
5095 vtop
->type
= s
->type
;
5096 vtop
->type
.t
|= qualifiers
;
5097 /* an array is never an lvalue */
5098 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5099 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5100 #ifdef CONFIG_TCC_BCHECK
5101 /* if bound checking, the referenced pointer must be checked */
5102 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5103 vtop
->r
|= VT_MUSTBOUND
;
5107 } else if (tok
== '[') {
5113 } else if (tok
== '(') {
5116 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5119 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5120 /* pointer test (no array accepted) */
5121 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5122 vtop
->type
= *pointed_type(&vtop
->type
);
5123 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5127 expect("function pointer");
5130 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5132 /* get return type */
5135 sa
= s
->next
; /* first parameter */
5136 nb_args
= regsize
= 0;
5138 /* compute first implicit argument if a structure is returned */
5139 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5140 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5141 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5142 &ret_align
, ®size
);
5144 /* get some space for the returned structure */
5145 size
= type_size(&s
->type
, &align
);
5146 #ifdef TCC_TARGET_ARM64
5147 /* On arm64, a small struct is return in registers.
5148 It is much easier to write it to memory if we know
5149 that we are allowed to write some extra bytes, so
5150 round the allocated space up to a power of 2: */
5152 while (size
& (size
- 1))
5153 size
= (size
| (size
- 1)) + 1;
5155 loc
= (loc
- size
) & -align
;
5157 ret
.r
= VT_LOCAL
| VT_LVAL
;
5158 /* pass it as 'int' to avoid structure arg passing
5160 vseti(VT_LOCAL
, loc
);
5170 /* return in register */
5171 if (is_float(ret
.type
.t
)) {
5172 ret
.r
= reg_fret(ret
.type
.t
);
5173 #ifdef TCC_TARGET_X86_64
5174 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5178 #ifndef TCC_TARGET_ARM64
5179 #ifdef TCC_TARGET_X86_64
5180 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5182 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5193 gfunc_param_typed(s
, sa
);
5203 tcc_error("too few arguments to function");
5205 gfunc_call(nb_args
);
5208 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5209 vsetc(&ret
.type
, r
, &ret
.c
);
5210 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5213 /* handle packed struct return */
5214 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5217 size
= type_size(&s
->type
, &align
);
5218 /* We're writing whole regs often, make sure there's enough
5219 space. Assume register size is power of 2. */
5220 if (regsize
> align
)
5222 loc
= (loc
- size
) & -align
;
5226 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5230 if (--ret_nregs
== 0)
5234 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5242 ST_FUNC
void expr_prod(void)
5247 while (tok
== '*' || tok
== '/' || tok
== '%') {
5255 ST_FUNC
void expr_sum(void)
5260 while (tok
== '+' || tok
== '-') {
5268 static void expr_shift(void)
5273 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5281 static void expr_cmp(void)
5286 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5287 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5295 static void expr_cmpeq(void)
5300 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5308 static void expr_and(void)
5311 while (tok
== '&') {
5318 static void expr_xor(void)
5321 while (tok
== '^') {
5328 static void expr_or(void)
5331 while (tok
== '|') {
5338 static void expr_land(void)
5341 if (tok
== TOK_LAND
) {
5344 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5345 gen_cast_s(VT_BOOL
);
5350 while (tok
== TOK_LAND
) {
5366 if (tok
!= TOK_LAND
) {
5379 static void expr_lor(void)
5382 if (tok
== TOK_LOR
) {
5385 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5386 gen_cast_s(VT_BOOL
);
5391 while (tok
== TOK_LOR
) {
5407 if (tok
!= TOK_LOR
) {
5420 /* Assuming vtop is a value used in a conditional context
5421 (i.e. compared with zero) return 0 if it's false, 1 if
5422 true and -1 if it can't be statically determined. */
5423 static int condition_3way(void)
5426 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5427 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5429 gen_cast_s(VT_BOOL
);
5436 static void expr_cond(void)
5438 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5440 CType type
, type1
, type2
;
5445 c
= condition_3way();
5446 g
= (tok
== ':' && gnu_ext
);
5448 /* needed to avoid having different registers saved in
5450 if (is_float(vtop
->type
.t
)) {
5452 #ifdef TCC_TARGET_X86_64
5453 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5478 sv
= *vtop
; /* save value to handle it later */
5479 vtop
--; /* no vpop so that FP stack is not flushed */
5497 bt1
= t1
& VT_BTYPE
;
5499 bt2
= t2
& VT_BTYPE
;
5502 /* cast operands to correct type according to ISOC rules */
5503 if (is_float(bt1
) || is_float(bt2
)) {
5504 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5505 type
.t
= VT_LDOUBLE
;
5507 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5512 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5513 /* cast to biggest op */
5514 type
.t
= VT_LLONG
| VT_LONG
;
5515 if (bt1
== VT_LLONG
)
5517 if (bt2
== VT_LLONG
)
5519 /* convert to unsigned if it does not fit in a long long */
5520 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5521 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5522 type
.t
|= VT_UNSIGNED
;
5523 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5524 /* If one is a null ptr constant the result type
5526 if (is_null_pointer (vtop
))
5528 else if (is_null_pointer (&sv
))
5530 /* XXX: test pointer compatibility, C99 has more elaborate
5534 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5535 /* XXX: test function pointer compatibility */
5536 type
= bt1
== VT_FUNC
? type1
: type2
;
5537 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5538 /* XXX: test structure compatibility */
5539 type
= bt1
== VT_STRUCT
? type1
: type2
;
5540 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5541 /* NOTE: as an extension, we accept void on only one side */
5544 /* integer operations */
5545 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5546 /* convert to unsigned if it does not fit in an integer */
5547 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5548 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5549 type
.t
|= VT_UNSIGNED
;
5551 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5552 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5553 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5556 /* now we convert second operand */
5560 mk_pointer(&vtop
->type
);
5562 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5567 if (is_float(type
.t
)) {
5569 #ifdef TCC_TARGET_X86_64
5570 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5574 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5575 /* for long longs, we use fixed registers to avoid having
5576 to handle a complicated move */
5587 /* this is horrible, but we must also convert first
5593 mk_pointer(&vtop
->type
);
5595 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5601 move_reg(r2
, r1
, type
.t
);
5611 static void expr_eq(void)
5617 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5618 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5619 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5634 ST_FUNC
void gexpr(void)
5645 /* parse a constant expression and return value in vtop. */
5646 static void expr_const1(void)
5655 /* parse an integer constant and return its value. */
5656 static inline int64_t expr_const64(void)
5660 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5661 expect("constant expression");
5667 /* parse an integer constant and return its value.
5668 Complain if it doesn't fit 32bit (signed or unsigned). */
5669 ST_FUNC
int expr_const(void)
5672 int64_t wc
= expr_const64();
5674 if (c
!= wc
&& (unsigned)c
!= wc
)
5675 tcc_error("constant exceeds 32 bit");
5679 /* return the label token if current token is a label, otherwise
5681 static int is_label(void)
5685 /* fast test first */
5686 if (tok
< TOK_UIDENT
)
5688 /* no need to save tokc because tok is an identifier */
5694 unget_tok(last_tok
);
5699 #ifndef TCC_TARGET_ARM64
5700 static void gfunc_return(CType
*func_type
)
5702 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5703 CType type
, ret_type
;
5704 int ret_align
, ret_nregs
, regsize
;
5705 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5706 &ret_align
, ®size
);
5707 if (0 == ret_nregs
) {
5708 /* if returning structure, must copy it to implicit
5709 first pointer arg location */
5712 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5715 /* copy structure value to pointer */
5718 /* returning structure packed into registers */
5719 int r
, size
, addr
, align
;
5720 size
= type_size(func_type
,&align
);
5721 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5722 (vtop
->c
.i
& (ret_align
-1)))
5723 && (align
& (ret_align
-1))) {
5724 loc
= (loc
- size
) & -ret_align
;
5727 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5731 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5733 vtop
->type
= ret_type
;
5734 if (is_float(ret_type
.t
))
5735 r
= rc_fret(ret_type
.t
);
5746 if (--ret_nregs
== 0)
5748 /* We assume that when a structure is returned in multiple
5749 registers, their classes are consecutive values of the
5752 vtop
->c
.i
+= regsize
;
5756 } else if (is_float(func_type
->t
)) {
5757 gv(rc_fret(func_type
->t
));
5761 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5765 static int case_cmp(const void *pa
, const void *pb
)
5767 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5768 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5769 return a
< b
? -1 : a
> b
;
5772 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5776 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5794 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5796 gcase(base
, len
/2, bsym
);
5797 if (cur_switch
->def_sym
)
5798 gjmp_addr(cur_switch
->def_sym
);
5800 *bsym
= gjmp(*bsym
);
5804 base
+= e
; len
-= e
;
5814 if (p
->v1
== p
->v2
) {
5816 gtst_addr(0, p
->sym
);
5826 gtst_addr(0, p
->sym
);
5832 static void block(int *bsym
, int *csym
, int is_expr
)
5834 int a
, b
, c
, d
, cond
;
5837 /* generate line number info */
5838 if (tcc_state
->do_debug
)
5839 tcc_debug_line(tcc_state
);
5842 /* default return value is (void) */
5844 vtop
->type
.t
= VT_VOID
;
5847 if (tok
== TOK_IF
) {
5849 int saved_nocode_wanted
= nocode_wanted
;
5854 cond
= condition_3way();
5860 nocode_wanted
|= 0x20000000;
5861 block(bsym
, csym
, 0);
5863 nocode_wanted
= saved_nocode_wanted
;
5865 if (c
== TOK_ELSE
) {
5870 nocode_wanted
|= 0x20000000;
5871 block(bsym
, csym
, 0);
5872 gsym(d
); /* patch else jmp */
5874 nocode_wanted
= saved_nocode_wanted
;
5877 } else if (tok
== TOK_WHILE
) {
5878 int saved_nocode_wanted
;
5879 nocode_wanted
&= ~0x20000000;
5889 saved_nocode_wanted
= nocode_wanted
;
5891 nocode_wanted
= saved_nocode_wanted
;
5896 } else if (tok
== '{') {
5898 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5901 /* record local declaration stack position */
5903 llabel
= local_label_stack
;
5906 /* handle local labels declarations */
5907 if (tok
== TOK_LABEL
) {
5910 if (tok
< TOK_UIDENT
)
5911 expect("label identifier");
5912 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5922 while (tok
!= '}') {
5923 if ((a
= is_label()))
5930 block(bsym
, csym
, is_expr
);
5933 /* pop locally defined labels */
5934 label_pop(&local_label_stack
, llabel
, is_expr
);
5935 /* pop locally defined symbols */
5937 /* In the is_expr case (a statement expression is finished here),
5938 vtop might refer to symbols on the local_stack. Either via the
5939 type or via vtop->sym. We can't pop those nor any that in turn
5940 might be referred to. To make it easier we don't roll back
5941 any symbols in that case; some upper level call to block() will
5942 do that. We do have to remove such symbols from the lookup
5943 tables, though. sym_pop will do that. */
5944 sym_pop(&local_stack
, s
, is_expr
);
5946 /* Pop VLA frames and restore stack pointer if required */
5947 if (vlas_in_scope
> saved_vlas_in_scope
) {
5948 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5951 vlas_in_scope
= saved_vlas_in_scope
;
5954 } else if (tok
== TOK_RETURN
) {
5958 gen_assign_cast(&func_vt
);
5959 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
5962 gfunc_return(&func_vt
);
5965 /* jump unless last stmt in top-level block */
5966 if (tok
!= '}' || local_scope
!= 1)
5968 nocode_wanted
|= 0x20000000;
5969 } else if (tok
== TOK_BREAK
) {
5972 tcc_error("cannot break");
5973 *bsym
= gjmp(*bsym
);
5976 nocode_wanted
|= 0x20000000;
5977 } else if (tok
== TOK_CONTINUE
) {
5980 tcc_error("cannot continue");
5981 vla_sp_restore_root();
5982 *csym
= gjmp(*csym
);
5985 } else if (tok
== TOK_FOR
) {
5987 int saved_nocode_wanted
;
5988 nocode_wanted
&= ~0x20000000;
5994 /* c99 for-loop init decl? */
5995 if (!decl0(VT_LOCAL
, 1, NULL
)) {
5996 /* no, regular for-loop init expr */
6022 saved_nocode_wanted
= nocode_wanted
;
6024 nocode_wanted
= saved_nocode_wanted
;
6029 sym_pop(&local_stack
, s
, 0);
6032 if (tok
== TOK_DO
) {
6033 int saved_nocode_wanted
;
6034 nocode_wanted
&= ~0x20000000;
6040 saved_nocode_wanted
= nocode_wanted
;
6048 nocode_wanted
= saved_nocode_wanted
;
6053 if (tok
== TOK_SWITCH
) {
6054 struct switch_t
*saved
, sw
;
6055 int saved_nocode_wanted
= nocode_wanted
;
6061 switchval
= *vtop
--;
6063 b
= gjmp(0); /* jump to first case */
6064 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6068 nocode_wanted
= saved_nocode_wanted
;
6069 a
= gjmp(a
); /* add implicit break */
6072 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6073 for (b
= 1; b
< sw
.n
; b
++)
6074 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6075 tcc_error("duplicate case value");
6076 /* Our switch table sorting is signed, so the compared
6077 value needs to be as well when it's 64bit. */
6078 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6079 switchval
.type
.t
&= ~VT_UNSIGNED
;
6081 gcase(sw
.p
, sw
.n
, &a
);
6084 gjmp_addr(sw
.def_sym
);
6085 dynarray_reset(&sw
.p
, &sw
.n
);
6090 if (tok
== TOK_CASE
) {
6091 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6094 nocode_wanted
&= ~0x20000000;
6096 cr
->v1
= cr
->v2
= expr_const64();
6097 if (gnu_ext
&& tok
== TOK_DOTS
) {
6099 cr
->v2
= expr_const64();
6100 if (cr
->v2
< cr
->v1
)
6101 tcc_warning("empty case range");
6104 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6107 goto block_after_label
;
6109 if (tok
== TOK_DEFAULT
) {
6114 if (cur_switch
->def_sym
)
6115 tcc_error("too many 'default'");
6116 cur_switch
->def_sym
= ind
;
6118 goto block_after_label
;
6120 if (tok
== TOK_GOTO
) {
6122 if (tok
== '*' && gnu_ext
) {
6126 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6129 } else if (tok
>= TOK_UIDENT
) {
6130 s
= label_find(tok
);
6131 /* put forward definition if needed */
6133 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6135 if (s
->r
== LABEL_DECLARED
)
6136 s
->r
= LABEL_FORWARD
;
6138 vla_sp_restore_root();
6139 if (s
->r
& LABEL_FORWARD
)
6140 s
->jnext
= gjmp(s
->jnext
);
6142 gjmp_addr(s
->jnext
);
6145 expect("label identifier");
6148 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6157 if (s
->r
== LABEL_DEFINED
)
6158 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6160 s
->r
= LABEL_DEFINED
;
6162 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6166 /* we accept this, but it is a mistake */
6168 nocode_wanted
&= ~0x20000000;
6170 tcc_warning("deprecated use of label at end of compound statement");
6174 block(bsym
, csym
, is_expr
);
6177 /* expression case */
6192 /* This skips over a stream of tokens containing balanced {} and ()
6193 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6194 with a '{'). If STR then allocates and stores the skipped tokens
6195 in *STR. This doesn't check if () and {} are nested correctly,
6196 i.e. "({)}" is accepted. */
6197 static void skip_or_save_block(TokenString
**str
)
6199 int braces
= tok
== '{';
6202 *str
= tok_str_alloc();
6204 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6206 if (tok
== TOK_EOF
) {
6207 if (str
|| level
> 0)
6208 tcc_error("unexpected end of file");
6213 tok_str_add_tok(*str
);
6216 if (t
== '{' || t
== '(') {
6218 } else if (t
== '}' || t
== ')') {
6220 if (level
== 0 && braces
&& t
== '}')
6225 tok_str_add(*str
, -1);
6226 tok_str_add(*str
, 0);
6230 #define EXPR_CONST 1
6233 static void parse_init_elem(int expr_type
)
6235 int saved_global_expr
;
6238 /* compound literals must be allocated globally in this case */
6239 saved_global_expr
= global_expr
;
6242 global_expr
= saved_global_expr
;
6243 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6244 (compound literals). */
6245 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6246 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6247 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6248 #ifdef TCC_TARGET_PE
6249 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6252 tcc_error("initializer element is not constant");
6260 /* put zeros for variable based init */
6261 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6264 /* nothing to do because globals are already set to zero */
6266 vpush_global_sym(&func_old_type
, TOK_memset
);
6268 #ifdef TCC_TARGET_ARM
6279 /* t is the array or struct type. c is the array or struct
6280 address. cur_field is the pointer to the current
6281 field, for arrays the 'c' member contains the current start
6282 index. 'size_only' is true if only size info is needed (only used
6283 in arrays). al contains the already initialized length of the
6284 current container (starting at c). This returns the new length of that. */
6285 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6286 Sym
**cur_field
, int size_only
, int al
)
6289 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6290 unsigned long corig
= c
;
6294 if (gnu_ext
&& (l
= is_label()) != 0)
6296 /* NOTE: we only support ranges for last designator */
6297 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6299 if (!(type
->t
& VT_ARRAY
))
6300 expect("array type");
6302 index
= index_last
= expr_const();
6303 if (tok
== TOK_DOTS
&& gnu_ext
) {
6305 index_last
= expr_const();
6309 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6311 tcc_error("invalid index");
6313 (*cur_field
)->c
= index_last
;
6314 type
= pointed_type(type
);
6315 elem_size
= type_size(type
, &align
);
6316 c
+= index
* elem_size
;
6317 nb_elems
= index_last
- index
+ 1;
6323 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6324 expect("struct/union type");
6325 f
= find_field(type
, l
);
6338 } else if (!gnu_ext
) {
6342 if (type
->t
& VT_ARRAY
) {
6343 index
= (*cur_field
)->c
;
6344 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6345 tcc_error("index too large");
6346 type
= pointed_type(type
);
6347 c
+= index
* type_size(type
, &align
);
6350 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6351 *cur_field
= f
= f
->next
;
6353 tcc_error("too many field init");
6358 /* must put zero in holes (note that doing it that way
6359 ensures that it even works with designators) */
6360 if (!size_only
&& c
- corig
> al
)
6361 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6362 decl_initializer(type
, sec
, c
, 0, size_only
);
6364 /* XXX: make it more general */
6365 if (!size_only
&& nb_elems
> 1) {
6366 unsigned long c_end
;
6371 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6372 for (i
= 1; i
< nb_elems
; i
++) {
6373 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6378 } else if (!NODATA_WANTED
) {
6379 c_end
= c
+ nb_elems
* elem_size
;
6380 if (c_end
> sec
->data_allocated
)
6381 section_realloc(sec
, c_end
);
6382 src
= sec
->data
+ c
;
6384 for(i
= 1; i
< nb_elems
; i
++) {
6386 memcpy(dst
, src
, elem_size
);
6390 c
+= nb_elems
* type_size(type
, &align
);
6396 /* store a value or an expression directly in global data or in local array */
6397 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6404 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6408 /* XXX: not portable */
6409 /* XXX: generate error if incorrect relocation */
6410 gen_assign_cast(&dtype
);
6411 bt
= type
->t
& VT_BTYPE
;
6413 if ((vtop
->r
& VT_SYM
)
6416 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6417 || (type
->t
& VT_BITFIELD
))
6418 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6420 tcc_error("initializer element is not computable at load time");
6422 if (NODATA_WANTED
) {
6427 size
= type_size(type
, &align
);
6428 section_reserve(sec
, c
+ size
);
6429 ptr
= sec
->data
+ c
;
6431 /* XXX: make code faster ? */
6432 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6433 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6434 /* XXX This rejects compound literals like
6435 '(void *){ptr}'. The problem is that '&sym' is
6436 represented the same way, which would be ruled out
6437 by the SYM_FIRST_ANOM check above, but also '"string"'
6438 in 'char *p = "string"' is represented the same
6439 with the type being VT_PTR and the symbol being an
6440 anonymous one. That is, there's no difference in vtop
6441 between '(void *){x}' and '&(void *){x}'. Ignore
6442 pointer typed entities here. Hopefully no real code
6443 will every use compound literals with scalar type. */
6444 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6445 /* These come from compound literals, memcpy stuff over. */
6449 esym
= elfsym(vtop
->sym
);
6450 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6451 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6453 /* We need to copy over all memory contents, and that
6454 includes relocations. Use the fact that relocs are
6455 created it order, so look from the end of relocs
6456 until we hit one before the copied region. */
6457 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6458 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6459 while (num_relocs
--) {
6461 if (rel
->r_offset
>= esym
->st_value
+ size
)
6463 if (rel
->r_offset
< esym
->st_value
)
6465 /* Note: if the same fields are initialized multiple
6466 times (possible with designators) then we possibly
6467 add multiple relocations for the same offset here.
6468 That would lead to wrong code, the last reloc needs
6469 to win. We clean this up later after the whole
6470 initializer is parsed. */
6471 put_elf_reloca(symtab_section
, sec
,
6472 c
+ rel
->r_offset
- esym
->st_value
,
6473 ELFW(R_TYPE
)(rel
->r_info
),
6474 ELFW(R_SYM
)(rel
->r_info
),
6484 if (type
->t
& VT_BITFIELD
) {
6485 int bit_pos
, bit_size
, bits
, n
;
6486 unsigned char *p
, v
, m
;
6487 bit_pos
= BIT_POS(vtop
->type
.t
);
6488 bit_size
= BIT_SIZE(vtop
->type
.t
);
6489 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6490 bit_pos
&= 7, bits
= 0;
6495 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6496 m
= ((1 << n
) - 1) << bit_pos
;
6497 *p
= (*p
& ~m
) | (v
& m
);
6498 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6502 /* XXX: when cross-compiling we assume that each type has the
6503 same representation on host and target, which is likely to
6504 be wrong in the case of long double */
6506 vtop
->c
.i
= vtop
->c
.i
!= 0;
6508 *(char *)ptr
|= vtop
->c
.i
;
6511 *(short *)ptr
|= vtop
->c
.i
;
6514 *(float*)ptr
= vtop
->c
.f
;
6517 *(double *)ptr
= vtop
->c
.d
;
6520 #if defined TCC_IS_NATIVE_387
6521 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6522 memcpy(ptr
, &vtop
->c
.ld
, 10);
6524 else if (sizeof (long double) == sizeof (double))
6525 __asm__("fldl %1\nfstpt %0\n" : "=m" (ptr
) : "m" (vtop
->c
.ld
));
6529 if (sizeof(long double) == LDOUBLE_SIZE
)
6530 *(long double*)ptr
= vtop
->c
.ld
;
6531 else if (sizeof(double) == LDOUBLE_SIZE
)
6532 *(double *)ptr
= (double)vtop
->c
.ld
;
6534 tcc_error("can't cross compile long double constants");
6538 *(long long *)ptr
|= vtop
->c
.i
;
6545 addr_t val
= vtop
->c
.i
;
6547 if (vtop
->r
& VT_SYM
)
6548 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6550 *(addr_t
*)ptr
|= val
;
6552 if (vtop
->r
& VT_SYM
)
6553 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6554 *(addr_t
*)ptr
|= val
;
6560 int val
= vtop
->c
.i
;
6562 if (vtop
->r
& VT_SYM
)
6563 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6567 if (vtop
->r
& VT_SYM
)
6568 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6577 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6584 /* 't' contains the type and storage info. 'c' is the offset of the
6585 object in section 'sec'. If 'sec' is NULL, it means stack based
6586 allocation. 'first' is true if array '{' must be read (multi
6587 dimension implicit array init handling). 'size_only' is true if
6588 size only evaluation is wanted (only for arrays). */
6589 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6590 int first
, int size_only
)
6592 int len
, n
, no_oblock
, nb
, i
;
6599 /* If we currently are at an '}' or ',' we have read an initializer
6600 element in one of our callers, and not yet consumed it. */
6601 have_elem
= tok
== '}' || tok
== ',';
6602 if (!have_elem
&& tok
!= '{' &&
6603 /* In case of strings we have special handling for arrays, so
6604 don't consume them as initializer value (which would commit them
6605 to some anonymous symbol). */
6606 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6608 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6613 !(type
->t
& VT_ARRAY
) &&
6614 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6615 The source type might have VT_CONSTANT set, which is
6616 of course assignable to non-const elements. */
6617 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6618 init_putv(type
, sec
, c
);
6619 } else if (type
->t
& VT_ARRAY
) {
6622 t1
= pointed_type(type
);
6623 size1
= type_size(t1
, &align1
);
6626 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6629 tcc_error("character array initializer must be a literal,"
6630 " optionally enclosed in braces");
6635 /* only parse strings here if correct type (otherwise: handle
6636 them as ((w)char *) expressions */
6637 if ((tok
== TOK_LSTR
&&
6638 #ifdef TCC_TARGET_PE
6639 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6641 (t1
->t
& VT_BTYPE
) == VT_INT
6643 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6645 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6648 /* compute maximum number of chars wanted */
6650 cstr_len
= tokc
.str
.size
;
6652 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6655 if (n
>= 0 && nb
> (n
- len
))
6659 tcc_warning("initializer-string for array is too long");
6660 /* in order to go faster for common case (char
6661 string in global variable, we handle it
6663 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6665 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6669 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6671 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6673 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6680 /* only add trailing zero if enough storage (no
6681 warning in this case since it is standard) */
6682 if (n
< 0 || len
< n
) {
6685 init_putv(t1
, sec
, c
+ (len
* size1
));
6696 while (tok
!= '}' || have_elem
) {
6697 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
6699 if (type
->t
& VT_ARRAY
) {
6701 /* special test for multi dimensional arrays (may not
6702 be strictly correct if designators are used at the
6704 if (no_oblock
&& len
>= n
*size1
)
6707 if (s
->type
.t
== VT_UNION
)
6711 if (no_oblock
&& f
== NULL
)
6720 /* put zeros at the end */
6721 if (!size_only
&& len
< n
*size1
)
6722 init_putz(sec
, c
+ len
, n
*size1
- len
);
6725 /* patch type size if needed, which happens only for array types */
6727 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
6728 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6731 if (first
|| tok
== '{') {
6739 } else if (tok
== '{') {
6741 decl_initializer(type
, sec
, c
, first
, size_only
);
6743 } else if (size_only
) {
6744 /* If we supported only ISO C we wouldn't have to accept calling
6745 this on anything than an array size_only==1 (and even then
6746 only on the outermost level, so no recursion would be needed),
6747 because initializing a flex array member isn't supported.
6748 But GNU C supports it, so we need to recurse even into
6749 subfields of structs and arrays when size_only is set. */
6750 /* just skip expression */
6751 skip_or_save_block(NULL
);
6754 /* This should happen only when we haven't parsed
6755 the init element above for fear of committing a
6756 string constant to memory too early. */
6757 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6758 expect("string constant");
6759 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6761 init_putv(type
, sec
, c
);
6765 /* parse an initializer for type 't' if 'has_init' is non zero, and
6766 allocate space in local or global data space ('r' is either
6767 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6768 variable 'v' of scope 'scope' is declared before initializers
6769 are parsed. If 'v' is zero, then a reference to the new object
6770 is put in the value stack. If 'has_init' is 2, a special parsing
6771 is done to handle string constants. */
6772 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6773 int has_init
, int v
, int scope
)
6775 int size
, align
, addr
;
6776 TokenString
*init_str
= NULL
;
6779 Sym
*flexible_array
;
6781 int saved_nocode_wanted
= nocode_wanted
;
6782 #ifdef CONFIG_TCC_BCHECK
6783 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
6786 if (type
->t
& VT_STATIC
)
6787 nocode_wanted
|= NODATA_WANTED
? 0x40000000 : 0x80000000;
6789 flexible_array
= NULL
;
6790 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6791 Sym
*field
= type
->ref
->next
;
6794 field
= field
->next
;
6795 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6796 flexible_array
= field
;
6800 size
= type_size(type
, &align
);
6801 /* If unknown size, we must evaluate it before
6802 evaluating initializers because
6803 initializers can generate global data too
6804 (e.g. string pointers or ISOC99 compound
6805 literals). It also simplifies local
6806 initializers handling */
6807 if (size
< 0 || (flexible_array
&& has_init
)) {
6809 tcc_error("unknown type size");
6810 /* get all init string */
6811 if (has_init
== 2) {
6812 init_str
= tok_str_alloc();
6813 /* only get strings */
6814 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6815 tok_str_add_tok(init_str
);
6818 tok_str_add(init_str
, -1);
6819 tok_str_add(init_str
, 0);
6821 skip_or_save_block(&init_str
);
6826 begin_macro(init_str
, 1);
6828 decl_initializer(type
, NULL
, 0, 1, 1);
6829 /* prepare second initializer parsing */
6830 macro_ptr
= init_str
->str
;
6833 /* if still unknown size, error */
6834 size
= type_size(type
, &align
);
6836 tcc_error("unknown type size");
6838 /* If there's a flex member and it was used in the initializer
6840 if (flexible_array
&&
6841 flexible_array
->type
.ref
->c
> 0)
6842 size
+= flexible_array
->type
.ref
->c
6843 * pointed_size(&flexible_array
->type
);
6844 /* take into account specified alignment if bigger */
6845 if (ad
->a
.aligned
) {
6846 int speca
= 1 << (ad
->a
.aligned
- 1);
6849 } else if (ad
->a
.packed
) {
6854 size
= 0, align
= 1;
6856 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6858 #ifdef CONFIG_TCC_BCHECK
6859 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6863 loc
= (loc
- size
) & -align
;
6865 #ifdef CONFIG_TCC_BCHECK
6866 /* handles bounds */
6867 /* XXX: currently, since we do only one pass, we cannot track
6868 '&' operators, so we add only arrays */
6869 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6871 /* add padding between regions */
6873 /* then add local bound info */
6874 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6875 bounds_ptr
[0] = addr
;
6876 bounds_ptr
[1] = size
;
6880 /* local variable */
6881 #ifdef CONFIG_TCC_ASM
6882 if (ad
->asm_label
) {
6883 int reg
= asm_parse_regvar(ad
->asm_label
);
6885 r
= (r
& ~VT_VALMASK
) | reg
;
6888 sym
= sym_push(v
, type
, r
, addr
);
6891 /* push local reference */
6892 vset(type
, r
, addr
);
6895 if (v
&& scope
== VT_CONST
) {
6896 /* see if the symbol was already defined */
6899 patch_storage(sym
, ad
, type
);
6900 if (sym
->type
.t
& VT_EXTERN
) {
6901 /* if the variable is extern, it was not allocated */
6902 sym
->type
.t
&= ~VT_EXTERN
;
6903 /* set array size if it was omitted in extern
6905 if ((sym
->type
.t
& VT_ARRAY
) &&
6906 sym
->type
.ref
->c
< 0 &&
6908 sym
->type
.ref
->c
= type
->ref
->c
;
6909 } else if (!has_init
) {
6910 /* we accept several definitions of the same
6911 global variable. this is tricky, because we
6912 must play with the SHN_COMMON type of the symbol */
6913 /* no init data, we won't add more to the symbol */
6915 } else if (sym
->c
) {
6916 ElfSym
*esym
= elfsym(sym
);
6917 if (esym
->st_shndx
== data_section
->sh_num
)
6918 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6923 /* allocate symbol in corresponding section */
6928 else if (tcc_state
->nocommon
)
6933 addr
= section_add(sec
, size
, align
);
6934 #ifdef CONFIG_TCC_BCHECK
6935 /* add padding if bound check */
6937 section_add(sec
, 1, 1);
6940 addr
= align
; /* SHN_COMMON is special, symbol value is align */
6941 sec
= common_section
;
6946 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6947 patch_storage(sym
, ad
, NULL
);
6949 /* Local statics have a scope until now (for
6950 warnings), remove it here. */
6952 /* update symbol definition */
6953 put_extern_sym(sym
, sec
, addr
, size
);
6955 /* push global reference */
6956 sym
= get_sym_ref(type
, sec
, addr
, size
);
6957 vpushsym(type
, sym
);
6961 #ifdef CONFIG_TCC_BCHECK
6962 /* handles bounds now because the symbol must be defined
6963 before for the relocation */
6967 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
6968 /* then add global bound info */
6969 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6970 bounds_ptr
[0] = 0; /* relocated */
6971 bounds_ptr
[1] = size
;
6976 if (type
->t
& VT_VLA
) {
6982 /* save current stack pointer */
6983 if (vlas_in_scope
== 0) {
6984 if (vla_sp_root_loc
== -1)
6985 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
6986 gen_vla_sp_save(vla_sp_root_loc
);
6989 vla_runtime_type_size(type
, &a
);
6990 gen_vla_alloc(type
, a
);
6991 gen_vla_sp_save(addr
);
6995 } else if (has_init
) {
6996 size_t oldreloc_offset
= 0;
6997 if (sec
&& sec
->reloc
)
6998 oldreloc_offset
= sec
->reloc
->data_offset
;
6999 decl_initializer(type
, sec
, addr
, 1, 0);
7000 if (sec
&& sec
->reloc
)
7001 squeeze_multi_relocs(sec
, oldreloc_offset
);
7002 /* patch flexible array member size back to -1, */
7003 /* for possible subsequent similar declarations */
7005 flexible_array
->type
.ref
->c
= -1;
7009 /* restore parse state if needed */
7015 nocode_wanted
= saved_nocode_wanted
;
7018 /* parse a function defined by symbol 'sym' and generate its code in
7019 'cur_text_section' */
7020 static void gen_function(Sym
*sym
)
7023 ind
= cur_text_section
->data_offset
;
7024 /* NOTE: we patch the symbol size later */
7025 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7026 funcname
= get_tok_str(sym
->v
, NULL
);
7028 /* Initialize VLA state */
7030 vla_sp_root_loc
= -1;
7031 /* put debug symbol */
7032 tcc_debug_funcstart(tcc_state
, sym
);
7033 /* push a dummy symbol to enable local sym storage */
7034 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7035 local_scope
= 1; /* for function parameters */
7036 gfunc_prolog(&sym
->type
);
7039 block(NULL
, NULL
, 0);
7043 cur_text_section
->data_offset
= ind
;
7044 label_pop(&global_label_stack
, NULL
, 0);
7045 /* reset local stack */
7047 sym_pop(&local_stack
, NULL
, 0);
7048 /* end of function */
7049 /* patch symbol size */
7050 elfsym(sym
)->st_size
= ind
- func_ind
;
7051 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7052 /* It's better to crash than to generate wrong code */
7053 cur_text_section
= NULL
;
7054 funcname
= ""; /* for safety */
7055 func_vt
.t
= VT_VOID
; /* for safety */
7056 func_var
= 0; /* for safety */
7057 ind
= 0; /* for safety */
7058 nocode_wanted
= 0x80000000;
7062 static void gen_inline_functions(TCCState
*s
)
7065 int inline_generated
, i
, ln
;
7066 struct InlineFunc
*fn
;
7068 ln
= file
->line_num
;
7069 /* iterate while inline function are referenced */
7071 inline_generated
= 0;
7072 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7073 fn
= s
->inline_fns
[i
];
7075 if (sym
&& sym
->c
) {
7076 /* the function was used: generate its code and
7077 convert it to a normal function */
7080 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7081 sym
->type
.t
&= ~VT_INLINE
;
7083 begin_macro(fn
->func_str
, 1);
7085 cur_text_section
= text_section
;
7089 inline_generated
= 1;
7092 } while (inline_generated
);
7093 file
->line_num
= ln
;
7096 ST_FUNC
void free_inline_functions(TCCState
*s
)
7099 /* free tokens of unused inline functions */
7100 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7101 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7103 tok_str_free(fn
->func_str
);
7105 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7108 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7109 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7110 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7118 if (!parse_btype(&btype
, &ad
)) {
7119 if (is_for_loop_init
)
7121 /* skip redundant ';' if not in old parameter decl scope */
7122 if (tok
== ';' && l
!= VT_CMP
) {
7128 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7129 /* global asm block */
7133 if (tok
>= TOK_UIDENT
) {
7134 /* special test for old K&R protos without explicit int
7135 type. Only accepted when defining global data */
7139 expect("declaration");
7144 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7145 int v
= btype
.ref
->v
;
7146 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7147 tcc_warning("unnamed struct/union that defines no instances");
7151 if (IS_ENUM(btype
.t
)) {
7156 while (1) { /* iterate thru each declaration */
7158 /* If the base type itself was an array type of unspecified
7159 size (like in 'typedef int arr[]; arr x = {1};') then
7160 we will overwrite the unknown size by the real one for
7161 this decl. We need to unshare the ref symbol holding
7163 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7164 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7166 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7170 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7171 printf("type = '%s'\n", buf
);
7174 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7175 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7176 tcc_error("function without file scope cannot be static");
7178 /* if old style function prototype, we accept a
7181 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7182 decl0(VT_CMP
, 0, sym
);
7185 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7186 ad
.asm_label
= asm_label_instr();
7187 /* parse one last attribute list, after asm label */
7188 parse_attribute(&ad
);
7193 #ifdef TCC_TARGET_PE
7194 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7195 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7196 tcc_error("cannot have dll linkage with static or typedef");
7197 if (ad
.a
.dllimport
) {
7198 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7201 type
.t
|= VT_EXTERN
;
7207 tcc_error("cannot use local functions");
7208 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7209 expect("function definition");
7211 /* reject abstract declarators in function definition
7212 make old style params without decl have int type */
7214 while ((sym
= sym
->next
) != NULL
) {
7215 if (!(sym
->v
& ~SYM_FIELD
))
7216 expect("identifier");
7217 if (sym
->type
.t
== VT_VOID
)
7218 sym
->type
= int_type
;
7221 /* XXX: cannot do better now: convert extern line to static inline */
7222 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7223 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7228 /* If type is VT_VOID the symbol was created by tccasm
7229 first, and we see the first reference from C now. */
7230 if ((sym
->type
.t
& VT_BTYPE
) == VT_VOID
)
7233 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
7236 ref
= sym
->type
.ref
;
7238 /* use func_call from prototype if not defined */
7239 if (ref
->f
.func_call
!= FUNC_CDECL
7240 && type
.ref
->f
.func_call
== FUNC_CDECL
)
7241 type
.ref
->f
.func_call
= ref
->f
.func_call
;
7243 /* use static from prototype */
7244 if (sym
->type
.t
& VT_STATIC
)
7245 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7247 /* If the definition has no visibility use the
7248 one from prototype. */
7249 if (!type
.ref
->a
.visibility
)
7250 type
.ref
->a
.visibility
= ref
->a
.visibility
;
7251 /* apply other storage attributes from prototype */
7252 type
.ref
->a
.dllexport
|= ref
->a
.dllexport
;
7253 type
.ref
->a
.weak
|= ref
->a
.weak
;
7255 if (!is_compatible_types(&sym
->type
, &type
)) {
7257 tcc_error("incompatible types for redefinition of '%s'",
7258 get_tok_str(v
, NULL
));
7260 if (ref
->f
.func_body
)
7261 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
7262 /* if symbol is already defined, then put complete type */
7266 /* put function symbol */
7267 sym
= global_identifier_push(v
, type
.t
, 0);
7268 sym
->type
.ref
= type
.ref
;
7271 sym
->type
.ref
->f
.func_body
= 1;
7272 sym
->r
= VT_SYM
| VT_CONST
;
7273 patch_storage(sym
, &ad
, NULL
);
7275 /* static inline functions are just recorded as a kind
7276 of macro. Their code will be emitted at the end of
7277 the compilation unit only if they are used */
7278 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7279 (VT_INLINE
| VT_STATIC
)) {
7280 struct InlineFunc
*fn
;
7281 const char *filename
;
7283 filename
= file
? file
->filename
: "";
7284 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7285 strcpy(fn
->filename
, filename
);
7287 skip_or_save_block(&fn
->func_str
);
7288 dynarray_add(&tcc_state
->inline_fns
,
7289 &tcc_state
->nb_inline_fns
, fn
);
7291 /* compute text section */
7292 cur_text_section
= ad
.section
;
7293 if (!cur_text_section
)
7294 cur_text_section
= text_section
;
7300 /* find parameter in function parameter list */
7301 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7302 if ((sym
->v
& ~SYM_FIELD
) == v
)
7304 tcc_error("declaration for parameter '%s' but no such parameter",
7305 get_tok_str(v
, NULL
));
7307 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7308 tcc_error("storage class specified for '%s'",
7309 get_tok_str(v
, NULL
));
7310 if (sym
->type
.t
!= VT_VOID
)
7311 tcc_error("redefinition of parameter '%s'",
7312 get_tok_str(v
, NULL
));
7313 convert_parameter_type(&type
);
7315 } else if (type
.t
& VT_TYPEDEF
) {
7316 /* save typedefed type */
7317 /* XXX: test storage specifiers ? */
7319 if (sym
&& sym
->sym_scope
== local_scope
) {
7320 if (!is_compatible_types(&sym
->type
, &type
)
7321 || !(sym
->type
.t
& VT_TYPEDEF
))
7322 tcc_error("incompatible redefinition of '%s'",
7323 get_tok_str(v
, NULL
));
7326 sym
= sym_push(v
, &type
, 0, 0);
7332 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7333 /* external function definition */
7334 /* specific case for func_call attribute */
7336 } else if (!(type
.t
& VT_ARRAY
)) {
7337 /* not lvalue if array */
7338 r
|= lvalue_type(type
.t
);
7340 has_init
= (tok
== '=');
7341 if (has_init
&& (type
.t
& VT_VLA
))
7342 tcc_error("variable length array cannot be initialized");
7343 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7344 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7345 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7346 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7347 /* external variable or function */
7348 /* NOTE: as GCC, uninitialized global static
7349 arrays of null size are considered as
7351 sym
= external_sym(v
, &type
, r
, &ad
);
7352 if (ad
.alias_target
) {
7356 alias_target
= sym_find(ad
.alias_target
);
7357 esym
= elfsym(alias_target
);
7359 tcc_error("unsupported forward __alias__ attribute");
7360 tsec
.sh_num
= esym
->st_shndx
;
7361 /* Local statics have a scope until now (for
7362 warnings), remove it here. */
7364 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
7367 if (type
.t
& VT_STATIC
)
7373 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7377 if (is_for_loop_init
)
7390 static void decl(int l
)
7395 /* ------------------------------------------------------------------------- */