2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 /********************************************************/
24 /* global variables */
26 /* loc : local variable index
27 ind : output code index
29 anon_sym: anonymous symbol index
31 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
33 ST_DATA Sym
*sym_free_first
;
34 ST_DATA
void **sym_pools
;
35 ST_DATA
int nb_sym_pools
;
37 ST_DATA Sym
*global_stack
;
38 ST_DATA Sym
*local_stack
;
39 ST_DATA Sym
*define_stack
;
40 ST_DATA Sym
*global_label_stack
;
41 ST_DATA Sym
*local_label_stack
;
42 static int local_scope
;
44 static int section_sym
;
46 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
47 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
48 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
50 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
52 ST_DATA
int const_wanted
; /* true if constant wanted */
53 ST_DATA
int nocode_wanted
; /* no code generation wanted */
54 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
55 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
56 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
57 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
58 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
60 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
61 ST_DATA
const char *funcname
;
64 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
66 ST_DATA
struct switch_t
{
70 } **p
; int n
; /* list of case ranges */
71 int def_sym
; /* default symbol */
72 } *cur_switch
; /* current switch */
74 /* ------------------------------------------------------------------------- */
76 static void gen_cast(CType
*type
);
77 static void gen_cast_s(int t
);
78 static inline CType
*pointed_type(CType
*type
);
79 static int is_compatible_types(CType
*type1
, CType
*type2
);
80 static int parse_btype(CType
*type
, AttributeDef
*ad
);
81 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
82 static void parse_expr_type(CType
*type
);
83 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
84 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
85 static void block(int *bsym
, int *csym
, int is_expr
);
86 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
87 static void decl(int l
);
88 static int decl0(int l
, int is_for_loop_init
, Sym
*);
89 static void expr_eq(void);
90 static void vla_runtime_type_size(CType
*type
, int *a
);
91 static void vla_sp_restore(void);
92 static void vla_sp_restore_root(void);
93 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
94 static inline int64_t expr_const64(void);
95 static void vpush64(int ty
, unsigned long long v
);
96 static void vpush(CType
*type
);
97 static int gvtst(int inv
, int t
);
98 static void gen_inline_functions(TCCState
*s
);
99 static void skip_or_save_block(TokenString
**str
);
100 static void gv_dup(void);
102 ST_INLN
int is_float(int t
)
106 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
109 /* we use our own 'finite' function to avoid potential problems with
110 non standard math libs */
111 /* XXX: endianness dependent */
112 ST_FUNC
int ieee_finite(double d
)
115 memcpy(p
, &d
, sizeof(double));
116 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
119 /* compiling intel long double natively */
120 #if (defined __i386__ || defined __x86_64__) \
121 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
122 # define TCC_IS_NATIVE_387
125 ST_FUNC
void test_lvalue(void)
127 if (!(vtop
->r
& VT_LVAL
))
131 ST_FUNC
void check_vstack(void)
134 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
137 /* ------------------------------------------------------------------------- */
138 /* vstack debugging aid */
141 void pv (const char *lbl
, int a
, int b
)
144 for (i
= a
; i
< a
+ b
; ++i
) {
145 SValue
*p
= &vtop
[-i
];
146 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
147 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
152 /* ------------------------------------------------------------------------- */
153 /* start of translation unit info */
154 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
159 /* file info: full path + filename */
160 section_sym
= put_elf_sym(symtab_section
, 0, 0,
161 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
162 text_section
->sh_num
, NULL
);
163 getcwd(buf
, sizeof(buf
));
165 normalize_slashes(buf
);
167 pstrcat(buf
, sizeof(buf
), "/");
168 put_stabs_r(buf
, N_SO
, 0, 0,
169 text_section
->data_offset
, text_section
, section_sym
);
170 put_stabs_r(file
->filename
, N_SO
, 0, 0,
171 text_section
->data_offset
, text_section
, section_sym
);
176 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
177 symbols can be safely used */
178 put_elf_sym(symtab_section
, 0, 0,
179 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
180 SHN_ABS
, file
->filename
);
183 /* put end of translation unit info */
184 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
188 put_stabs_r(NULL
, N_SO
, 0, 0,
189 text_section
->data_offset
, text_section
, section_sym
);
193 /* generate line number info */
194 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
198 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
199 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
201 last_line_num
= file
->line_num
;
205 /* put function symbol */
206 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
214 /* XXX: we put here a dummy type */
215 snprintf(buf
, sizeof(buf
), "%s:%c1",
216 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
217 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
218 cur_text_section
, sym
->c
);
219 /* //gr gdb wants a line at the function */
220 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
226 /* put function size */
227 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
231 put_stabn(N_FUN
, 0, 0, size
);
234 /* ------------------------------------------------------------------------- */
235 ST_FUNC
int tccgen_compile(TCCState
*s1
)
237 cur_text_section
= NULL
;
239 anon_sym
= SYM_FIRST_ANOM
;
242 nocode_wanted
= 0x80000000;
244 /* define some often used types */
246 char_pointer_type
.t
= VT_BYTE
;
247 mk_pointer(&char_pointer_type
);
249 size_type
.t
= VT_INT
| VT_UNSIGNED
;
250 ptrdiff_type
.t
= VT_INT
;
252 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
253 ptrdiff_type
.t
= VT_LLONG
;
255 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
256 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
258 func_old_type
.t
= VT_FUNC
;
259 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
260 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
261 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
265 #ifdef TCC_TARGET_ARM
270 printf("%s: **** new file\n", file
->filename
);
273 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
276 gen_inline_functions(s1
);
278 /* end of translation unit info */
283 /* ------------------------------------------------------------------------- */
284 /* apply storage attributes to Elf symbol */
286 static void update_storage(Sym
*sym
)
291 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
292 if (sym
->a
.visibility
)
293 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
296 esym
->st_info
= ELFW(ST_INFO
)(STB_WEAK
, ELFW(ST_TYPE
)(esym
->st_info
));
298 if (sym
->a
.dllimport
)
299 esym
->st_other
|= ST_PE_IMPORT
;
300 if (sym
->a
.dllexport
)
301 esym
->st_other
|= ST_PE_EXPORT
;
304 printf("storage %s: vis=%d weak=%d exp=%d imp=%d\n",
305 get_tok_str(sym
->v
, NULL
),
314 /* ------------------------------------------------------------------------- */
315 /* update sym->c so that it points to an external symbol in section
316 'section' with value 'value' */
318 ST_FUNC
void put_extern_sym2(Sym
*sym
, Section
*section
,
319 addr_t value
, unsigned long size
,
320 int can_add_underscore
)
322 int sym_type
, sym_bind
, sh_num
, info
, other
, t
;
326 #ifdef CONFIG_TCC_BCHECK
332 else if (section
== SECTION_ABS
)
335 sh_num
= section
->sh_num
;
338 name
= get_tok_str(sym
->v
, NULL
);
339 #ifdef CONFIG_TCC_BCHECK
340 if (tcc_state
->do_bounds_check
) {
341 /* XXX: avoid doing that for statics ? */
342 /* if bound checking is activated, we change some function
343 names by adding the "__bound" prefix */
346 /* XXX: we rely only on malloc hooks */
359 strcpy(buf
, "__bound_");
367 if ((t
& VT_BTYPE
) == VT_FUNC
) {
369 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
370 sym_type
= STT_NOTYPE
;
372 sym_type
= STT_OBJECT
;
375 sym_bind
= STB_LOCAL
;
377 sym_bind
= STB_GLOBAL
;
380 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
381 Sym
*ref
= sym
->type
.ref
;
382 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
383 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
385 other
|= ST_PE_STDCALL
;
386 can_add_underscore
= 0;
390 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
392 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
396 name
= get_tok_str(sym
->asm_label
, NULL
);
397 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
398 sym
->c
= set_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
400 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
401 esym
->st_value
= value
;
402 esym
->st_size
= size
;
403 esym
->st_shndx
= sh_num
;
408 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
409 addr_t value
, unsigned long size
)
411 put_extern_sym2(sym
, section
, value
, size
, 1);
414 /* add a new relocation entry to symbol 'sym' in section 's' */
415 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
420 if (nocode_wanted
&& s
== cur_text_section
)
425 put_extern_sym(sym
, NULL
, 0, 0);
429 /* now we can add ELF relocation info */
430 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
434 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
436 greloca(s
, sym
, offset
, type
, 0);
440 /* ------------------------------------------------------------------------- */
441 /* symbol allocator */
442 static Sym
*__sym_malloc(void)
444 Sym
*sym_pool
, *sym
, *last_sym
;
447 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
448 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
450 last_sym
= sym_free_first
;
452 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
453 sym
->next
= last_sym
;
457 sym_free_first
= last_sym
;
461 static inline Sym
*sym_malloc(void)
465 sym
= sym_free_first
;
467 sym
= __sym_malloc();
468 sym_free_first
= sym
->next
;
471 sym
= tcc_malloc(sizeof(Sym
));
476 ST_INLN
void sym_free(Sym
*sym
)
479 sym
->next
= sym_free_first
;
480 sym_free_first
= sym
;
486 /* push, without hashing */
487 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
492 memset(s
, 0, sizeof *s
);
502 /* find a symbol and return its associated structure. 's' is the top
503 of the symbol stack */
504 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
516 /* structure lookup */
517 ST_INLN Sym
*struct_find(int v
)
520 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
522 return table_ident
[v
]->sym_struct
;
525 /* find an identifier */
526 ST_INLN Sym
*sym_find(int v
)
529 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
531 return table_ident
[v
]->sym_identifier
;
534 /* push a given symbol on the symbol stack */
535 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
544 s
= sym_push2(ps
, v
, type
->t
, c
);
545 s
->type
.ref
= type
->ref
;
547 /* don't record fields or anonymous symbols */
549 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
550 /* record symbol in token array */
551 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
553 ps
= &ts
->sym_struct
;
555 ps
= &ts
->sym_identifier
;
558 s
->sym_scope
= local_scope
;
559 if (s
->prev_tok
&& s
->prev_tok
->sym_scope
== s
->sym_scope
)
560 tcc_error("redeclaration of '%s'",
561 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
566 /* push a global identifier */
567 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
570 s
= sym_push2(&global_stack
, v
, t
, c
);
571 /* don't record anonymous symbol */
572 if (v
< SYM_FIRST_ANOM
) {
573 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
574 /* modify the top most local identifier, so that
575 sym_identifier will point to 's' when popped */
577 ps
= &(*ps
)->prev_tok
;
584 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
585 pop them yet from the list, but do remove them from the token array. */
586 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
596 /* remove symbol in token array */
598 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
599 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
601 ps
= &ts
->sym_struct
;
603 ps
= &ts
->sym_identifier
;
614 /* ------------------------------------------------------------------------- */
616 static void vsetc(CType
*type
, int r
, CValue
*vc
)
620 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
621 tcc_error("memory full (vstack)");
622 /* cannot let cpu flags if other instruction are generated. Also
623 avoid leaving VT_JMP anywhere except on the top of the stack
624 because it would complicate the code generator.
626 Don't do this when nocode_wanted. vtop might come from
627 !nocode_wanted regions (see 88_codeopt.c) and transforming
628 it to a register without actually generating code is wrong
629 as their value might still be used for real. All values
630 we push under nocode_wanted will eventually be popped
631 again, so that the VT_CMP/VT_JMP value will be in vtop
632 when code is unsuppressed again.
634 Same logic below in vswap(); */
635 if (vtop
>= vstack
&& !nocode_wanted
) {
636 v
= vtop
->r
& VT_VALMASK
;
637 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
649 ST_FUNC
void vswap(void)
652 /* cannot vswap cpu flags. See comment at vsetc() above */
653 if (vtop
>= vstack
&& !nocode_wanted
) {
654 int v
= vtop
->r
& VT_VALMASK
;
655 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
663 /* pop stack value */
664 ST_FUNC
void vpop(void)
667 v
= vtop
->r
& VT_VALMASK
;
668 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
669 /* for x86, we need to pop the FP stack */
671 o(0xd8dd); /* fstp %st(0) */
674 if (v
== VT_JMP
|| v
== VT_JMPI
) {
675 /* need to put correct jump if && or || without test */
681 /* push constant of type "type" with useless value */
682 ST_FUNC
void vpush(CType
*type
)
684 vset(type
, VT_CONST
, 0);
687 /* push integer constant */
688 ST_FUNC
void vpushi(int v
)
692 vsetc(&int_type
, VT_CONST
, &cval
);
695 /* push a pointer sized constant */
696 static void vpushs(addr_t v
)
700 vsetc(&size_type
, VT_CONST
, &cval
);
703 /* push arbitrary 64bit constant */
704 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
711 vsetc(&ctype
, VT_CONST
, &cval
);
714 /* push long long constant */
715 static inline void vpushll(long long v
)
717 vpush64(VT_LLONG
, v
);
720 ST_FUNC
void vset(CType
*type
, int r
, int v
)
725 vsetc(type
, r
, &cval
);
728 static void vseti(int r
, int v
)
736 ST_FUNC
void vpushv(SValue
*v
)
738 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
739 tcc_error("memory full (vstack)");
744 static void vdup(void)
749 /* rotate n first stack elements to the bottom
750 I1 ... In -> I2 ... In I1 [top is right]
752 ST_FUNC
void vrotb(int n
)
763 /* rotate the n elements before entry e towards the top
764 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
766 ST_FUNC
void vrote(SValue
*e
, int n
)
772 for(i
= 0;i
< n
- 1; i
++)
777 /* rotate n first stack elements to the top
778 I1 ... In -> In I1 ... I(n-1) [top is right]
780 ST_FUNC
void vrott(int n
)
785 /* push a symbol value of TYPE */
786 static inline void vpushsym(CType
*type
, Sym
*sym
)
790 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
794 /* Return a static symbol pointing to a section */
795 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
801 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
802 sym
->type
.ref
= type
->ref
;
803 sym
->r
= VT_CONST
| VT_SYM
;
804 put_extern_sym(sym
, sec
, offset
, size
);
808 /* push a reference to a section offset by adding a dummy symbol */
809 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
811 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
814 /* define a new external reference to a symbol 'v' of type 'u' */
815 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
821 /* push forward reference */
822 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
823 s
->type
.ref
= type
->ref
;
824 s
->r
= r
| VT_CONST
| VT_SYM
;
829 /* Merge some storage attributes. */
830 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
832 if (type
&& !is_compatible_types(&sym
->type
, type
))
833 tcc_error("incompatible types for redefinition of '%s'",
834 get_tok_str(sym
->v
, NULL
));
836 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
837 tcc_error("incompatible dll linkage for redefinition of '%s'",
838 get_tok_str(sym
->v
, NULL
));
840 sym
->a
.dllexport
|= ad
->a
.dllexport
;
841 sym
->a
.weak
|= ad
->a
.weak
;
842 if (ad
->a
.visibility
) {
843 int vis
= sym
->a
.visibility
;
844 int vis2
= ad
->a
.visibility
;
845 if (vis
== STV_DEFAULT
)
847 else if (vis2
!= STV_DEFAULT
)
848 vis
= (vis
< vis2
) ? vis
: vis2
;
849 sym
->a
.visibility
= vis
;
852 sym
->a
.aligned
= ad
->a
.aligned
;
854 sym
->asm_label
= ad
->asm_label
;
858 /* define a new external reference to a symbol 'v' */
859 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
864 /* push forward reference */
865 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
866 s
->type
.t
|= VT_EXTERN
;
870 if (s
->type
.ref
== func_old_type
.ref
) {
871 s
->type
.ref
= type
->ref
;
872 s
->r
= r
| VT_CONST
| VT_SYM
;
873 s
->type
.t
|= VT_EXTERN
;
875 patch_storage(s
, ad
, type
);
880 /* push a reference to global symbol v */
881 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
883 vpushsym(type
, external_global_sym(v
, type
, 0));
886 /* save registers up to (vtop - n) stack entry */
887 ST_FUNC
void save_regs(int n
)
890 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
894 /* save r to the memory stack, and mark it as being free */
895 ST_FUNC
void save_reg(int r
)
897 save_reg_upstack(r
, 0);
900 /* save r to the memory stack, and mark it as being free,
901 if seen up to (vtop - n) stack entry */
902 ST_FUNC
void save_reg_upstack(int r
, int n
)
904 int l
, saved
, size
, align
;
908 if ((r
&= VT_VALMASK
) >= VT_CONST
)
913 /* modify all stack values */
916 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
917 if ((p
->r
& VT_VALMASK
) == r
||
918 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
919 /* must save value on stack if not already done */
921 /* NOTE: must reload 'r' because r might be equal to r2 */
922 r
= p
->r
& VT_VALMASK
;
923 /* store register in the stack */
925 if ((p
->r
& VT_LVAL
) ||
926 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
928 type
= &char_pointer_type
;
932 size
= type_size(type
, &align
);
933 loc
= (loc
- size
) & -align
;
935 sv
.r
= VT_LOCAL
| VT_LVAL
;
938 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
939 /* x86 specific: need to pop fp register ST0 if saved */
941 o(0xd8dd); /* fstp %st(0) */
945 /* special long long case */
946 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
954 /* mark that stack entry as being saved on the stack */
955 if (p
->r
& VT_LVAL
) {
956 /* also clear the bounded flag because the
957 relocation address of the function was stored in
959 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
961 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
969 #ifdef TCC_TARGET_ARM
970 /* find a register of class 'rc2' with at most one reference on stack.
971 * If none, call get_reg(rc) */
972 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
977 for(r
=0;r
<NB_REGS
;r
++) {
978 if (reg_classes
[r
] & rc2
) {
981 for(p
= vstack
; p
<= vtop
; p
++) {
982 if ((p
->r
& VT_VALMASK
) == r
||
983 (p
->r2
& VT_VALMASK
) == r
)
994 /* find a free register of class 'rc'. If none, save one register */
995 ST_FUNC
int get_reg(int rc
)
1000 /* find a free register */
1001 for(r
=0;r
<NB_REGS
;r
++) {
1002 if (reg_classes
[r
] & rc
) {
1005 for(p
=vstack
;p
<=vtop
;p
++) {
1006 if ((p
->r
& VT_VALMASK
) == r
||
1007 (p
->r2
& VT_VALMASK
) == r
)
1015 /* no register left : free the first one on the stack (VERY
1016 IMPORTANT to start from the bottom to ensure that we don't
1017 spill registers used in gen_opi()) */
1018 for(p
=vstack
;p
<=vtop
;p
++) {
1019 /* look at second register (if long long) */
1020 r
= p
->r2
& VT_VALMASK
;
1021 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1023 r
= p
->r
& VT_VALMASK
;
1024 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1030 /* Should never comes here */
1034 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1036 static void move_reg(int r
, int s
, int t
)
1050 /* get address of vtop (vtop MUST BE an lvalue) */
1051 ST_FUNC
void gaddrof(void)
1053 vtop
->r
&= ~VT_LVAL
;
1054 /* tricky: if saved lvalue, then we can go back to lvalue */
1055 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1056 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1061 #ifdef CONFIG_TCC_BCHECK
1062 /* generate lvalue bound code */
1063 static void gbound(void)
1068 vtop
->r
&= ~VT_MUSTBOUND
;
1069 /* if lvalue, then use checking code before dereferencing */
1070 if (vtop
->r
& VT_LVAL
) {
1071 /* if not VT_BOUNDED value, then make one */
1072 if (!(vtop
->r
& VT_BOUNDED
)) {
1073 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1074 /* must save type because we must set it to int to get pointer */
1076 vtop
->type
.t
= VT_PTR
;
1079 gen_bounded_ptr_add();
1080 vtop
->r
|= lval_type
;
1083 /* then check for dereferencing */
1084 gen_bounded_ptr_deref();
1089 static void incr_bf_adr(int o
)
1091 vtop
->type
= char_pointer_type
;
1095 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1096 | (VT_BYTE
|VT_UNSIGNED
);
1097 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1098 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1101 /* single-byte load mode for packed or otherwise unaligned bitfields */
1102 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1105 save_reg_upstack(vtop
->r
, 1);
1106 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1107 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1116 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1118 vpushi((1 << n
) - 1), gen_op('&');
1121 vpushi(bits
), gen_op(TOK_SHL
);
1124 bits
+= n
, bit_size
-= n
, o
= 1;
1127 if (!(type
->t
& VT_UNSIGNED
)) {
1128 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1129 vpushi(n
), gen_op(TOK_SHL
);
1130 vpushi(n
), gen_op(TOK_SAR
);
1134 /* single-byte store mode for packed or otherwise unaligned bitfields */
1135 static void store_packed_bf(int bit_pos
, int bit_size
)
1137 int bits
, n
, o
, m
, c
;
1139 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1141 save_reg_upstack(vtop
->r
, 1);
1142 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1144 incr_bf_adr(o
); // X B
1146 c
? vdup() : gv_dup(); // B V X
1149 vpushi(bits
), gen_op(TOK_SHR
);
1151 vpushi(bit_pos
), gen_op(TOK_SHL
);
1156 m
= ((1 << n
) - 1) << bit_pos
;
1157 vpushi(m
), gen_op('&'); // X B V1
1158 vpushv(vtop
-1); // X B V1 B
1159 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1160 gen_op('&'); // X B V1 B1
1161 gen_op('|'); // X B V2
1163 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1164 vstore(), vpop(); // X B
1165 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1170 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1173 if (0 == sv
->type
.ref
)
1175 t
= sv
->type
.ref
->auxtype
;
1176 if (t
!= -1 && t
!= VT_STRUCT
) {
1177 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1178 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1183 /* store vtop a register belonging to class 'rc'. lvalues are
1184 converted to values. Cannot be used if cannot be converted to
1185 register value (such as structures). */
1186 ST_FUNC
int gv(int rc
)
1188 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1190 /* NOTE: get_reg can modify vstack[] */
1191 if (vtop
->type
.t
& VT_BITFIELD
) {
1194 bit_pos
= BIT_POS(vtop
->type
.t
);
1195 bit_size
= BIT_SIZE(vtop
->type
.t
);
1196 /* remove bit field info to avoid loops */
1197 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1200 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1201 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1202 type
.t
|= VT_UNSIGNED
;
1204 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1206 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1211 if (r
== VT_STRUCT
) {
1212 load_packed_bf(&type
, bit_pos
, bit_size
);
1214 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1215 /* cast to int to propagate signedness in following ops */
1217 /* generate shifts */
1218 vpushi(bits
- (bit_pos
+ bit_size
));
1220 vpushi(bits
- bit_size
);
1221 /* NOTE: transformed to SHR if unsigned */
1226 if (is_float(vtop
->type
.t
) &&
1227 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1228 unsigned long offset
;
1229 /* CPUs usually cannot use float constants, so we store them
1230 generically in data segment */
1231 size
= type_size(&vtop
->type
, &align
);
1233 size
= 0, align
= 1;
1234 offset
= section_add(data_section
, size
, align
);
1235 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1237 init_putv(&vtop
->type
, data_section
, offset
);
1240 #ifdef CONFIG_TCC_BCHECK
1241 if (vtop
->r
& VT_MUSTBOUND
)
1245 r
= vtop
->r
& VT_VALMASK
;
1246 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1247 #ifndef TCC_TARGET_ARM64
1250 #ifdef TCC_TARGET_X86_64
1251 else if (rc
== RC_FRET
)
1255 /* need to reload if:
1257 - lvalue (need to dereference pointer)
1258 - already a register, but not in the right class */
1260 || (vtop
->r
& VT_LVAL
)
1261 || !(reg_classes
[r
] & rc
)
1263 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1264 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1266 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1272 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1273 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1275 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1276 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1277 unsigned long long ll
;
1279 int r2
, original_type
;
1280 original_type
= vtop
->type
.t
;
1281 /* two register type load : expand to two words
1284 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1287 vtop
->c
.i
= ll
; /* first word */
1289 vtop
->r
= r
; /* save register value */
1290 vpushi(ll
>> 32); /* second word */
1293 if (vtop
->r
& VT_LVAL
) {
1294 /* We do not want to modifier the long long
1295 pointer here, so the safest (and less
1296 efficient) is to save all the other registers
1297 in the stack. XXX: totally inefficient. */
1301 /* lvalue_save: save only if used further down the stack */
1302 save_reg_upstack(vtop
->r
, 1);
1304 /* load from memory */
1305 vtop
->type
.t
= load_type
;
1308 vtop
[-1].r
= r
; /* save register value */
1309 /* increment pointer to get second word */
1310 vtop
->type
.t
= addr_type
;
1315 vtop
->type
.t
= load_type
;
1317 /* move registers */
1320 vtop
[-1].r
= r
; /* save register value */
1321 vtop
->r
= vtop
[-1].r2
;
1323 /* Allocate second register. Here we rely on the fact that
1324 get_reg() tries first to free r2 of an SValue. */
1328 /* write second register */
1330 vtop
->type
.t
= original_type
;
1331 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1333 /* lvalue of scalar type : need to use lvalue type
1334 because of possible cast */
1337 /* compute memory access type */
1338 if (vtop
->r
& VT_LVAL_BYTE
)
1340 else if (vtop
->r
& VT_LVAL_SHORT
)
1342 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1346 /* restore wanted type */
1349 /* one register type load */
1354 #ifdef TCC_TARGET_C67
1355 /* uses register pairs for doubles */
1356 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1363 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1364 ST_FUNC
void gv2(int rc1
, int rc2
)
1368 /* generate more generic register first. But VT_JMP or VT_CMP
1369 values must be generated first in all cases to avoid possible
1371 v
= vtop
[0].r
& VT_VALMASK
;
1372 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1377 /* test if reload is needed for first register */
1378 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1388 /* test if reload is needed for first register */
1389 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1395 #ifndef TCC_TARGET_ARM64
1396 /* wrapper around RC_FRET to return a register by type */
1397 static int rc_fret(int t
)
1399 #ifdef TCC_TARGET_X86_64
1400 if (t
== VT_LDOUBLE
) {
1408 /* wrapper around REG_FRET to return a register by type */
1409 static int reg_fret(int t
)
1411 #ifdef TCC_TARGET_X86_64
1412 if (t
== VT_LDOUBLE
) {
1420 /* expand 64bit on stack in two ints */
1421 static void lexpand(void)
1424 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1425 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1426 if (v
== VT_CONST
) {
1429 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1435 vtop
[0].r
= vtop
[-1].r2
;
1436 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1438 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1442 #ifdef TCC_TARGET_ARM
1443 /* expand long long on stack */
1444 ST_FUNC
void lexpand_nr(void)
1448 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1450 vtop
->r2
= VT_CONST
;
1451 vtop
->type
.t
= VT_INT
| u
;
1452 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1453 if (v
== VT_CONST
) {
1454 vtop
[-1].c
.i
= vtop
->c
.i
;
1455 vtop
->c
.i
= vtop
->c
.i
>> 32;
1457 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1459 vtop
->r
= vtop
[-1].r
;
1460 } else if (v
> VT_CONST
) {
1464 vtop
->r
= vtop
[-1].r2
;
1465 vtop
[-1].r2
= VT_CONST
;
1466 vtop
[-1].type
.t
= VT_INT
| u
;
1471 /* build a long long from two ints */
1472 static void lbuild(int t
)
1474 gv2(RC_INT
, RC_INT
);
1475 vtop
[-1].r2
= vtop
[0].r
;
1476 vtop
[-1].type
.t
= t
;
1481 /* convert stack entry to register and duplicate its value in another
1483 static void gv_dup(void)
1490 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1491 if (t
& VT_BITFIELD
) {
1501 /* stack: H L L1 H1 */
1511 /* duplicate value */
1516 #ifdef TCC_TARGET_X86_64
1517 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1527 load(r1
, &sv
); /* move r to r1 */
1529 /* duplicates value */
1535 /* Generate value test
1537 * Generate a test for any value (jump, comparison and integers) */
1538 ST_FUNC
int gvtst(int inv
, int t
)
1540 int v
= vtop
->r
& VT_VALMASK
;
1541 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1545 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1546 /* constant jmp optimization */
1547 if ((vtop
->c
.i
!= 0) != inv
)
1552 return gtst(inv
, t
);
1556 /* generate CPU independent (unsigned) long long operations */
1557 static void gen_opl(int op
)
1559 int t
, a
, b
, op1
, c
, i
;
1561 unsigned short reg_iret
= REG_IRET
;
1562 unsigned short reg_lret
= REG_LRET
;
1568 func
= TOK___divdi3
;
1571 func
= TOK___udivdi3
;
1574 func
= TOK___moddi3
;
1577 func
= TOK___umoddi3
;
1584 /* call generic long long function */
1585 vpush_global_sym(&func_old_type
, func
);
1590 vtop
->r2
= reg_lret
;
1598 //pv("gen_opl A",0,2);
1604 /* stack: L1 H1 L2 H2 */
1609 vtop
[-2] = vtop
[-3];
1612 /* stack: H1 H2 L1 L2 */
1613 //pv("gen_opl B",0,4);
1619 /* stack: H1 H2 L1 L2 ML MH */
1622 /* stack: ML MH H1 H2 L1 L2 */
1626 /* stack: ML MH H1 L2 H2 L1 */
1631 /* stack: ML MH M1 M2 */
1634 } else if (op
== '+' || op
== '-') {
1635 /* XXX: add non carry method too (for MIPS or alpha) */
1641 /* stack: H1 H2 (L1 op L2) */
1644 gen_op(op1
+ 1); /* TOK_xxxC2 */
1647 /* stack: H1 H2 (L1 op L2) */
1650 /* stack: (L1 op L2) H1 H2 */
1652 /* stack: (L1 op L2) (H1 op H2) */
1660 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1661 t
= vtop
[-1].type
.t
;
1665 /* stack: L H shift */
1667 /* constant: simpler */
1668 /* NOTE: all comments are for SHL. the other cases are
1669 done by swapping words */
1680 if (op
!= TOK_SAR
) {
1713 /* XXX: should provide a faster fallback on x86 ? */
1716 func
= TOK___ashrdi3
;
1719 func
= TOK___lshrdi3
;
1722 func
= TOK___ashldi3
;
1728 /* compare operations */
1734 /* stack: L1 H1 L2 H2 */
1736 vtop
[-1] = vtop
[-2];
1738 /* stack: L1 L2 H1 H2 */
1741 /* when values are equal, we need to compare low words. since
1742 the jump is inverted, we invert the test too. */
1745 else if (op1
== TOK_GT
)
1747 else if (op1
== TOK_ULT
)
1749 else if (op1
== TOK_UGT
)
1759 /* generate non equal test */
1765 /* compare low. Always unsigned */
1769 else if (op1
== TOK_LE
)
1771 else if (op1
== TOK_GT
)
1773 else if (op1
== TOK_GE
)
1784 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1786 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1787 return (a
^ b
) >> 63 ? -x
: x
;
1790 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1792 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1795 /* handle integer constant optimizations and various machine
1797 static void gen_opic(int op
)
1799 SValue
*v1
= vtop
- 1;
1801 int t1
= v1
->type
.t
& VT_BTYPE
;
1802 int t2
= v2
->type
.t
& VT_BTYPE
;
1803 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1804 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1805 uint64_t l1
= c1
? v1
->c
.i
: 0;
1806 uint64_t l2
= c2
? v2
->c
.i
: 0;
1807 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1809 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1810 l1
= ((uint32_t)l1
|
1811 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1812 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1813 l2
= ((uint32_t)l2
|
1814 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1818 case '+': l1
+= l2
; break;
1819 case '-': l1
-= l2
; break;
1820 case '&': l1
&= l2
; break;
1821 case '^': l1
^= l2
; break;
1822 case '|': l1
|= l2
; break;
1823 case '*': l1
*= l2
; break;
1830 /* if division by zero, generate explicit division */
1833 tcc_error("division by zero in constant");
1837 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1838 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1839 case TOK_UDIV
: l1
= l1
/ l2
; break;
1840 case TOK_UMOD
: l1
= l1
% l2
; break;
1843 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1844 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1846 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1849 case TOK_ULT
: l1
= l1
< l2
; break;
1850 case TOK_UGE
: l1
= l1
>= l2
; break;
1851 case TOK_EQ
: l1
= l1
== l2
; break;
1852 case TOK_NE
: l1
= l1
!= l2
; break;
1853 case TOK_ULE
: l1
= l1
<= l2
; break;
1854 case TOK_UGT
: l1
= l1
> l2
; break;
1855 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1856 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1857 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1858 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1860 case TOK_LAND
: l1
= l1
&& l2
; break;
1861 case TOK_LOR
: l1
= l1
|| l2
; break;
1865 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1866 l1
= ((uint32_t)l1
|
1867 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1871 /* if commutative ops, put c2 as constant */
1872 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1873 op
== '|' || op
== '*')) {
1875 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1876 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1878 if (!const_wanted
&&
1880 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1881 (l1
== -1 && op
== TOK_SAR
))) {
1882 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1884 } else if (!const_wanted
&&
1885 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1887 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
1888 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1889 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1894 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
1897 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
1898 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
1901 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
1902 /* filter out NOP operations like x*1, x-0, x&-1... */
1904 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
1905 /* try to use shifts instead of muls or divs */
1906 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
1915 else if (op
== TOK_PDIV
)
1921 } else if (c2
&& (op
== '+' || op
== '-') &&
1922 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
1923 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
1924 /* symbol + constant case */
1928 /* The backends can't always deal with addends to symbols
1929 larger than +-1<<31. Don't construct such. */
1936 /* call low level op generator */
1937 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
1938 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
1946 /* generate a floating point operation with constant propagation */
1947 static void gen_opif(int op
)
1951 #if defined _MSC_VER && defined _AMD64_
1952 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
1959 /* currently, we cannot do computations with forward symbols */
1960 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1961 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1963 if (v1
->type
.t
== VT_FLOAT
) {
1966 } else if (v1
->type
.t
== VT_DOUBLE
) {
1974 /* NOTE: we only do constant propagation if finite number (not
1975 NaN or infinity) (ANSI spec) */
1976 if (!ieee_finite(f1
) || !ieee_finite(f2
))
1980 case '+': f1
+= f2
; break;
1981 case '-': f1
-= f2
; break;
1982 case '*': f1
*= f2
; break;
1986 tcc_error("division by zero in constant");
1991 /* XXX: also handles tests ? */
1995 /* XXX: overflow test ? */
1996 if (v1
->type
.t
== VT_FLOAT
) {
1998 } else if (v1
->type
.t
== VT_DOUBLE
) {
2010 static int pointed_size(CType
*type
)
2013 return type_size(pointed_type(type
), &align
);
2016 static void vla_runtime_pointed_size(CType
*type
)
2019 vla_runtime_type_size(pointed_type(type
), &align
);
2022 static inline int is_null_pointer(SValue
*p
)
2024 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2026 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2027 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2028 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2029 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0));
2032 static inline int is_integer_btype(int bt
)
2034 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2035 bt
== VT_INT
|| bt
== VT_LLONG
);
2038 /* check types for comparison or subtraction of pointers */
2039 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2041 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2044 /* null pointers are accepted for all comparisons as gcc */
2045 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2049 bt1
= type1
->t
& VT_BTYPE
;
2050 bt2
= type2
->t
& VT_BTYPE
;
2051 /* accept comparison between pointer and integer with a warning */
2052 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2053 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2054 tcc_warning("comparison between pointer and integer");
2058 /* both must be pointers or implicit function pointers */
2059 if (bt1
== VT_PTR
) {
2060 type1
= pointed_type(type1
);
2061 } else if (bt1
!= VT_FUNC
)
2062 goto invalid_operands
;
2064 if (bt2
== VT_PTR
) {
2065 type2
= pointed_type(type2
);
2066 } else if (bt2
!= VT_FUNC
) {
2068 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2070 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2071 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2075 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2076 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2077 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2078 /* gcc-like error if '-' is used */
2080 goto invalid_operands
;
2082 tcc_warning("comparison of distinct pointer types lacks a cast");
2086 /* generic gen_op: handles types problems */
2087 ST_FUNC
void gen_op(int op
)
2089 int u
, t1
, t2
, bt1
, bt2
, t
;
2093 t1
= vtop
[-1].type
.t
;
2094 t2
= vtop
[0].type
.t
;
2095 bt1
= t1
& VT_BTYPE
;
2096 bt2
= t2
& VT_BTYPE
;
2098 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2099 tcc_error("operation on a struct");
2100 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2101 if (bt2
== VT_FUNC
) {
2102 mk_pointer(&vtop
->type
);
2105 if (bt1
== VT_FUNC
) {
2107 mk_pointer(&vtop
->type
);
2112 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2113 /* at least one operand is a pointer */
2114 /* relational op: must be both pointers */
2115 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2116 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2117 /* pointers are handled are unsigned */
2119 t
= VT_LLONG
| VT_UNSIGNED
;
2121 t
= VT_INT
| VT_UNSIGNED
;
2125 /* if both pointers, then it must be the '-' op */
2126 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2128 tcc_error("cannot use pointers here");
2129 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2130 /* XXX: check that types are compatible */
2131 if (vtop
[-1].type
.t
& VT_VLA
) {
2132 vla_runtime_pointed_size(&vtop
[-1].type
);
2134 vpushi(pointed_size(&vtop
[-1].type
));
2138 vtop
->type
.t
= ptrdiff_type
.t
;
2142 /* exactly one pointer : must be '+' or '-'. */
2143 if (op
!= '-' && op
!= '+')
2144 tcc_error("cannot use pointers here");
2145 /* Put pointer as first operand */
2146 if (bt2
== VT_PTR
) {
2148 t
= t1
, t1
= t2
, t2
= t
;
2151 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2152 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2155 type1
= vtop
[-1].type
;
2156 type1
.t
&= ~VT_ARRAY
;
2157 if (vtop
[-1].type
.t
& VT_VLA
)
2158 vla_runtime_pointed_size(&vtop
[-1].type
);
2160 u
= pointed_size(&vtop
[-1].type
);
2162 tcc_error("unknown array element size");
2166 /* XXX: cast to int ? (long long case) */
2172 /* #ifdef CONFIG_TCC_BCHECK
2173 The main reason to removing this code:
2180 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2181 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2183 When this code is on. then the output looks like
2185 v+(i-j) = 0xbff84000
2187 /* if evaluating constant expression, no code should be
2188 generated, so no bound check */
2189 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2190 /* if bounded pointers, we generate a special code to
2197 gen_bounded_ptr_add();
2203 /* put again type if gen_opic() swaped operands */
2206 } else if (is_float(bt1
) || is_float(bt2
)) {
2207 /* compute bigger type and do implicit casts */
2208 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2210 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2215 /* floats can only be used for a few operations */
2216 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2217 (op
< TOK_ULT
|| op
> TOK_GT
))
2218 tcc_error("invalid operands for binary operation");
2220 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2221 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2222 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2224 t
|= (VT_LONG
& t1
);
2226 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2227 /* cast to biggest op */
2228 t
= VT_LLONG
| VT_LONG
;
2229 if (bt1
== VT_LLONG
)
2231 if (bt2
== VT_LLONG
)
2233 /* convert to unsigned if it does not fit in a long long */
2234 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2235 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2239 /* integer operations */
2240 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2241 /* convert to unsigned if it does not fit in an integer */
2242 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2243 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2246 /* XXX: currently, some unsigned operations are explicit, so
2247 we modify them here */
2248 if (t
& VT_UNSIGNED
) {
2255 else if (op
== TOK_LT
)
2257 else if (op
== TOK_GT
)
2259 else if (op
== TOK_LE
)
2261 else if (op
== TOK_GE
)
2269 /* special case for shifts and long long: we keep the shift as
2271 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2278 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2279 /* relational op: the result is an int */
2280 vtop
->type
.t
= VT_INT
;
2285 // Make sure that we have converted to an rvalue:
2286 if (vtop
->r
& VT_LVAL
)
2287 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2290 #ifndef TCC_TARGET_ARM
2291 /* generic itof for unsigned long long case */
2292 static void gen_cvt_itof1(int t
)
2294 #ifdef TCC_TARGET_ARM64
2297 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2298 (VT_LLONG
| VT_UNSIGNED
)) {
2301 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2302 #if LDOUBLE_SIZE != 8
2303 else if (t
== VT_LDOUBLE
)
2304 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2307 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2311 vtop
->r
= reg_fret(t
);
2319 /* generic ftoi for unsigned long long case */
2320 static void gen_cvt_ftoi1(int t
)
2322 #ifdef TCC_TARGET_ARM64
2327 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2328 /* not handled natively */
2329 st
= vtop
->type
.t
& VT_BTYPE
;
2331 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2332 #if LDOUBLE_SIZE != 8
2333 else if (st
== VT_LDOUBLE
)
2334 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2337 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2342 vtop
->r2
= REG_LRET
;
2349 /* force char or short cast */
2350 static void force_charshort_cast(int t
)
2354 /* cannot cast static initializers */
2355 if (STATIC_DATA_WANTED
)
2359 /* XXX: add optimization if lvalue : just change type and offset */
2364 if (t
& VT_UNSIGNED
) {
2365 vpushi((1 << bits
) - 1);
2368 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2374 /* result must be signed or the SAR is converted to an SHL
2375 This was not the case when "t" was a signed short
2376 and the last value on the stack was an unsigned int */
2377 vtop
->type
.t
&= ~VT_UNSIGNED
;
2383 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2384 static void gen_cast_s(int t
)
2392 static void gen_cast(CType
*type
)
2394 int sbt
, dbt
, sf
, df
, c
, p
;
2396 /* special delayed cast for char/short */
2397 /* XXX: in some cases (multiple cascaded casts), it may still
2399 if (vtop
->r
& VT_MUSTCAST
) {
2400 vtop
->r
&= ~VT_MUSTCAST
;
2401 force_charshort_cast(vtop
->type
.t
);
2404 /* bitfields first get cast to ints */
2405 if (vtop
->type
.t
& VT_BITFIELD
) {
2409 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2410 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2415 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2416 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2417 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2418 c
&= dbt
!= VT_LDOUBLE
;
2421 /* constant case: we can do it now */
2422 /* XXX: in ISOC, cannot do it if error in convert */
2423 if (sbt
== VT_FLOAT
)
2424 vtop
->c
.ld
= vtop
->c
.f
;
2425 else if (sbt
== VT_DOUBLE
)
2426 vtop
->c
.ld
= vtop
->c
.d
;
2429 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2430 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2431 vtop
->c
.ld
= vtop
->c
.i
;
2433 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2435 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2436 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2438 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2441 if (dbt
== VT_FLOAT
)
2442 vtop
->c
.f
= (float)vtop
->c
.ld
;
2443 else if (dbt
== VT_DOUBLE
)
2444 vtop
->c
.d
= (double)vtop
->c
.ld
;
2445 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2446 vtop
->c
.i
= vtop
->c
.ld
;
2447 } else if (sf
&& dbt
== VT_BOOL
) {
2448 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2451 vtop
->c
.i
= vtop
->c
.ld
;
2452 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2454 else if (sbt
& VT_UNSIGNED
)
2455 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2457 else if (sbt
== VT_PTR
)
2460 else if (sbt
!= VT_LLONG
)
2461 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2462 -(vtop
->c
.i
& 0x80000000));
2464 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2466 else if (dbt
== VT_BOOL
)
2467 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2469 else if (dbt
== VT_PTR
)
2472 else if (dbt
!= VT_LLONG
) {
2473 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2474 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2477 if (!(dbt
& VT_UNSIGNED
))
2478 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2481 } else if (p
&& dbt
== VT_BOOL
) {
2485 /* non constant case: generate code */
2487 /* convert from fp to fp */
2490 /* convert int to fp */
2493 /* convert fp to int */
2494 if (dbt
== VT_BOOL
) {
2498 /* we handle char/short/etc... with generic code */
2499 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2500 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2504 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2505 /* additional cast for char/short... */
2511 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2512 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2513 /* scalar to long long */
2514 /* machine independent conversion */
2516 /* generate high word */
2517 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2521 if (sbt
== VT_PTR
) {
2522 /* cast from pointer to int before we apply
2523 shift operation, which pointers don't support*/
2530 /* patch second register */
2531 vtop
[-1].r2
= vtop
->r
;
2535 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2536 (dbt
& VT_BTYPE
) == VT_PTR
||
2537 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2538 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2539 (sbt
& VT_BTYPE
) != VT_PTR
&&
2540 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2541 /* need to convert from 32bit to 64bit */
2543 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2544 #if defined(TCC_TARGET_ARM64)
2546 #elif defined(TCC_TARGET_X86_64)
2548 /* x86_64 specific: movslq */
2550 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2557 } else if (dbt
== VT_BOOL
) {
2558 /* scalar to bool */
2561 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2562 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2563 if (sbt
== VT_PTR
) {
2564 vtop
->type
.t
= VT_INT
;
2565 tcc_warning("nonportable conversion from pointer to char/short");
2567 force_charshort_cast(dbt
);
2569 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2571 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2572 /* from long long: just take low order word */
2576 /* if lvalue and single word type, nothing to do because
2577 the lvalue already contains the real type size (see
2578 VT_LVAL_xxx constants) */
2582 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2583 /* if we are casting between pointer types,
2584 we must update the VT_LVAL_xxx size */
2585 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2586 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2591 /* return type size as known at compile time. Put alignment at 'a' */
2592 ST_FUNC
int type_size(CType
*type
, int *a
)
2597 bt
= type
->t
& VT_BTYPE
;
2598 if (bt
== VT_STRUCT
) {
2603 } else if (bt
== VT_PTR
) {
2604 if (type
->t
& VT_ARRAY
) {
2608 ts
= type_size(&s
->type
, a
);
2610 if (ts
< 0 && s
->c
< 0)
2618 } else if (IS_ENUM(type
->t
) && type
->ref
->c
== -1) {
2619 return -1; /* incomplete enum */
2620 } else if (bt
== VT_LDOUBLE
) {
2622 return LDOUBLE_SIZE
;
2623 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2624 #ifdef TCC_TARGET_I386
2625 #ifdef TCC_TARGET_PE
2630 #elif defined(TCC_TARGET_ARM)
2640 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2643 } else if (bt
== VT_SHORT
) {
2646 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2650 /* char, void, function, _Bool */
2656 /* push type size as known at runtime time on top of value stack. Put
2658 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2660 if (type
->t
& VT_VLA
) {
2661 type_size(&type
->ref
->type
, a
);
2662 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2664 vpushi(type_size(type
, a
));
2668 static void vla_sp_restore(void) {
2669 if (vlas_in_scope
) {
2670 gen_vla_sp_restore(vla_sp_loc
);
2674 static void vla_sp_restore_root(void) {
2675 if (vlas_in_scope
) {
2676 gen_vla_sp_restore(vla_sp_root_loc
);
2680 /* return the pointed type of t */
2681 static inline CType
*pointed_type(CType
*type
)
2683 return &type
->ref
->type
;
2686 /* modify type so that its it is a pointer to type. */
2687 ST_FUNC
void mk_pointer(CType
*type
)
2690 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2691 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2695 /* compare function types. OLD functions match any new functions */
2696 static int is_compatible_func(CType
*type1
, CType
*type2
)
2702 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2704 /* check func_call */
2705 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2707 /* XXX: not complete */
2708 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2710 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2712 while (s1
!= NULL
) {
2715 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2725 /* return true if type1 and type2 are the same. If unqualified is
2726 true, qualifiers on the types are ignored.
2728 - enums are not checked as gcc __builtin_types_compatible_p ()
2730 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2734 t1
= type1
->t
& VT_TYPE
;
2735 t2
= type2
->t
& VT_TYPE
;
2737 /* strip qualifiers before comparing */
2738 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2739 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2742 /* Default Vs explicit signedness only matters for char */
2743 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2747 /* XXX: bitfields ? */
2750 /* test more complicated cases */
2751 bt1
= t1
& VT_BTYPE
;
2752 if (bt1
== VT_PTR
) {
2753 type1
= pointed_type(type1
);
2754 type2
= pointed_type(type2
);
2755 return is_compatible_types(type1
, type2
);
2756 } else if (bt1
== VT_STRUCT
) {
2757 return (type1
->ref
== type2
->ref
);
2758 } else if (bt1
== VT_FUNC
) {
2759 return is_compatible_func(type1
, type2
);
2765 /* return true if type1 and type2 are exactly the same (including
2768 static int is_compatible_types(CType
*type1
, CType
*type2
)
2770 return compare_types(type1
,type2
,0);
2773 /* return true if type1 and type2 are the same (ignoring qualifiers).
2775 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2777 return compare_types(type1
,type2
,1);
2780 /* print a type. If 'varstr' is not NULL, then the variable is also
2781 printed in the type */
2783 /* XXX: add array and function pointers */
2784 static void type_to_str(char *buf
, int buf_size
,
2785 CType
*type
, const char *varstr
)
2797 pstrcat(buf
, buf_size
, "extern ");
2799 pstrcat(buf
, buf_size
, "static ");
2801 pstrcat(buf
, buf_size
, "typedef ");
2803 pstrcat(buf
, buf_size
, "inline ");
2804 if (t
& VT_VOLATILE
)
2805 pstrcat(buf
, buf_size
, "volatile ");
2806 if (t
& VT_CONSTANT
)
2807 pstrcat(buf
, buf_size
, "const ");
2809 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2810 || ((t
& VT_UNSIGNED
)
2811 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2814 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2816 buf_size
-= strlen(buf
);
2851 tstr
= "long double";
2853 pstrcat(buf
, buf_size
, tstr
);
2860 pstrcat(buf
, buf_size
, tstr
);
2861 v
= type
->ref
->v
& ~SYM_STRUCT
;
2862 if (v
>= SYM_FIRST_ANOM
)
2863 pstrcat(buf
, buf_size
, "<anonymous>");
2865 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2869 type_to_str(buf
, buf_size
, &s
->type
, varstr
);
2870 pstrcat(buf
, buf_size
, "(");
2872 while (sa
!= NULL
) {
2873 type_to_str(buf1
, sizeof(buf1
), &sa
->type
, NULL
);
2874 pstrcat(buf
, buf_size
, buf1
);
2877 pstrcat(buf
, buf_size
, ", ");
2879 pstrcat(buf
, buf_size
, ")");
2884 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2885 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2888 pstrcpy(buf1
, sizeof(buf1
), "*");
2889 if (t
& VT_CONSTANT
)
2890 pstrcat(buf1
, buf_size
, "const ");
2891 if (t
& VT_VOLATILE
)
2892 pstrcat(buf1
, buf_size
, "volatile ");
2894 pstrcat(buf1
, sizeof(buf1
), varstr
);
2895 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2899 pstrcat(buf
, buf_size
, " ");
2900 pstrcat(buf
, buf_size
, varstr
);
2905 /* verify type compatibility to store vtop in 'dt' type, and generate
2907 static void gen_assign_cast(CType
*dt
)
2909 CType
*st
, *type1
, *type2
;
2910 char buf1
[256], buf2
[256];
2913 st
= &vtop
->type
; /* source type */
2914 dbt
= dt
->t
& VT_BTYPE
;
2915 sbt
= st
->t
& VT_BTYPE
;
2916 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
2917 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
2919 It is Ok if both are void
2925 gcc accepts this program
2928 tcc_error("cannot cast from/to void");
2930 if (dt
->t
& VT_CONSTANT
)
2931 tcc_warning("assignment of read-only location");
2934 /* special cases for pointers */
2935 /* '0' can also be a pointer */
2936 if (is_null_pointer(vtop
))
2938 /* accept implicit pointer to integer cast with warning */
2939 if (is_integer_btype(sbt
)) {
2940 tcc_warning("assignment makes pointer from integer without a cast");
2943 type1
= pointed_type(dt
);
2944 /* a function is implicitly a function pointer */
2945 if (sbt
== VT_FUNC
) {
2946 if ((type1
->t
& VT_BTYPE
) != VT_VOID
&&
2947 !is_compatible_types(pointed_type(dt
), st
))
2948 tcc_warning("assignment from incompatible pointer type");
2953 type2
= pointed_type(st
);
2954 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2955 (type2
->t
& VT_BTYPE
) == VT_VOID
) {
2956 /* void * can match anything */
2958 //printf("types %08x %08x\n", type1->t, type2->t);
2959 /* exact type match, except for qualifiers */
2960 if (!is_compatible_unqualified_types(type1
, type2
)) {
2961 /* Like GCC don't warn by default for merely changes
2962 in pointer target signedness. Do warn for different
2963 base types, though, in particular for unsigned enums
2964 and signed int targets. */
2965 if ((type1
->t
& (VT_BTYPE
|VT_LONG
)) != (type2
->t
& (VT_BTYPE
|VT_LONG
))
2966 || IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)
2968 tcc_warning("assignment from incompatible pointer type");
2971 /* check const and volatile */
2972 if ((!(type1
->t
& VT_CONSTANT
) && (type2
->t
& VT_CONSTANT
)) ||
2973 (!(type1
->t
& VT_VOLATILE
) && (type2
->t
& VT_VOLATILE
)))
2974 tcc_warning("assignment discards qualifiers from pointer target type");
2980 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
2981 tcc_warning("assignment makes integer from pointer without a cast");
2982 } else if (sbt
== VT_STRUCT
) {
2983 goto case_VT_STRUCT
;
2985 /* XXX: more tests */
2989 if (!is_compatible_unqualified_types(dt
, st
)) {
2991 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2992 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2993 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3001 /* store vtop in lvalue pushed on stack */
3002 ST_FUNC
void vstore(void)
3004 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3006 ft
= vtop
[-1].type
.t
;
3007 sbt
= vtop
->type
.t
& VT_BTYPE
;
3008 dbt
= ft
& VT_BTYPE
;
3009 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3010 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3011 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3012 /* optimize char/short casts */
3013 delayed_cast
= VT_MUSTCAST
;
3014 vtop
->type
.t
= ft
& VT_TYPE
;
3015 /* XXX: factorize */
3016 if (ft
& VT_CONSTANT
)
3017 tcc_warning("assignment of read-only location");
3020 if (!(ft
& VT_BITFIELD
))
3021 gen_assign_cast(&vtop
[-1].type
);
3024 if (sbt
== VT_STRUCT
) {
3025 /* if structure, only generate pointer */
3026 /* structure assignment : generate memcpy */
3027 /* XXX: optimize if small size */
3028 size
= type_size(&vtop
->type
, &align
);
3032 vtop
->type
.t
= VT_PTR
;
3035 /* address of memcpy() */
3038 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3039 else if(!(align
& 3))
3040 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3043 /* Use memmove, rather than memcpy, as dest and src may be same: */
3044 vpush_global_sym(&func_old_type
, TOK_memmove
);
3049 vtop
->type
.t
= VT_PTR
;
3055 /* leave source on stack */
3056 } else if (ft
& VT_BITFIELD
) {
3057 /* bitfield store handling */
3059 /* save lvalue as expression result (example: s.b = s.a = n;) */
3060 vdup(), vtop
[-1] = vtop
[-2];
3062 bit_pos
= BIT_POS(ft
);
3063 bit_size
= BIT_SIZE(ft
);
3064 /* remove bit field info to avoid loops */
3065 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3067 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3068 gen_cast(&vtop
[-1].type
);
3069 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3072 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3073 if (r
== VT_STRUCT
) {
3074 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3075 store_packed_bf(bit_pos
, bit_size
);
3077 unsigned long long mask
= (1ULL << bit_size
) - 1;
3078 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3080 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3083 vpushi((unsigned)mask
);
3090 /* duplicate destination */
3093 /* load destination, mask and or with source */
3094 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3095 vpushll(~(mask
<< bit_pos
));
3097 vpushi(~((unsigned)mask
<< bit_pos
));
3102 /* ... and discard */
3105 } else if (dbt
== VT_VOID
) {
3108 #ifdef CONFIG_TCC_BCHECK
3109 /* bound check case */
3110 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3119 #ifdef TCC_TARGET_X86_64
3120 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3122 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3127 r
= gv(rc
); /* generate value */
3128 /* if lvalue was saved on stack, must read it */
3129 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3131 t
= get_reg(RC_INT
);
3137 sv
.r
= VT_LOCAL
| VT_LVAL
;
3138 sv
.c
.i
= vtop
[-1].c
.i
;
3140 vtop
[-1].r
= t
| VT_LVAL
;
3142 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3144 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3145 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3147 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3148 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3150 vtop
[-1].type
.t
= load_type
;
3153 /* convert to int to increment easily */
3154 vtop
->type
.t
= addr_type
;
3160 vtop
[-1].type
.t
= load_type
;
3161 /* XXX: it works because r2 is spilled last ! */
3162 store(vtop
->r2
, vtop
- 1);
3168 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3169 vtop
->r
|= delayed_cast
;
3173 /* post defines POST/PRE add. c is the token ++ or -- */
3174 ST_FUNC
void inc(int post
, int c
)
3177 vdup(); /* save lvalue */
3179 gv_dup(); /* duplicate value */
3184 vpushi(c
- TOK_MID
);
3186 vstore(); /* store value */
3188 vpop(); /* if post op, return saved value */
3191 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3193 /* read the string */
3197 while (tok
== TOK_STR
) {
3198 /* XXX: add \0 handling too ? */
3199 cstr_cat(astr
, tokc
.str
.data
, -1);
3202 cstr_ccat(astr
, '\0');
3205 /* If I is >= 1 and a power of two, returns log2(i)+1.
3206 If I is 0 returns 0. */
3207 static int exact_log2p1(int i
)
3212 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3223 /* Parse __attribute__((...)) GNUC extension. */
3224 static void parse_attribute(AttributeDef
*ad
)
3230 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3235 while (tok
!= ')') {
3236 if (tok
< TOK_IDENT
)
3237 expect("attribute name");
3244 parse_mult_str(&astr
, "section name");
3245 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3252 parse_mult_str(&astr
, "alias(\"target\")");
3253 ad
->alias_target
= /* save string as token, for later */
3254 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3258 case TOK_VISIBILITY1
:
3259 case TOK_VISIBILITY2
:
3261 parse_mult_str(&astr
,
3262 "visibility(\"default|hidden|internal|protected\")");
3263 if (!strcmp (astr
.data
, "default"))
3264 ad
->a
.visibility
= STV_DEFAULT
;
3265 else if (!strcmp (astr
.data
, "hidden"))
3266 ad
->a
.visibility
= STV_HIDDEN
;
3267 else if (!strcmp (astr
.data
, "internal"))
3268 ad
->a
.visibility
= STV_INTERNAL
;
3269 else if (!strcmp (astr
.data
, "protected"))
3270 ad
->a
.visibility
= STV_PROTECTED
;
3272 expect("visibility(\"default|hidden|internal|protected\")");
3281 if (n
<= 0 || (n
& (n
- 1)) != 0)
3282 tcc_error("alignment must be a positive power of two");
3287 ad
->a
.aligned
= exact_log2p1(n
);
3288 if (n
!= 1 << (ad
->a
.aligned
- 1))
3289 tcc_error("alignment of %d is larger than implemented", n
);
3301 /* currently, no need to handle it because tcc does not
3302 track unused objects */
3306 /* currently, no need to handle it because tcc does not
3307 track unused objects */
3312 ad
->f
.func_call
= FUNC_CDECL
;
3317 ad
->f
.func_call
= FUNC_STDCALL
;
3319 #ifdef TCC_TARGET_I386
3329 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3335 ad
->f
.func_call
= FUNC_FASTCALLW
;
3342 ad
->attr_mode
= VT_LLONG
+ 1;
3345 ad
->attr_mode
= VT_BYTE
+ 1;
3348 ad
->attr_mode
= VT_SHORT
+ 1;
3352 ad
->attr_mode
= VT_INT
+ 1;
3355 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3362 ad
->a
.dllexport
= 1;
3365 ad
->a
.dllimport
= 1;
3368 if (tcc_state
->warn_unsupported
)
3369 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3370 /* skip parameters */
3372 int parenthesis
= 0;
3376 else if (tok
== ')')
3379 } while (parenthesis
&& tok
!= -1);
3392 static Sym
* find_field (CType
*type
, int v
)
3396 while ((s
= s
->next
) != NULL
) {
3397 if ((s
->v
& SYM_FIELD
) &&
3398 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3399 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3400 Sym
*ret
= find_field (&s
->type
, v
);
3410 static void struct_add_offset (Sym
*s
, int offset
)
3412 while ((s
= s
->next
) != NULL
) {
3413 if ((s
->v
& SYM_FIELD
) &&
3414 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3415 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3416 struct_add_offset(s
->type
.ref
, offset
);
3422 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3424 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3425 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3426 int pcc
= !tcc_state
->ms_bitfields
;
3427 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3434 prevbt
= VT_STRUCT
; /* make it never match */
3439 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3440 if (f
->type
.t
& VT_BITFIELD
)
3441 bit_size
= BIT_SIZE(f
->type
.t
);
3444 size
= type_size(&f
->type
, &align
);
3445 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3448 if (pcc
&& bit_size
== 0) {
3449 /* in pcc mode, packing does not affect zero-width bitfields */
3452 /* in pcc mode, attribute packed overrides if set. */
3453 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3456 /* pragma pack overrides align if lesser and packs bitfields always */
3459 if (pragma_pack
< align
)
3460 align
= pragma_pack
;
3461 /* in pcc mode pragma pack also overrides individual align */
3462 if (pcc
&& pragma_pack
< a
)
3466 /* some individual align was specified */
3470 if (type
->ref
->type
.t
== VT_UNION
) {
3471 if (pcc
&& bit_size
>= 0)
3472 size
= (bit_size
+ 7) >> 3;
3477 } else if (bit_size
< 0) {
3479 c
+= (bit_pos
+ 7) >> 3;
3480 c
= (c
+ align
- 1) & -align
;
3489 /* A bit-field. Layout is more complicated. There are two
3490 options: PCC (GCC) compatible and MS compatible */
3492 /* In PCC layout a bit-field is placed adjacent to the
3493 preceding bit-fields, except if:
3495 - an individual alignment was given
3496 - it would overflow its base type container and
3497 there is no packing */
3498 if (bit_size
== 0) {
3500 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3502 } else if (f
->a
.aligned
) {
3504 } else if (!packed
) {
3506 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3507 if (ofs
> size
/ align
)
3511 /* in pcc mode, long long bitfields have type int if they fit */
3512 if (size
== 8 && bit_size
<= 32)
3513 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3515 while (bit_pos
>= align
* 8)
3516 c
+= align
, bit_pos
-= align
* 8;
3519 /* In PCC layout named bit-fields influence the alignment
3520 of the containing struct using the base types alignment,
3521 except for packed fields (which here have correct align). */
3522 if (f
->v
& SYM_FIRST_ANOM
3523 // && bit_size // ??? gcc on ARM/rpi does that
3528 bt
= f
->type
.t
& VT_BTYPE
;
3529 if ((bit_pos
+ bit_size
> size
* 8)
3530 || (bit_size
> 0) == (bt
!= prevbt
)
3532 c
= (c
+ align
- 1) & -align
;
3535 /* In MS bitfield mode a bit-field run always uses
3536 at least as many bits as the underlying type.
3537 To start a new run it's also required that this
3538 or the last bit-field had non-zero width. */
3539 if (bit_size
|| prev_bit_size
)
3542 /* In MS layout the records alignment is normally
3543 influenced by the field, except for a zero-width
3544 field at the start of a run (but by further zero-width
3545 fields it is again). */
3546 if (bit_size
== 0 && prevbt
!= bt
)
3549 prev_bit_size
= bit_size
;
3552 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3553 | (bit_pos
<< VT_STRUCT_SHIFT
);
3554 bit_pos
+= bit_size
;
3556 if (align
> maxalign
)
3560 printf("set field %s offset %-2d size %-2d align %-2d",
3561 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3562 if (f
->type
.t
& VT_BITFIELD
) {
3563 printf(" pos %-2d bits %-2d",
3571 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3573 /* An anonymous struct/union. Adjust member offsets
3574 to reflect the real offset of our containing struct.
3575 Also set the offset of this anon member inside
3576 the outer struct to be zero. Via this it
3577 works when accessing the field offset directly
3578 (from base object), as well as when recursing
3579 members in initializer handling. */
3580 int v2
= f
->type
.ref
->v
;
3581 if (!(v2
& SYM_FIELD
) &&
3582 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3584 /* This happens only with MS extensions. The
3585 anon member has a named struct type, so it
3586 potentially is shared with other references.
3587 We need to unshare members so we can modify
3590 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3591 &f
->type
.ref
->type
, 0,
3593 pps
= &f
->type
.ref
->next
;
3594 while ((ass
= ass
->next
) != NULL
) {
3595 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3596 pps
= &((*pps
)->next
);
3600 struct_add_offset(f
->type
.ref
, offset
);
3610 c
+= (bit_pos
+ 7) >> 3;
3612 /* store size and alignment */
3613 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3617 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3618 /* can happen if individual align for some member was given. In
3619 this case MSVC ignores maxalign when aligning the size */
3624 c
= (c
+ a
- 1) & -a
;
3628 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3631 /* check whether we can access bitfields by their type */
3632 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3636 if (0 == (f
->type
.t
& VT_BITFIELD
))
3640 bit_size
= BIT_SIZE(f
->type
.t
);
3643 bit_pos
= BIT_POS(f
->type
.t
);
3644 size
= type_size(&f
->type
, &align
);
3645 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3648 /* try to access the field using a different type */
3649 c0
= -1, s
= align
= 1;
3651 px
= f
->c
* 8 + bit_pos
;
3652 cx
= (px
>> 3) & -align
;
3653 px
= px
- (cx
<< 3);
3656 s
= (px
+ bit_size
+ 7) >> 3;
3666 s
= type_size(&t
, &align
);
3670 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3671 /* update offset and bit position */
3674 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3675 | (bit_pos
<< VT_STRUCT_SHIFT
);
3679 printf("FIX field %s offset %-2d size %-2d align %-2d "
3680 "pos %-2d bits %-2d\n",
3681 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3682 cx
, s
, align
, px
, bit_size
);
3685 /* fall back to load/store single-byte wise */
3686 f
->auxtype
= VT_STRUCT
;
3688 printf("FIX field %s : load byte-wise\n",
3689 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3695 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3696 static void struct_decl(CType
*type
, int u
)
3698 int v
, c
, size
, align
, flexible
;
3699 int bit_size
, bsize
, bt
;
3701 AttributeDef ad
, ad1
;
3704 memset(&ad
, 0, sizeof ad
);
3706 parse_attribute(&ad
);
3710 /* struct already defined ? return it */
3712 expect("struct/union/enum name");
3714 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3717 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3719 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3724 /* Record the original enum/struct/union token. */
3725 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3727 /* we put an undefined size for struct/union */
3728 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3729 s
->r
= 0; /* default alignment is zero as gcc */
3731 type
->t
= s
->type
.t
;
3737 tcc_error("struct/union/enum already defined");
3738 /* cannot be empty */
3739 /* non empty enums are not allowed */
3742 long long ll
= 0, pl
= 0, nl
= 0;
3745 /* enum symbols have static storage */
3746 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3750 expect("identifier");
3752 if (ss
&& !local_stack
)
3753 tcc_error("redefinition of enumerator '%s'",
3754 get_tok_str(v
, NULL
));
3758 ll
= expr_const64();
3760 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3762 *ps
= ss
, ps
= &ss
->next
;
3771 /* NOTE: we accept a trailing comma */
3776 /* set integral type of the enum */
3779 if (pl
!= (unsigned)pl
)
3780 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3782 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3783 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3784 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3786 /* set type for enum members */
3787 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3789 if (ll
== (int)ll
) /* default is int if it fits */
3791 if (t
.t
& VT_UNSIGNED
) {
3792 ss
->type
.t
|= VT_UNSIGNED
;
3793 if (ll
== (unsigned)ll
)
3796 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3797 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3802 while (tok
!= '}') {
3803 if (!parse_btype(&btype
, &ad1
)) {
3809 tcc_error("flexible array member '%s' not at the end of struct",
3810 get_tok_str(v
, NULL
));
3816 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3818 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3819 expect("identifier");
3821 int v
= btype
.ref
->v
;
3822 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3823 if (tcc_state
->ms_extensions
== 0)
3824 expect("identifier");
3828 if (type_size(&type1
, &align
) < 0) {
3829 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3832 tcc_error("field '%s' has incomplete type",
3833 get_tok_str(v
, NULL
));
3835 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3836 (type1
.t
& VT_STORAGE
))
3837 tcc_error("invalid type for '%s'",
3838 get_tok_str(v
, NULL
));
3842 bit_size
= expr_const();
3843 /* XXX: handle v = 0 case for messages */
3845 tcc_error("negative width in bit-field '%s'",
3846 get_tok_str(v
, NULL
));
3847 if (v
&& bit_size
== 0)
3848 tcc_error("zero width for bit-field '%s'",
3849 get_tok_str(v
, NULL
));
3850 parse_attribute(&ad1
);
3852 size
= type_size(&type1
, &align
);
3853 if (bit_size
>= 0) {
3854 bt
= type1
.t
& VT_BTYPE
;
3860 tcc_error("bitfields must have scalar type");
3862 if (bit_size
> bsize
) {
3863 tcc_error("width of '%s' exceeds its type",
3864 get_tok_str(v
, NULL
));
3865 } else if (bit_size
== bsize
3866 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
3867 /* no need for bit fields */
3869 } else if (bit_size
== 64) {
3870 tcc_error("field width 64 not implemented");
3872 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
3874 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
3877 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
3878 /* Remember we've seen a real field to check
3879 for placement of flexible array member. */
3882 /* If member is a struct or bit-field, enforce
3883 placing into the struct (as anonymous). */
3885 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
3890 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
3895 if (tok
== ';' || tok
== TOK_EOF
)
3902 parse_attribute(&ad
);
3903 struct_layout(type
, &ad
);
3908 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
3910 if (s
->a
.aligned
&& 0 == ad
->a
.aligned
)
3911 ad
->a
.aligned
= s
->a
.aligned
;
3912 if (s
->f
.func_call
&& 0 == ad
->f
.func_call
)
3913 ad
->f
.func_call
= s
->f
.func_call
;
3914 if (s
->f
.func_type
&& 0 == ad
->f
.func_type
)
3915 ad
->f
.func_type
= s
->f
.func_type
;
3920 /* Add type qualifiers to a type. If the type is an array then the qualifiers
3921 are added to the element type, copied because it could be a typedef. */
3922 static void parse_btype_qualify(CType
*type
, int qualifiers
)
3924 while (type
->t
& VT_ARRAY
) {
3925 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
3926 type
= &type
->ref
->type
;
3928 type
->t
|= qualifiers
;
3931 /* return 0 if no type declaration. otherwise, return the basic type
3934 static int parse_btype(CType
*type
, AttributeDef
*ad
)
3936 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
3940 memset(ad
, 0, sizeof(AttributeDef
));
3950 /* currently, we really ignore extension */
3960 if (u
== VT_SHORT
|| u
== VT_LONG
) {
3961 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
3962 tmbt
: tcc_error("too many basic types");
3965 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
3970 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
3983 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
3984 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
3985 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
3986 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
3993 #ifdef TCC_TARGET_ARM64
3995 /* GCC's __uint128_t appears in some Linux header files. Make it a
3996 synonym for long double to get the size and alignment right. */
4007 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4008 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4016 struct_decl(&type1
, VT_ENUM
);
4019 type
->ref
= type1
.ref
;
4022 struct_decl(&type1
, VT_STRUCT
);
4025 struct_decl(&type1
, VT_UNION
);
4028 /* type modifiers */
4033 parse_btype_qualify(type
, VT_CONSTANT
);
4041 parse_btype_qualify(type
, VT_VOLATILE
);
4048 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4049 tcc_error("signed and unsigned modifier");
4062 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4063 tcc_error("signed and unsigned modifier");
4064 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4080 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4081 tcc_error("multiple storage classes");
4092 /* GNUC attribute */
4093 case TOK_ATTRIBUTE1
:
4094 case TOK_ATTRIBUTE2
:
4095 parse_attribute(ad
);
4096 if (ad
->attr_mode
) {
4097 u
= ad
->attr_mode
-1;
4098 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4106 parse_expr_type(&type1
);
4107 /* remove all storage modifiers except typedef */
4108 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4110 sym_to_attr(ad
, type1
.ref
);
4116 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4118 t
&= ~(VT_BTYPE
|VT_LONG
);
4119 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4120 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4121 type
->ref
= s
->type
.ref
;
4123 parse_btype_qualify(type
, t
);
4125 /* get attributes from typedef */
4135 if (tcc_state
->char_is_unsigned
) {
4136 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4139 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4140 bt
= t
& (VT_BTYPE
|VT_LONG
);
4142 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4143 #ifdef TCC_TARGET_PE
4144 if (bt
== VT_LDOUBLE
)
4145 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4151 /* convert a function parameter type (array to pointer and function to
4152 function pointer) */
4153 static inline void convert_parameter_type(CType
*pt
)
4155 /* remove const and volatile qualifiers (XXX: const could be used
4156 to indicate a const function parameter */
4157 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4158 /* array must be transformed to pointer according to ANSI C */
4160 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4165 ST_FUNC
void parse_asm_str(CString
*astr
)
4168 parse_mult_str(astr
, "string constant");
4171 /* Parse an asm label and return the token */
4172 static int asm_label_instr(void)
4178 parse_asm_str(&astr
);
4181 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4183 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4188 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4190 int n
, l
, t1
, arg_size
, align
;
4191 Sym
**plast
, *s
, *first
;
4196 /* function type, or recursive declarator (return if so) */
4198 if (td
&& !(td
& TYPE_ABSTRACT
))
4202 else if (parse_btype(&pt
, &ad1
))
4213 /* read param name and compute offset */
4214 if (l
!= FUNC_OLD
) {
4215 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4217 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4218 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4219 tcc_error("parameter declared as void");
4220 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4224 expect("identifier");
4225 pt
.t
= VT_VOID
; /* invalid type */
4228 convert_parameter_type(&pt
);
4229 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4235 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4240 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4241 tcc_error("invalid type");
4244 /* if no parameters, then old type prototype */
4247 /* NOTE: const is ignored in returned type as it has a special
4248 meaning in gcc / C++ */
4249 type
->t
&= ~VT_CONSTANT
;
4250 /* some ancient pre-K&R C allows a function to return an array
4251 and the array brackets to be put after the arguments, such
4252 that "int c()[]" means something like "int[] c()" */
4255 skip(']'); /* only handle simple "[]" */
4258 /* we push a anonymous symbol which will contain the function prototype */
4259 ad
->f
.func_args
= arg_size
;
4260 ad
->f
.func_type
= l
;
4261 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4267 } else if (tok
== '[') {
4268 int saved_nocode_wanted
= nocode_wanted
;
4269 /* array definition */
4271 if (tok
== TOK_RESTRICT1
)
4276 if (!local_stack
|| (storage
& VT_STATIC
))
4277 vpushi(expr_const());
4279 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4280 length must always be evaluated, even under nocode_wanted,
4281 so that its size slot is initialized (e.g. under sizeof
4286 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4289 tcc_error("invalid array size");
4291 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4292 tcc_error("size of variable length array should be an integer");
4297 /* parse next post type */
4298 post_type(type
, ad
, storage
, 0);
4299 if (type
->t
== VT_FUNC
)
4300 tcc_error("declaration of an array of functions");
4301 t1
|= type
->t
& VT_VLA
;
4304 loc
-= type_size(&int_type
, &align
);
4308 vla_runtime_type_size(type
, &align
);
4310 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4316 nocode_wanted
= saved_nocode_wanted
;
4318 /* we push an anonymous symbol which will contain the array
4320 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4321 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4327 /* Parse a type declarator (except basic type), and return the type
4328 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4329 expected. 'type' should contain the basic type. 'ad' is the
4330 attribute definition of the basic type. It can be modified by
4331 type_decl(). If this (possibly abstract) declarator is a pointer chain
4332 it returns the innermost pointed to type (equals *type, but is a different
4333 pointer), otherwise returns type itself, that's used for recursive calls. */
4334 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4337 int qualifiers
, storage
;
4339 /* recursive type, remove storage bits first, apply them later again */
4340 storage
= type
->t
& VT_STORAGE
;
4341 type
->t
&= ~VT_STORAGE
;
4344 while (tok
== '*') {
4352 qualifiers
|= VT_CONSTANT
;
4357 qualifiers
|= VT_VOLATILE
;
4363 /* XXX: clarify attribute handling */
4364 case TOK_ATTRIBUTE1
:
4365 case TOK_ATTRIBUTE2
:
4366 parse_attribute(ad
);
4370 type
->t
|= qualifiers
;
4372 /* innermost pointed to type is the one for the first derivation */
4373 ret
= pointed_type(type
);
4377 /* This is possibly a parameter type list for abstract declarators
4378 ('int ()'), use post_type for testing this. */
4379 if (!post_type(type
, ad
, 0, td
)) {
4380 /* It's not, so it's a nested declarator, and the post operations
4381 apply to the innermost pointed to type (if any). */
4382 /* XXX: this is not correct to modify 'ad' at this point, but
4383 the syntax is not clear */
4384 parse_attribute(ad
);
4385 post
= type_decl(type
, ad
, v
, td
);
4388 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4389 /* type identifier */
4393 if (!(td
& TYPE_ABSTRACT
))
4394 expect("identifier");
4397 post_type(post
, ad
, storage
, 0);
4398 parse_attribute(ad
);
4403 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4404 ST_FUNC
int lvalue_type(int t
)
4409 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4411 else if (bt
== VT_SHORT
)
4415 if (t
& VT_UNSIGNED
)
4416 r
|= VT_LVAL_UNSIGNED
;
4420 /* indirection with full error checking and bound check */
4421 ST_FUNC
void indir(void)
4423 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4424 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4428 if (vtop
->r
& VT_LVAL
)
4430 vtop
->type
= *pointed_type(&vtop
->type
);
4431 /* Arrays and functions are never lvalues */
4432 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4433 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4434 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4435 /* if bound checking, the referenced pointer must be checked */
4436 #ifdef CONFIG_TCC_BCHECK
4437 if (tcc_state
->do_bounds_check
)
4438 vtop
->r
|= VT_MUSTBOUND
;
4443 /* pass a parameter to a function and do type checking and casting */
4444 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4449 func_type
= func
->f
.func_type
;
4450 if (func_type
== FUNC_OLD
||
4451 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4452 /* default casting : only need to convert float to double */
4453 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4454 gen_cast_s(VT_DOUBLE
);
4455 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4456 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4457 type
.ref
= vtop
->type
.ref
;
4460 } else if (arg
== NULL
) {
4461 tcc_error("too many arguments to function");
4464 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4465 gen_assign_cast(&type
);
4469 /* parse an expression and return its type without any side effect. */
4470 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4479 /* parse an expression of the form '(type)' or '(expr)' and return its
4481 static void parse_expr_type(CType
*type
)
4487 if (parse_btype(type
, &ad
)) {
4488 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4490 expr_type(type
, gexpr
);
4495 static void parse_type(CType
*type
)
4500 if (!parse_btype(type
, &ad
)) {
4503 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4506 static void parse_builtin_params(int nc
, const char *args
)
4513 while ((c
= *args
++)) {
4517 case 'e': expr_eq(); continue;
4518 case 't': parse_type(&t
); vpush(&t
); continue;
4519 default: tcc_error("internal error"); break;
4527 ST_FUNC
void unary(void)
4529 int n
, t
, align
, size
, r
, sizeof_caller
;
4534 sizeof_caller
= in_sizeof
;
4537 /* XXX: GCC 2.95.3 does not generate a table although it should be
4545 #ifdef TCC_TARGET_PE
4546 t
= VT_SHORT
|VT_UNSIGNED
;
4554 vsetc(&type
, VT_CONST
, &tokc
);
4558 t
= VT_INT
| VT_UNSIGNED
;
4564 t
= VT_LLONG
| VT_UNSIGNED
;
4576 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4579 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4581 case TOK___FUNCTION__
:
4583 goto tok_identifier
;
4589 /* special function name identifier */
4590 len
= strlen(funcname
) + 1;
4591 /* generate char[len] type */
4596 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4597 if (!NODATA_WANTED
) {
4598 ptr
= section_ptr_add(data_section
, len
);
4599 memcpy(ptr
, funcname
, len
);
4605 #ifdef TCC_TARGET_PE
4606 t
= VT_SHORT
| VT_UNSIGNED
;
4612 /* string parsing */
4614 if (tcc_state
->char_is_unsigned
)
4615 t
= VT_BYTE
| VT_UNSIGNED
;
4617 if (tcc_state
->warn_write_strings
)
4622 memset(&ad
, 0, sizeof(AttributeDef
));
4623 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4628 if (parse_btype(&type
, &ad
)) {
4629 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4631 /* check ISOC99 compound literal */
4633 /* data is allocated locally by default */
4638 /* all except arrays are lvalues */
4639 if (!(type
.t
& VT_ARRAY
))
4640 r
|= lvalue_type(type
.t
);
4641 memset(&ad
, 0, sizeof(AttributeDef
));
4642 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4644 if (sizeof_caller
) {
4651 } else if (tok
== '{') {
4652 int saved_nocode_wanted
= nocode_wanted
;
4654 tcc_error("expected constant");
4655 /* save all registers */
4657 /* statement expression : we do not accept break/continue
4658 inside as GCC does. We do retain the nocode_wanted state,
4659 as statement expressions can't ever be entered from the
4660 outside, so any reactivation of code emission (from labels
4661 or loop heads) can be disabled again after the end of it. */
4662 block(NULL
, NULL
, 1);
4663 nocode_wanted
= saved_nocode_wanted
;
4678 /* functions names must be treated as function pointers,
4679 except for unary '&' and sizeof. Since we consider that
4680 functions are not lvalues, we only have to handle it
4681 there and in function calls. */
4682 /* arrays can also be used although they are not lvalues */
4683 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4684 !(vtop
->type
.t
& VT_ARRAY
))
4686 mk_pointer(&vtop
->type
);
4692 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4693 gen_cast_s(VT_BOOL
);
4694 vtop
->c
.i
= !vtop
->c
.i
;
4695 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4699 vseti(VT_JMP
, gvtst(1, 0));
4711 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4712 tcc_error("pointer not accepted for unary plus");
4713 /* In order to force cast, we add zero, except for floating point
4714 where we really need an noop (otherwise -0.0 will be transformed
4716 if (!is_float(vtop
->type
.t
)) {
4727 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
4728 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
4729 size
= type_size(&type
, &align
);
4730 if (s
&& s
->a
.aligned
)
4731 align
= 1 << (s
->a
.aligned
- 1);
4732 if (t
== TOK_SIZEOF
) {
4733 if (!(type
.t
& VT_VLA
)) {
4735 tcc_error("sizeof applied to an incomplete type");
4738 vla_runtime_type_size(&type
, &align
);
4743 vtop
->type
.t
|= VT_UNSIGNED
;
4746 case TOK_builtin_expect
:
4747 /* __builtin_expect is a no-op for now */
4748 parse_builtin_params(0, "ee");
4751 case TOK_builtin_types_compatible_p
:
4752 parse_builtin_params(0, "tt");
4753 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4754 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4755 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4759 case TOK_builtin_choose_expr
:
4786 case TOK_builtin_constant_p
:
4787 parse_builtin_params(1, "e");
4788 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4792 case TOK_builtin_frame_address
:
4793 case TOK_builtin_return_address
:
4799 if (tok
!= TOK_CINT
) {
4800 tcc_error("%s only takes positive integers",
4801 tok1
== TOK_builtin_return_address
?
4802 "__builtin_return_address" :
4803 "__builtin_frame_address");
4805 level
= (uint32_t)tokc
.i
;
4810 vset(&type
, VT_LOCAL
, 0); /* local frame */
4812 mk_pointer(&vtop
->type
);
4813 indir(); /* -> parent frame */
4815 if (tok1
== TOK_builtin_return_address
) {
4816 // assume return address is just above frame pointer on stack
4819 mk_pointer(&vtop
->type
);
4824 #ifdef TCC_TARGET_X86_64
4825 #ifdef TCC_TARGET_PE
4826 case TOK_builtin_va_start
:
4827 parse_builtin_params(0, "ee");
4828 r
= vtop
->r
& VT_VALMASK
;
4832 tcc_error("__builtin_va_start expects a local variable");
4834 vtop
->type
= char_pointer_type
;
4839 case TOK_builtin_va_arg_types
:
4840 parse_builtin_params(0, "t");
4841 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4848 #ifdef TCC_TARGET_ARM64
4849 case TOK___va_start
: {
4850 parse_builtin_params(0, "ee");
4854 vtop
->type
.t
= VT_VOID
;
4857 case TOK___va_arg
: {
4858 parse_builtin_params(0, "et");
4866 case TOK___arm64_clear_cache
: {
4867 parse_builtin_params(0, "ee");
4870 vtop
->type
.t
= VT_VOID
;
4874 /* pre operations */
4885 t
= vtop
->type
.t
& VT_BTYPE
;
4887 /* In IEEE negate(x) isn't subtract(0,x), but rather
4891 vtop
->c
.f
= -1.0 * 0.0;
4892 else if (t
== VT_DOUBLE
)
4893 vtop
->c
.d
= -1.0 * 0.0;
4895 vtop
->c
.ld
= -1.0 * 0.0;
4903 goto tok_identifier
;
4905 /* allow to take the address of a label */
4906 if (tok
< TOK_UIDENT
)
4907 expect("label identifier");
4908 s
= label_find(tok
);
4910 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
4912 if (s
->r
== LABEL_DECLARED
)
4913 s
->r
= LABEL_FORWARD
;
4916 s
->type
.t
= VT_VOID
;
4917 mk_pointer(&s
->type
);
4918 s
->type
.t
|= VT_STATIC
;
4920 vpushsym(&s
->type
, s
);
4926 CType controlling_type
;
4927 int has_default
= 0;
4930 TokenString
*str
= NULL
;
4934 expr_type(&controlling_type
, expr_eq
);
4935 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
4939 if (tok
== TOK_DEFAULT
) {
4941 tcc_error("too many 'default'");
4947 AttributeDef ad_tmp
;
4950 parse_btype(&cur_type
, &ad_tmp
);
4951 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
4952 if (compare_types(&controlling_type
, &cur_type
, 0)) {
4954 tcc_error("type match twice");
4964 skip_or_save_block(&str
);
4966 skip_or_save_block(NULL
);
4973 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
4974 tcc_error("type '%s' does not match any association", buf
);
4976 begin_macro(str
, 1);
4985 // special qnan , snan and infinity values
4987 vpush64(VT_DOUBLE
, 0x7ff8000000000000ULL
);
4991 vpush64(VT_DOUBLE
, 0x7ff0000000000001ULL
);
4995 vpush64(VT_DOUBLE
, 0x7ff0000000000000ULL
);
5004 expect("identifier");
5007 const char *name
= get_tok_str(t
, NULL
);
5009 tcc_error("'%s' undeclared", name
);
5010 /* for simple function calls, we tolerate undeclared
5011 external reference to int() function */
5012 if (tcc_state
->warn_implicit_function_declaration
5013 #ifdef TCC_TARGET_PE
5014 /* people must be warned about using undeclared WINAPI functions
5015 (which usually start with uppercase letter) */
5016 || (name
[0] >= 'A' && name
[0] <= 'Z')
5019 tcc_warning("implicit declaration of function '%s'", name
);
5020 s
= external_global_sym(t
, &func_old_type
, 0);
5024 /* A symbol that has a register is a local register variable,
5025 which starts out as VT_LOCAL value. */
5026 if ((r
& VT_VALMASK
) < VT_CONST
)
5027 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5029 vset(&s
->type
, r
, s
->c
);
5030 /* Point to s as backpointer (even without r&VT_SYM).
5031 Will be used by at least the x86 inline asm parser for
5037 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5038 vtop
->c
.i
= s
->enum_val
;
5043 /* post operations */
5045 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5048 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5051 if (tok
== TOK_ARROW
)
5053 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5056 /* expect pointer on structure */
5057 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5058 expect("struct or union");
5059 if (tok
== TOK_CDOUBLE
)
5060 expect("field name");
5062 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5063 expect("field name");
5064 s
= find_field(&vtop
->type
, tok
);
5066 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5067 /* add field offset to pointer */
5068 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5071 /* change type to field type, and set to lvalue */
5072 vtop
->type
= s
->type
;
5073 vtop
->type
.t
|= qualifiers
;
5074 /* an array is never an lvalue */
5075 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5076 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5077 #ifdef CONFIG_TCC_BCHECK
5078 /* if bound checking, the referenced pointer must be checked */
5079 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5080 vtop
->r
|= VT_MUSTBOUND
;
5084 } else if (tok
== '[') {
5090 } else if (tok
== '(') {
5093 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5096 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5097 /* pointer test (no array accepted) */
5098 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5099 vtop
->type
= *pointed_type(&vtop
->type
);
5100 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5104 expect("function pointer");
5107 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5109 /* get return type */
5112 sa
= s
->next
; /* first parameter */
5113 nb_args
= regsize
= 0;
5115 /* compute first implicit argument if a structure is returned */
5116 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5117 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5118 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5119 &ret_align
, ®size
);
5121 /* get some space for the returned structure */
5122 size
= type_size(&s
->type
, &align
);
5123 #ifdef TCC_TARGET_ARM64
5124 /* On arm64, a small struct is return in registers.
5125 It is much easier to write it to memory if we know
5126 that we are allowed to write some extra bytes, so
5127 round the allocated space up to a power of 2: */
5129 while (size
& (size
- 1))
5130 size
= (size
| (size
- 1)) + 1;
5132 loc
= (loc
- size
) & -align
;
5134 ret
.r
= VT_LOCAL
| VT_LVAL
;
5135 /* pass it as 'int' to avoid structure arg passing
5137 vseti(VT_LOCAL
, loc
);
5147 /* return in register */
5148 if (is_float(ret
.type
.t
)) {
5149 ret
.r
= reg_fret(ret
.type
.t
);
5150 #ifdef TCC_TARGET_X86_64
5151 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5155 #ifndef TCC_TARGET_ARM64
5156 #ifdef TCC_TARGET_X86_64
5157 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5159 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5170 gfunc_param_typed(s
, sa
);
5180 tcc_error("too few arguments to function");
5182 gfunc_call(nb_args
);
5185 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5186 vsetc(&ret
.type
, r
, &ret
.c
);
5187 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5190 /* handle packed struct return */
5191 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5194 size
= type_size(&s
->type
, &align
);
5195 /* We're writing whole regs often, make sure there's enough
5196 space. Assume register size is power of 2. */
5197 if (regsize
> align
)
5199 loc
= (loc
- size
) & -align
;
5203 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5207 if (--ret_nregs
== 0)
5211 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5219 ST_FUNC
void expr_prod(void)
5224 while (tok
== '*' || tok
== '/' || tok
== '%') {
5232 ST_FUNC
void expr_sum(void)
5237 while (tok
== '+' || tok
== '-') {
5245 static void expr_shift(void)
5250 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5258 static void expr_cmp(void)
5263 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5264 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5272 static void expr_cmpeq(void)
5277 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5285 static void expr_and(void)
5288 while (tok
== '&') {
5295 static void expr_xor(void)
5298 while (tok
== '^') {
5305 static void expr_or(void)
5308 while (tok
== '|') {
5315 static void expr_land(void)
5318 if (tok
== TOK_LAND
) {
5321 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5322 gen_cast_s(VT_BOOL
);
5327 while (tok
== TOK_LAND
) {
5343 if (tok
!= TOK_LAND
) {
5356 static void expr_lor(void)
5359 if (tok
== TOK_LOR
) {
5362 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5363 gen_cast_s(VT_BOOL
);
5368 while (tok
== TOK_LOR
) {
5384 if (tok
!= TOK_LOR
) {
5397 /* Assuming vtop is a value used in a conditional context
5398 (i.e. compared with zero) return 0 if it's false, 1 if
5399 true and -1 if it can't be statically determined. */
5400 static int condition_3way(void)
5403 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5404 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5406 gen_cast_s(VT_BOOL
);
5413 static void expr_cond(void)
5415 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5417 CType type
, type1
, type2
;
5422 c
= condition_3way();
5423 g
= (tok
== ':' && gnu_ext
);
5425 /* needed to avoid having different registers saved in
5427 if (is_float(vtop
->type
.t
)) {
5429 #ifdef TCC_TARGET_X86_64
5430 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5455 sv
= *vtop
; /* save value to handle it later */
5456 vtop
--; /* no vpop so that FP stack is not flushed */
5474 bt1
= t1
& VT_BTYPE
;
5476 bt2
= t2
& VT_BTYPE
;
5479 /* cast operands to correct type according to ISOC rules */
5480 if (is_float(bt1
) || is_float(bt2
)) {
5481 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5482 type
.t
= VT_LDOUBLE
;
5484 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5489 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5490 /* cast to biggest op */
5491 type
.t
= VT_LLONG
| VT_LONG
;
5492 if (bt1
== VT_LLONG
)
5494 if (bt2
== VT_LLONG
)
5496 /* convert to unsigned if it does not fit in a long long */
5497 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5498 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5499 type
.t
|= VT_UNSIGNED
;
5500 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5501 /* If one is a null ptr constant the result type
5503 if (is_null_pointer (vtop
))
5505 else if (is_null_pointer (&sv
))
5507 /* XXX: test pointer compatibility, C99 has more elaborate
5511 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
5512 /* XXX: test function pointer compatibility */
5513 type
= bt1
== VT_FUNC
? type1
: type2
;
5514 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5515 /* XXX: test structure compatibility */
5516 type
= bt1
== VT_STRUCT
? type1
: type2
;
5517 } else if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5518 /* NOTE: as an extension, we accept void on only one side */
5521 /* integer operations */
5522 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5523 /* convert to unsigned if it does not fit in an integer */
5524 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5525 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5526 type
.t
|= VT_UNSIGNED
;
5528 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5529 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5530 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5533 /* now we convert second operand */
5537 mk_pointer(&vtop
->type
);
5539 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5544 if (is_float(type
.t
)) {
5546 #ifdef TCC_TARGET_X86_64
5547 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5551 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5552 /* for long longs, we use fixed registers to avoid having
5553 to handle a complicated move */
5564 /* this is horrible, but we must also convert first
5570 mk_pointer(&vtop
->type
);
5572 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5578 move_reg(r2
, r1
, type
.t
);
5588 static void expr_eq(void)
5594 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5595 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5596 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5611 ST_FUNC
void gexpr(void)
5622 /* parse a constant expression and return value in vtop. */
5623 static void expr_const1(void)
5632 /* parse an integer constant and return its value. */
5633 static inline int64_t expr_const64(void)
5637 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5638 expect("constant expression");
5644 /* parse an integer constant and return its value.
5645 Complain if it doesn't fit 32bit (signed or unsigned). */
5646 ST_FUNC
int expr_const(void)
5649 int64_t wc
= expr_const64();
5651 if (c
!= wc
&& (unsigned)c
!= wc
)
5652 tcc_error("constant exceeds 32 bit");
5656 /* return the label token if current token is a label, otherwise
5658 static int is_label(void)
5662 /* fast test first */
5663 if (tok
< TOK_UIDENT
)
5665 /* no need to save tokc because tok is an identifier */
5671 unget_tok(last_tok
);
5676 #ifndef TCC_TARGET_ARM64
5677 static void gfunc_return(CType
*func_type
)
5679 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5680 CType type
, ret_type
;
5681 int ret_align
, ret_nregs
, regsize
;
5682 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5683 &ret_align
, ®size
);
5684 if (0 == ret_nregs
) {
5685 /* if returning structure, must copy it to implicit
5686 first pointer arg location */
5689 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5692 /* copy structure value to pointer */
5695 /* returning structure packed into registers */
5696 int r
, size
, addr
, align
;
5697 size
= type_size(func_type
,&align
);
5698 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5699 (vtop
->c
.i
& (ret_align
-1)))
5700 && (align
& (ret_align
-1))) {
5701 loc
= (loc
- size
) & -ret_align
;
5704 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5708 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5710 vtop
->type
= ret_type
;
5711 if (is_float(ret_type
.t
))
5712 r
= rc_fret(ret_type
.t
);
5723 if (--ret_nregs
== 0)
5725 /* We assume that when a structure is returned in multiple
5726 registers, their classes are consecutive values of the
5729 vtop
->c
.i
+= regsize
;
5733 } else if (is_float(func_type
->t
)) {
5734 gv(rc_fret(func_type
->t
));
5738 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5742 static int case_cmp(const void *pa
, const void *pb
)
5744 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5745 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5746 return a
< b
? -1 : a
> b
;
5749 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5753 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5771 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5773 gcase(base
, len
/2, bsym
);
5774 if (cur_switch
->def_sym
)
5775 gjmp_addr(cur_switch
->def_sym
);
5777 *bsym
= gjmp(*bsym
);
5781 base
+= e
; len
-= e
;
5791 if (p
->v1
== p
->v2
) {
5793 gtst_addr(0, p
->sym
);
5803 gtst_addr(0, p
->sym
);
5809 static void block(int *bsym
, int *csym
, int is_expr
)
5811 int a
, b
, c
, d
, cond
;
5814 /* generate line number info */
5815 if (tcc_state
->do_debug
)
5816 tcc_debug_line(tcc_state
);
5819 /* default return value is (void) */
5821 vtop
->type
.t
= VT_VOID
;
5824 if (tok
== TOK_IF
) {
5826 int saved_nocode_wanted
= nocode_wanted
;
5831 cond
= condition_3way();
5837 nocode_wanted
|= 0x20000000;
5838 block(bsym
, csym
, 0);
5840 nocode_wanted
= saved_nocode_wanted
;
5842 if (c
== TOK_ELSE
) {
5847 nocode_wanted
|= 0x20000000;
5848 block(bsym
, csym
, 0);
5849 gsym(d
); /* patch else jmp */
5851 nocode_wanted
= saved_nocode_wanted
;
5854 } else if (tok
== TOK_WHILE
) {
5855 int saved_nocode_wanted
;
5856 nocode_wanted
&= ~0x20000000;
5866 saved_nocode_wanted
= nocode_wanted
;
5868 nocode_wanted
= saved_nocode_wanted
;
5873 } else if (tok
== '{') {
5875 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
5878 /* record local declaration stack position */
5880 llabel
= local_label_stack
;
5883 /* handle local labels declarations */
5884 if (tok
== TOK_LABEL
) {
5887 if (tok
< TOK_UIDENT
)
5888 expect("label identifier");
5889 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
5899 while (tok
!= '}') {
5900 if ((a
= is_label()))
5907 block(bsym
, csym
, is_expr
);
5910 /* pop locally defined labels */
5911 label_pop(&local_label_stack
, llabel
, is_expr
);
5912 /* pop locally defined symbols */
5914 /* In the is_expr case (a statement expression is finished here),
5915 vtop might refer to symbols on the local_stack. Either via the
5916 type or via vtop->sym. We can't pop those nor any that in turn
5917 might be referred to. To make it easier we don't roll back
5918 any symbols in that case; some upper level call to block() will
5919 do that. We do have to remove such symbols from the lookup
5920 tables, though. sym_pop will do that. */
5921 sym_pop(&local_stack
, s
, is_expr
);
5923 /* Pop VLA frames and restore stack pointer if required */
5924 if (vlas_in_scope
> saved_vlas_in_scope
) {
5925 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
5928 vlas_in_scope
= saved_vlas_in_scope
;
5931 } else if (tok
== TOK_RETURN
) {
5935 gen_assign_cast(&func_vt
);
5936 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
5939 gfunc_return(&func_vt
);
5942 /* jump unless last stmt in top-level block */
5943 if (tok
!= '}' || local_scope
!= 1)
5945 nocode_wanted
|= 0x20000000;
5946 } else if (tok
== TOK_BREAK
) {
5949 tcc_error("cannot break");
5950 *bsym
= gjmp(*bsym
);
5953 nocode_wanted
|= 0x20000000;
5954 } else if (tok
== TOK_CONTINUE
) {
5957 tcc_error("cannot continue");
5958 vla_sp_restore_root();
5959 *csym
= gjmp(*csym
);
5962 } else if (tok
== TOK_FOR
) {
5964 int saved_nocode_wanted
;
5965 nocode_wanted
&= ~0x20000000;
5971 /* c99 for-loop init decl? */
5972 if (!decl0(VT_LOCAL
, 1, NULL
)) {
5973 /* no, regular for-loop init expr */
5999 saved_nocode_wanted
= nocode_wanted
;
6001 nocode_wanted
= saved_nocode_wanted
;
6006 sym_pop(&local_stack
, s
, 0);
6009 if (tok
== TOK_DO
) {
6010 int saved_nocode_wanted
;
6011 nocode_wanted
&= ~0x20000000;
6017 saved_nocode_wanted
= nocode_wanted
;
6025 nocode_wanted
= saved_nocode_wanted
;
6030 if (tok
== TOK_SWITCH
) {
6031 struct switch_t
*saved
, sw
;
6032 int saved_nocode_wanted
= nocode_wanted
;
6038 switchval
= *vtop
--;
6040 b
= gjmp(0); /* jump to first case */
6041 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6045 nocode_wanted
= saved_nocode_wanted
;
6046 a
= gjmp(a
); /* add implicit break */
6049 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6050 for (b
= 1; b
< sw
.n
; b
++)
6051 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6052 tcc_error("duplicate case value");
6053 /* Our switch table sorting is signed, so the compared
6054 value needs to be as well when it's 64bit. */
6055 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6056 switchval
.type
.t
&= ~VT_UNSIGNED
;
6058 gcase(sw
.p
, sw
.n
, &a
);
6061 gjmp_addr(sw
.def_sym
);
6062 dynarray_reset(&sw
.p
, &sw
.n
);
6067 if (tok
== TOK_CASE
) {
6068 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6071 nocode_wanted
&= ~0x20000000;
6073 cr
->v1
= cr
->v2
= expr_const64();
6074 if (gnu_ext
&& tok
== TOK_DOTS
) {
6076 cr
->v2
= expr_const64();
6077 if (cr
->v2
< cr
->v1
)
6078 tcc_warning("empty case range");
6081 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6084 goto block_after_label
;
6086 if (tok
== TOK_DEFAULT
) {
6091 if (cur_switch
->def_sym
)
6092 tcc_error("too many 'default'");
6093 cur_switch
->def_sym
= ind
;
6095 goto block_after_label
;
6097 if (tok
== TOK_GOTO
) {
6099 if (tok
== '*' && gnu_ext
) {
6103 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6106 } else if (tok
>= TOK_UIDENT
) {
6107 s
= label_find(tok
);
6108 /* put forward definition if needed */
6110 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6112 if (s
->r
== LABEL_DECLARED
)
6113 s
->r
= LABEL_FORWARD
;
6115 vla_sp_restore_root();
6116 if (s
->r
& LABEL_FORWARD
)
6117 s
->jnext
= gjmp(s
->jnext
);
6119 gjmp_addr(s
->jnext
);
6122 expect("label identifier");
6125 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6134 if (s
->r
== LABEL_DEFINED
)
6135 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6137 s
->r
= LABEL_DEFINED
;
6139 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6143 /* we accept this, but it is a mistake */
6145 nocode_wanted
&= ~0x20000000;
6147 tcc_warning("deprecated use of label at end of compound statement");
6151 block(bsym
, csym
, is_expr
);
6154 /* expression case */
6169 /* This skips over a stream of tokens containing balanced {} and ()
6170 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6171 with a '{'). If STR then allocates and stores the skipped tokens
6172 in *STR. This doesn't check if () and {} are nested correctly,
6173 i.e. "({)}" is accepted. */
6174 static void skip_or_save_block(TokenString
**str
)
6176 int braces
= tok
== '{';
6179 *str
= tok_str_alloc();
6181 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6183 if (tok
== TOK_EOF
) {
6184 if (str
|| level
> 0)
6185 tcc_error("unexpected end of file");
6190 tok_str_add_tok(*str
);
6193 if (t
== '{' || t
== '(') {
6195 } else if (t
== '}' || t
== ')') {
6197 if (level
== 0 && braces
&& t
== '}')
6202 tok_str_add(*str
, -1);
6203 tok_str_add(*str
, 0);
6207 #define EXPR_CONST 1
6210 static void parse_init_elem(int expr_type
)
6212 int saved_global_expr
;
6215 /* compound literals must be allocated globally in this case */
6216 saved_global_expr
= global_expr
;
6219 global_expr
= saved_global_expr
;
6220 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6221 (compound literals). */
6222 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6223 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6224 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6225 #ifdef TCC_TARGET_PE
6226 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6229 tcc_error("initializer element is not constant");
6237 /* put zeros for variable based init */
6238 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6241 /* nothing to do because globals are already set to zero */
6243 vpush_global_sym(&func_old_type
, TOK_memset
);
6245 #ifdef TCC_TARGET_ARM
6256 /* t is the array or struct type. c is the array or struct
6257 address. cur_field is the pointer to the current
6258 field, for arrays the 'c' member contains the current start
6259 index. 'size_only' is true if only size info is needed (only used
6260 in arrays). al contains the already initialized length of the
6261 current container (starting at c). This returns the new length of that. */
6262 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6263 Sym
**cur_field
, int size_only
, int al
)
6266 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6267 unsigned long corig
= c
;
6271 if (gnu_ext
&& (l
= is_label()) != 0)
6273 /* NOTE: we only support ranges for last designator */
6274 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6276 if (!(type
->t
& VT_ARRAY
))
6277 expect("array type");
6279 index
= index_last
= expr_const();
6280 if (tok
== TOK_DOTS
&& gnu_ext
) {
6282 index_last
= expr_const();
6286 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6288 tcc_error("invalid index");
6290 (*cur_field
)->c
= index_last
;
6291 type
= pointed_type(type
);
6292 elem_size
= type_size(type
, &align
);
6293 c
+= index
* elem_size
;
6294 nb_elems
= index_last
- index
+ 1;
6300 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6301 expect("struct/union type");
6302 f
= find_field(type
, l
);
6315 } else if (!gnu_ext
) {
6319 if (type
->t
& VT_ARRAY
) {
6320 index
= (*cur_field
)->c
;
6321 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6322 tcc_error("index too large");
6323 type
= pointed_type(type
);
6324 c
+= index
* type_size(type
, &align
);
6327 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6328 *cur_field
= f
= f
->next
;
6330 tcc_error("too many field init");
6335 /* must put zero in holes (note that doing it that way
6336 ensures that it even works with designators) */
6337 if (!size_only
&& c
- corig
> al
)
6338 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6339 decl_initializer(type
, sec
, c
, 0, size_only
);
6341 /* XXX: make it more general */
6342 if (!size_only
&& nb_elems
> 1) {
6343 unsigned long c_end
;
6348 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6349 for (i
= 1; i
< nb_elems
; i
++) {
6350 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6355 } else if (!NODATA_WANTED
) {
6356 c_end
= c
+ nb_elems
* elem_size
;
6357 if (c_end
> sec
->data_allocated
)
6358 section_realloc(sec
, c_end
);
6359 src
= sec
->data
+ c
;
6361 for(i
= 1; i
< nb_elems
; i
++) {
6363 memcpy(dst
, src
, elem_size
);
6367 c
+= nb_elems
* type_size(type
, &align
);
6373 /* store a value or an expression directly in global data or in local array */
6374 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6381 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6385 /* XXX: not portable */
6386 /* XXX: generate error if incorrect relocation */
6387 gen_assign_cast(&dtype
);
6388 bt
= type
->t
& VT_BTYPE
;
6390 if ((vtop
->r
& VT_SYM
)
6393 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6394 || (type
->t
& VT_BITFIELD
))
6395 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6397 tcc_error("initializer element is not computable at load time");
6399 if (NODATA_WANTED
) {
6404 size
= type_size(type
, &align
);
6405 section_reserve(sec
, c
+ size
);
6406 ptr
= sec
->data
+ c
;
6408 /* XXX: make code faster ? */
6409 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6410 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6411 /* XXX This rejects compound literals like
6412 '(void *){ptr}'. The problem is that '&sym' is
6413 represented the same way, which would be ruled out
6414 by the SYM_FIRST_ANOM check above, but also '"string"'
6415 in 'char *p = "string"' is represented the same
6416 with the type being VT_PTR and the symbol being an
6417 anonymous one. That is, there's no difference in vtop
6418 between '(void *){x}' and '&(void *){x}'. Ignore
6419 pointer typed entities here. Hopefully no real code
6420 will every use compound literals with scalar type. */
6421 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6422 /* These come from compound literals, memcpy stuff over. */
6426 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[vtop
->sym
->c
];
6427 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6428 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6430 /* We need to copy over all memory contents, and that
6431 includes relocations. Use the fact that relocs are
6432 created it order, so look from the end of relocs
6433 until we hit one before the copied region. */
6434 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6435 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6436 while (num_relocs
--) {
6438 if (rel
->r_offset
>= esym
->st_value
+ size
)
6440 if (rel
->r_offset
< esym
->st_value
)
6442 /* Note: if the same fields are initialized multiple
6443 times (possible with designators) then we possibly
6444 add multiple relocations for the same offset here.
6445 That would lead to wrong code, the last reloc needs
6446 to win. We clean this up later after the whole
6447 initializer is parsed. */
6448 put_elf_reloca(symtab_section
, sec
,
6449 c
+ rel
->r_offset
- esym
->st_value
,
6450 ELFW(R_TYPE
)(rel
->r_info
),
6451 ELFW(R_SYM
)(rel
->r_info
),
6461 if (type
->t
& VT_BITFIELD
) {
6462 int bit_pos
, bit_size
, bits
, n
;
6463 unsigned char *p
, v
, m
;
6464 bit_pos
= BIT_POS(vtop
->type
.t
);
6465 bit_size
= BIT_SIZE(vtop
->type
.t
);
6466 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6467 bit_pos
&= 7, bits
= 0;
6472 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6473 m
= ((1 << n
) - 1) << bit_pos
;
6474 *p
= (*p
& ~m
) | (v
& m
);
6475 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6479 /* XXX: when cross-compiling we assume that each type has the
6480 same representation on host and target, which is likely to
6481 be wrong in the case of long double */
6483 vtop
->c
.i
= vtop
->c
.i
!= 0;
6485 *(char *)ptr
|= vtop
->c
.i
;
6488 *(short *)ptr
|= vtop
->c
.i
;
6491 *(float*)ptr
= vtop
->c
.f
;
6494 *(double *)ptr
= vtop
->c
.d
;
6497 #if defined TCC_IS_NATIVE_387
6498 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6499 memcpy(ptr
, &vtop
->c
.ld
, 10);
6501 else if (sizeof (long double) == sizeof (double))
6502 __asm__("fldl %1\nfstpt %0\n" : "=m" (ptr
) : "m" (vtop
->c
.ld
));
6506 if (sizeof(long double) == LDOUBLE_SIZE
)
6507 *(long double*)ptr
= vtop
->c
.ld
;
6508 else if (sizeof(double) == LDOUBLE_SIZE
)
6509 *(double *)ptr
= (double)vtop
->c
.ld
;
6511 tcc_error("can't cross compile long double constants");
6515 *(long long *)ptr
|= vtop
->c
.i
;
6522 addr_t val
= vtop
->c
.i
;
6524 if (vtop
->r
& VT_SYM
)
6525 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6527 *(addr_t
*)ptr
|= val
;
6529 if (vtop
->r
& VT_SYM
)
6530 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6531 *(addr_t
*)ptr
|= val
;
6537 int val
= vtop
->c
.i
;
6539 if (vtop
->r
& VT_SYM
)
6540 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6544 if (vtop
->r
& VT_SYM
)
6545 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6554 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6561 /* 't' contains the type and storage info. 'c' is the offset of the
6562 object in section 'sec'. If 'sec' is NULL, it means stack based
6563 allocation. 'first' is true if array '{' must be read (multi
6564 dimension implicit array init handling). 'size_only' is true if
6565 size only evaluation is wanted (only for arrays). */
6566 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6567 int first
, int size_only
)
6569 int len
, n
, no_oblock
, nb
, i
;
6576 /* If we currently are at an '}' or ',' we have read an initializer
6577 element in one of our callers, and not yet consumed it. */
6578 have_elem
= tok
== '}' || tok
== ',';
6579 if (!have_elem
&& tok
!= '{' &&
6580 /* In case of strings we have special handling for arrays, so
6581 don't consume them as initializer value (which would commit them
6582 to some anonymous symbol). */
6583 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6585 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6590 !(type
->t
& VT_ARRAY
) &&
6591 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6592 The source type might have VT_CONSTANT set, which is
6593 of course assignable to non-const elements. */
6594 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6595 init_putv(type
, sec
, c
);
6596 } else if (type
->t
& VT_ARRAY
) {
6599 t1
= pointed_type(type
);
6600 size1
= type_size(t1
, &align1
);
6603 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6606 tcc_error("character array initializer must be a literal,"
6607 " optionally enclosed in braces");
6612 /* only parse strings here if correct type (otherwise: handle
6613 them as ((w)char *) expressions */
6614 if ((tok
== TOK_LSTR
&&
6615 #ifdef TCC_TARGET_PE
6616 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6618 (t1
->t
& VT_BTYPE
) == VT_INT
6620 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6622 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6625 /* compute maximum number of chars wanted */
6627 cstr_len
= tokc
.str
.size
;
6629 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6632 if (n
>= 0 && nb
> (n
- len
))
6636 tcc_warning("initializer-string for array is too long");
6637 /* in order to go faster for common case (char
6638 string in global variable, we handle it
6640 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6642 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6646 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6648 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6650 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6657 /* only add trailing zero if enough storage (no
6658 warning in this case since it is standard) */
6659 if (n
< 0 || len
< n
) {
6662 init_putv(t1
, sec
, c
+ (len
* size1
));
6673 while (tok
!= '}' || have_elem
) {
6674 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
6676 if (type
->t
& VT_ARRAY
) {
6678 /* special test for multi dimensional arrays (may not
6679 be strictly correct if designators are used at the
6681 if (no_oblock
&& len
>= n
*size1
)
6684 if (s
->type
.t
== VT_UNION
)
6688 if (no_oblock
&& f
== NULL
)
6697 /* put zeros at the end */
6698 if (!size_only
&& len
< n
*size1
)
6699 init_putz(sec
, c
+ len
, n
*size1
- len
);
6702 /* patch type size if needed, which happens only for array types */
6704 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
6705 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6708 if (first
|| tok
== '{') {
6716 } else if (tok
== '{') {
6718 decl_initializer(type
, sec
, c
, first
, size_only
);
6720 } else if (size_only
) {
6721 /* If we supported only ISO C we wouldn't have to accept calling
6722 this on anything than an array size_only==1 (and even then
6723 only on the outermost level, so no recursion would be needed),
6724 because initializing a flex array member isn't supported.
6725 But GNU C supports it, so we need to recurse even into
6726 subfields of structs and arrays when size_only is set. */
6727 /* just skip expression */
6728 skip_or_save_block(NULL
);
6731 /* This should happen only when we haven't parsed
6732 the init element above for fear of committing a
6733 string constant to memory too early. */
6734 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6735 expect("string constant");
6736 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6738 init_putv(type
, sec
, c
);
6742 /* parse an initializer for type 't' if 'has_init' is non zero, and
6743 allocate space in local or global data space ('r' is either
6744 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6745 variable 'v' of scope 'scope' is declared before initializers
6746 are parsed. If 'v' is zero, then a reference to the new object
6747 is put in the value stack. If 'has_init' is 2, a special parsing
6748 is done to handle string constants. */
6749 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6750 int has_init
, int v
, int scope
)
6752 int size
, align
, addr
;
6753 TokenString
*init_str
= NULL
;
6756 Sym
*flexible_array
;
6758 int saved_nocode_wanted
= nocode_wanted
;
6759 #ifdef CONFIG_TCC_BCHECK
6760 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
6763 if (type
->t
& VT_STATIC
)
6764 nocode_wanted
|= NODATA_WANTED
? 0x40000000 : 0x80000000;
6766 flexible_array
= NULL
;
6767 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6768 Sym
*field
= type
->ref
->next
;
6771 field
= field
->next
;
6772 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6773 flexible_array
= field
;
6777 size
= type_size(type
, &align
);
6778 /* If unknown size, we must evaluate it before
6779 evaluating initializers because
6780 initializers can generate global data too
6781 (e.g. string pointers or ISOC99 compound
6782 literals). It also simplifies local
6783 initializers handling */
6784 if (size
< 0 || (flexible_array
&& has_init
)) {
6786 tcc_error("unknown type size");
6787 /* get all init string */
6788 if (has_init
== 2) {
6789 init_str
= tok_str_alloc();
6790 /* only get strings */
6791 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6792 tok_str_add_tok(init_str
);
6795 tok_str_add(init_str
, -1);
6796 tok_str_add(init_str
, 0);
6798 skip_or_save_block(&init_str
);
6803 begin_macro(init_str
, 1);
6805 decl_initializer(type
, NULL
, 0, 1, 1);
6806 /* prepare second initializer parsing */
6807 macro_ptr
= init_str
->str
;
6810 /* if still unknown size, error */
6811 size
= type_size(type
, &align
);
6813 tcc_error("unknown type size");
6815 /* If there's a flex member and it was used in the initializer
6817 if (flexible_array
&&
6818 flexible_array
->type
.ref
->c
> 0)
6819 size
+= flexible_array
->type
.ref
->c
6820 * pointed_size(&flexible_array
->type
);
6821 /* take into account specified alignment if bigger */
6822 if (ad
->a
.aligned
) {
6823 int speca
= 1 << (ad
->a
.aligned
- 1);
6826 } else if (ad
->a
.packed
) {
6831 size
= 0, align
= 1;
6833 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
6835 #ifdef CONFIG_TCC_BCHECK
6836 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6840 loc
= (loc
- size
) & -align
;
6842 #ifdef CONFIG_TCC_BCHECK
6843 /* handles bounds */
6844 /* XXX: currently, since we do only one pass, we cannot track
6845 '&' operators, so we add only arrays */
6846 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
6848 /* add padding between regions */
6850 /* then add local bound info */
6851 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
6852 bounds_ptr
[0] = addr
;
6853 bounds_ptr
[1] = size
;
6857 /* local variable */
6858 #ifdef CONFIG_TCC_ASM
6859 if (ad
->asm_label
) {
6860 int reg
= asm_parse_regvar(ad
->asm_label
);
6862 r
= (r
& ~VT_VALMASK
) | reg
;
6865 sym
= sym_push(v
, type
, r
, addr
);
6868 /* push local reference */
6869 vset(type
, r
, addr
);
6872 if (v
&& scope
== VT_CONST
) {
6873 /* see if the symbol was already defined */
6876 patch_storage(sym
, ad
, type
);
6877 if (sym
->type
.t
& VT_EXTERN
) {
6878 /* if the variable is extern, it was not allocated */
6879 sym
->type
.t
&= ~VT_EXTERN
;
6880 /* set array size if it was omitted in extern
6882 if ((sym
->type
.t
& VT_ARRAY
) &&
6883 sym
->type
.ref
->c
< 0 &&
6885 sym
->type
.ref
->c
= type
->ref
->c
;
6886 } else if (!has_init
) {
6887 /* we accept several definitions of the same
6888 global variable. this is tricky, because we
6889 must play with the SHN_COMMON type of the symbol */
6890 /* no init data, we won't add more to the symbol */
6892 } else if (sym
->c
) {
6894 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
];
6895 if (esym
->st_shndx
== data_section
->sh_num
)
6896 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
6901 /* allocate symbol in corresponding section */
6906 else if (tcc_state
->nocommon
)
6911 addr
= section_add(sec
, size
, align
);
6912 #ifdef CONFIG_TCC_BCHECK
6913 /* add padding if bound check */
6915 section_add(sec
, 1, 1);
6918 addr
= align
; /* SHN_COMMON is special, symbol value is align */
6919 sec
= common_section
;
6924 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
6925 patch_storage(sym
, ad
, NULL
);
6927 /* Local statics have a scope until now (for
6928 warnings), remove it here. */
6930 /* update symbol definition */
6931 put_extern_sym(sym
, sec
, addr
, size
);
6933 /* push global reference */
6934 sym
= get_sym_ref(type
, sec
, addr
, size
);
6935 vpushsym(type
, sym
);
6939 #ifdef CONFIG_TCC_BCHECK
6940 /* handles bounds now because the symbol must be defined
6941 before for the relocation */
6945 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
6946 /* then add global bound info */
6947 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
6948 bounds_ptr
[0] = 0; /* relocated */
6949 bounds_ptr
[1] = size
;
6954 if (type
->t
& VT_VLA
) {
6960 /* save current stack pointer */
6961 if (vlas_in_scope
== 0) {
6962 if (vla_sp_root_loc
== -1)
6963 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
6964 gen_vla_sp_save(vla_sp_root_loc
);
6967 vla_runtime_type_size(type
, &a
);
6968 gen_vla_alloc(type
, a
);
6969 gen_vla_sp_save(addr
);
6973 } else if (has_init
) {
6974 size_t oldreloc_offset
= 0;
6975 if (sec
&& sec
->reloc
)
6976 oldreloc_offset
= sec
->reloc
->data_offset
;
6977 decl_initializer(type
, sec
, addr
, 1, 0);
6978 if (sec
&& sec
->reloc
)
6979 squeeze_multi_relocs(sec
, oldreloc_offset
);
6980 /* patch flexible array member size back to -1, */
6981 /* for possible subsequent similar declarations */
6983 flexible_array
->type
.ref
->c
= -1;
6987 /* restore parse state if needed */
6993 nocode_wanted
= saved_nocode_wanted
;
6996 /* parse a function defined by symbol 'sym' and generate its code in
6997 'cur_text_section' */
6998 static void gen_function(Sym
*sym
)
7001 ind
= cur_text_section
->data_offset
;
7002 /* NOTE: we patch the symbol size later */
7003 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7004 funcname
= get_tok_str(sym
->v
, NULL
);
7006 /* Initialize VLA state */
7008 vla_sp_root_loc
= -1;
7009 /* put debug symbol */
7010 tcc_debug_funcstart(tcc_state
, sym
);
7011 /* push a dummy symbol to enable local sym storage */
7012 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7013 local_scope
= 1; /* for function parameters */
7014 gfunc_prolog(&sym
->type
);
7017 block(NULL
, NULL
, 0);
7021 cur_text_section
->data_offset
= ind
;
7022 label_pop(&global_label_stack
, NULL
, 0);
7023 /* reset local stack */
7025 sym_pop(&local_stack
, NULL
, 0);
7026 /* end of function */
7027 /* patch symbol size */
7028 ((ElfW(Sym
) *)symtab_section
->data
)[sym
->c
].st_size
=
7030 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7031 /* It's better to crash than to generate wrong code */
7032 cur_text_section
= NULL
;
7033 funcname
= ""; /* for safety */
7034 func_vt
.t
= VT_VOID
; /* for safety */
7035 func_var
= 0; /* for safety */
7036 ind
= 0; /* for safety */
7037 nocode_wanted
= 0x80000000;
7041 static void gen_inline_functions(TCCState
*s
)
7044 int inline_generated
, i
, ln
;
7045 struct InlineFunc
*fn
;
7047 ln
= file
->line_num
;
7048 /* iterate while inline function are referenced */
7050 inline_generated
= 0;
7051 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7052 fn
= s
->inline_fns
[i
];
7054 if (sym
&& sym
->c
) {
7055 /* the function was used: generate its code and
7056 convert it to a normal function */
7059 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7060 sym
->type
.t
&= ~VT_INLINE
;
7062 begin_macro(fn
->func_str
, 1);
7064 cur_text_section
= text_section
;
7068 inline_generated
= 1;
7071 } while (inline_generated
);
7072 file
->line_num
= ln
;
7075 ST_FUNC
void free_inline_functions(TCCState
*s
)
7078 /* free tokens of unused inline functions */
7079 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7080 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7082 tok_str_free(fn
->func_str
);
7084 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7087 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7088 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7089 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7097 if (!parse_btype(&btype
, &ad
)) {
7098 if (is_for_loop_init
)
7100 /* skip redundant ';' if not in old parameter decl scope */
7101 if (tok
== ';' && l
!= VT_CMP
) {
7107 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7108 /* global asm block */
7112 if (tok
>= TOK_UIDENT
) {
7113 /* special test for old K&R protos without explicit int
7114 type. Only accepted when defining global data */
7118 expect("declaration");
7123 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7124 int v
= btype
.ref
->v
;
7125 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7126 tcc_warning("unnamed struct/union that defines no instances");
7130 if (IS_ENUM(btype
.t
)) {
7135 while (1) { /* iterate thru each declaration */
7137 /* If the base type itself was an array type of unspecified
7138 size (like in 'typedef int arr[]; arr x = {1};') then
7139 we will overwrite the unknown size by the real one for
7140 this decl. We need to unshare the ref symbol holding
7142 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7143 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7145 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7149 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7150 printf("type = '%s'\n", buf
);
7153 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7154 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7155 tcc_error("function without file scope cannot be static");
7157 /* if old style function prototype, we accept a
7160 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7161 decl0(VT_CMP
, 0, sym
);
7164 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7165 ad
.asm_label
= asm_label_instr();
7166 /* parse one last attribute list, after asm label */
7167 parse_attribute(&ad
);
7172 #ifdef TCC_TARGET_PE
7173 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7174 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7175 tcc_error("cannot have dll linkage with static or typedef");
7176 if (ad
.a
.dllimport
) {
7177 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7180 type
.t
|= VT_EXTERN
;
7186 tcc_error("cannot use local functions");
7187 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7188 expect("function definition");
7190 /* reject abstract declarators in function definition
7191 make old style params without decl have int type */
7193 while ((sym
= sym
->next
) != NULL
) {
7194 if (!(sym
->v
& ~SYM_FIELD
))
7195 expect("identifier");
7196 if (sym
->type
.t
== VT_VOID
)
7197 sym
->type
= int_type
;
7200 /* XXX: cannot do better now: convert extern line to static inline */
7201 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7202 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7207 if ((sym
->type
.t
& VT_BTYPE
) != VT_FUNC
)
7210 ref
= sym
->type
.ref
;
7212 /* use func_call from prototype if not defined */
7213 if (ref
->f
.func_call
!= FUNC_CDECL
7214 && type
.ref
->f
.func_call
== FUNC_CDECL
)
7215 type
.ref
->f
.func_call
= ref
->f
.func_call
;
7217 /* use static from prototype */
7218 if (sym
->type
.t
& VT_STATIC
)
7219 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7221 /* If the definition has no visibility use the
7222 one from prototype. */
7223 if (!type
.ref
->a
.visibility
)
7224 type
.ref
->a
.visibility
= ref
->a
.visibility
;
7225 /* apply other storage attributes from prototype */
7226 type
.ref
->a
.dllexport
|= ref
->a
.dllexport
;
7227 type
.ref
->a
.weak
|= ref
->a
.weak
;
7229 if (!is_compatible_types(&sym
->type
, &type
)) {
7231 tcc_error("incompatible types for redefinition of '%s'",
7232 get_tok_str(v
, NULL
));
7234 if (ref
->f
.func_body
)
7235 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
7236 /* if symbol is already defined, then put complete type */
7240 /* put function symbol */
7241 sym
= global_identifier_push(v
, type
.t
, 0);
7242 sym
->type
.ref
= type
.ref
;
7245 sym
->type
.ref
->f
.func_body
= 1;
7246 sym
->r
= VT_SYM
| VT_CONST
;
7247 patch_storage(sym
, &ad
, NULL
);
7249 /* static inline functions are just recorded as a kind
7250 of macro. Their code will be emitted at the end of
7251 the compilation unit only if they are used */
7252 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7253 (VT_INLINE
| VT_STATIC
)) {
7254 struct InlineFunc
*fn
;
7255 const char *filename
;
7257 filename
= file
? file
->filename
: "";
7258 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7259 strcpy(fn
->filename
, filename
);
7261 skip_or_save_block(&fn
->func_str
);
7262 dynarray_add(&tcc_state
->inline_fns
,
7263 &tcc_state
->nb_inline_fns
, fn
);
7265 /* compute text section */
7266 cur_text_section
= ad
.section
;
7267 if (!cur_text_section
)
7268 cur_text_section
= text_section
;
7274 /* find parameter in function parameter list */
7275 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7276 if ((sym
->v
& ~SYM_FIELD
) == v
)
7278 tcc_error("declaration for parameter '%s' but no such parameter",
7279 get_tok_str(v
, NULL
));
7281 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7282 tcc_error("storage class specified for '%s'",
7283 get_tok_str(v
, NULL
));
7284 if (sym
->type
.t
!= VT_VOID
)
7285 tcc_error("redefinition of parameter '%s'",
7286 get_tok_str(v
, NULL
));
7287 convert_parameter_type(&type
);
7289 } else if (type
.t
& VT_TYPEDEF
) {
7290 /* save typedefed type */
7291 /* XXX: test storage specifiers ? */
7293 if (sym
&& sym
->sym_scope
== local_scope
) {
7294 if (!is_compatible_types(&sym
->type
, &type
)
7295 || !(sym
->type
.t
& VT_TYPEDEF
))
7296 tcc_error("incompatible redefinition of '%s'",
7297 get_tok_str(v
, NULL
));
7300 sym
= sym_push(v
, &type
, 0, 0);
7306 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7307 /* external function definition */
7308 /* specific case for func_call attribute */
7310 } else if (!(type
.t
& VT_ARRAY
)) {
7311 /* not lvalue if array */
7312 r
|= lvalue_type(type
.t
);
7314 has_init
= (tok
== '=');
7315 if (has_init
&& (type
.t
& VT_VLA
))
7316 tcc_error("variable length array cannot be initialized");
7317 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7318 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7319 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7320 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7321 /* external variable or function */
7322 /* NOTE: as GCC, uninitialized global static
7323 arrays of null size are considered as
7325 sym
= external_sym(v
, &type
, r
, &ad
);
7326 if (ad
.alias_target
) {
7330 alias_target
= sym_find(ad
.alias_target
);
7331 if (!alias_target
|| !alias_target
->c
)
7332 tcc_error("unsupported forward __alias__ attribute");
7333 esym
= &((ElfW(Sym
) *)symtab_section
->data
)[alias_target
->c
];
7334 tsec
.sh_num
= esym
->st_shndx
;
7335 /* Local statics have a scope until now (for
7336 warnings), remove it here. */
7338 put_extern_sym2(sym
, &tsec
, esym
->st_value
, esym
->st_size
, 0);
7341 if (type
.t
& VT_STATIC
)
7347 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7351 if (is_for_loop_init
)
7364 static void decl(int l
)
7369 /* ------------------------------------------------------------------------- */