2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 /********************************************************/
23 /* global variables */
25 /* loc : local variable index
26 ind : output code index
28 anon_sym: anonymous symbol index
30 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
32 ST_DATA Sym
*sym_free_first
;
33 ST_DATA
void **sym_pools
;
34 ST_DATA
int nb_sym_pools
;
36 ST_DATA Sym
*global_stack
;
37 ST_DATA Sym
*local_stack
;
38 ST_DATA Sym
*define_stack
;
39 ST_DATA Sym
*global_label_stack
;
40 ST_DATA Sym
*local_label_stack
;
41 static int local_scope
;
43 static int section_sym
;
45 ST_DATA
int vlas_in_scope
; /* number of VLAs that are currently in scope */
46 ST_DATA
int vla_sp_root_loc
; /* vla_sp_loc for SP before any VLAs were pushed */
47 ST_DATA
int vla_sp_loc
; /* Pointer to variable holding location to store stack pointer on the stack when modifying stack pointer */
49 ST_DATA SValue __vstack
[1+VSTACK_SIZE
], *vtop
, *pvtop
;
51 ST_DATA
int const_wanted
; /* true if constant wanted */
52 ST_DATA
int nocode_wanted
; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
55 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
56 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
57 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
59 ST_DATA
int last_line_num
, last_ind
, func_ind
; /* debug last line number and pc */
60 ST_DATA
const char *funcname
;
63 ST_DATA CType char_pointer_type
, func_old_type
, int_type
, size_type
, ptrdiff_type
;
65 ST_DATA
struct switch_t
{
69 } **p
; int n
; /* list of case ranges */
70 int def_sym
; /* default symbol */
71 } *cur_switch
; /* current switch */
73 /* ------------------------------------------------------------------------- */
75 static void gen_cast(CType
*type
);
76 static void gen_cast_s(int t
);
77 static inline CType
*pointed_type(CType
*type
);
78 static int is_compatible_types(CType
*type1
, CType
*type2
);
79 static int parse_btype(CType
*type
, AttributeDef
*ad
);
80 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
81 static void parse_expr_type(CType
*type
);
82 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
83 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int first
, int size_only
);
84 static void block(int *bsym
, int *csym
, int is_expr
);
85 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
86 static void decl(int l
);
87 static int decl0(int l
, int is_for_loop_init
, Sym
*);
88 static void expr_eq(void);
89 static void vla_runtime_type_size(CType
*type
, int *a
);
90 static void vla_sp_restore(void);
91 static void vla_sp_restore_root(void);
92 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
93 static inline int64_t expr_const64(void);
94 static void vpush64(int ty
, unsigned long long v
);
95 static void vpush(CType
*type
);
96 static int gvtst(int inv
, int t
);
97 static void gen_inline_functions(TCCState
*s
);
98 static void skip_or_save_block(TokenString
**str
);
99 static void gv_dup(void);
101 ST_INLN
int is_float(int t
)
105 return bt
== VT_LDOUBLE
|| bt
== VT_DOUBLE
|| bt
== VT_FLOAT
|| bt
== VT_QFLOAT
;
108 /* we use our own 'finite' function to avoid potential problems with
109 non standard math libs */
110 /* XXX: endianness dependent */
111 ST_FUNC
int ieee_finite(double d
)
114 memcpy(p
, &d
, sizeof(double));
115 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
118 /* compiling intel long double natively */
119 #if (defined __i386__ || defined __x86_64__) \
120 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
121 # define TCC_IS_NATIVE_387
124 ST_FUNC
void test_lvalue(void)
126 if (!(vtop
->r
& VT_LVAL
))
130 ST_FUNC
void check_vstack(void)
133 tcc_error("internal compiler error: vstack leak (%d)", vtop
- pvtop
);
136 /* ------------------------------------------------------------------------- */
137 /* vstack debugging aid */
140 void pv (const char *lbl
, int a
, int b
)
143 for (i
= a
; i
< a
+ b
; ++i
) {
144 SValue
*p
= &vtop
[-i
];
145 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
146 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
151 /* ------------------------------------------------------------------------- */
152 /* start of translation unit info */
153 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
158 /* file info: full path + filename */
159 section_sym
= put_elf_sym(symtab_section
, 0, 0,
160 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
161 text_section
->sh_num
, NULL
);
162 getcwd(buf
, sizeof(buf
));
164 normalize_slashes(buf
);
166 pstrcat(buf
, sizeof(buf
), "/");
167 put_stabs_r(buf
, N_SO
, 0, 0,
168 text_section
->data_offset
, text_section
, section_sym
);
169 put_stabs_r(file
->filename
, N_SO
, 0, 0,
170 text_section
->data_offset
, text_section
, section_sym
);
175 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
176 symbols can be safely used */
177 put_elf_sym(symtab_section
, 0, 0,
178 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
179 SHN_ABS
, file
->filename
);
182 /* put end of translation unit info */
183 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
187 put_stabs_r(NULL
, N_SO
, 0, 0,
188 text_section
->data_offset
, text_section
, section_sym
);
192 /* generate line number info */
193 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
197 if ((last_line_num
!= file
->line_num
|| last_ind
!= ind
)) {
198 put_stabn(N_SLINE
, 0, file
->line_num
, ind
- func_ind
);
200 last_line_num
= file
->line_num
;
204 /* put function symbol */
205 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
213 /* XXX: we put here a dummy type */
214 snprintf(buf
, sizeof(buf
), "%s:%c1",
215 funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
216 put_stabs_r(buf
, N_FUN
, 0, file
->line_num
, 0,
217 cur_text_section
, sym
->c
);
218 /* //gr gdb wants a line at the function */
219 put_stabn(N_SLINE
, 0, file
->line_num
, 0);
225 /* put function size */
226 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
230 put_stabn(N_FUN
, 0, 0, size
);
233 /* ------------------------------------------------------------------------- */
234 ST_FUNC
int tccgen_compile(TCCState
*s1
)
236 cur_text_section
= NULL
;
238 anon_sym
= SYM_FIRST_ANOM
;
241 nocode_wanted
= 0x80000000;
243 /* define some often used types */
245 char_pointer_type
.t
= VT_BYTE
;
246 mk_pointer(&char_pointer_type
);
248 size_type
.t
= VT_INT
| VT_UNSIGNED
;
249 ptrdiff_type
.t
= VT_INT
;
251 size_type
.t
= VT_LLONG
| VT_UNSIGNED
;
252 ptrdiff_type
.t
= VT_LLONG
;
254 size_type
.t
= VT_LONG
| VT_LLONG
| VT_UNSIGNED
;
255 ptrdiff_type
.t
= VT_LONG
| VT_LLONG
;
257 func_old_type
.t
= VT_FUNC
;
258 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
259 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
260 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
264 #ifdef TCC_TARGET_ARM
269 printf("%s: **** new file\n", file
->filename
);
272 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
275 gen_inline_functions(s1
);
277 /* end of translation unit info */
282 /* ------------------------------------------------------------------------- */
283 ST_FUNC ElfSym
*elfsym(Sym
*s
)
287 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
290 /* apply storage attributes to Elf symbol */
291 ST_FUNC
void update_storage(Sym
*sym
)
294 int sym_bind
, old_sym_bind
;
300 if (sym
->a
.visibility
)
301 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
304 if (sym
->type
.t
& VT_STATIC
)
305 sym_bind
= STB_LOCAL
;
306 else if (sym
->a
.weak
)
309 sym_bind
= STB_GLOBAL
;
310 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
311 if (sym_bind
!= old_sym_bind
) {
312 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
316 if (sym
->a
.dllimport
)
317 esym
->st_other
|= ST_PE_IMPORT
;
318 if (sym
->a
.dllexport
)
319 esym
->st_other
|= ST_PE_EXPORT
;
323 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
324 get_tok_str(sym
->v
, NULL
),
325 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
333 /* ------------------------------------------------------------------------- */
334 /* update sym->c so that it points to an external symbol in section
335 'section' with value 'value' */
337 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
338 addr_t value
, unsigned long size
,
339 int can_add_underscore
)
341 int sym_type
, sym_bind
, info
, other
, t
;
345 #ifdef CONFIG_TCC_BCHECK
350 name
= get_tok_str(sym
->v
, NULL
);
351 #ifdef CONFIG_TCC_BCHECK
352 if (tcc_state
->do_bounds_check
) {
353 /* XXX: avoid doing that for statics ? */
354 /* if bound checking is activated, we change some function
355 names by adding the "__bound" prefix */
358 /* XXX: we rely only on malloc hooks */
371 strcpy(buf
, "__bound_");
379 if ((t
& VT_BTYPE
) == VT_FUNC
) {
381 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
382 sym_type
= STT_NOTYPE
;
384 sym_type
= STT_OBJECT
;
387 sym_bind
= STB_LOCAL
;
389 sym_bind
= STB_GLOBAL
;
392 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
393 Sym
*ref
= sym
->type
.ref
;
394 if (ref
->a
.nodecorate
) {
395 can_add_underscore
= 0;
397 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
398 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
400 other
|= ST_PE_STDCALL
;
401 can_add_underscore
= 0;
405 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
407 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
411 name
= get_tok_str(sym
->asm_label
, NULL
);
412 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
413 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
416 esym
->st_value
= value
;
417 esym
->st_size
= size
;
418 esym
->st_shndx
= sh_num
;
423 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
424 addr_t value
, unsigned long size
)
426 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
427 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
430 /* add a new relocation entry to symbol 'sym' in section 's' */
431 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
436 if (nocode_wanted
&& s
== cur_text_section
)
441 put_extern_sym(sym
, NULL
, 0, 0);
445 /* now we can add ELF relocation info */
446 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
450 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
452 greloca(s
, sym
, offset
, type
, 0);
456 /* ------------------------------------------------------------------------- */
457 /* symbol allocator */
458 static Sym
*__sym_malloc(void)
460 Sym
*sym_pool
, *sym
, *last_sym
;
463 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
464 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
466 last_sym
= sym_free_first
;
468 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
469 sym
->next
= last_sym
;
473 sym_free_first
= last_sym
;
477 static inline Sym
*sym_malloc(void)
481 sym
= sym_free_first
;
483 sym
= __sym_malloc();
484 sym_free_first
= sym
->next
;
487 sym
= tcc_malloc(sizeof(Sym
));
492 ST_INLN
void sym_free(Sym
*sym
)
495 sym
->next
= sym_free_first
;
496 sym_free_first
= sym
;
502 /* push, without hashing */
503 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
508 memset(s
, 0, sizeof *s
);
518 /* find a symbol and return its associated structure. 's' is the top
519 of the symbol stack */
520 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
532 /* structure lookup */
533 ST_INLN Sym
*struct_find(int v
)
536 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
538 return table_ident
[v
]->sym_struct
;
541 /* find an identifier */
542 ST_INLN Sym
*sym_find(int v
)
545 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
547 return table_ident
[v
]->sym_identifier
;
550 /* push a given symbol on the symbol stack */
551 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
560 s
= sym_push2(ps
, v
, type
->t
, c
);
561 s
->type
.ref
= type
->ref
;
563 /* don't record fields or anonymous symbols */
565 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
566 /* record symbol in token array */
567 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
569 ps
= &ts
->sym_struct
;
571 ps
= &ts
->sym_identifier
;
574 s
->sym_scope
= local_scope
;
575 if (s
->prev_tok
&& s
->prev_tok
->sym_scope
== s
->sym_scope
)
576 tcc_error("redeclaration of '%s'",
577 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
582 /* push a global identifier */
583 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
586 s
= sym_push2(&global_stack
, v
, t
, c
);
587 /* don't record anonymous symbol */
588 if (v
< SYM_FIRST_ANOM
) {
589 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
590 /* modify the top most local identifier, so that
591 sym_identifier will point to 's' when popped */
592 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
593 ps
= &(*ps
)->prev_tok
;
600 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
601 pop them yet from the list, but do remove them from the token array. */
602 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
612 /* remove symbol in token array */
614 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
615 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
617 ps
= &ts
->sym_struct
;
619 ps
= &ts
->sym_identifier
;
630 /* ------------------------------------------------------------------------- */
632 static void vsetc(CType
*type
, int r
, CValue
*vc
)
636 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
637 tcc_error("memory full (vstack)");
638 /* cannot let cpu flags if other instruction are generated. Also
639 avoid leaving VT_JMP anywhere except on the top of the stack
640 because it would complicate the code generator.
642 Don't do this when nocode_wanted. vtop might come from
643 !nocode_wanted regions (see 88_codeopt.c) and transforming
644 it to a register without actually generating code is wrong
645 as their value might still be used for real. All values
646 we push under nocode_wanted will eventually be popped
647 again, so that the VT_CMP/VT_JMP value will be in vtop
648 when code is unsuppressed again.
650 Same logic below in vswap(); */
651 if (vtop
>= vstack
&& !nocode_wanted
) {
652 v
= vtop
->r
& VT_VALMASK
;
653 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
665 ST_FUNC
void vswap(void)
668 /* cannot vswap cpu flags. See comment at vsetc() above */
669 if (vtop
>= vstack
&& !nocode_wanted
) {
670 int v
= vtop
->r
& VT_VALMASK
;
671 if (v
== VT_CMP
|| (v
& ~1) == VT_JMP
)
679 /* pop stack value */
680 ST_FUNC
void vpop(void)
683 v
= vtop
->r
& VT_VALMASK
;
684 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
685 /* for x86, we need to pop the FP stack */
687 o(0xd8dd); /* fstp %st(0) */
690 if (v
== VT_JMP
|| v
== VT_JMPI
) {
691 /* need to put correct jump if && or || without test */
697 /* push constant of type "type" with useless value */
698 ST_FUNC
void vpush(CType
*type
)
700 vset(type
, VT_CONST
, 0);
703 /* push integer constant */
704 ST_FUNC
void vpushi(int v
)
708 vsetc(&int_type
, VT_CONST
, &cval
);
711 /* push a pointer sized constant */
712 static void vpushs(addr_t v
)
716 vsetc(&size_type
, VT_CONST
, &cval
);
719 /* push arbitrary 64bit constant */
720 ST_FUNC
void vpush64(int ty
, unsigned long long v
)
727 vsetc(&ctype
, VT_CONST
, &cval
);
730 /* push long long constant */
731 static inline void vpushll(long long v
)
733 vpush64(VT_LLONG
, v
);
736 ST_FUNC
void vset(CType
*type
, int r
, int v
)
741 vsetc(type
, r
, &cval
);
744 static void vseti(int r
, int v
)
752 ST_FUNC
void vpushv(SValue
*v
)
754 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
755 tcc_error("memory full (vstack)");
760 static void vdup(void)
765 /* rotate n first stack elements to the bottom
766 I1 ... In -> I2 ... In I1 [top is right]
768 ST_FUNC
void vrotb(int n
)
779 /* rotate the n elements before entry e towards the top
780 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
782 ST_FUNC
void vrote(SValue
*e
, int n
)
788 for(i
= 0;i
< n
- 1; i
++)
793 /* rotate n first stack elements to the top
794 I1 ... In -> In I1 ... I(n-1) [top is right]
796 ST_FUNC
void vrott(int n
)
801 /* push a symbol value of TYPE */
802 static inline void vpushsym(CType
*type
, Sym
*sym
)
806 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
810 /* Return a static symbol pointing to a section */
811 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
817 sym
= global_identifier_push(v
, type
->t
| VT_STATIC
, 0);
818 sym
->type
.ref
= type
->ref
;
819 sym
->r
= VT_CONST
| VT_SYM
;
820 put_extern_sym(sym
, sec
, offset
, size
);
824 /* push a reference to a section offset by adding a dummy symbol */
825 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
827 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
830 /* define a new external reference to a symbol 'v' of type 'u' */
831 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
, int r
)
837 /* push forward reference */
838 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
839 s
->type
.ref
= type
->ref
;
840 s
->r
= r
| VT_CONST
| VT_SYM
;
841 } else if (IS_ASM_SYM(s
)) {
842 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
843 s
->type
.ref
= type
->ref
;
849 /* Merge symbol attributes. */
850 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
852 if (sa1
->aligned
&& !sa
->aligned
)
853 sa
->aligned
= sa1
->aligned
;
854 sa
->packed
|= sa1
->packed
;
855 sa
->weak
|= sa1
->weak
;
856 if (sa1
->visibility
!= STV_DEFAULT
) {
857 int vis
= sa
->visibility
;
858 if (vis
== STV_DEFAULT
859 || vis
> sa1
->visibility
)
860 vis
= sa1
->visibility
;
861 sa
->visibility
= vis
;
863 sa
->dllexport
|= sa1
->dllexport
;
864 sa
->nodecorate
|= sa1
->nodecorate
;
865 sa
->dllimport
|= sa1
->dllimport
;
868 /* Merge function attributes. */
869 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
871 if (fa1
->func_call
&& !fa
->func_call
)
872 fa
->func_call
= fa1
->func_call
;
873 if (fa1
->func_type
&& !fa
->func_type
)
874 fa
->func_type
= fa1
->func_type
;
875 if (fa1
->func_args
&& !fa
->func_args
)
876 fa
->func_args
= fa1
->func_args
;
879 /* Merge attributes. */
880 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
882 merge_symattr(&ad
->a
, &ad1
->a
);
883 merge_funcattr(&ad
->f
, &ad1
->f
);
886 ad
->section
= ad1
->section
;
887 if (ad1
->alias_target
)
888 ad
->alias_target
= ad1
->alias_target
;
890 ad
->asm_label
= ad1
->asm_label
;
892 ad
->attr_mode
= ad1
->attr_mode
;
895 /* Merge some type attributes. */
896 static void patch_type(Sym
*sym
, CType
*type
)
898 if (!(type
->t
& VT_EXTERN
)) {
899 if (!(sym
->type
.t
& VT_EXTERN
))
900 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
901 sym
->type
.t
&= ~VT_EXTERN
;
904 if (IS_ASM_SYM(sym
)) {
905 /* stay static if both are static */
906 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
907 sym
->type
.ref
= type
->ref
;
910 if (!is_compatible_types(&sym
->type
, type
)) {
911 tcc_error("incompatible types for redefinition of '%s'",
912 get_tok_str(sym
->v
, NULL
));
914 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
915 int static_proto
= sym
->type
.t
& VT_STATIC
;
916 /* warn if static follows non-static function declaration */
917 if ((type
->t
& VT_STATIC
) && !static_proto
&& !(type
->t
& VT_INLINE
))
918 tcc_warning("static storage ignored for redefinition of '%s'",
919 get_tok_str(sym
->v
, NULL
));
921 if (0 == (type
->t
& VT_EXTERN
)) {
922 /* put complete type, use static from prototype */
923 sym
->type
.t
= (type
->t
& ~VT_STATIC
) | static_proto
;
924 if (type
->t
& VT_INLINE
)
925 sym
->type
.t
= type
->t
;
926 sym
->type
.ref
= type
->ref
;
930 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
931 /* set array size if it was omitted in extern declaration */
932 if (sym
->type
.ref
->c
< 0)
933 sym
->type
.ref
->c
= type
->ref
->c
;
934 else if (sym
->type
.ref
->c
!= type
->ref
->c
)
935 tcc_error("conflicting type for '%s'", get_tok_str(sym
->v
, NULL
));
937 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
938 tcc_warning("storage mismatch for redefinition of '%s'",
939 get_tok_str(sym
->v
, NULL
));
944 /* Merge some storage attributes. */
945 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
948 patch_type(sym
, type
);
951 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
952 tcc_error("incompatible dll linkage for redefinition of '%s'",
953 get_tok_str(sym
->v
, NULL
));
955 merge_symattr(&sym
->a
, &ad
->a
);
957 sym
->asm_label
= ad
->asm_label
;
961 /* define a new external reference to a symbol 'v' */
962 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
967 /* push forward reference */
968 s
= sym_push(v
, type
, r
| VT_CONST
| VT_SYM
, 0);
969 s
->type
.t
|= VT_EXTERN
;
973 if (s
->type
.ref
== func_old_type
.ref
) {
974 s
->type
.ref
= type
->ref
;
975 s
->r
= r
| VT_CONST
| VT_SYM
;
976 s
->type
.t
|= VT_EXTERN
;
978 patch_storage(s
, ad
, type
);
983 /* push a reference to global symbol v */
984 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
986 vpushsym(type
, external_global_sym(v
, type
, 0));
989 /* save registers up to (vtop - n) stack entry */
990 ST_FUNC
void save_regs(int n
)
993 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
997 /* save r to the memory stack, and mark it as being free */
998 ST_FUNC
void save_reg(int r
)
1000 save_reg_upstack(r
, 0);
1003 /* save r to the memory stack, and mark it as being free,
1004 if seen up to (vtop - n) stack entry */
1005 ST_FUNC
void save_reg_upstack(int r
, int n
)
1007 int l
, saved
, size
, align
;
1011 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1016 /* modify all stack values */
1019 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1020 if ((p
->r
& VT_VALMASK
) == r
||
1021 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& (p
->r2
& VT_VALMASK
) == r
)) {
1022 /* must save value on stack if not already done */
1024 /* NOTE: must reload 'r' because r might be equal to r2 */
1025 r
= p
->r
& VT_VALMASK
;
1026 /* store register in the stack */
1028 if ((p
->r
& VT_LVAL
) ||
1029 (!is_float(type
->t
) && (type
->t
& VT_BTYPE
) != VT_LLONG
))
1031 type
= &char_pointer_type
;
1035 size
= type_size(type
, &align
);
1036 loc
= (loc
- size
) & -align
;
1037 sv
.type
.t
= type
->t
;
1038 sv
.r
= VT_LOCAL
| VT_LVAL
;
1041 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1042 /* x86 specific: need to pop fp register ST0 if saved */
1043 if (r
== TREG_ST0
) {
1044 o(0xd8dd); /* fstp %st(0) */
1048 /* special long long case */
1049 if ((type
->t
& VT_BTYPE
) == VT_LLONG
) {
1057 /* mark that stack entry as being saved on the stack */
1058 if (p
->r
& VT_LVAL
) {
1059 /* also clear the bounded flag because the
1060 relocation address of the function was stored in
1062 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1064 p
->r
= lvalue_type(p
->type
.t
) | VT_LOCAL
;
1072 #ifdef TCC_TARGET_ARM
1073 /* find a register of class 'rc2' with at most one reference on stack.
1074 * If none, call get_reg(rc) */
1075 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1080 for(r
=0;r
<NB_REGS
;r
++) {
1081 if (reg_classes
[r
] & rc2
) {
1084 for(p
= vstack
; p
<= vtop
; p
++) {
1085 if ((p
->r
& VT_VALMASK
) == r
||
1086 (p
->r2
& VT_VALMASK
) == r
)
1097 /* find a free register of class 'rc'. If none, save one register */
1098 ST_FUNC
int get_reg(int rc
)
1103 /* find a free register */
1104 for(r
=0;r
<NB_REGS
;r
++) {
1105 if (reg_classes
[r
] & rc
) {
1108 for(p
=vstack
;p
<=vtop
;p
++) {
1109 if ((p
->r
& VT_VALMASK
) == r
||
1110 (p
->r2
& VT_VALMASK
) == r
)
1118 /* no register left : free the first one on the stack (VERY
1119 IMPORTANT to start from the bottom to ensure that we don't
1120 spill registers used in gen_opi()) */
1121 for(p
=vstack
;p
<=vtop
;p
++) {
1122 /* look at second register (if long long) */
1123 r
= p
->r2
& VT_VALMASK
;
1124 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1126 r
= p
->r
& VT_VALMASK
;
1127 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1133 /* Should never comes here */
1137 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1139 static void move_reg(int r
, int s
, int t
)
1153 /* get address of vtop (vtop MUST BE an lvalue) */
1154 ST_FUNC
void gaddrof(void)
1156 vtop
->r
&= ~VT_LVAL
;
1157 /* tricky: if saved lvalue, then we can go back to lvalue */
1158 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1159 vtop
->r
= (vtop
->r
& ~(VT_VALMASK
| VT_LVAL_TYPE
)) | VT_LOCAL
| VT_LVAL
;
1164 #ifdef CONFIG_TCC_BCHECK
1165 /* generate lvalue bound code */
1166 static void gbound(void)
1171 vtop
->r
&= ~VT_MUSTBOUND
;
1172 /* if lvalue, then use checking code before dereferencing */
1173 if (vtop
->r
& VT_LVAL
) {
1174 /* if not VT_BOUNDED value, then make one */
1175 if (!(vtop
->r
& VT_BOUNDED
)) {
1176 lval_type
= vtop
->r
& (VT_LVAL_TYPE
| VT_LVAL
);
1177 /* must save type because we must set it to int to get pointer */
1179 vtop
->type
.t
= VT_PTR
;
1182 gen_bounded_ptr_add();
1183 vtop
->r
|= lval_type
;
1186 /* then check for dereferencing */
1187 gen_bounded_ptr_deref();
1192 static void incr_bf_adr(int o
)
1194 vtop
->type
= char_pointer_type
;
1198 vtop
->type
.t
= (vtop
->type
.t
& ~(VT_BTYPE
|VT_DEFSIGN
))
1199 | (VT_BYTE
|VT_UNSIGNED
);
1200 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
1201 | (VT_LVAL_BYTE
|VT_LVAL_UNSIGNED
|VT_LVAL
);
1204 /* single-byte load mode for packed or otherwise unaligned bitfields */
1205 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1208 save_reg_upstack(vtop
->r
, 1);
1209 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1210 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1219 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1221 vpushi((1 << n
) - 1), gen_op('&');
1224 vpushi(bits
), gen_op(TOK_SHL
);
1227 bits
+= n
, bit_size
-= n
, o
= 1;
1230 if (!(type
->t
& VT_UNSIGNED
)) {
1231 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1232 vpushi(n
), gen_op(TOK_SHL
);
1233 vpushi(n
), gen_op(TOK_SAR
);
1237 /* single-byte store mode for packed or otherwise unaligned bitfields */
1238 static void store_packed_bf(int bit_pos
, int bit_size
)
1240 int bits
, n
, o
, m
, c
;
1242 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1244 save_reg_upstack(vtop
->r
, 1);
1245 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1247 incr_bf_adr(o
); // X B
1249 c
? vdup() : gv_dup(); // B V X
1252 vpushi(bits
), gen_op(TOK_SHR
);
1254 vpushi(bit_pos
), gen_op(TOK_SHL
);
1259 m
= ((1 << n
) - 1) << bit_pos
;
1260 vpushi(m
), gen_op('&'); // X B V1
1261 vpushv(vtop
-1); // X B V1 B
1262 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1263 gen_op('&'); // X B V1 B1
1264 gen_op('|'); // X B V2
1266 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1267 vstore(), vpop(); // X B
1268 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1273 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1276 if (0 == sv
->type
.ref
)
1278 t
= sv
->type
.ref
->auxtype
;
1279 if (t
!= -1 && t
!= VT_STRUCT
) {
1280 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
1281 sv
->r
= (sv
->r
& ~VT_LVAL_TYPE
) | lvalue_type(sv
->type
.t
);
1286 /* store vtop a register belonging to class 'rc'. lvalues are
1287 converted to values. Cannot be used if cannot be converted to
1288 register value (such as structures). */
1289 ST_FUNC
int gv(int rc
)
1291 int r
, bit_pos
, bit_size
, size
, align
, rc2
;
1293 /* NOTE: get_reg can modify vstack[] */
1294 if (vtop
->type
.t
& VT_BITFIELD
) {
1297 bit_pos
= BIT_POS(vtop
->type
.t
);
1298 bit_size
= BIT_SIZE(vtop
->type
.t
);
1299 /* remove bit field info to avoid loops */
1300 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1303 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1304 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1305 type
.t
|= VT_UNSIGNED
;
1307 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1309 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1314 if (r
== VT_STRUCT
) {
1315 load_packed_bf(&type
, bit_pos
, bit_size
);
1317 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1318 /* cast to int to propagate signedness in following ops */
1320 /* generate shifts */
1321 vpushi(bits
- (bit_pos
+ bit_size
));
1323 vpushi(bits
- bit_size
);
1324 /* NOTE: transformed to SHR if unsigned */
1329 if (is_float(vtop
->type
.t
) &&
1330 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1331 unsigned long offset
;
1332 /* CPUs usually cannot use float constants, so we store them
1333 generically in data segment */
1334 size
= type_size(&vtop
->type
, &align
);
1336 size
= 0, align
= 1;
1337 offset
= section_add(data_section
, size
, align
);
1338 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
1340 init_putv(&vtop
->type
, data_section
, offset
);
1343 #ifdef CONFIG_TCC_BCHECK
1344 if (vtop
->r
& VT_MUSTBOUND
)
1348 r
= vtop
->r
& VT_VALMASK
;
1349 rc2
= (rc
& RC_FLOAT
) ? RC_FLOAT
: RC_INT
;
1350 #ifndef TCC_TARGET_ARM64
1353 #ifdef TCC_TARGET_X86_64
1354 else if (rc
== RC_FRET
)
1358 /* need to reload if:
1360 - lvalue (need to dereference pointer)
1361 - already a register, but not in the right class */
1363 || (vtop
->r
& VT_LVAL
)
1364 || !(reg_classes
[r
] & rc
)
1366 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1367 || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
&& !(reg_classes
[vtop
->r2
] & rc2
))
1369 || ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
&& !(reg_classes
[vtop
->r2
] & rc2
))
1375 if (((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) || ((vtop
->type
.t
& VT_BTYPE
) == VT_QFLOAT
)) {
1376 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
1378 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
) {
1379 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
1380 unsigned long long ll
;
1382 int r2
, original_type
;
1383 original_type
= vtop
->type
.t
;
1384 /* two register type load : expand to two words
1387 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1390 vtop
->c
.i
= ll
; /* first word */
1392 vtop
->r
= r
; /* save register value */
1393 vpushi(ll
>> 32); /* second word */
1396 if (vtop
->r
& VT_LVAL
) {
1397 /* We do not want to modifier the long long
1398 pointer here, so the safest (and less
1399 efficient) is to save all the other registers
1400 in the stack. XXX: totally inefficient. */
1404 /* lvalue_save: save only if used further down the stack */
1405 save_reg_upstack(vtop
->r
, 1);
1407 /* load from memory */
1408 vtop
->type
.t
= load_type
;
1411 vtop
[-1].r
= r
; /* save register value */
1412 /* increment pointer to get second word */
1413 vtop
->type
.t
= addr_type
;
1418 vtop
->type
.t
= load_type
;
1420 /* move registers */
1423 vtop
[-1].r
= r
; /* save register value */
1424 vtop
->r
= vtop
[-1].r2
;
1426 /* Allocate second register. Here we rely on the fact that
1427 get_reg() tries first to free r2 of an SValue. */
1431 /* write second register */
1433 vtop
->type
.t
= original_type
;
1434 } else if ((vtop
->r
& VT_LVAL
) && !is_float(vtop
->type
.t
)) {
1436 /* lvalue of scalar type : need to use lvalue type
1437 because of possible cast */
1440 /* compute memory access type */
1441 if (vtop
->r
& VT_LVAL_BYTE
)
1443 else if (vtop
->r
& VT_LVAL_SHORT
)
1445 if (vtop
->r
& VT_LVAL_UNSIGNED
)
1449 /* restore wanted type */
1452 /* one register type load */
1457 #ifdef TCC_TARGET_C67
1458 /* uses register pairs for doubles */
1459 if ((vtop
->type
.t
& VT_BTYPE
) == VT_DOUBLE
)
1466 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1467 ST_FUNC
void gv2(int rc1
, int rc2
)
1471 /* generate more generic register first. But VT_JMP or VT_CMP
1472 values must be generated first in all cases to avoid possible
1474 v
= vtop
[0].r
& VT_VALMASK
;
1475 if (v
!= VT_CMP
&& (v
& ~1) != VT_JMP
&& rc1
<= rc2
) {
1480 /* test if reload is needed for first register */
1481 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1491 /* test if reload is needed for first register */
1492 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1498 #ifndef TCC_TARGET_ARM64
1499 /* wrapper around RC_FRET to return a register by type */
1500 static int rc_fret(int t
)
1502 #ifdef TCC_TARGET_X86_64
1503 if (t
== VT_LDOUBLE
) {
1511 /* wrapper around REG_FRET to return a register by type */
1512 static int reg_fret(int t
)
1514 #ifdef TCC_TARGET_X86_64
1515 if (t
== VT_LDOUBLE
) {
1523 /* expand 64bit on stack in two ints */
1524 static void lexpand(void)
1527 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1528 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1529 if (v
== VT_CONST
) {
1532 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1538 vtop
[0].r
= vtop
[-1].r2
;
1539 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
1541 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
1545 #ifdef TCC_TARGET_ARM
1546 /* expand long long on stack */
1547 ST_FUNC
void lexpand_nr(void)
1551 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1553 vtop
->r2
= VT_CONST
;
1554 vtop
->type
.t
= VT_INT
| u
;
1555 v
=vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
);
1556 if (v
== VT_CONST
) {
1557 vtop
[-1].c
.i
= vtop
->c
.i
;
1558 vtop
->c
.i
= vtop
->c
.i
>> 32;
1560 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1562 vtop
->r
= vtop
[-1].r
;
1563 } else if (v
> VT_CONST
) {
1567 vtop
->r
= vtop
[-1].r2
;
1568 vtop
[-1].r2
= VT_CONST
;
1569 vtop
[-1].type
.t
= VT_INT
| u
;
1574 /* build a long long from two ints */
1575 static void lbuild(int t
)
1577 gv2(RC_INT
, RC_INT
);
1578 vtop
[-1].r2
= vtop
[0].r
;
1579 vtop
[-1].type
.t
= t
;
1584 /* convert stack entry to register and duplicate its value in another
1586 static void gv_dup(void)
1593 if ((t
& VT_BTYPE
) == VT_LLONG
) {
1594 if (t
& VT_BITFIELD
) {
1604 /* stack: H L L1 H1 */
1614 /* duplicate value */
1619 #ifdef TCC_TARGET_X86_64
1620 if ((t
& VT_BTYPE
) == VT_LDOUBLE
) {
1630 load(r1
, &sv
); /* move r to r1 */
1632 /* duplicates value */
1638 /* Generate value test
1640 * Generate a test for any value (jump, comparison and integers) */
1641 ST_FUNC
int gvtst(int inv
, int t
)
1643 int v
= vtop
->r
& VT_VALMASK
;
1644 if (v
!= VT_CMP
&& v
!= VT_JMP
&& v
!= VT_JMPI
) {
1648 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1649 /* constant jmp optimization */
1650 if ((vtop
->c
.i
!= 0) != inv
)
1655 return gtst(inv
, t
);
1659 /* generate CPU independent (unsigned) long long operations */
1660 static void gen_opl(int op
)
1662 int t
, a
, b
, op1
, c
, i
;
1664 unsigned short reg_iret
= REG_IRET
;
1665 unsigned short reg_lret
= REG_LRET
;
1671 func
= TOK___divdi3
;
1674 func
= TOK___udivdi3
;
1677 func
= TOK___moddi3
;
1680 func
= TOK___umoddi3
;
1687 /* call generic long long function */
1688 vpush_global_sym(&func_old_type
, func
);
1693 vtop
->r2
= reg_lret
;
1701 //pv("gen_opl A",0,2);
1707 /* stack: L1 H1 L2 H2 */
1712 vtop
[-2] = vtop
[-3];
1715 /* stack: H1 H2 L1 L2 */
1716 //pv("gen_opl B",0,4);
1722 /* stack: H1 H2 L1 L2 ML MH */
1725 /* stack: ML MH H1 H2 L1 L2 */
1729 /* stack: ML MH H1 L2 H2 L1 */
1734 /* stack: ML MH M1 M2 */
1737 } else if (op
== '+' || op
== '-') {
1738 /* XXX: add non carry method too (for MIPS or alpha) */
1744 /* stack: H1 H2 (L1 op L2) */
1747 gen_op(op1
+ 1); /* TOK_xxxC2 */
1750 /* stack: H1 H2 (L1 op L2) */
1753 /* stack: (L1 op L2) H1 H2 */
1755 /* stack: (L1 op L2) (H1 op H2) */
1763 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
1764 t
= vtop
[-1].type
.t
;
1768 /* stack: L H shift */
1770 /* constant: simpler */
1771 /* NOTE: all comments are for SHL. the other cases are
1772 done by swapping words */
1783 if (op
!= TOK_SAR
) {
1816 /* XXX: should provide a faster fallback on x86 ? */
1819 func
= TOK___ashrdi3
;
1822 func
= TOK___lshrdi3
;
1825 func
= TOK___ashldi3
;
1831 /* compare operations */
1837 /* stack: L1 H1 L2 H2 */
1839 vtop
[-1] = vtop
[-2];
1841 /* stack: L1 L2 H1 H2 */
1844 /* when values are equal, we need to compare low words. since
1845 the jump is inverted, we invert the test too. */
1848 else if (op1
== TOK_GT
)
1850 else if (op1
== TOK_ULT
)
1852 else if (op1
== TOK_UGT
)
1862 /* generate non equal test */
1868 /* compare low. Always unsigned */
1872 else if (op1
== TOK_LE
)
1874 else if (op1
== TOK_GT
)
1876 else if (op1
== TOK_GE
)
1887 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
1889 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
1890 return (a
^ b
) >> 63 ? -x
: x
;
1893 static int gen_opic_lt(uint64_t a
, uint64_t b
)
1895 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
1898 /* handle integer constant optimizations and various machine
1900 static void gen_opic(int op
)
1902 SValue
*v1
= vtop
- 1;
1904 int t1
= v1
->type
.t
& VT_BTYPE
;
1905 int t2
= v2
->type
.t
& VT_BTYPE
;
1906 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1907 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1908 uint64_t l1
= c1
? v1
->c
.i
: 0;
1909 uint64_t l2
= c2
? v2
->c
.i
: 0;
1910 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
1912 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1913 l1
= ((uint32_t)l1
|
1914 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1915 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
1916 l2
= ((uint32_t)l2
|
1917 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
1921 case '+': l1
+= l2
; break;
1922 case '-': l1
-= l2
; break;
1923 case '&': l1
&= l2
; break;
1924 case '^': l1
^= l2
; break;
1925 case '|': l1
|= l2
; break;
1926 case '*': l1
*= l2
; break;
1933 /* if division by zero, generate explicit division */
1936 tcc_error("division by zero in constant");
1940 default: l1
= gen_opic_sdiv(l1
, l2
); break;
1941 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
1942 case TOK_UDIV
: l1
= l1
/ l2
; break;
1943 case TOK_UMOD
: l1
= l1
% l2
; break;
1946 case TOK_SHL
: l1
<<= (l2
& shm
); break;
1947 case TOK_SHR
: l1
>>= (l2
& shm
); break;
1949 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
1952 case TOK_ULT
: l1
= l1
< l2
; break;
1953 case TOK_UGE
: l1
= l1
>= l2
; break;
1954 case TOK_EQ
: l1
= l1
== l2
; break;
1955 case TOK_NE
: l1
= l1
!= l2
; break;
1956 case TOK_ULE
: l1
= l1
<= l2
; break;
1957 case TOK_UGT
: l1
= l1
> l2
; break;
1958 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
1959 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
1960 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
1961 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
1963 case TOK_LAND
: l1
= l1
&& l2
; break;
1964 case TOK_LOR
: l1
= l1
|| l2
; break;
1968 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
1969 l1
= ((uint32_t)l1
|
1970 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
1974 /* if commutative ops, put c2 as constant */
1975 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
1976 op
== '|' || op
== '*')) {
1978 c2
= c1
; //c = c1, c1 = c2, c2 = c;
1979 l2
= l1
; //l = l1, l1 = l2, l2 = l;
1981 if (!const_wanted
&&
1983 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
1984 (l1
== -1 && op
== TOK_SAR
))) {
1985 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
1987 } else if (!const_wanted
&&
1988 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
1990 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
1991 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
1992 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
1997 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2000 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2001 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2004 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2005 /* filter out NOP operations like x*1, x-0, x&-1... */
2007 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2008 /* try to use shifts instead of muls or divs */
2009 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2018 else if (op
== TOK_PDIV
)
2024 } else if (c2
&& (op
== '+' || op
== '-') &&
2025 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2026 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2027 /* symbol + constant case */
2031 /* The backends can't always deal with addends to symbols
2032 larger than +-1<<31. Don't construct such. */
2039 /* call low level op generator */
2040 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2041 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2049 /* generate a floating point operation with constant propagation */
2050 static void gen_opif(int op
)
2054 #if defined _MSC_VER && defined _AMD64_
2055 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2062 /* currently, we cannot do computations with forward symbols */
2063 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2064 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2066 if (v1
->type
.t
== VT_FLOAT
) {
2069 } else if (v1
->type
.t
== VT_DOUBLE
) {
2077 /* NOTE: we only do constant propagation if finite number (not
2078 NaN or infinity) (ANSI spec) */
2079 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2083 case '+': f1
+= f2
; break;
2084 case '-': f1
-= f2
; break;
2085 case '*': f1
*= f2
; break;
2088 /* If not in initializer we need to potentially generate
2089 FP exceptions at runtime, otherwise we want to fold. */
2095 /* XXX: also handles tests ? */
2099 /* XXX: overflow test ? */
2100 if (v1
->type
.t
== VT_FLOAT
) {
2102 } else if (v1
->type
.t
== VT_DOUBLE
) {
2114 static int pointed_size(CType
*type
)
2117 return type_size(pointed_type(type
), &align
);
2120 static void vla_runtime_pointed_size(CType
*type
)
2123 vla_runtime_type_size(pointed_type(type
), &align
);
2126 static inline int is_null_pointer(SValue
*p
)
2128 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2130 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2131 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2132 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2133 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2134 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2135 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2138 static inline int is_integer_btype(int bt
)
2140 return (bt
== VT_BYTE
|| bt
== VT_SHORT
||
2141 bt
== VT_INT
|| bt
== VT_LLONG
);
2144 /* check types for comparison or subtraction of pointers */
2145 static void check_comparison_pointer_types(SValue
*p1
, SValue
*p2
, int op
)
2147 CType
*type1
, *type2
, tmp_type1
, tmp_type2
;
2150 /* null pointers are accepted for all comparisons as gcc */
2151 if (is_null_pointer(p1
) || is_null_pointer(p2
))
2155 bt1
= type1
->t
& VT_BTYPE
;
2156 bt2
= type2
->t
& VT_BTYPE
;
2157 /* accept comparison between pointer and integer with a warning */
2158 if ((is_integer_btype(bt1
) || is_integer_btype(bt2
)) && op
!= '-') {
2159 if (op
!= TOK_LOR
&& op
!= TOK_LAND
)
2160 tcc_warning("comparison between pointer and integer");
2164 /* both must be pointers or implicit function pointers */
2165 if (bt1
== VT_PTR
) {
2166 type1
= pointed_type(type1
);
2167 } else if (bt1
!= VT_FUNC
)
2168 goto invalid_operands
;
2170 if (bt2
== VT_PTR
) {
2171 type2
= pointed_type(type2
);
2172 } else if (bt2
!= VT_FUNC
) {
2174 tcc_error("invalid operands to binary %s", get_tok_str(op
, NULL
));
2176 if ((type1
->t
& VT_BTYPE
) == VT_VOID
||
2177 (type2
->t
& VT_BTYPE
) == VT_VOID
)
2181 tmp_type1
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2182 tmp_type2
.t
&= ~(VT_DEFSIGN
| VT_UNSIGNED
| VT_CONSTANT
| VT_VOLATILE
);
2183 if (!is_compatible_types(&tmp_type1
, &tmp_type2
)) {
2184 /* gcc-like error if '-' is used */
2186 goto invalid_operands
;
2188 tcc_warning("comparison of distinct pointer types lacks a cast");
2192 /* generic gen_op: handles types problems */
2193 ST_FUNC
void gen_op(int op
)
2195 int u
, t1
, t2
, bt1
, bt2
, t
;
2199 t1
= vtop
[-1].type
.t
;
2200 t2
= vtop
[0].type
.t
;
2201 bt1
= t1
& VT_BTYPE
;
2202 bt2
= t2
& VT_BTYPE
;
2204 if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2205 tcc_error("operation on a struct");
2206 } else if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2207 if (bt2
== VT_FUNC
) {
2208 mk_pointer(&vtop
->type
);
2211 if (bt1
== VT_FUNC
) {
2213 mk_pointer(&vtop
->type
);
2218 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2219 /* at least one operand is a pointer */
2220 /* relational op: must be both pointers */
2221 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
2222 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2223 /* pointers are handled are unsigned */
2225 t
= VT_LLONG
| VT_UNSIGNED
;
2227 t
= VT_INT
| VT_UNSIGNED
;
2231 /* if both pointers, then it must be the '-' op */
2232 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2234 tcc_error("cannot use pointers here");
2235 check_comparison_pointer_types(vtop
- 1, vtop
, op
);
2236 /* XXX: check that types are compatible */
2237 if (vtop
[-1].type
.t
& VT_VLA
) {
2238 vla_runtime_pointed_size(&vtop
[-1].type
);
2240 vpushi(pointed_size(&vtop
[-1].type
));
2244 vtop
->type
.t
= ptrdiff_type
.t
;
2248 /* exactly one pointer : must be '+' or '-'. */
2249 if (op
!= '-' && op
!= '+')
2250 tcc_error("cannot use pointers here");
2251 /* Put pointer as first operand */
2252 if (bt2
== VT_PTR
) {
2254 t
= t1
, t1
= t2
, t2
= t
;
2257 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
2258 /* XXX: truncate here because gen_opl can't handle ptr + long long */
2261 type1
= vtop
[-1].type
;
2262 type1
.t
&= ~VT_ARRAY
;
2263 if (vtop
[-1].type
.t
& VT_VLA
)
2264 vla_runtime_pointed_size(&vtop
[-1].type
);
2266 u
= pointed_size(&vtop
[-1].type
);
2268 tcc_error("unknown array element size");
2272 /* XXX: cast to int ? (long long case) */
2278 /* #ifdef CONFIG_TCC_BCHECK
2279 The main reason to removing this code:
2286 fprintf(stderr, "v+i-j = %p\n", v+i-j);
2287 fprintf(stderr, "v+(i-j) = %p\n", v+(i-j));
2289 When this code is on. then the output looks like
2291 v+(i-j) = 0xbff84000
2293 /* if evaluating constant expression, no code should be
2294 generated, so no bound check */
2295 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
2296 /* if bounded pointers, we generate a special code to
2303 gen_bounded_ptr_add();
2309 /* put again type if gen_opic() swaped operands */
2312 } else if (is_float(bt1
) || is_float(bt2
)) {
2313 /* compute bigger type and do implicit casts */
2314 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2316 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2321 /* floats can only be used for a few operations */
2322 if (op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/' &&
2323 (op
< TOK_ULT
|| op
> TOK_GT
))
2324 tcc_error("invalid operands for binary operation");
2326 } else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
2327 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
2328 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
2330 t
|= (VT_LONG
& t1
);
2332 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2333 /* cast to biggest op */
2334 t
= VT_LLONG
| VT_LONG
;
2335 if (bt1
== VT_LLONG
)
2337 if (bt2
== VT_LLONG
)
2339 /* convert to unsigned if it does not fit in a long long */
2340 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2341 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2345 /* integer operations */
2346 t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2347 /* convert to unsigned if it does not fit in an integer */
2348 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2349 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2352 /* XXX: currently, some unsigned operations are explicit, so
2353 we modify them here */
2354 if (t
& VT_UNSIGNED
) {
2361 else if (op
== TOK_LT
)
2363 else if (op
== TOK_GT
)
2365 else if (op
== TOK_LE
)
2367 else if (op
== TOK_GE
)
2375 /* special case for shifts and long long: we keep the shift as
2377 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
2384 if (op
>= TOK_ULT
&& op
<= TOK_GT
) {
2385 /* relational op: the result is an int */
2386 vtop
->type
.t
= VT_INT
;
2391 // Make sure that we have converted to an rvalue:
2392 if (vtop
->r
& VT_LVAL
)
2393 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
2396 #ifndef TCC_TARGET_ARM
2397 /* generic itof for unsigned long long case */
2398 static void gen_cvt_itof1(int t
)
2400 #ifdef TCC_TARGET_ARM64
2403 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
2404 (VT_LLONG
| VT_UNSIGNED
)) {
2407 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
2408 #if LDOUBLE_SIZE != 8
2409 else if (t
== VT_LDOUBLE
)
2410 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
2413 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
2417 vtop
->r
= reg_fret(t
);
2425 /* generic ftoi for unsigned long long case */
2426 static void gen_cvt_ftoi1(int t
)
2428 #ifdef TCC_TARGET_ARM64
2433 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
2434 /* not handled natively */
2435 st
= vtop
->type
.t
& VT_BTYPE
;
2437 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
2438 #if LDOUBLE_SIZE != 8
2439 else if (st
== VT_LDOUBLE
)
2440 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
2443 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
2448 vtop
->r2
= REG_LRET
;
2455 /* force char or short cast */
2456 static void force_charshort_cast(int t
)
2460 /* cannot cast static initializers */
2461 if (STATIC_DATA_WANTED
)
2465 /* XXX: add optimization if lvalue : just change type and offset */
2470 if (t
& VT_UNSIGNED
) {
2471 vpushi((1 << bits
) - 1);
2474 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2480 /* result must be signed or the SAR is converted to an SHL
2481 This was not the case when "t" was a signed short
2482 and the last value on the stack was an unsigned int */
2483 vtop
->type
.t
&= ~VT_UNSIGNED
;
2489 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
2490 static void gen_cast_s(int t
)
2498 static void gen_cast(CType
*type
)
2500 int sbt
, dbt
, sf
, df
, c
, p
;
2502 /* special delayed cast for char/short */
2503 /* XXX: in some cases (multiple cascaded casts), it may still
2505 if (vtop
->r
& VT_MUSTCAST
) {
2506 vtop
->r
&= ~VT_MUSTCAST
;
2507 force_charshort_cast(vtop
->type
.t
);
2510 /* bitfields first get cast to ints */
2511 if (vtop
->type
.t
& VT_BITFIELD
) {
2515 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
2516 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
2521 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2522 p
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
);
2523 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
2524 c
&= dbt
!= VT_LDOUBLE
;
2527 /* constant case: we can do it now */
2528 /* XXX: in ISOC, cannot do it if error in convert */
2529 if (sbt
== VT_FLOAT
)
2530 vtop
->c
.ld
= vtop
->c
.f
;
2531 else if (sbt
== VT_DOUBLE
)
2532 vtop
->c
.ld
= vtop
->c
.d
;
2535 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2536 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
2537 vtop
->c
.ld
= vtop
->c
.i
;
2539 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
2541 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
2542 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
2544 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
2547 if (dbt
== VT_FLOAT
)
2548 vtop
->c
.f
= (float)vtop
->c
.ld
;
2549 else if (dbt
== VT_DOUBLE
)
2550 vtop
->c
.d
= (double)vtop
->c
.ld
;
2551 } else if (sf
&& dbt
== (VT_LLONG
|VT_UNSIGNED
)) {
2552 vtop
->c
.i
= vtop
->c
.ld
;
2553 } else if (sf
&& dbt
== VT_BOOL
) {
2554 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
2557 vtop
->c
.i
= vtop
->c
.ld
;
2558 else if (sbt
== (VT_LLONG
|VT_UNSIGNED
))
2560 else if (sbt
& VT_UNSIGNED
)
2561 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
2563 else if (sbt
== VT_PTR
)
2566 else if (sbt
!= VT_LLONG
)
2567 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
|
2568 -(vtop
->c
.i
& 0x80000000));
2570 if (dbt
== (VT_LLONG
|VT_UNSIGNED
))
2572 else if (dbt
== VT_BOOL
)
2573 vtop
->c
.i
= (vtop
->c
.i
!= 0);
2575 else if (dbt
== VT_PTR
)
2578 else if (dbt
!= VT_LLONG
) {
2579 uint32_t m
= ((dbt
& VT_BTYPE
) == VT_BYTE
? 0xff :
2580 (dbt
& VT_BTYPE
) == VT_SHORT
? 0xffff :
2583 if (!(dbt
& VT_UNSIGNED
))
2584 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
2587 } else if (p
&& dbt
== VT_BOOL
) {
2591 /* non constant case: generate code */
2593 /* convert from fp to fp */
2596 /* convert int to fp */
2599 /* convert fp to int */
2600 if (dbt
== VT_BOOL
) {
2604 /* we handle char/short/etc... with generic code */
2605 if (dbt
!= (VT_INT
| VT_UNSIGNED
) &&
2606 dbt
!= (VT_LLONG
| VT_UNSIGNED
) &&
2610 if (dbt
== VT_INT
&& (type
->t
& (VT_BTYPE
| VT_UNSIGNED
)) != dbt
) {
2611 /* additional cast for char/short... */
2617 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
) {
2618 if ((sbt
& VT_BTYPE
) != VT_LLONG
) {
2619 /* scalar to long long */
2620 /* machine independent conversion */
2622 /* generate high word */
2623 if (sbt
== (VT_INT
| VT_UNSIGNED
)) {
2627 if (sbt
== VT_PTR
) {
2628 /* cast from pointer to int before we apply
2629 shift operation, which pointers don't support*/
2636 /* patch second register */
2637 vtop
[-1].r2
= vtop
->r
;
2641 } else if ((dbt
& VT_BTYPE
) == VT_LLONG
||
2642 (dbt
& VT_BTYPE
) == VT_PTR
||
2643 (dbt
& VT_BTYPE
) == VT_FUNC
) {
2644 if ((sbt
& VT_BTYPE
) != VT_LLONG
&&
2645 (sbt
& VT_BTYPE
) != VT_PTR
&&
2646 (sbt
& VT_BTYPE
) != VT_FUNC
) {
2647 /* need to convert from 32bit to 64bit */
2649 if (sbt
!= (VT_INT
| VT_UNSIGNED
)) {
2650 #if defined(TCC_TARGET_ARM64)
2652 #elif defined(TCC_TARGET_X86_64)
2654 /* x86_64 specific: movslq */
2656 o(0xc0 + (REG_VALUE(r
) << 3) + REG_VALUE(r
));
2663 } else if (dbt
== VT_BOOL
) {
2664 /* scalar to bool */
2667 } else if ((dbt
& VT_BTYPE
) == VT_BYTE
||
2668 (dbt
& VT_BTYPE
) == VT_SHORT
) {
2669 if (sbt
== VT_PTR
) {
2670 vtop
->type
.t
= VT_INT
;
2671 tcc_warning("nonportable conversion from pointer to char/short");
2673 force_charshort_cast(dbt
);
2674 } else if ((dbt
& VT_BTYPE
) == VT_INT
) {
2676 if ((sbt
& VT_BTYPE
) == VT_LLONG
) {
2678 /* from long long: just take low order word */
2683 vtop
->type
.t
|= VT_UNSIGNED
;
2687 /* if lvalue and single word type, nothing to do because
2688 the lvalue already contains the real type size (see
2689 VT_LVAL_xxx constants) */
2692 } else if ((dbt
& VT_BTYPE
) == VT_PTR
&& !(vtop
->r
& VT_LVAL
)) {
2693 /* if we are casting between pointer types,
2694 we must update the VT_LVAL_xxx size */
2695 vtop
->r
= (vtop
->r
& ~VT_LVAL_TYPE
)
2696 | (lvalue_type(type
->ref
->type
.t
) & VT_LVAL_TYPE
);
2699 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
2702 /* return type size as known at compile time. Put alignment at 'a' */
2703 ST_FUNC
int type_size(CType
*type
, int *a
)
2708 bt
= type
->t
& VT_BTYPE
;
2709 if (bt
== VT_STRUCT
) {
2714 } else if (bt
== VT_PTR
) {
2715 if (type
->t
& VT_ARRAY
) {
2719 ts
= type_size(&s
->type
, a
);
2721 if (ts
< 0 && s
->c
< 0)
2729 } else if (IS_ENUM(type
->t
) && type
->ref
->c
== -1) {
2730 return -1; /* incomplete enum */
2731 } else if (bt
== VT_LDOUBLE
) {
2733 return LDOUBLE_SIZE
;
2734 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
2735 #ifdef TCC_TARGET_I386
2736 #ifdef TCC_TARGET_PE
2741 #elif defined(TCC_TARGET_ARM)
2751 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
2754 } else if (bt
== VT_SHORT
) {
2757 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
2761 /* char, void, function, _Bool */
2767 /* push type size as known at runtime time on top of value stack. Put
2769 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
2771 if (type
->t
& VT_VLA
) {
2772 type_size(&type
->ref
->type
, a
);
2773 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
2775 vpushi(type_size(type
, a
));
2779 static void vla_sp_restore(void) {
2780 if (vlas_in_scope
) {
2781 gen_vla_sp_restore(vla_sp_loc
);
2785 static void vla_sp_restore_root(void) {
2786 if (vlas_in_scope
) {
2787 gen_vla_sp_restore(vla_sp_root_loc
);
2791 /* return the pointed type of t */
2792 static inline CType
*pointed_type(CType
*type
)
2794 return &type
->ref
->type
;
2797 /* modify type so that its it is a pointer to type. */
2798 ST_FUNC
void mk_pointer(CType
*type
)
2801 s
= sym_push(SYM_FIELD
, type
, 0, -1);
2802 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
2806 /* compare function types. OLD functions match any new functions */
2807 static int is_compatible_func(CType
*type1
, CType
*type2
)
2813 if (!is_compatible_types(&s1
->type
, &s2
->type
))
2815 /* check func_call */
2816 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2818 /* XXX: not complete */
2819 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2821 if (s1
->f
.func_type
!= s2
->f
.func_type
)
2823 while (s1
!= NULL
) {
2826 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2836 /* return true if type1 and type2 are the same. If unqualified is
2837 true, qualifiers on the types are ignored.
2839 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2843 t1
= type1
->t
& VT_TYPE
;
2844 t2
= type2
->t
& VT_TYPE
;
2846 /* strip qualifiers before comparing */
2847 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2848 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2851 /* Default Vs explicit signedness only matters for char */
2852 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2856 /* XXX: bitfields ? */
2859 /* test more complicated cases */
2860 bt1
= t1
& (VT_BTYPE
| VT_ARRAY
);
2861 if (bt1
== VT_PTR
) {
2862 type1
= pointed_type(type1
);
2863 type2
= pointed_type(type2
);
2864 return is_compatible_types(type1
, type2
);
2865 } else if (bt1
& VT_ARRAY
) {
2866 return type1
->ref
->c
< 0 || type2
->ref
->c
< 0
2867 || type1
->ref
->c
== type2
->ref
->c
;
2868 } else if (bt1
== VT_STRUCT
) {
2869 return (type1
->ref
== type2
->ref
);
2870 } else if (bt1
== VT_FUNC
) {
2871 return is_compatible_func(type1
, type2
);
2872 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
2873 return type1
->ref
== type2
->ref
;
2879 /* return true if type1 and type2 are exactly the same (including
2882 static int is_compatible_types(CType
*type1
, CType
*type2
)
2884 return compare_types(type1
,type2
,0);
2887 /* return true if type1 and type2 are the same (ignoring qualifiers).
2889 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
2891 return compare_types(type1
,type2
,1);
2894 /* print a type. If 'varstr' is not NULL, then the variable is also
2895 printed in the type */
2897 /* XXX: add array and function pointers */
2898 static void type_to_str(char *buf
, int buf_size
,
2899 CType
*type
, const char *varstr
)
2911 pstrcat(buf
, buf_size
, "extern ");
2913 pstrcat(buf
, buf_size
, "static ");
2915 pstrcat(buf
, buf_size
, "typedef ");
2917 pstrcat(buf
, buf_size
, "inline ");
2918 if (t
& VT_VOLATILE
)
2919 pstrcat(buf
, buf_size
, "volatile ");
2920 if (t
& VT_CONSTANT
)
2921 pstrcat(buf
, buf_size
, "const ");
2923 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2924 || ((t
& VT_UNSIGNED
)
2925 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2928 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2930 buf_size
-= strlen(buf
);
2965 tstr
= "long double";
2967 pstrcat(buf
, buf_size
, tstr
);
2974 pstrcat(buf
, buf_size
, tstr
);
2975 v
= type
->ref
->v
& ~SYM_STRUCT
;
2976 if (v
>= SYM_FIRST_ANOM
)
2977 pstrcat(buf
, buf_size
, "<anonymous>");
2979 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2984 if (varstr
&& '*' == *varstr
) {
2985 pstrcat(buf1
, sizeof(buf1
), "(");
2986 pstrcat(buf1
, sizeof(buf1
), varstr
);
2987 pstrcat(buf1
, sizeof(buf1
), ")");
2989 pstrcat(buf1
, buf_size
, "(");
2991 while (sa
!= NULL
) {
2993 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2994 pstrcat(buf1
, sizeof(buf1
), buf2
);
2997 pstrcat(buf1
, sizeof(buf1
), ", ");
2999 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3000 pstrcat(buf1
, sizeof(buf1
), ", ...");
3001 pstrcat(buf1
, sizeof(buf1
), ")");
3002 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3007 if (varstr
&& '*' == *varstr
)
3008 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3010 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3011 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3014 pstrcpy(buf1
, sizeof(buf1
), "*");
3015 if (t
& VT_CONSTANT
)
3016 pstrcat(buf1
, buf_size
, "const ");
3017 if (t
& VT_VOLATILE
)
3018 pstrcat(buf1
, buf_size
, "volatile ");
3020 pstrcat(buf1
, sizeof(buf1
), varstr
);
3021 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3025 pstrcat(buf
, buf_size
, " ");
3026 pstrcat(buf
, buf_size
, varstr
);
3031 /* verify type compatibility to store vtop in 'dt' type, and generate
3033 static void gen_assign_cast(CType
*dt
)
3035 CType
*st
, *type1
, *type2
;
3036 char buf1
[256], buf2
[256];
3037 int dbt
, sbt
, qualwarn
, lvl
;
3039 st
= &vtop
->type
; /* source type */
3040 dbt
= dt
->t
& VT_BTYPE
;
3041 sbt
= st
->t
& VT_BTYPE
;
3042 if (sbt
== VT_VOID
|| dbt
== VT_VOID
) {
3043 if (sbt
== VT_VOID
&& dbt
== VT_VOID
)
3044 ; /* It is Ok if both are void */
3046 tcc_error("cannot cast from/to void");
3048 if (dt
->t
& VT_CONSTANT
)
3049 tcc_warning("assignment of read-only location");
3052 /* special cases for pointers */
3053 /* '0' can also be a pointer */
3054 if (is_null_pointer(vtop
))
3056 /* accept implicit pointer to integer cast with warning */
3057 if (is_integer_btype(sbt
)) {
3058 tcc_warning("assignment makes pointer from integer without a cast");
3061 type1
= pointed_type(dt
);
3063 type2
= pointed_type(st
);
3064 else if (sbt
== VT_FUNC
)
3065 type2
= st
; /* a function is implicitly a function pointer */
3068 if (is_compatible_types(type1
, type2
))
3070 for (qualwarn
= lvl
= 0;; ++lvl
) {
3071 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3072 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3074 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3075 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3076 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3078 type1
= pointed_type(type1
);
3079 type2
= pointed_type(type2
);
3081 if (!is_compatible_unqualified_types(type1
, type2
)) {
3082 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3083 /* void * can match anything */
3084 } else if (dbt
== sbt
3085 && is_integer_btype(sbt
& VT_BTYPE
)
3086 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3087 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3088 /* Like GCC don't warn by default for merely changes
3089 in pointer target signedness. Do warn for different
3090 base types, though, in particular for unsigned enums
3091 and signed int targets. */
3093 tcc_warning("assignment from incompatible pointer type");
3098 tcc_warning("assignment discards qualifiers from pointer target type");
3104 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3105 tcc_warning("assignment makes integer from pointer without a cast");
3106 } else if (sbt
== VT_STRUCT
) {
3107 goto case_VT_STRUCT
;
3109 /* XXX: more tests */
3113 if (!is_compatible_unqualified_types(dt
, st
)) {
3115 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3116 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3117 tcc_error("cannot cast '%s' to '%s'", buf1
, buf2
);
3124 /* store vtop in lvalue pushed on stack */
3125 ST_FUNC
void vstore(void)
3127 int sbt
, dbt
, ft
, r
, t
, size
, align
, bit_size
, bit_pos
, rc
, delayed_cast
;
3129 ft
= vtop
[-1].type
.t
;
3130 sbt
= vtop
->type
.t
& VT_BTYPE
;
3131 dbt
= ft
& VT_BTYPE
;
3132 if ((((sbt
== VT_INT
|| sbt
== VT_SHORT
) && dbt
== VT_BYTE
) ||
3133 (sbt
== VT_INT
&& dbt
== VT_SHORT
))
3134 && !(vtop
->type
.t
& VT_BITFIELD
)) {
3135 /* optimize char/short casts */
3136 delayed_cast
= VT_MUSTCAST
;
3137 vtop
->type
.t
= ft
& VT_TYPE
;
3138 /* XXX: factorize */
3139 if (ft
& VT_CONSTANT
)
3140 tcc_warning("assignment of read-only location");
3143 if (!(ft
& VT_BITFIELD
))
3144 gen_assign_cast(&vtop
[-1].type
);
3147 if (sbt
== VT_STRUCT
) {
3148 /* if structure, only generate pointer */
3149 /* structure assignment : generate memcpy */
3150 /* XXX: optimize if small size */
3151 size
= type_size(&vtop
->type
, &align
);
3155 vtop
->type
.t
= VT_PTR
;
3158 /* address of memcpy() */
3161 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3162 else if(!(align
& 3))
3163 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3166 /* Use memmove, rather than memcpy, as dest and src may be same: */
3167 vpush_global_sym(&func_old_type
, TOK_memmove
);
3172 vtop
->type
.t
= VT_PTR
;
3178 /* leave source on stack */
3179 } else if (ft
& VT_BITFIELD
) {
3180 /* bitfield store handling */
3182 /* save lvalue as expression result (example: s.b = s.a = n;) */
3183 vdup(), vtop
[-1] = vtop
[-2];
3185 bit_pos
= BIT_POS(ft
);
3186 bit_size
= BIT_SIZE(ft
);
3187 /* remove bit field info to avoid loops */
3188 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3190 if ((ft
& VT_BTYPE
) == VT_BOOL
) {
3191 gen_cast(&vtop
[-1].type
);
3192 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3195 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3196 if (r
== VT_STRUCT
) {
3197 gen_cast_s((ft
& VT_BTYPE
) == VT_LLONG
? VT_LLONG
: VT_INT
);
3198 store_packed_bf(bit_pos
, bit_size
);
3200 unsigned long long mask
= (1ULL << bit_size
) - 1;
3201 if ((ft
& VT_BTYPE
) != VT_BOOL
) {
3203 if ((vtop
[-1].type
.t
& VT_BTYPE
) == VT_LLONG
)
3206 vpushi((unsigned)mask
);
3213 /* duplicate destination */
3216 /* load destination, mask and or with source */
3217 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
3218 vpushll(~(mask
<< bit_pos
));
3220 vpushi(~((unsigned)mask
<< bit_pos
));
3225 /* ... and discard */
3228 } else if (dbt
== VT_VOID
) {
3231 #ifdef CONFIG_TCC_BCHECK
3232 /* bound check case */
3233 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3242 #ifdef TCC_TARGET_X86_64
3243 if ((ft
& VT_BTYPE
) == VT_LDOUBLE
) {
3245 } else if ((ft
& VT_BTYPE
) == VT_QFLOAT
) {
3250 r
= gv(rc
); /* generate value */
3251 /* if lvalue was saved on stack, must read it */
3252 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3254 t
= get_reg(RC_INT
);
3260 sv
.r
= VT_LOCAL
| VT_LVAL
;
3261 sv
.c
.i
= vtop
[-1].c
.i
;
3263 vtop
[-1].r
= t
| VT_LVAL
;
3265 /* two word case handling : store second register at word + 4 (or +8 for x86-64) */
3267 if (((ft
& VT_BTYPE
) == VT_QLONG
) || ((ft
& VT_BTYPE
) == VT_QFLOAT
)) {
3268 int addr_type
= VT_LLONG
, load_size
= 8, load_type
= ((vtop
->type
.t
& VT_BTYPE
) == VT_QLONG
) ? VT_LLONG
: VT_DOUBLE
;
3270 if ((ft
& VT_BTYPE
) == VT_LLONG
) {
3271 int addr_type
= VT_INT
, load_size
= 4, load_type
= VT_INT
;
3273 vtop
[-1].type
.t
= load_type
;
3276 /* convert to int to increment easily */
3277 vtop
->type
.t
= addr_type
;
3283 vtop
[-1].type
.t
= load_type
;
3284 /* XXX: it works because r2 is spilled last ! */
3285 store(vtop
->r2
, vtop
- 1);
3291 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3292 vtop
->r
|= delayed_cast
;
3296 /* post defines POST/PRE add. c is the token ++ or -- */
3297 ST_FUNC
void inc(int post
, int c
)
3300 vdup(); /* save lvalue */
3302 gv_dup(); /* duplicate value */
3307 vpushi(c
- TOK_MID
);
3309 vstore(); /* store value */
3311 vpop(); /* if post op, return saved value */
3314 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3316 /* read the string */
3320 while (tok
== TOK_STR
) {
3321 /* XXX: add \0 handling too ? */
3322 cstr_cat(astr
, tokc
.str
.data
, -1);
3325 cstr_ccat(astr
, '\0');
3328 /* If I is >= 1 and a power of two, returns log2(i)+1.
3329 If I is 0 returns 0. */
3330 static int exact_log2p1(int i
)
3335 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3346 /* Parse __attribute__((...)) GNUC extension. */
3347 static void parse_attribute(AttributeDef
*ad
)
3353 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3358 while (tok
!= ')') {
3359 if (tok
< TOK_IDENT
)
3360 expect("attribute name");
3367 parse_mult_str(&astr
, "section name");
3368 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
3375 parse_mult_str(&astr
, "alias(\"target\")");
3376 ad
->alias_target
= /* save string as token, for later */
3377 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
3381 case TOK_VISIBILITY1
:
3382 case TOK_VISIBILITY2
:
3384 parse_mult_str(&astr
,
3385 "visibility(\"default|hidden|internal|protected\")");
3386 if (!strcmp (astr
.data
, "default"))
3387 ad
->a
.visibility
= STV_DEFAULT
;
3388 else if (!strcmp (astr
.data
, "hidden"))
3389 ad
->a
.visibility
= STV_HIDDEN
;
3390 else if (!strcmp (astr
.data
, "internal"))
3391 ad
->a
.visibility
= STV_INTERNAL
;
3392 else if (!strcmp (astr
.data
, "protected"))
3393 ad
->a
.visibility
= STV_PROTECTED
;
3395 expect("visibility(\"default|hidden|internal|protected\")");
3404 if (n
<= 0 || (n
& (n
- 1)) != 0)
3405 tcc_error("alignment must be a positive power of two");
3410 ad
->a
.aligned
= exact_log2p1(n
);
3411 if (n
!= 1 << (ad
->a
.aligned
- 1))
3412 tcc_error("alignment of %d is larger than implemented", n
);
3424 /* currently, no need to handle it because tcc does not
3425 track unused objects */
3429 /* currently, no need to handle it because tcc does not
3430 track unused objects */
3435 ad
->f
.func_call
= FUNC_CDECL
;
3440 ad
->f
.func_call
= FUNC_STDCALL
;
3442 #ifdef TCC_TARGET_I386
3452 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3458 ad
->f
.func_call
= FUNC_FASTCALLW
;
3465 ad
->attr_mode
= VT_LLONG
+ 1;
3468 ad
->attr_mode
= VT_BYTE
+ 1;
3471 ad
->attr_mode
= VT_SHORT
+ 1;
3475 ad
->attr_mode
= VT_INT
+ 1;
3478 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
3485 ad
->a
.dllexport
= 1;
3487 case TOK_NODECORATE
:
3488 ad
->a
.nodecorate
= 1;
3491 ad
->a
.dllimport
= 1;
3494 if (tcc_state
->warn_unsupported
)
3495 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
3496 /* skip parameters */
3498 int parenthesis
= 0;
3502 else if (tok
== ')')
3505 } while (parenthesis
&& tok
!= -1);
3518 static Sym
* find_field (CType
*type
, int v
)
3522 while ((s
= s
->next
) != NULL
) {
3523 if ((s
->v
& SYM_FIELD
) &&
3524 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3525 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3526 Sym
*ret
= find_field (&s
->type
, v
);
3536 static void struct_add_offset (Sym
*s
, int offset
)
3538 while ((s
= s
->next
) != NULL
) {
3539 if ((s
->v
& SYM_FIELD
) &&
3540 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
3541 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
3542 struct_add_offset(s
->type
.ref
, offset
);
3548 static void struct_layout(CType
*type
, AttributeDef
*ad
)
3550 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
3551 int packed
, a
, bt
, prevbt
, prev_bit_size
;
3552 int pcc
= !tcc_state
->ms_bitfields
;
3553 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
3560 prevbt
= VT_STRUCT
; /* make it never match */
3565 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3566 if (f
->type
.t
& VT_BITFIELD
)
3567 bit_size
= BIT_SIZE(f
->type
.t
);
3570 size
= type_size(&f
->type
, &align
);
3571 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
3574 if (pcc
&& bit_size
== 0) {
3575 /* in pcc mode, packing does not affect zero-width bitfields */
3578 /* in pcc mode, attribute packed overrides if set. */
3579 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
3582 /* pragma pack overrides align if lesser and packs bitfields always */
3585 if (pragma_pack
< align
)
3586 align
= pragma_pack
;
3587 /* in pcc mode pragma pack also overrides individual align */
3588 if (pcc
&& pragma_pack
< a
)
3592 /* some individual align was specified */
3596 if (type
->ref
->type
.t
== VT_UNION
) {
3597 if (pcc
&& bit_size
>= 0)
3598 size
= (bit_size
+ 7) >> 3;
3603 } else if (bit_size
< 0) {
3605 c
+= (bit_pos
+ 7) >> 3;
3606 c
= (c
+ align
- 1) & -align
;
3615 /* A bit-field. Layout is more complicated. There are two
3616 options: PCC (GCC) compatible and MS compatible */
3618 /* In PCC layout a bit-field is placed adjacent to the
3619 preceding bit-fields, except if:
3621 - an individual alignment was given
3622 - it would overflow its base type container and
3623 there is no packing */
3624 if (bit_size
== 0) {
3626 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
3628 } else if (f
->a
.aligned
) {
3630 } else if (!packed
) {
3632 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
3633 if (ofs
> size
/ align
)
3637 /* in pcc mode, long long bitfields have type int if they fit */
3638 if (size
== 8 && bit_size
<= 32)
3639 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
3641 while (bit_pos
>= align
* 8)
3642 c
+= align
, bit_pos
-= align
* 8;
3645 /* In PCC layout named bit-fields influence the alignment
3646 of the containing struct using the base types alignment,
3647 except for packed fields (which here have correct align). */
3648 if (f
->v
& SYM_FIRST_ANOM
3649 // && bit_size // ??? gcc on ARM/rpi does that
3654 bt
= f
->type
.t
& VT_BTYPE
;
3655 if ((bit_pos
+ bit_size
> size
* 8)
3656 || (bit_size
> 0) == (bt
!= prevbt
)
3658 c
= (c
+ align
- 1) & -align
;
3661 /* In MS bitfield mode a bit-field run always uses
3662 at least as many bits as the underlying type.
3663 To start a new run it's also required that this
3664 or the last bit-field had non-zero width. */
3665 if (bit_size
|| prev_bit_size
)
3668 /* In MS layout the records alignment is normally
3669 influenced by the field, except for a zero-width
3670 field at the start of a run (but by further zero-width
3671 fields it is again). */
3672 if (bit_size
== 0 && prevbt
!= bt
)
3675 prev_bit_size
= bit_size
;
3678 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3679 | (bit_pos
<< VT_STRUCT_SHIFT
);
3680 bit_pos
+= bit_size
;
3682 if (align
> maxalign
)
3686 printf("set field %s offset %-2d size %-2d align %-2d",
3687 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
3688 if (f
->type
.t
& VT_BITFIELD
) {
3689 printf(" pos %-2d bits %-2d",
3697 if (f
->v
& SYM_FIRST_ANOM
&& (f
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
3699 /* An anonymous struct/union. Adjust member offsets
3700 to reflect the real offset of our containing struct.
3701 Also set the offset of this anon member inside
3702 the outer struct to be zero. Via this it
3703 works when accessing the field offset directly
3704 (from base object), as well as when recursing
3705 members in initializer handling. */
3706 int v2
= f
->type
.ref
->v
;
3707 if (!(v2
& SYM_FIELD
) &&
3708 (v2
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3710 /* This happens only with MS extensions. The
3711 anon member has a named struct type, so it
3712 potentially is shared with other references.
3713 We need to unshare members so we can modify
3716 f
->type
.ref
= sym_push(anon_sym
++ | SYM_FIELD
,
3717 &f
->type
.ref
->type
, 0,
3719 pps
= &f
->type
.ref
->next
;
3720 while ((ass
= ass
->next
) != NULL
) {
3721 *pps
= sym_push(ass
->v
, &ass
->type
, 0, ass
->c
);
3722 pps
= &((*pps
)->next
);
3726 struct_add_offset(f
->type
.ref
, offset
);
3736 c
+= (bit_pos
+ 7) >> 3;
3738 /* store size and alignment */
3739 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
3743 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
3744 /* can happen if individual align for some member was given. In
3745 this case MSVC ignores maxalign when aligning the size */
3750 c
= (c
+ a
- 1) & -a
;
3754 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
3757 /* check whether we can access bitfields by their type */
3758 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
3762 if (0 == (f
->type
.t
& VT_BITFIELD
))
3766 bit_size
= BIT_SIZE(f
->type
.t
);
3769 bit_pos
= BIT_POS(f
->type
.t
);
3770 size
= type_size(&f
->type
, &align
);
3771 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
3774 /* try to access the field using a different type */
3775 c0
= -1, s
= align
= 1;
3777 px
= f
->c
* 8 + bit_pos
;
3778 cx
= (px
>> 3) & -align
;
3779 px
= px
- (cx
<< 3);
3782 s
= (px
+ bit_size
+ 7) >> 3;
3792 s
= type_size(&t
, &align
);
3796 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
3797 /* update offset and bit position */
3800 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
3801 | (bit_pos
<< VT_STRUCT_SHIFT
);
3805 printf("FIX field %s offset %-2d size %-2d align %-2d "
3806 "pos %-2d bits %-2d\n",
3807 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
3808 cx
, s
, align
, px
, bit_size
);
3811 /* fall back to load/store single-byte wise */
3812 f
->auxtype
= VT_STRUCT
;
3814 printf("FIX field %s : load byte-wise\n",
3815 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
3821 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
3822 static void struct_decl(CType
*type
, int u
)
3824 int v
, c
, size
, align
, flexible
;
3825 int bit_size
, bsize
, bt
;
3827 AttributeDef ad
, ad1
;
3830 memset(&ad
, 0, sizeof ad
);
3832 parse_attribute(&ad
);
3836 /* struct already defined ? return it */
3838 expect("struct/union/enum name");
3840 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
3843 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
3845 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
3850 /* Record the original enum/struct/union token. */
3851 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
3853 /* we put an undefined size for struct/union */
3854 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
3855 s
->r
= 0; /* default alignment is zero as gcc */
3857 type
->t
= s
->type
.t
;
3863 tcc_error("struct/union/enum already defined");
3864 /* cannot be empty */
3865 /* non empty enums are not allowed */
3868 long long ll
= 0, pl
= 0, nl
= 0;
3871 /* enum symbols have static storage */
3872 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
3876 expect("identifier");
3878 if (ss
&& !local_stack
)
3879 tcc_error("redefinition of enumerator '%s'",
3880 get_tok_str(v
, NULL
));
3884 ll
= expr_const64();
3886 ss
= sym_push(v
, &t
, VT_CONST
, 0);
3888 *ps
= ss
, ps
= &ss
->next
;
3897 /* NOTE: we accept a trailing comma */
3902 /* set integral type of the enum */
3905 if (pl
!= (unsigned)pl
)
3906 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3908 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
3909 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3910 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
3912 /* set type for enum members */
3913 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
3915 if (ll
== (int)ll
) /* default is int if it fits */
3917 if (t
.t
& VT_UNSIGNED
) {
3918 ss
->type
.t
|= VT_UNSIGNED
;
3919 if (ll
== (unsigned)ll
)
3922 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
3923 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
3928 while (tok
!= '}') {
3929 if (!parse_btype(&btype
, &ad1
)) {
3935 tcc_error("flexible array member '%s' not at the end of struct",
3936 get_tok_str(v
, NULL
));
3942 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
3944 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
3945 expect("identifier");
3947 int v
= btype
.ref
->v
;
3948 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
3949 if (tcc_state
->ms_extensions
== 0)
3950 expect("identifier");
3954 if (type_size(&type1
, &align
) < 0) {
3955 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
3958 tcc_error("field '%s' has incomplete type",
3959 get_tok_str(v
, NULL
));
3961 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
3962 (type1
.t
& VT_BTYPE
) == VT_VOID
||
3963 (type1
.t
& VT_STORAGE
))
3964 tcc_error("invalid type for '%s'",
3965 get_tok_str(v
, NULL
));
3969 bit_size
= expr_const();
3970 /* XXX: handle v = 0 case for messages */
3972 tcc_error("negative width in bit-field '%s'",
3973 get_tok_str(v
, NULL
));
3974 if (v
&& bit_size
== 0)
3975 tcc_error("zero width for bit-field '%s'",
3976 get_tok_str(v
, NULL
));
3977 parse_attribute(&ad1
);
3979 size
= type_size(&type1
, &align
);
3980 if (bit_size
>= 0) {
3981 bt
= type1
.t
& VT_BTYPE
;
3987 tcc_error("bitfields must have scalar type");
3989 if (bit_size
> bsize
) {
3990 tcc_error("width of '%s' exceeds its type",
3991 get_tok_str(v
, NULL
));
3992 } else if (bit_size
== bsize
3993 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
3994 /* no need for bit fields */
3996 } else if (bit_size
== 64) {
3997 tcc_error("field width 64 not implemented");
3999 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4001 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4004 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4005 /* Remember we've seen a real field to check
4006 for placement of flexible array member. */
4009 /* If member is a struct or bit-field, enforce
4010 placing into the struct (as anonymous). */
4012 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4017 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4022 if (tok
== ';' || tok
== TOK_EOF
)
4029 parse_attribute(&ad
);
4030 struct_layout(type
, &ad
);
4035 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4037 merge_symattr(&ad
->a
, &s
->a
);
4038 merge_funcattr(&ad
->f
, &s
->f
);
4041 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4042 are added to the element type, copied because it could be a typedef. */
4043 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4045 while (type
->t
& VT_ARRAY
) {
4046 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4047 type
= &type
->ref
->type
;
4049 type
->t
|= qualifiers
;
4052 /* return 0 if no type declaration. otherwise, return the basic type
4055 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4057 int t
, u
, bt
, st
, type_found
, typespec_found
, g
;
4061 memset(ad
, 0, sizeof(AttributeDef
));
4071 /* currently, we really ignore extension */
4081 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4082 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4083 tmbt
: tcc_error("too many basic types");
4086 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4091 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4104 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4105 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4106 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4107 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4114 #ifdef TCC_TARGET_ARM64
4116 /* GCC's __uint128_t appears in some Linux header files. Make it a
4117 synonym for long double to get the size and alignment right. */
4128 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4129 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4137 struct_decl(&type1
, VT_ENUM
);
4140 type
->ref
= type1
.ref
;
4143 struct_decl(&type1
, VT_STRUCT
);
4146 struct_decl(&type1
, VT_UNION
);
4149 /* type modifiers */
4154 parse_btype_qualify(type
, VT_CONSTANT
);
4162 parse_btype_qualify(type
, VT_VOLATILE
);
4169 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4170 tcc_error("signed and unsigned modifier");
4183 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4184 tcc_error("signed and unsigned modifier");
4185 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4201 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4202 tcc_error("multiple storage classes");
4213 /* GNUC attribute */
4214 case TOK_ATTRIBUTE1
:
4215 case TOK_ATTRIBUTE2
:
4216 parse_attribute(ad
);
4217 if (ad
->attr_mode
) {
4218 u
= ad
->attr_mode
-1;
4219 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4227 parse_expr_type(&type1
);
4228 /* remove all storage modifiers except typedef */
4229 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4231 sym_to_attr(ad
, type1
.ref
);
4237 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4239 t
&= ~(VT_BTYPE
|VT_LONG
);
4240 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4241 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4242 type
->ref
= s
->type
.ref
;
4244 parse_btype_qualify(type
, t
);
4246 /* get attributes from typedef */
4256 if (tcc_state
->char_is_unsigned
) {
4257 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4260 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4261 bt
= t
& (VT_BTYPE
|VT_LONG
);
4263 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4264 #ifdef TCC_TARGET_PE
4265 if (bt
== VT_LDOUBLE
)
4266 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_DOUBLE
;
4272 /* convert a function parameter type (array to pointer and function to
4273 function pointer) */
4274 static inline void convert_parameter_type(CType
*pt
)
4276 /* remove const and volatile qualifiers (XXX: const could be used
4277 to indicate a const function parameter */
4278 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4279 /* array must be transformed to pointer according to ANSI C */
4281 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4286 ST_FUNC
void parse_asm_str(CString
*astr
)
4289 parse_mult_str(astr
, "string constant");
4292 /* Parse an asm label and return the token */
4293 static int asm_label_instr(void)
4299 parse_asm_str(&astr
);
4302 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4304 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
4309 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4311 int n
, l
, t1
, arg_size
, align
;
4312 Sym
**plast
, *s
, *first
;
4317 /* function type, or recursive declarator (return if so) */
4319 if (td
&& !(td
& TYPE_ABSTRACT
))
4323 else if (parse_btype(&pt
, &ad1
))
4326 merge_attr (ad
, &ad1
);
4335 /* read param name and compute offset */
4336 if (l
!= FUNC_OLD
) {
4337 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4339 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
4340 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4341 tcc_error("parameter declared as void");
4345 expect("identifier");
4346 pt
.t
= VT_VOID
; /* invalid type */
4349 convert_parameter_type(&pt
);
4350 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4351 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
4357 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4362 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
4363 tcc_error("invalid type");
4366 /* if no parameters, then old type prototype */
4369 /* NOTE: const is ignored in returned type as it has a special
4370 meaning in gcc / C++ */
4371 type
->t
&= ~VT_CONSTANT
;
4372 /* some ancient pre-K&R C allows a function to return an array
4373 and the array brackets to be put after the arguments, such
4374 that "int c()[]" means something like "int[] c()" */
4377 skip(']'); /* only handle simple "[]" */
4380 /* we push a anonymous symbol which will contain the function prototype */
4381 ad
->f
.func_args
= arg_size
;
4382 ad
->f
.func_type
= l
;
4383 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4389 } else if (tok
== '[') {
4390 int saved_nocode_wanted
= nocode_wanted
;
4391 /* array definition */
4394 /* XXX The optional type-quals and static should only be accepted
4395 in parameter decls. The '*' as well, and then even only
4396 in prototypes (not function defs). */
4398 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
4413 if (!local_stack
|| (storage
& VT_STATIC
))
4414 vpushi(expr_const());
4416 /* VLAs (which can only happen with local_stack && !VT_STATIC)
4417 length must always be evaluated, even under nocode_wanted,
4418 so that its size slot is initialized (e.g. under sizeof
4423 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4426 tcc_error("invalid array size");
4428 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
4429 tcc_error("size of variable length array should be an integer");
4434 /* parse next post type */
4435 post_type(type
, ad
, storage
, 0);
4436 if (type
->t
== VT_FUNC
)
4437 tcc_error("declaration of an array of functions");
4438 t1
|= type
->t
& VT_VLA
;
4441 loc
-= type_size(&int_type
, &align
);
4445 vla_runtime_type_size(type
, &align
);
4447 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
4453 nocode_wanted
= saved_nocode_wanted
;
4455 /* we push an anonymous symbol which will contain the array
4457 s
= sym_push(SYM_FIELD
, type
, 0, n
);
4458 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
4464 /* Parse a type declarator (except basic type), and return the type
4465 in 'type'. 'td' is a bitmask indicating which kind of type decl is
4466 expected. 'type' should contain the basic type. 'ad' is the
4467 attribute definition of the basic type. It can be modified by
4468 type_decl(). If this (possibly abstract) declarator is a pointer chain
4469 it returns the innermost pointed to type (equals *type, but is a different
4470 pointer), otherwise returns type itself, that's used for recursive calls. */
4471 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
4474 int qualifiers
, storage
;
4476 /* recursive type, remove storage bits first, apply them later again */
4477 storage
= type
->t
& VT_STORAGE
;
4478 type
->t
&= ~VT_STORAGE
;
4481 while (tok
== '*') {
4489 qualifiers
|= VT_CONSTANT
;
4494 qualifiers
|= VT_VOLATILE
;
4500 /* XXX: clarify attribute handling */
4501 case TOK_ATTRIBUTE1
:
4502 case TOK_ATTRIBUTE2
:
4503 parse_attribute(ad
);
4507 type
->t
|= qualifiers
;
4509 /* innermost pointed to type is the one for the first derivation */
4510 ret
= pointed_type(type
);
4514 /* This is possibly a parameter type list for abstract declarators
4515 ('int ()'), use post_type for testing this. */
4516 if (!post_type(type
, ad
, 0, td
)) {
4517 /* It's not, so it's a nested declarator, and the post operations
4518 apply to the innermost pointed to type (if any). */
4519 /* XXX: this is not correct to modify 'ad' at this point, but
4520 the syntax is not clear */
4521 parse_attribute(ad
);
4522 post
= type_decl(type
, ad
, v
, td
);
4525 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
4526 /* type identifier */
4530 if (!(td
& TYPE_ABSTRACT
))
4531 expect("identifier");
4534 post_type(post
, ad
, storage
, 0);
4535 parse_attribute(ad
);
4540 /* compute the lvalue VT_LVAL_xxx needed to match type t. */
4541 ST_FUNC
int lvalue_type(int t
)
4546 if (bt
== VT_BYTE
|| bt
== VT_BOOL
)
4548 else if (bt
== VT_SHORT
)
4552 if (t
& VT_UNSIGNED
)
4553 r
|= VT_LVAL_UNSIGNED
;
4557 /* indirection with full error checking and bound check */
4558 ST_FUNC
void indir(void)
4560 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
4561 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
4565 if (vtop
->r
& VT_LVAL
)
4567 vtop
->type
= *pointed_type(&vtop
->type
);
4568 /* Arrays and functions are never lvalues */
4569 if (!(vtop
->type
.t
& VT_ARRAY
) && !(vtop
->type
.t
& VT_VLA
)
4570 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
4571 vtop
->r
|= lvalue_type(vtop
->type
.t
);
4572 /* if bound checking, the referenced pointer must be checked */
4573 #ifdef CONFIG_TCC_BCHECK
4574 if (tcc_state
->do_bounds_check
)
4575 vtop
->r
|= VT_MUSTBOUND
;
4580 /* pass a parameter to a function and do type checking and casting */
4581 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
4586 func_type
= func
->f
.func_type
;
4587 if (func_type
== FUNC_OLD
||
4588 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
4589 /* default casting : only need to convert float to double */
4590 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
4591 gen_cast_s(VT_DOUBLE
);
4592 } else if (vtop
->type
.t
& VT_BITFIELD
) {
4593 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
4594 type
.ref
= vtop
->type
.ref
;
4597 } else if (arg
== NULL
) {
4598 tcc_error("too many arguments to function");
4601 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
4602 gen_assign_cast(&type
);
4606 /* parse an expression and return its type without any side effect. */
4607 static void expr_type(CType
*type
, void (*expr_fn
)(void))
4616 /* parse an expression of the form '(type)' or '(expr)' and return its
4618 static void parse_expr_type(CType
*type
)
4624 if (parse_btype(type
, &ad
)) {
4625 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4627 expr_type(type
, gexpr
);
4632 static void parse_type(CType
*type
)
4637 if (!parse_btype(type
, &ad
)) {
4640 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
4643 static void parse_builtin_params(int nc
, const char *args
)
4650 while ((c
= *args
++)) {
4654 case 'e': expr_eq(); continue;
4655 case 't': parse_type(&t
); vpush(&t
); continue;
4656 default: tcc_error("internal error"); break;
4664 ST_FUNC
void unary(void)
4666 int n
, t
, align
, size
, r
, sizeof_caller
;
4671 sizeof_caller
= in_sizeof
;
4674 /* XXX: GCC 2.95.3 does not generate a table although it should be
4682 #ifdef TCC_TARGET_PE
4683 t
= VT_SHORT
|VT_UNSIGNED
;
4691 vsetc(&type
, VT_CONST
, &tokc
);
4695 t
= VT_INT
| VT_UNSIGNED
;
4701 t
= VT_LLONG
| VT_UNSIGNED
;
4713 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
4716 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
4718 case TOK___FUNCTION__
:
4720 goto tok_identifier
;
4726 /* special function name identifier */
4727 len
= strlen(funcname
) + 1;
4728 /* generate char[len] type */
4733 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
4734 if (!NODATA_WANTED
) {
4735 ptr
= section_ptr_add(data_section
, len
);
4736 memcpy(ptr
, funcname
, len
);
4742 #ifdef TCC_TARGET_PE
4743 t
= VT_SHORT
| VT_UNSIGNED
;
4749 /* string parsing */
4751 if (tcc_state
->char_is_unsigned
)
4752 t
= VT_BYTE
| VT_UNSIGNED
;
4754 if (tcc_state
->warn_write_strings
)
4759 memset(&ad
, 0, sizeof(AttributeDef
));
4760 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
4765 if (parse_btype(&type
, &ad
)) {
4766 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
4768 /* check ISOC99 compound literal */
4770 /* data is allocated locally by default */
4775 /* all except arrays are lvalues */
4776 if (!(type
.t
& VT_ARRAY
))
4777 r
|= lvalue_type(type
.t
);
4778 memset(&ad
, 0, sizeof(AttributeDef
));
4779 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
4781 if (sizeof_caller
) {
4788 } else if (tok
== '{') {
4789 int saved_nocode_wanted
= nocode_wanted
;
4791 tcc_error("expected constant");
4792 /* save all registers */
4794 /* statement expression : we do not accept break/continue
4795 inside as GCC does. We do retain the nocode_wanted state,
4796 as statement expressions can't ever be entered from the
4797 outside, so any reactivation of code emission (from labels
4798 or loop heads) can be disabled again after the end of it. */
4799 block(NULL
, NULL
, 1);
4800 nocode_wanted
= saved_nocode_wanted
;
4815 /* functions names must be treated as function pointers,
4816 except for unary '&' and sizeof. Since we consider that
4817 functions are not lvalues, we only have to handle it
4818 there and in function calls. */
4819 /* arrays can also be used although they are not lvalues */
4820 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
4821 !(vtop
->type
.t
& VT_ARRAY
))
4823 mk_pointer(&vtop
->type
);
4829 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
4830 gen_cast_s(VT_BOOL
);
4831 vtop
->c
.i
= !vtop
->c
.i
;
4832 } else if ((vtop
->r
& VT_VALMASK
) == VT_CMP
)
4836 vseti(VT_JMP
, gvtst(1, 0));
4848 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
4849 tcc_error("pointer not accepted for unary plus");
4850 /* In order to force cast, we add zero, except for floating point
4851 where we really need an noop (otherwise -0.0 will be transformed
4853 if (!is_float(vtop
->type
.t
)) {
4865 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
4866 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
4867 size
= type_size(&type
, &align
);
4868 if (s
&& s
->a
.aligned
)
4869 align
= 1 << (s
->a
.aligned
- 1);
4870 if (t
== TOK_SIZEOF
) {
4871 if (!(type
.t
& VT_VLA
)) {
4873 tcc_error("sizeof applied to an incomplete type");
4876 vla_runtime_type_size(&type
, &align
);
4881 vtop
->type
.t
|= VT_UNSIGNED
;
4884 case TOK_builtin_expect
:
4885 /* __builtin_expect is a no-op for now */
4886 parse_builtin_params(0, "ee");
4889 case TOK_builtin_types_compatible_p
:
4890 parse_builtin_params(0, "tt");
4891 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4892 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4893 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
4897 case TOK_builtin_choose_expr
:
4924 case TOK_builtin_constant_p
:
4925 parse_builtin_params(1, "e");
4926 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
4930 case TOK_builtin_frame_address
:
4931 case TOK_builtin_return_address
:
4937 if (tok
!= TOK_CINT
) {
4938 tcc_error("%s only takes positive integers",
4939 tok1
== TOK_builtin_return_address
?
4940 "__builtin_return_address" :
4941 "__builtin_frame_address");
4943 level
= (uint32_t)tokc
.i
;
4948 vset(&type
, VT_LOCAL
, 0); /* local frame */
4950 mk_pointer(&vtop
->type
);
4951 indir(); /* -> parent frame */
4953 if (tok1
== TOK_builtin_return_address
) {
4954 // assume return address is just above frame pointer on stack
4957 mk_pointer(&vtop
->type
);
4962 #ifdef TCC_TARGET_X86_64
4963 #ifdef TCC_TARGET_PE
4964 case TOK_builtin_va_start
:
4965 parse_builtin_params(0, "ee");
4966 r
= vtop
->r
& VT_VALMASK
;
4970 tcc_error("__builtin_va_start expects a local variable");
4972 vtop
->type
= char_pointer_type
;
4977 case TOK_builtin_va_arg_types
:
4978 parse_builtin_params(0, "t");
4979 vpushi(classify_x86_64_va_arg(&vtop
->type
));
4986 #ifdef TCC_TARGET_ARM64
4987 case TOK___va_start
: {
4988 parse_builtin_params(0, "ee");
4992 vtop
->type
.t
= VT_VOID
;
4995 case TOK___va_arg
: {
4996 parse_builtin_params(0, "et");
5004 case TOK___arm64_clear_cache
: {
5005 parse_builtin_params(0, "ee");
5008 vtop
->type
.t
= VT_VOID
;
5012 /* pre operations */
5023 t
= vtop
->type
.t
& VT_BTYPE
;
5025 /* In IEEE negate(x) isn't subtract(0,x), but rather
5029 vtop
->c
.f
= -1.0 * 0.0;
5030 else if (t
== VT_DOUBLE
)
5031 vtop
->c
.d
= -1.0 * 0.0;
5033 vtop
->c
.ld
= -1.0 * 0.0;
5041 goto tok_identifier
;
5043 /* allow to take the address of a label */
5044 if (tok
< TOK_UIDENT
)
5045 expect("label identifier");
5046 s
= label_find(tok
);
5048 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5050 if (s
->r
== LABEL_DECLARED
)
5051 s
->r
= LABEL_FORWARD
;
5054 s
->type
.t
= VT_VOID
;
5055 mk_pointer(&s
->type
);
5056 s
->type
.t
|= VT_STATIC
;
5058 vpushsym(&s
->type
, s
);
5064 CType controlling_type
;
5065 int has_default
= 0;
5068 TokenString
*str
= NULL
;
5069 int saved_const_wanted
= const_wanted
;
5074 expr_type(&controlling_type
, expr_eq
);
5075 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5076 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5077 mk_pointer(&controlling_type
);
5078 const_wanted
= saved_const_wanted
;
5082 if (tok
== TOK_DEFAULT
) {
5084 tcc_error("too many 'default'");
5090 AttributeDef ad_tmp
;
5093 parse_btype(&cur_type
, &ad_tmp
);
5094 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5095 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5097 tcc_error("type match twice");
5107 skip_or_save_block(&str
);
5109 skip_or_save_block(NULL
);
5116 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5117 tcc_error("type '%s' does not match any association", buf
);
5119 begin_macro(str
, 1);
5128 // special qnan , snan and infinity values
5133 vtop
->type
.t
= VT_FLOAT
;
5138 goto special_math_val
;
5141 goto special_math_val
;
5148 expect("identifier");
5150 if (!s
|| IS_ASM_SYM(s
)) {
5151 const char *name
= get_tok_str(t
, NULL
);
5153 tcc_error("'%s' undeclared", name
);
5154 /* for simple function calls, we tolerate undeclared
5155 external reference to int() function */
5156 if (tcc_state
->warn_implicit_function_declaration
5157 #ifdef TCC_TARGET_PE
5158 /* people must be warned about using undeclared WINAPI functions
5159 (which usually start with uppercase letter) */
5160 || (name
[0] >= 'A' && name
[0] <= 'Z')
5163 tcc_warning("implicit declaration of function '%s'", name
);
5164 s
= external_global_sym(t
, &func_old_type
, 0);
5168 /* A symbol that has a register is a local register variable,
5169 which starts out as VT_LOCAL value. */
5170 if ((r
& VT_VALMASK
) < VT_CONST
)
5171 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5173 vset(&s
->type
, r
, s
->c
);
5174 /* Point to s as backpointer (even without r&VT_SYM).
5175 Will be used by at least the x86 inline asm parser for
5181 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5182 vtop
->c
.i
= s
->enum_val
;
5187 /* post operations */
5189 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5192 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5195 if (tok
== TOK_ARROW
)
5197 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5200 /* expect pointer on structure */
5201 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5202 expect("struct or union");
5203 if (tok
== TOK_CDOUBLE
)
5204 expect("field name");
5206 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5207 expect("field name");
5208 s
= find_field(&vtop
->type
, tok
);
5210 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5211 /* add field offset to pointer */
5212 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5215 /* change type to field type, and set to lvalue */
5216 vtop
->type
= s
->type
;
5217 vtop
->type
.t
|= qualifiers
;
5218 /* an array is never an lvalue */
5219 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5220 vtop
->r
|= lvalue_type(vtop
->type
.t
);
5221 #ifdef CONFIG_TCC_BCHECK
5222 /* if bound checking, the referenced pointer must be checked */
5223 if (tcc_state
->do_bounds_check
&& (vtop
->r
& VT_VALMASK
) != VT_LOCAL
)
5224 vtop
->r
|= VT_MUSTBOUND
;
5228 } else if (tok
== '[') {
5234 } else if (tok
== '(') {
5237 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5240 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5241 /* pointer test (no array accepted) */
5242 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5243 vtop
->type
= *pointed_type(&vtop
->type
);
5244 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5248 expect("function pointer");
5251 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5253 /* get return type */
5256 sa
= s
->next
; /* first parameter */
5257 nb_args
= regsize
= 0;
5259 /* compute first implicit argument if a structure is returned */
5260 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5261 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5262 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5263 &ret_align
, ®size
);
5265 /* get some space for the returned structure */
5266 size
= type_size(&s
->type
, &align
);
5267 #ifdef TCC_TARGET_ARM64
5268 /* On arm64, a small struct is return in registers.
5269 It is much easier to write it to memory if we know
5270 that we are allowed to write some extra bytes, so
5271 round the allocated space up to a power of 2: */
5273 while (size
& (size
- 1))
5274 size
= (size
| (size
- 1)) + 1;
5276 loc
= (loc
- size
) & -align
;
5278 ret
.r
= VT_LOCAL
| VT_LVAL
;
5279 /* pass it as 'int' to avoid structure arg passing
5281 vseti(VT_LOCAL
, loc
);
5291 /* return in register */
5292 if (is_float(ret
.type
.t
)) {
5293 ret
.r
= reg_fret(ret
.type
.t
);
5294 #ifdef TCC_TARGET_X86_64
5295 if ((ret
.type
.t
& VT_BTYPE
) == VT_QFLOAT
)
5299 #ifndef TCC_TARGET_ARM64
5300 #ifdef TCC_TARGET_X86_64
5301 if ((ret
.type
.t
& VT_BTYPE
) == VT_QLONG
)
5303 if ((ret
.type
.t
& VT_BTYPE
) == VT_LLONG
)
5314 gfunc_param_typed(s
, sa
);
5324 tcc_error("too few arguments to function");
5326 gfunc_call(nb_args
);
5329 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
5330 vsetc(&ret
.type
, r
, &ret
.c
);
5331 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
5334 /* handle packed struct return */
5335 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
5338 size
= type_size(&s
->type
, &align
);
5339 /* We're writing whole regs often, make sure there's enough
5340 space. Assume register size is power of 2. */
5341 if (regsize
> align
)
5343 loc
= (loc
- size
) & -align
;
5347 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
5351 if (--ret_nregs
== 0)
5355 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
5363 ST_FUNC
void expr_prod(void)
5368 while (tok
== '*' || tok
== '/' || tok
== '%') {
5376 ST_FUNC
void expr_sum(void)
5381 while (tok
== '+' || tok
== '-') {
5389 static void expr_shift(void)
5394 while (tok
== TOK_SHL
|| tok
== TOK_SAR
) {
5402 static void expr_cmp(void)
5407 while ((tok
>= TOK_ULE
&& tok
<= TOK_GT
) ||
5408 tok
== TOK_ULT
|| tok
== TOK_UGE
) {
5416 static void expr_cmpeq(void)
5421 while (tok
== TOK_EQ
|| tok
== TOK_NE
) {
5429 static void expr_and(void)
5432 while (tok
== '&') {
5439 static void expr_xor(void)
5442 while (tok
== '^') {
5449 static void expr_or(void)
5452 while (tok
== '|') {
5459 static void expr_land(void)
5462 if (tok
== TOK_LAND
) {
5465 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5466 gen_cast_s(VT_BOOL
);
5471 while (tok
== TOK_LAND
) {
5487 if (tok
!= TOK_LAND
) {
5500 static void expr_lor(void)
5503 if (tok
== TOK_LOR
) {
5506 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5507 gen_cast_s(VT_BOOL
);
5512 while (tok
== TOK_LOR
) {
5528 if (tok
!= TOK_LOR
) {
5541 /* Assuming vtop is a value used in a conditional context
5542 (i.e. compared with zero) return 0 if it's false, 1 if
5543 true and -1 if it can't be statically determined. */
5544 static int condition_3way(void)
5547 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5548 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
5550 gen_cast_s(VT_BOOL
);
5557 static void expr_cond(void)
5559 int tt
, u
, r1
, r2
, rc
, t1
, t2
, bt1
, bt2
, islv
, c
, g
;
5561 CType type
, type1
, type2
;
5566 c
= condition_3way();
5567 g
= (tok
== ':' && gnu_ext
);
5569 /* needed to avoid having different registers saved in
5571 if (is_float(vtop
->type
.t
)) {
5573 #ifdef TCC_TARGET_X86_64
5574 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5598 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5599 mk_pointer(&vtop
->type
);
5601 sv
= *vtop
; /* save value to handle it later */
5602 vtop
--; /* no vpop so that FP stack is not flushed */
5618 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5619 mk_pointer(&vtop
->type
);
5622 bt1
= t1
& VT_BTYPE
;
5624 bt2
= t2
& VT_BTYPE
;
5628 /* cast operands to correct type according to ISOC rules */
5629 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
5630 type
.t
= VT_VOID
; /* NOTE: as an extension, we accept void on only one side */
5631 } else if (is_float(bt1
) || is_float(bt2
)) {
5632 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
5633 type
.t
= VT_LDOUBLE
;
5635 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
5640 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
5641 /* cast to biggest op */
5642 type
.t
= VT_LLONG
| VT_LONG
;
5643 if (bt1
== VT_LLONG
)
5645 if (bt2
== VT_LLONG
)
5647 /* convert to unsigned if it does not fit in a long long */
5648 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
5649 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
5650 type
.t
|= VT_UNSIGNED
;
5651 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
5652 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
5653 /* If one is a null ptr constant the result type
5655 if (is_null_pointer (vtop
)) type
= type1
;
5656 else if (is_null_pointer (&sv
)) type
= type2
;
5657 else if (bt1
!= bt2
)
5658 tcc_error("incompatible types in conditional expressions");
5660 CType
*pt1
= pointed_type(&type1
);
5661 CType
*pt2
= pointed_type(&type2
);
5662 int pbt1
= pt1
->t
& VT_BTYPE
;
5663 int pbt2
= pt2
->t
& VT_BTYPE
;
5664 int newquals
, copied
= 0;
5665 /* pointers to void get preferred, otherwise the
5666 pointed to types minus qualifs should be compatible */
5667 type
= (pbt1
== VT_VOID
) ? type1
: type2
;
5668 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
) {
5669 if(!compare_types(pt1
, pt2
, 1/*unqualif*/))
5670 tcc_warning("pointer type mismatch in conditional expression\n");
5672 /* combine qualifs */
5673 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
5674 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
5677 /* copy the pointer target symbol */
5678 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5681 pointed_type(&type
)->t
|= newquals
;
5683 /* pointers to incomplete arrays get converted to
5684 pointers to completed ones if possible */
5685 if (pt1
->t
& VT_ARRAY
5686 && pt2
->t
& VT_ARRAY
5687 && pointed_type(&type
)->ref
->c
< 0
5688 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
5691 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
5693 pointed_type(&type
)->ref
=
5694 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
5695 0, pointed_type(&type
)->ref
->c
);
5696 pointed_type(&type
)->ref
->c
=
5697 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
5700 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
5701 /* XXX: test structure compatibility */
5702 type
= bt1
== VT_STRUCT
? type1
: type2
;
5704 /* integer operations */
5705 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
5706 /* convert to unsigned if it does not fit in an integer */
5707 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
5708 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
5709 type
.t
|= VT_UNSIGNED
;
5711 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
5712 that `(expr ? a : b).mem` does not error with "lvalue expected" */
5713 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
5715 /* now we convert second operand */
5719 mk_pointer(&vtop
->type
);
5721 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5726 if (is_float(type
.t
)) {
5728 #ifdef TCC_TARGET_X86_64
5729 if ((type
.t
& VT_BTYPE
) == VT_LDOUBLE
) {
5733 } else if ((type
.t
& VT_BTYPE
) == VT_LLONG
) {
5734 /* for long longs, we use fixed registers to avoid having
5735 to handle a complicated move */
5746 /* this is horrible, but we must also convert first
5752 mk_pointer(&vtop
->type
);
5754 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
5758 if (c
< 0 || islv
) {
5760 move_reg(r2
, r1
, type
.t
);
5770 static void expr_eq(void)
5776 (tok
>= TOK_A_MOD
&& tok
<= TOK_A_DIV
) ||
5777 tok
== TOK_A_XOR
|| tok
== TOK_A_OR
||
5778 tok
== TOK_A_SHL
|| tok
== TOK_A_SAR
) {
5793 ST_FUNC
void gexpr(void)
5804 /* parse a constant expression and return value in vtop. */
5805 static void expr_const1(void)
5814 /* parse an integer constant and return its value. */
5815 static inline int64_t expr_const64(void)
5819 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
5820 expect("constant expression");
5826 /* parse an integer constant and return its value.
5827 Complain if it doesn't fit 32bit (signed or unsigned). */
5828 ST_FUNC
int expr_const(void)
5831 int64_t wc
= expr_const64();
5833 if (c
!= wc
&& (unsigned)c
!= wc
)
5834 tcc_error("constant exceeds 32 bit");
5838 /* return the label token if current token is a label, otherwise
5840 static int is_label(void)
5844 /* fast test first */
5845 if (tok
< TOK_UIDENT
)
5847 /* no need to save tokc because tok is an identifier */
5853 unget_tok(last_tok
);
5858 #ifndef TCC_TARGET_ARM64
5859 static void gfunc_return(CType
*func_type
)
5861 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
5862 CType type
, ret_type
;
5863 int ret_align
, ret_nregs
, regsize
;
5864 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
5865 &ret_align
, ®size
);
5866 if (0 == ret_nregs
) {
5867 /* if returning structure, must copy it to implicit
5868 first pointer arg location */
5871 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
5874 /* copy structure value to pointer */
5877 /* returning structure packed into registers */
5878 int r
, size
, addr
, align
;
5879 size
= type_size(func_type
,&align
);
5880 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
5881 (vtop
->c
.i
& (ret_align
-1)))
5882 && (align
& (ret_align
-1))) {
5883 loc
= (loc
- size
) & -ret_align
;
5886 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
5890 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
5892 vtop
->type
= ret_type
;
5893 if (is_float(ret_type
.t
))
5894 r
= rc_fret(ret_type
.t
);
5905 if (--ret_nregs
== 0)
5907 /* We assume that when a structure is returned in multiple
5908 registers, their classes are consecutive values of the
5911 vtop
->c
.i
+= regsize
;
5915 } else if (is_float(func_type
->t
)) {
5916 gv(rc_fret(func_type
->t
));
5920 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
5924 static int case_cmp(const void *pa
, const void *pb
)
5926 int64_t a
= (*(struct case_t
**) pa
)->v1
;
5927 int64_t b
= (*(struct case_t
**) pb
)->v1
;
5928 return a
< b
? -1 : a
> b
;
5931 static void gcase(struct case_t
**base
, int len
, int *bsym
)
5935 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
5953 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
5955 gcase(base
, len
/2, bsym
);
5956 if (cur_switch
->def_sym
)
5957 gjmp_addr(cur_switch
->def_sym
);
5959 *bsym
= gjmp(*bsym
);
5963 base
+= e
; len
-= e
;
5973 if (p
->v1
== p
->v2
) {
5975 gtst_addr(0, p
->sym
);
5985 gtst_addr(0, p
->sym
);
5991 static void block(int *bsym
, int *csym
, int is_expr
)
5993 int a
, b
, c
, d
, cond
;
5996 /* generate line number info */
5997 if (tcc_state
->do_debug
)
5998 tcc_debug_line(tcc_state
);
6001 /* default return value is (void) */
6003 vtop
->type
.t
= VT_VOID
;
6006 if (tok
== TOK_IF
) {
6008 int saved_nocode_wanted
= nocode_wanted
;
6013 cond
= condition_3way();
6019 nocode_wanted
|= 0x20000000;
6020 block(bsym
, csym
, 0);
6022 nocode_wanted
= saved_nocode_wanted
;
6024 if (c
== TOK_ELSE
) {
6029 nocode_wanted
|= 0x20000000;
6030 block(bsym
, csym
, 0);
6031 gsym(d
); /* patch else jmp */
6033 nocode_wanted
= saved_nocode_wanted
;
6036 } else if (tok
== TOK_WHILE
) {
6037 int saved_nocode_wanted
;
6038 nocode_wanted
&= ~0x20000000;
6048 saved_nocode_wanted
= nocode_wanted
;
6050 nocode_wanted
= saved_nocode_wanted
;
6055 } else if (tok
== '{') {
6057 int block_vla_sp_loc
= vla_sp_loc
, saved_vlas_in_scope
= vlas_in_scope
;
6060 /* record local declaration stack position */
6062 llabel
= local_label_stack
;
6065 /* handle local labels declarations */
6066 if (tok
== TOK_LABEL
) {
6069 if (tok
< TOK_UIDENT
)
6070 expect("label identifier");
6071 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6081 while (tok
!= '}') {
6082 if ((a
= is_label()))
6089 block(bsym
, csym
, is_expr
);
6092 /* pop locally defined labels */
6093 label_pop(&local_label_stack
, llabel
, is_expr
);
6094 /* pop locally defined symbols */
6096 /* In the is_expr case (a statement expression is finished here),
6097 vtop might refer to symbols on the local_stack. Either via the
6098 type or via vtop->sym. We can't pop those nor any that in turn
6099 might be referred to. To make it easier we don't roll back
6100 any symbols in that case; some upper level call to block() will
6101 do that. We do have to remove such symbols from the lookup
6102 tables, though. sym_pop will do that. */
6103 sym_pop(&local_stack
, s
, is_expr
);
6105 /* Pop VLA frames and restore stack pointer if required */
6106 if (vlas_in_scope
> saved_vlas_in_scope
) {
6107 vla_sp_loc
= saved_vlas_in_scope
? block_vla_sp_loc
: vla_sp_root_loc
;
6110 vlas_in_scope
= saved_vlas_in_scope
;
6113 } else if (tok
== TOK_RETURN
) {
6117 gen_assign_cast(&func_vt
);
6118 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6121 gfunc_return(&func_vt
);
6124 /* jump unless last stmt in top-level block */
6125 if (tok
!= '}' || local_scope
!= 1)
6127 nocode_wanted
|= 0x20000000;
6128 } else if (tok
== TOK_BREAK
) {
6131 tcc_error("cannot break");
6132 *bsym
= gjmp(*bsym
);
6135 nocode_wanted
|= 0x20000000;
6136 } else if (tok
== TOK_CONTINUE
) {
6139 tcc_error("cannot continue");
6140 vla_sp_restore_root();
6141 *csym
= gjmp(*csym
);
6144 } else if (tok
== TOK_FOR
) {
6146 int saved_nocode_wanted
;
6147 nocode_wanted
&= ~0x20000000;
6153 /* c99 for-loop init decl? */
6154 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6155 /* no, regular for-loop init expr */
6181 saved_nocode_wanted
= nocode_wanted
;
6183 nocode_wanted
= saved_nocode_wanted
;
6188 sym_pop(&local_stack
, s
, 0);
6191 if (tok
== TOK_DO
) {
6192 int saved_nocode_wanted
;
6193 nocode_wanted
&= ~0x20000000;
6199 saved_nocode_wanted
= nocode_wanted
;
6205 nocode_wanted
= saved_nocode_wanted
;
6209 nocode_wanted
= saved_nocode_wanted
;
6214 if (tok
== TOK_SWITCH
) {
6215 struct switch_t
*saved
, sw
;
6216 int saved_nocode_wanted
= nocode_wanted
;
6222 switchval
= *vtop
--;
6224 b
= gjmp(0); /* jump to first case */
6225 sw
.p
= NULL
; sw
.n
= 0; sw
.def_sym
= 0;
6229 nocode_wanted
= saved_nocode_wanted
;
6230 a
= gjmp(a
); /* add implicit break */
6233 qsort(sw
.p
, sw
.n
, sizeof(void*), case_cmp
);
6234 for (b
= 1; b
< sw
.n
; b
++)
6235 if (sw
.p
[b
- 1]->v2
>= sw
.p
[b
]->v1
)
6236 tcc_error("duplicate case value");
6237 /* Our switch table sorting is signed, so the compared
6238 value needs to be as well when it's 64bit. */
6239 if ((switchval
.type
.t
& VT_BTYPE
) == VT_LLONG
)
6240 switchval
.type
.t
&= ~VT_UNSIGNED
;
6242 gcase(sw
.p
, sw
.n
, &a
);
6245 gjmp_addr(sw
.def_sym
);
6246 dynarray_reset(&sw
.p
, &sw
.n
);
6251 if (tok
== TOK_CASE
) {
6252 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
6255 nocode_wanted
&= ~0x20000000;
6257 cr
->v1
= cr
->v2
= expr_const64();
6258 if (gnu_ext
&& tok
== TOK_DOTS
) {
6260 cr
->v2
= expr_const64();
6261 if (cr
->v2
< cr
->v1
)
6262 tcc_warning("empty case range");
6265 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
6268 goto block_after_label
;
6270 if (tok
== TOK_DEFAULT
) {
6275 if (cur_switch
->def_sym
)
6276 tcc_error("too many 'default'");
6277 cur_switch
->def_sym
= ind
;
6279 goto block_after_label
;
6281 if (tok
== TOK_GOTO
) {
6283 if (tok
== '*' && gnu_ext
) {
6287 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
6290 } else if (tok
>= TOK_UIDENT
) {
6291 s
= label_find(tok
);
6292 /* put forward definition if needed */
6294 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6296 if (s
->r
== LABEL_DECLARED
)
6297 s
->r
= LABEL_FORWARD
;
6299 vla_sp_restore_root();
6300 if (s
->r
& LABEL_FORWARD
)
6301 s
->jnext
= gjmp(s
->jnext
);
6303 gjmp_addr(s
->jnext
);
6306 expect("label identifier");
6309 } else if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
6318 if (s
->r
== LABEL_DEFINED
)
6319 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
6321 s
->r
= LABEL_DEFINED
;
6323 s
= label_push(&global_label_stack
, b
, LABEL_DEFINED
);
6327 /* we accept this, but it is a mistake */
6329 nocode_wanted
&= ~0x20000000;
6331 tcc_warning("deprecated use of label at end of compound statement");
6335 block(bsym
, csym
, is_expr
);
6338 /* expression case */
6353 /* This skips over a stream of tokens containing balanced {} and ()
6354 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
6355 with a '{'). If STR then allocates and stores the skipped tokens
6356 in *STR. This doesn't check if () and {} are nested correctly,
6357 i.e. "({)}" is accepted. */
6358 static void skip_or_save_block(TokenString
**str
)
6360 int braces
= tok
== '{';
6363 *str
= tok_str_alloc();
6365 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
6367 if (tok
== TOK_EOF
) {
6368 if (str
|| level
> 0)
6369 tcc_error("unexpected end of file");
6374 tok_str_add_tok(*str
);
6377 if (t
== '{' || t
== '(') {
6379 } else if (t
== '}' || t
== ')') {
6381 if (level
== 0 && braces
&& t
== '}')
6386 tok_str_add(*str
, -1);
6387 tok_str_add(*str
, 0);
6391 #define EXPR_CONST 1
6394 static void parse_init_elem(int expr_type
)
6396 int saved_global_expr
;
6399 /* compound literals must be allocated globally in this case */
6400 saved_global_expr
= global_expr
;
6403 global_expr
= saved_global_expr
;
6404 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
6405 (compound literals). */
6406 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
6407 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
6408 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
6409 #ifdef TCC_TARGET_PE
6410 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
6413 tcc_error("initializer element is not constant");
6421 /* put zeros for variable based init */
6422 static void init_putz(Section
*sec
, unsigned long c
, int size
)
6425 /* nothing to do because globals are already set to zero */
6427 vpush_global_sym(&func_old_type
, TOK_memset
);
6429 #ifdef TCC_TARGET_ARM
6440 /* t is the array or struct type. c is the array or struct
6441 address. cur_field is the pointer to the current
6442 field, for arrays the 'c' member contains the current start
6443 index. 'size_only' is true if only size info is needed (only used
6444 in arrays). al contains the already initialized length of the
6445 current container (starting at c). This returns the new length of that. */
6446 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
6447 Sym
**cur_field
, int size_only
, int al
)
6450 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
6451 unsigned long corig
= c
;
6455 if (gnu_ext
&& (l
= is_label()) != 0)
6457 /* NOTE: we only support ranges for last designator */
6458 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
6460 if (!(type
->t
& VT_ARRAY
))
6461 expect("array type");
6463 index
= index_last
= expr_const();
6464 if (tok
== TOK_DOTS
&& gnu_ext
) {
6466 index_last
= expr_const();
6470 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
6472 tcc_error("invalid index");
6474 (*cur_field
)->c
= index_last
;
6475 type
= pointed_type(type
);
6476 elem_size
= type_size(type
, &align
);
6477 c
+= index
* elem_size
;
6478 nb_elems
= index_last
- index
+ 1;
6484 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
6485 expect("struct/union type");
6486 f
= find_field(type
, l
);
6499 } else if (!gnu_ext
) {
6503 if (type
->t
& VT_ARRAY
) {
6504 index
= (*cur_field
)->c
;
6505 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
6506 tcc_error("index too large");
6507 type
= pointed_type(type
);
6508 c
+= index
* type_size(type
, &align
);
6511 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
6512 *cur_field
= f
= f
->next
;
6514 tcc_error("too many field init");
6519 /* must put zero in holes (note that doing it that way
6520 ensures that it even works with designators) */
6521 if (!size_only
&& c
- corig
> al
)
6522 init_putz(sec
, corig
+ al
, c
- corig
- al
);
6523 decl_initializer(type
, sec
, c
, 0, size_only
);
6525 /* XXX: make it more general */
6526 if (!size_only
&& nb_elems
> 1) {
6527 unsigned long c_end
;
6532 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
6533 for (i
= 1; i
< nb_elems
; i
++) {
6534 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
6539 } else if (!NODATA_WANTED
) {
6540 c_end
= c
+ nb_elems
* elem_size
;
6541 if (c_end
> sec
->data_allocated
)
6542 section_realloc(sec
, c_end
);
6543 src
= sec
->data
+ c
;
6545 for(i
= 1; i
< nb_elems
; i
++) {
6547 memcpy(dst
, src
, elem_size
);
6551 c
+= nb_elems
* type_size(type
, &align
);
6557 /* store a value or an expression directly in global data or in local array */
6558 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
6565 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
6569 /* XXX: not portable */
6570 /* XXX: generate error if incorrect relocation */
6571 gen_assign_cast(&dtype
);
6572 bt
= type
->t
& VT_BTYPE
;
6574 if ((vtop
->r
& VT_SYM
)
6577 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
6578 || (type
->t
& VT_BITFIELD
))
6579 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
6581 tcc_error("initializer element is not computable at load time");
6583 if (NODATA_WANTED
) {
6588 size
= type_size(type
, &align
);
6589 section_reserve(sec
, c
+ size
);
6590 ptr
= sec
->data
+ c
;
6592 /* XXX: make code faster ? */
6593 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
6594 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
6595 /* XXX This rejects compound literals like
6596 '(void *){ptr}'. The problem is that '&sym' is
6597 represented the same way, which would be ruled out
6598 by the SYM_FIRST_ANOM check above, but also '"string"'
6599 in 'char *p = "string"' is represented the same
6600 with the type being VT_PTR and the symbol being an
6601 anonymous one. That is, there's no difference in vtop
6602 between '(void *){x}' and '&(void *){x}'. Ignore
6603 pointer typed entities here. Hopefully no real code
6604 will every use compound literals with scalar type. */
6605 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
6606 /* These come from compound literals, memcpy stuff over. */
6610 esym
= elfsym(vtop
->sym
);
6611 ssec
= tcc_state
->sections
[esym
->st_shndx
];
6612 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
6614 /* We need to copy over all memory contents, and that
6615 includes relocations. Use the fact that relocs are
6616 created it order, so look from the end of relocs
6617 until we hit one before the copied region. */
6618 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
6619 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
6620 while (num_relocs
--) {
6622 if (rel
->r_offset
>= esym
->st_value
+ size
)
6624 if (rel
->r_offset
< esym
->st_value
)
6626 /* Note: if the same fields are initialized multiple
6627 times (possible with designators) then we possibly
6628 add multiple relocations for the same offset here.
6629 That would lead to wrong code, the last reloc needs
6630 to win. We clean this up later after the whole
6631 initializer is parsed. */
6632 put_elf_reloca(symtab_section
, sec
,
6633 c
+ rel
->r_offset
- esym
->st_value
,
6634 ELFW(R_TYPE
)(rel
->r_info
),
6635 ELFW(R_SYM
)(rel
->r_info
),
6645 if (type
->t
& VT_BITFIELD
) {
6646 int bit_pos
, bit_size
, bits
, n
;
6647 unsigned char *p
, v
, m
;
6648 bit_pos
= BIT_POS(vtop
->type
.t
);
6649 bit_size
= BIT_SIZE(vtop
->type
.t
);
6650 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
6651 bit_pos
&= 7, bits
= 0;
6656 v
= vtop
->c
.i
>> bits
<< bit_pos
;
6657 m
= ((1 << n
) - 1) << bit_pos
;
6658 *p
= (*p
& ~m
) | (v
& m
);
6659 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
6663 /* XXX: when cross-compiling we assume that each type has the
6664 same representation on host and target, which is likely to
6665 be wrong in the case of long double */
6667 vtop
->c
.i
= vtop
->c
.i
!= 0;
6669 *(char *)ptr
|= vtop
->c
.i
;
6672 *(short *)ptr
|= vtop
->c
.i
;
6675 *(float*)ptr
= vtop
->c
.f
;
6678 *(double *)ptr
= vtop
->c
.d
;
6681 #if defined TCC_IS_NATIVE_387
6682 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
6683 memcpy(ptr
, &vtop
->c
.ld
, 10);
6685 else if (sizeof (long double) == sizeof (double))
6686 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
6688 else if (vtop
->c
.ld
== 0.0)
6692 if (sizeof(long double) == LDOUBLE_SIZE
)
6693 *(long double*)ptr
= vtop
->c
.ld
;
6694 else if (sizeof(double) == LDOUBLE_SIZE
)
6695 *(double *)ptr
= (double)vtop
->c
.ld
;
6697 tcc_error("can't cross compile long double constants");
6701 *(long long *)ptr
|= vtop
->c
.i
;
6708 addr_t val
= vtop
->c
.i
;
6710 if (vtop
->r
& VT_SYM
)
6711 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6713 *(addr_t
*)ptr
|= val
;
6715 if (vtop
->r
& VT_SYM
)
6716 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6717 *(addr_t
*)ptr
|= val
;
6723 int val
= vtop
->c
.i
;
6725 if (vtop
->r
& VT_SYM
)
6726 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
6730 if (vtop
->r
& VT_SYM
)
6731 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
6740 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
6747 /* 't' contains the type and storage info. 'c' is the offset of the
6748 object in section 'sec'. If 'sec' is NULL, it means stack based
6749 allocation. 'first' is true if array '{' must be read (multi
6750 dimension implicit array init handling). 'size_only' is true if
6751 size only evaluation is wanted (only for arrays). */
6752 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
6753 int first
, int size_only
)
6755 int len
, n
, no_oblock
, nb
, i
;
6762 /* If we currently are at an '}' or ',' we have read an initializer
6763 element in one of our callers, and not yet consumed it. */
6764 have_elem
= tok
== '}' || tok
== ',';
6765 if (!have_elem
&& tok
!= '{' &&
6766 /* In case of strings we have special handling for arrays, so
6767 don't consume them as initializer value (which would commit them
6768 to some anonymous symbol). */
6769 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
6771 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6776 !(type
->t
& VT_ARRAY
) &&
6777 /* Use i_c_parameter_t, to strip toplevel qualifiers.
6778 The source type might have VT_CONSTANT set, which is
6779 of course assignable to non-const elements. */
6780 is_compatible_unqualified_types(type
, &vtop
->type
)) {
6781 init_putv(type
, sec
, c
);
6782 } else if (type
->t
& VT_ARRAY
) {
6785 t1
= pointed_type(type
);
6786 size1
= type_size(t1
, &align1
);
6789 if ((first
&& tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
6792 tcc_error("character array initializer must be a literal,"
6793 " optionally enclosed in braces");
6798 /* only parse strings here if correct type (otherwise: handle
6799 them as ((w)char *) expressions */
6800 if ((tok
== TOK_LSTR
&&
6801 #ifdef TCC_TARGET_PE
6802 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
6804 (t1
->t
& VT_BTYPE
) == VT_INT
6806 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
6808 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6811 /* compute maximum number of chars wanted */
6813 cstr_len
= tokc
.str
.size
;
6815 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
6818 if (n
>= 0 && nb
> (n
- len
))
6822 tcc_warning("initializer-string for array is too long");
6823 /* in order to go faster for common case (char
6824 string in global variable, we handle it
6826 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
6828 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
6832 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
6834 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
6836 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
6843 /* only add trailing zero if enough storage (no
6844 warning in this case since it is standard) */
6845 if (n
< 0 || len
< n
) {
6848 init_putv(t1
, sec
, c
+ (len
* size1
));
6859 while (tok
!= '}' || have_elem
) {
6860 len
= decl_designator(type
, sec
, c
, &f
, size_only
, len
);
6862 if (type
->t
& VT_ARRAY
) {
6864 /* special test for multi dimensional arrays (may not
6865 be strictly correct if designators are used at the
6867 if (no_oblock
&& len
>= n
*size1
)
6870 if (s
->type
.t
== VT_UNION
)
6874 if (no_oblock
&& f
== NULL
)
6883 /* put zeros at the end */
6884 if (!size_only
&& len
< n
*size1
)
6885 init_putz(sec
, c
+ len
, n
*size1
- len
);
6888 /* patch type size if needed, which happens only for array types */
6890 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
6891 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6894 if (first
|| tok
== '{') {
6902 } else if (tok
== '{') {
6904 decl_initializer(type
, sec
, c
, first
, size_only
);
6906 } else if (size_only
) {
6907 /* If we supported only ISO C we wouldn't have to accept calling
6908 this on anything than an array size_only==1 (and even then
6909 only on the outermost level, so no recursion would be needed),
6910 because initializing a flex array member isn't supported.
6911 But GNU C supports it, so we need to recurse even into
6912 subfields of structs and arrays when size_only is set. */
6913 /* just skip expression */
6914 skip_or_save_block(NULL
);
6917 /* This should happen only when we haven't parsed
6918 the init element above for fear of committing a
6919 string constant to memory too early. */
6920 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
6921 expect("string constant");
6922 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
6924 init_putv(type
, sec
, c
);
6928 /* parse an initializer for type 't' if 'has_init' is non zero, and
6929 allocate space in local or global data space ('r' is either
6930 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
6931 variable 'v' of scope 'scope' is declared before initializers
6932 are parsed. If 'v' is zero, then a reference to the new object
6933 is put in the value stack. If 'has_init' is 2, a special parsing
6934 is done to handle string constants. */
6935 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
6936 int has_init
, int v
, int scope
)
6938 int size
, align
, addr
;
6939 TokenString
*init_str
= NULL
;
6942 Sym
*flexible_array
;
6944 int saved_nocode_wanted
= nocode_wanted
;
6945 #ifdef CONFIG_TCC_BCHECK
6949 /* Always allocate static or global variables */
6950 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
6951 nocode_wanted
|= 0x80000000;
6953 #ifdef CONFIG_TCC_BCHECK
6954 bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
6957 flexible_array
= NULL
;
6958 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6959 Sym
*field
= type
->ref
->next
;
6962 field
= field
->next
;
6963 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
6964 flexible_array
= field
;
6968 size
= type_size(type
, &align
);
6969 /* If unknown size, we must evaluate it before
6970 evaluating initializers because
6971 initializers can generate global data too
6972 (e.g. string pointers or ISOC99 compound
6973 literals). It also simplifies local
6974 initializers handling */
6975 if (size
< 0 || (flexible_array
&& has_init
)) {
6977 tcc_error("unknown type size");
6978 /* get all init string */
6979 if (has_init
== 2) {
6980 init_str
= tok_str_alloc();
6981 /* only get strings */
6982 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
6983 tok_str_add_tok(init_str
);
6986 tok_str_add(init_str
, -1);
6987 tok_str_add(init_str
, 0);
6989 skip_or_save_block(&init_str
);
6994 begin_macro(init_str
, 1);
6996 decl_initializer(type
, NULL
, 0, 1, 1);
6997 /* prepare second initializer parsing */
6998 macro_ptr
= init_str
->str
;
7001 /* if still unknown size, error */
7002 size
= type_size(type
, &align
);
7004 tcc_error("unknown type size");
7006 /* If there's a flex member and it was used in the initializer
7008 if (flexible_array
&&
7009 flexible_array
->type
.ref
->c
> 0)
7010 size
+= flexible_array
->type
.ref
->c
7011 * pointed_size(&flexible_array
->type
);
7012 /* take into account specified alignment if bigger */
7013 if (ad
->a
.aligned
) {
7014 int speca
= 1 << (ad
->a
.aligned
- 1);
7017 } else if (ad
->a
.packed
) {
7021 if (!v
&& NODATA_WANTED
)
7022 size
= 0, align
= 1;
7024 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7026 #ifdef CONFIG_TCC_BCHECK
7027 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7031 loc
= (loc
- size
) & -align
;
7033 #ifdef CONFIG_TCC_BCHECK
7034 /* handles bounds */
7035 /* XXX: currently, since we do only one pass, we cannot track
7036 '&' operators, so we add only arrays */
7037 if (bcheck
&& (type
->t
& VT_ARRAY
)) {
7039 /* add padding between regions */
7041 /* then add local bound info */
7042 bounds_ptr
= section_ptr_add(lbounds_section
, 2 * sizeof(addr_t
));
7043 bounds_ptr
[0] = addr
;
7044 bounds_ptr
[1] = size
;
7048 /* local variable */
7049 #ifdef CONFIG_TCC_ASM
7050 if (ad
->asm_label
) {
7051 int reg
= asm_parse_regvar(ad
->asm_label
);
7053 r
= (r
& ~VT_VALMASK
) | reg
;
7056 sym
= sym_push(v
, type
, r
, addr
);
7059 /* push local reference */
7060 vset(type
, r
, addr
);
7063 if (v
&& scope
== VT_CONST
) {
7064 /* see if the symbol was already defined */
7067 patch_storage(sym
, ad
, type
);
7068 /* we accept several definitions of the same global variable. */
7069 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7074 /* allocate symbol in corresponding section */
7079 else if (tcc_state
->nocommon
)
7084 addr
= section_add(sec
, size
, align
);
7085 #ifdef CONFIG_TCC_BCHECK
7086 /* add padding if bound check */
7088 section_add(sec
, 1, 1);
7091 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7092 sec
= common_section
;
7097 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7098 patch_storage(sym
, ad
, NULL
);
7100 /* Local statics have a scope until now (for
7101 warnings), remove it here. */
7103 /* update symbol definition */
7104 put_extern_sym(sym
, sec
, addr
, size
);
7106 /* push global reference */
7107 sym
= get_sym_ref(type
, sec
, addr
, size
);
7108 vpushsym(type
, sym
);
7112 #ifdef CONFIG_TCC_BCHECK
7113 /* handles bounds now because the symbol must be defined
7114 before for the relocation */
7118 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7119 /* then add global bound info */
7120 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7121 bounds_ptr
[0] = 0; /* relocated */
7122 bounds_ptr
[1] = size
;
7127 if (type
->t
& VT_VLA
) {
7133 /* save current stack pointer */
7134 if (vlas_in_scope
== 0) {
7135 if (vla_sp_root_loc
== -1)
7136 vla_sp_root_loc
= (loc
-= PTR_SIZE
);
7137 gen_vla_sp_save(vla_sp_root_loc
);
7140 vla_runtime_type_size(type
, &a
);
7141 gen_vla_alloc(type
, a
);
7142 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7143 /* on _WIN64, because of the function args scratch area, the
7144 result of alloca differs from RSP and is returned in RAX. */
7145 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7147 gen_vla_sp_save(addr
);
7151 } else if (has_init
) {
7152 size_t oldreloc_offset
= 0;
7153 if (sec
&& sec
->reloc
)
7154 oldreloc_offset
= sec
->reloc
->data_offset
;
7155 decl_initializer(type
, sec
, addr
, 1, 0);
7156 if (sec
&& sec
->reloc
)
7157 squeeze_multi_relocs(sec
, oldreloc_offset
);
7158 /* patch flexible array member size back to -1, */
7159 /* for possible subsequent similar declarations */
7161 flexible_array
->type
.ref
->c
= -1;
7165 /* restore parse state if needed */
7171 nocode_wanted
= saved_nocode_wanted
;
7174 /* parse a function defined by symbol 'sym' and generate its code in
7175 'cur_text_section' */
7176 static void gen_function(Sym
*sym
)
7179 ind
= cur_text_section
->data_offset
;
7180 if (sym
->a
.aligned
) {
7181 size_t newoff
= section_add(cur_text_section
, 0,
7182 1 << (sym
->a
.aligned
- 1));
7183 gen_fill_nops(newoff
- ind
);
7185 /* NOTE: we patch the symbol size later */
7186 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7187 funcname
= get_tok_str(sym
->v
, NULL
);
7189 /* Initialize VLA state */
7191 vla_sp_root_loc
= -1;
7192 /* put debug symbol */
7193 tcc_debug_funcstart(tcc_state
, sym
);
7194 /* push a dummy symbol to enable local sym storage */
7195 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
7196 local_scope
= 1; /* for function parameters */
7197 gfunc_prolog(&sym
->type
);
7200 block(NULL
, NULL
, 0);
7201 if (!(nocode_wanted
& 0x20000000)
7202 && ((func_vt
.t
& VT_BTYPE
) == VT_INT
)
7203 && !strcmp (funcname
, "main"))
7207 gen_assign_cast(&func_vt
);
7208 gfunc_return(&func_vt
);
7213 cur_text_section
->data_offset
= ind
;
7214 label_pop(&global_label_stack
, NULL
, 0);
7215 /* reset local stack */
7217 sym_pop(&local_stack
, NULL
, 0);
7218 /* end of function */
7219 /* patch symbol size */
7220 elfsym(sym
)->st_size
= ind
- func_ind
;
7221 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
7222 /* It's better to crash than to generate wrong code */
7223 cur_text_section
= NULL
;
7224 funcname
= ""; /* for safety */
7225 func_vt
.t
= VT_VOID
; /* for safety */
7226 func_var
= 0; /* for safety */
7227 ind
= 0; /* for safety */
7228 nocode_wanted
= 0x80000000;
7232 static void gen_inline_functions(TCCState
*s
)
7235 int inline_generated
, i
, ln
;
7236 struct InlineFunc
*fn
;
7238 ln
= file
->line_num
;
7239 /* iterate while inline function are referenced */
7241 inline_generated
= 0;
7242 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7243 fn
= s
->inline_fns
[i
];
7245 if (sym
&& sym
->c
) {
7246 /* the function was used: generate its code and
7247 convert it to a normal function */
7250 pstrcpy(file
->filename
, sizeof file
->filename
, fn
->filename
);
7251 sym
->type
.t
&= ~VT_INLINE
;
7253 begin_macro(fn
->func_str
, 1);
7255 cur_text_section
= text_section
;
7259 inline_generated
= 1;
7262 } while (inline_generated
);
7263 file
->line_num
= ln
;
7266 ST_FUNC
void free_inline_functions(TCCState
*s
)
7269 /* free tokens of unused inline functions */
7270 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
7271 struct InlineFunc
*fn
= s
->inline_fns
[i
];
7273 tok_str_free(fn
->func_str
);
7275 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
7278 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
7279 if parsing old style parameter decl list (and FUNC_SYM is set then) */
7280 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
7288 if (!parse_btype(&btype
, &ad
)) {
7289 if (is_for_loop_init
)
7291 /* skip redundant ';' if not in old parameter decl scope */
7292 if (tok
== ';' && l
!= VT_CMP
) {
7298 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
7299 /* global asm block */
7303 if (tok
>= TOK_UIDENT
) {
7304 /* special test for old K&R protos without explicit int
7305 type. Only accepted when defining global data */
7309 expect("declaration");
7314 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
7315 int v
= btype
.ref
->v
;
7316 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
7317 tcc_warning("unnamed struct/union that defines no instances");
7321 if (IS_ENUM(btype
.t
)) {
7326 while (1) { /* iterate thru each declaration */
7328 /* If the base type itself was an array type of unspecified
7329 size (like in 'typedef int arr[]; arr x = {1};') then
7330 we will overwrite the unknown size by the real one for
7331 this decl. We need to unshare the ref symbol holding
7333 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
7334 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
7336 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
7340 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
7341 printf("type = '%s'\n", buf
);
7344 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7345 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
)) {
7346 tcc_error("function without file scope cannot be static");
7348 /* if old style function prototype, we accept a
7351 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
7352 decl0(VT_CMP
, 0, sym
);
7355 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
7356 ad
.asm_label
= asm_label_instr();
7357 /* parse one last attribute list, after asm label */
7358 parse_attribute(&ad
);
7363 #ifdef TCC_TARGET_PE
7364 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
7365 if (type
.t
& (VT_STATIC
|VT_TYPEDEF
))
7366 tcc_error("cannot have dll linkage with static or typedef");
7367 if (ad
.a
.dllimport
) {
7368 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
7371 type
.t
|= VT_EXTERN
;
7377 tcc_error("cannot use local functions");
7378 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
7379 expect("function definition");
7381 /* reject abstract declarators in function definition
7382 make old style params without decl have int type */
7384 while ((sym
= sym
->next
) != NULL
) {
7385 if (!(sym
->v
& ~SYM_FIELD
))
7386 expect("identifier");
7387 if (sym
->type
.t
== VT_VOID
)
7388 sym
->type
= int_type
;
7391 /* XXX: cannot do better now: convert extern line to static inline */
7392 if ((type
.t
& (VT_EXTERN
| VT_INLINE
)) == (VT_EXTERN
| VT_INLINE
))
7393 type
.t
= (type
.t
& ~VT_EXTERN
) | VT_STATIC
;
7395 /* put function symbol */
7396 sym
= external_global_sym(v
, &type
, 0);
7397 type
.t
&= ~VT_EXTERN
;
7398 patch_storage(sym
, &ad
, &type
);
7400 /* static inline functions are just recorded as a kind
7401 of macro. Their code will be emitted at the end of
7402 the compilation unit only if they are used */
7403 if ((type
.t
& (VT_INLINE
| VT_STATIC
)) ==
7404 (VT_INLINE
| VT_STATIC
)) {
7405 struct InlineFunc
*fn
;
7406 const char *filename
;
7408 filename
= file
? file
->filename
: "";
7409 fn
= tcc_malloc(sizeof *fn
+ strlen(filename
));
7410 strcpy(fn
->filename
, filename
);
7412 skip_or_save_block(&fn
->func_str
);
7413 dynarray_add(&tcc_state
->inline_fns
,
7414 &tcc_state
->nb_inline_fns
, fn
);
7416 /* compute text section */
7417 cur_text_section
= ad
.section
;
7418 if (!cur_text_section
)
7419 cur_text_section
= text_section
;
7425 /* find parameter in function parameter list */
7426 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
7427 if ((sym
->v
& ~SYM_FIELD
) == v
)
7429 tcc_error("declaration for parameter '%s' but no such parameter",
7430 get_tok_str(v
, NULL
));
7432 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
7433 tcc_error("storage class specified for '%s'",
7434 get_tok_str(v
, NULL
));
7435 if (sym
->type
.t
!= VT_VOID
)
7436 tcc_error("redefinition of parameter '%s'",
7437 get_tok_str(v
, NULL
));
7438 convert_parameter_type(&type
);
7440 } else if (type
.t
& VT_TYPEDEF
) {
7441 /* save typedefed type */
7442 /* XXX: test storage specifiers ? */
7444 if (sym
&& sym
->sym_scope
== local_scope
) {
7445 if (!is_compatible_types(&sym
->type
, &type
)
7446 || !(sym
->type
.t
& VT_TYPEDEF
))
7447 tcc_error("incompatible redefinition of '%s'",
7448 get_tok_str(v
, NULL
));
7451 sym
= sym_push(v
, &type
, 0, 0);
7455 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
7456 && !(type
.t
& VT_EXTERN
)) {
7457 tcc_error("declaration of void object");
7460 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
7461 /* external function definition */
7462 /* specific case for func_call attribute */
7464 } else if (!(type
.t
& VT_ARRAY
)) {
7465 /* not lvalue if array */
7466 r
|= lvalue_type(type
.t
);
7468 has_init
= (tok
== '=');
7469 if (has_init
&& (type
.t
& VT_VLA
))
7470 tcc_error("variable length array cannot be initialized");
7471 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
)) ||
7472 ((type
.t
& VT_BTYPE
) == VT_FUNC
) ||
7473 ((type
.t
& VT_ARRAY
) && (type
.t
& VT_STATIC
) &&
7474 !has_init
&& l
== VT_CONST
&& type
.ref
->c
< 0)) {
7475 /* external variable or function */
7476 /* NOTE: as GCC, uninitialized global static
7477 arrays of null size are considered as
7479 type
.t
|= VT_EXTERN
;
7480 sym
= external_sym(v
, &type
, r
, &ad
);
7481 if (ad
.alias_target
) {
7484 alias_target
= sym_find(ad
.alias_target
);
7485 esym
= elfsym(alias_target
);
7487 tcc_error("unsupported forward __alias__ attribute");
7488 /* Local statics have a scope until now (for
7489 warnings), remove it here. */
7491 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
7494 if (type
.t
& VT_STATIC
)
7500 else if (l
== VT_CONST
)
7501 /* uninitialized global variables may be overridden */
7502 type
.t
|= VT_EXTERN
;
7503 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
7507 if (is_for_loop_init
)
7520 static void decl(int l
)
7525 /* ------------------------------------------------------------------------- */