2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
46 ST_DATA
char debug_modes
;
49 static SValue _vstack
[1 + VSTACK_SIZE
];
50 #define vstack (_vstack + 1)
52 ST_DATA
int nocode_wanted
; /* no code generation wanted */
53 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
54 #define DATA_ONLY_WANTED 0x80000000 /* ON outside of functions and for static initializers */
56 /* no code output after unconditional jumps such as with if (0) ... */
57 #define CODE_OFF_BIT 0x20000000
58 #define CODE_OFF() if(!nocode_wanted)(nocode_wanted |= CODE_OFF_BIT)
59 #define CODE_ON() (nocode_wanted &= ~CODE_OFF_BIT)
61 /* no code output when parsing sizeof()/typeof() etc. (using nocode_wanted++/--) */
62 #define NOEVAL_MASK 0x0000FFFF
63 #define NOEVAL_WANTED (nocode_wanted & NOEVAL_MASK)
65 /* no code output when parsing constant expressions */
66 #define CONST_WANTED_BIT 0x00010000
67 #define CONST_WANTED_MASK 0x0FFF0000
68 #define CONST_WANTED (nocode_wanted & CONST_WANTED_MASK)
70 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
71 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
72 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
75 ST_DATA
const char *funcname
;
76 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
77 static CString initstr
;
80 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
81 #define VT_PTRDIFF_T VT_INT
83 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
84 #define VT_PTRDIFF_T VT_LLONG
86 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
87 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
90 static struct switch_t
{
94 } **p
; int n
; /* list of case ranges */
95 int def_sym
; /* default symbol */
99 struct switch_t
*prev
;
101 } *cur_switch
; /* current switch */
103 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
104 /*list of temporary local variables on the stack in current function. */
105 static struct temp_local_variable
{
106 int location
; //offset on stack. Svalue.c.i
109 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
110 static int nb_temp_local_vars
;
112 static struct scope
{
114 struct { int loc
, locorig
, num
; } vla
;
115 struct { Sym
*s
; int n
; } cl
;
118 } *cur_scope
, *loop_scope
, *root_scope
;
127 #define precedence_parser
128 static void init_prec(void);
131 static void block(int flags
);
133 #define STMT_COMPOUND 2
135 static void gen_cast(CType
*type
);
136 static void gen_cast_s(int t
);
137 static inline CType
*pointed_type(CType
*type
);
138 static int is_compatible_types(CType
*type1
, CType
*type2
);
139 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
);
140 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
141 static void parse_expr_type(CType
*type
);
142 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
143 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
144 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
145 static int decl(int l
);
146 static void expr_eq(void);
147 static void vpush_type_size(CType
*type
, int *a
);
148 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
149 static inline int64_t expr_const64(void);
150 static void vpush64(int ty
, unsigned long long v
);
151 static void vpush(CType
*type
);
152 static int gvtst(int inv
, int t
);
153 static void gen_inline_functions(TCCState
*s
);
154 static void free_inline_functions(TCCState
*s
);
155 static void skip_or_save_block(TokenString
**str
);
156 static void gv_dup(void);
157 static int get_temp_local_var(int size
,int align
);
158 static void clear_temp_local_var_list();
159 static void cast_error(CType
*st
, CType
*dt
);
160 static void end_switch(void);
162 /* ------------------------------------------------------------------------- */
163 /* Automagical code suppression */
165 /* Clear 'nocode_wanted' at forward label if it was used */
166 ST_FUNC
void gsym(int t
)
174 /* Clear 'nocode_wanted' if current pc is a label */
180 tcc_tcov_block_begin(tcc_state
);
184 /* Set 'nocode_wanted' after unconditional (backwards) jump */
185 static void gjmp_addr_acs(int t
)
191 /* Set 'nocode_wanted' after unconditional (forwards) jump */
192 static int gjmp_acs(int t
)
199 /* These are #undef'd at the end of this file */
200 #define gjmp_addr gjmp_addr_acs
201 #define gjmp gjmp_acs
202 /* ------------------------------------------------------------------------- */
204 ST_INLN
int is_float(int t
)
206 int bt
= t
& VT_BTYPE
;
207 return bt
== VT_LDOUBLE
213 static inline int is_integer_btype(int bt
)
222 static int btype_size(int bt
)
224 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
228 bt
== VT_PTR
? PTR_SIZE
: 0;
231 /* returns function return register from type */
232 static int R_RET(int t
)
236 #ifdef TCC_TARGET_X86_64
237 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
239 #elif defined TCC_TARGET_RISCV64
240 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
246 /* returns 2nd function return register, if any */
247 static int R2_RET(int t
)
253 #elif defined TCC_TARGET_X86_64
258 #elif defined TCC_TARGET_RISCV64
265 /* returns true for two-word types */
266 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
268 /* put function return registers to stack value */
269 static void PUT_R_RET(SValue
*sv
, int t
)
271 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
274 /* returns function return register class for type t */
275 static int RC_RET(int t
)
277 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
280 /* returns generic register class for type t */
281 static int RC_TYPE(int t
)
285 #ifdef TCC_TARGET_X86_64
286 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
288 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
290 #elif defined TCC_TARGET_RISCV64
291 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
297 /* returns 2nd register class corresponding to t and rc */
298 static int RC2_TYPE(int t
, int rc
)
300 if (!USING_TWO_WORDS(t
))
315 /* we use our own 'finite' function to avoid potential problems with
316 non standard math libs */
317 /* XXX: endianness dependent */
318 ST_FUNC
int ieee_finite(double d
)
321 memcpy(p
, &d
, sizeof(double));
322 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
325 /* compiling intel long double natively */
326 #if (defined __i386__ || defined __x86_64__) \
327 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
328 # define TCC_IS_NATIVE_387
331 ST_FUNC
void test_lvalue(void)
333 if (!(vtop
->r
& VT_LVAL
))
337 ST_FUNC
void check_vstack(void)
339 if (vtop
!= vstack
- 1)
340 tcc_error("internal compiler error: vstack leak (%d)",
341 (int)(vtop
- vstack
+ 1));
344 /* vstack debugging aid */
346 void pv (const char *lbl
, int a
, int b
)
349 for (i
= a
; i
< a
+ b
; ++i
) {
350 SValue
*p
= &vtop
[-i
];
351 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
352 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
357 /* ------------------------------------------------------------------------- */
358 /* initialize vstack and types. This must be done also for tcc -E */
359 ST_FUNC
void tccgen_init(TCCState
*s1
)
362 memset(vtop
, 0, sizeof *vtop
);
364 /* define some often used types */
367 char_type
.t
= VT_BYTE
;
368 if (s1
->char_is_unsigned
)
369 char_type
.t
|= VT_UNSIGNED
;
370 char_pointer_type
= char_type
;
371 mk_pointer(&char_pointer_type
);
373 func_old_type
.t
= VT_FUNC
;
374 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
375 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
376 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
377 #ifdef precedence_parser
383 ST_FUNC
int tccgen_compile(TCCState
*s1
)
387 anon_sym
= SYM_FIRST_ANOM
;
388 nocode_wanted
= DATA_ONLY_WANTED
; /* no code outside of functions */
389 debug_modes
= (s1
->do_debug
? 1 : 0) | s1
->test_coverage
<< 1;
393 #ifdef TCC_TARGET_ARM
397 printf("%s: **** new file\n", file
->filename
);
399 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
402 gen_inline_functions(s1
);
404 /* end of translation unit info */
410 ST_FUNC
void tccgen_finish(TCCState
*s1
)
412 tcc_debug_end(s1
); /* just in case of errors: free memory */
413 free_inline_functions(s1
);
414 sym_pop(&global_stack
, NULL
, 0);
415 sym_pop(&local_stack
, NULL
, 0);
416 /* free preprocessor macros */
419 dynarray_reset(&sym_pools
, &nb_sym_pools
);
421 dynarray_reset(&stk_data
, &nb_stk_data
);
427 pending_gotos
= NULL
;
428 nb_temp_local_vars
= 0;
429 global_label_stack
= NULL
;
430 local_label_stack
= NULL
;
431 cur_text_section
= NULL
;
432 sym_free_first
= NULL
;
435 /* ------------------------------------------------------------------------- */
436 ST_FUNC ElfSym
*elfsym(Sym
*s
)
440 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
443 /* apply storage attributes to Elf symbol */
444 ST_FUNC
void update_storage(Sym
*sym
)
447 int sym_bind
, old_sym_bind
;
453 if (sym
->a
.visibility
)
454 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
457 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
458 sym_bind
= STB_LOCAL
;
459 else if (sym
->a
.weak
)
462 sym_bind
= STB_GLOBAL
;
463 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
464 if (sym_bind
!= old_sym_bind
) {
465 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
469 if (sym
->a
.dllimport
)
470 esym
->st_other
|= ST_PE_IMPORT
;
471 if (sym
->a
.dllexport
)
472 esym
->st_other
|= ST_PE_EXPORT
;
476 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
477 get_tok_str(sym
->v
, NULL
),
478 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
486 /* ------------------------------------------------------------------------- */
487 /* update sym->c so that it points to an external symbol in section
488 'section' with value 'value' */
490 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
491 addr_t value
, unsigned long size
,
492 int can_add_underscore
)
494 int sym_type
, sym_bind
, info
, other
, t
;
500 name
= get_tok_str(sym
->v
, NULL
);
502 if ((t
& VT_BTYPE
) == VT_FUNC
) {
504 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
505 sym_type
= STT_NOTYPE
;
506 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
509 sym_type
= STT_OBJECT
;
511 if (t
& (VT_STATIC
| VT_INLINE
))
512 sym_bind
= STB_LOCAL
;
514 sym_bind
= STB_GLOBAL
;
518 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
519 Sym
*ref
= sym
->type
.ref
;
520 if (ref
->a
.nodecorate
) {
521 can_add_underscore
= 0;
523 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
524 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
526 other
|= ST_PE_STDCALL
;
527 can_add_underscore
= 0;
532 if (sym
->asm_label
) {
533 name
= get_tok_str(sym
->asm_label
, NULL
);
534 can_add_underscore
= 0;
537 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
539 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
543 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
544 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
547 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
551 esym
->st_value
= value
;
552 esym
->st_size
= size
;
553 esym
->st_shndx
= sh_num
;
558 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*s
, addr_t value
, unsigned long size
)
560 if (nocode_wanted
&& (NODATA_WANTED
|| (s
&& s
== cur_text_section
)))
562 put_extern_sym2(sym
, s
? s
->sh_num
: SHN_UNDEF
, value
, size
, 1);
565 /* add a new relocation entry to symbol 'sym' in section 's' */
566 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
571 if (nocode_wanted
&& s
== cur_text_section
)
576 put_extern_sym(sym
, NULL
, 0, 0);
580 /* now we can add ELF relocation info */
581 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
585 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
587 greloca(s
, sym
, offset
, type
, 0);
591 /* ------------------------------------------------------------------------- */
592 /* symbol allocator */
593 static Sym
*__sym_malloc(void)
595 Sym
*sym_pool
, *sym
, *last_sym
;
598 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
599 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
601 last_sym
= sym_free_first
;
603 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
604 sym
->next
= last_sym
;
608 sym_free_first
= last_sym
;
612 static inline Sym
*sym_malloc(void)
616 sym
= sym_free_first
;
618 sym
= __sym_malloc();
619 sym_free_first
= sym
->next
;
622 sym
= tcc_malloc(sizeof(Sym
));
627 ST_INLN
void sym_free(Sym
*sym
)
630 sym
->next
= sym_free_first
;
631 sym_free_first
= sym
;
637 /* push, without hashing */
638 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
643 memset(s
, 0, sizeof *s
);
653 /* find a symbol and return its associated structure. 's' is the top
654 of the symbol stack */
655 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
667 /* structure lookup */
668 ST_INLN Sym
*struct_find(int v
)
671 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
673 return table_ident
[v
]->sym_struct
;
676 /* find an identifier */
677 ST_INLN Sym
*sym_find(int v
)
680 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
682 return table_ident
[v
]->sym_identifier
;
685 static int sym_scope(Sym
*s
)
687 if (IS_ENUM_VAL (s
->type
.t
))
688 return s
->type
.ref
->sym_scope
;
693 /* push a given symbol on the symbol stack */
694 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
703 s
= sym_push2(ps
, v
, type
->t
, c
);
704 s
->type
.ref
= type
->ref
;
706 /* don't record fields or anonymous symbols */
708 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
709 /* record symbol in token array */
710 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
712 ps
= &ts
->sym_struct
;
714 ps
= &ts
->sym_identifier
;
717 s
->sym_scope
= local_scope
;
718 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
719 tcc_error("redeclaration of '%s'",
720 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
725 /* push a global identifier */
726 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
729 s
= sym_push2(&global_stack
, v
, t
, c
);
730 s
->r
= VT_CONST
| VT_SYM
;
731 /* don't record anonymous symbol */
732 if (v
< SYM_FIRST_ANOM
) {
733 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
734 /* modify the top most local identifier, so that sym_identifier will
735 point to 's' when popped; happens when called from inline asm */
736 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
737 ps
= &(*ps
)->prev_tok
;
744 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
745 pop them yet from the list, but do remove them from the token array. */
746 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
756 /* remove symbol in token array */
758 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
759 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
761 ps
= &ts
->sym_struct
;
763 ps
= &ts
->sym_identifier
;
775 ST_FUNC Sym
*label_find(int v
)
778 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
780 return table_ident
[v
]->sym_label
;
783 ST_FUNC Sym
*label_push(Sym
**ptop
, int v
, int flags
)
786 s
= sym_push2(ptop
, v
, VT_STATIC
, 0);
788 ps
= &table_ident
[v
- TOK_IDENT
]->sym_label
;
789 if (ptop
== &global_label_stack
) {
790 /* modify the top most local identifier, so that
791 sym_identifier will point to 's' when popped */
793 ps
= &(*ps
)->prev_tok
;
800 /* pop labels until element last is reached. Look if any labels are
801 undefined. Define symbols if '&&label' was used. */
802 ST_FUNC
void label_pop(Sym
**ptop
, Sym
*slast
, int keep
)
805 for(s
= *ptop
; s
!= slast
; s
= s1
) {
807 if (s
->r
== LABEL_DECLARED
) {
808 tcc_warning_c(warn_all
)("label '%s' declared but not used", get_tok_str(s
->v
, NULL
));
809 } else if (s
->r
== LABEL_FORWARD
) {
810 tcc_error("label '%s' used but not defined",
811 get_tok_str(s
->v
, NULL
));
814 /* define corresponding symbol. A size of
816 put_extern_sym(s
, cur_text_section
, s
->jnext
, 1);
820 if (s
->r
!= LABEL_GONE
)
821 table_ident
[s
->v
- TOK_IDENT
]->sym_label
= s
->prev_tok
;
831 /* ------------------------------------------------------------------------- */
832 static void vcheck_cmp(void)
834 /* cannot let cpu flags if other instruction are generated. Also
835 avoid leaving VT_JMP anywhere except on the top of the stack
836 because it would complicate the code generator.
838 Don't do this when nocode_wanted. vtop might come from
839 !nocode_wanted regions (see 88_codeopt.c) and transforming
840 it to a register without actually generating code is wrong
841 as their value might still be used for real. All values
842 we push under nocode_wanted will eventually be popped
843 again, so that the VT_CMP/VT_JMP value will be in vtop
844 when code is unsuppressed again. */
846 /* However if it's just automatic suppression via CODE_OFF/ON()
847 then it seems that we better let things work undisturbed.
848 How can it work at all under nocode_wanted? Well, gv() will
849 actually clear it at the gsym() in load()/VT_JMP in the
850 generator backends */
852 if (vtop
->r
== VT_CMP
&& 0 == (nocode_wanted
& ~CODE_OFF_BIT
))
856 static void vsetc(CType
*type
, int r
, CValue
*vc
)
858 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
859 tcc_error("memory full (vstack)");
869 ST_FUNC
void vswap(void)
879 /* pop stack value */
880 ST_FUNC
void vpop(void)
883 v
= vtop
->r
& VT_VALMASK
;
884 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
885 /* for x86, we need to pop the FP stack */
887 o(0xd8dd); /* fstp %st(0) */
891 /* need to put correct jump if && or || without test */
898 /* push constant of type "type" with useless value */
899 static void vpush(CType
*type
)
901 vset(type
, VT_CONST
, 0);
904 /* push arbitrary 64bit constant */
905 static void vpush64(int ty
, unsigned long long v
)
912 vsetc(&ctype
, VT_CONST
, &cval
);
915 /* push integer constant */
916 ST_FUNC
void vpushi(int v
)
921 /* push a pointer sized constant */
922 static void vpushs(addr_t v
)
924 vpush64(VT_SIZE_T
, v
);
927 /* push long long constant */
928 static inline void vpushll(long long v
)
930 vpush64(VT_LLONG
, v
);
933 ST_FUNC
void vset(CType
*type
, int r
, int v
)
937 vsetc(type
, r
, &cval
);
940 static void vseti(int r
, int v
)
948 ST_FUNC
void vpushv(SValue
*v
)
950 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
951 tcc_error("memory full (vstack)");
956 static void vdup(void)
961 /* rotate n first stack elements to the bottom
962 I1 ... In -> I2 ... In I1 [top is right]
964 ST_FUNC
void vrotb(int n
)
976 /* rotate the n elements before entry e towards the top
977 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
979 ST_FUNC
void vrote(SValue
*e
, int n
)
986 for(i
= 0;i
< n
- 1; i
++)
991 /* rotate n first stack elements to the top
992 I1 ... In -> In I1 ... I(n-1) [top is right]
994 ST_FUNC
void vrott(int n
)
999 /* ------------------------------------------------------------------------- */
1000 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1002 /* called from generators to set the result from relational ops */
1003 ST_FUNC
void vset_VT_CMP(int op
)
1011 /* called once before asking generators to load VT_CMP to a register */
1012 static void vset_VT_JMP(void)
1014 int op
= vtop
->cmp_op
;
1016 if (vtop
->jtrue
|| vtop
->jfalse
) {
1017 int origt
= vtop
->type
.t
;
1018 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1019 int inv
= op
& (op
< 2); /* small optimization */
1020 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1021 vtop
->type
.t
|= origt
& (VT_UNSIGNED
| VT_DEFSIGN
);
1023 /* otherwise convert flags (rsp. 0/1) to register */
1025 if (op
< 2) /* doesn't seem to happen */
1030 /* Set CPU Flags, doesn't yet jump */
1031 static void gvtst_set(int inv
, int t
)
1035 if (vtop
->r
!= VT_CMP
) {
1038 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1039 vset_VT_CMP(vtop
->c
.i
!= 0);
1042 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1043 *p
= gjmp_append(*p
, t
);
1046 /* Generate value test
1048 * Generate a test for any value (jump, comparison and integers) */
1049 static int gvtst(int inv
, int t
)
1054 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1056 x
= u
, u
= t
, t
= x
;
1059 /* jump to the wanted target */
1061 t
= gjmp_cond(op
^ inv
, t
);
1064 /* resolve complementary jumps to here */
1071 /* generate a zero or nozero test */
1072 static void gen_test_zero(int op
)
1074 if (vtop
->r
== VT_CMP
) {
1078 vtop
->jfalse
= vtop
->jtrue
;
1088 /* ------------------------------------------------------------------------- */
1089 /* push a symbol value of TYPE */
1090 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1094 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1098 /* Return a static symbol pointing to a section */
1099 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1105 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1106 sym
->type
.t
|= VT_STATIC
;
1107 put_extern_sym(sym
, sec
, offset
, size
);
1111 /* push a reference to a section offset by adding a dummy symbol */
1112 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1114 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1117 /* define a new external reference to a symbol 'v' of type 'u' */
1118 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1124 /* push forward reference */
1125 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1126 s
->type
.ref
= type
->ref
;
1127 } else if (IS_ASM_SYM(s
)) {
1128 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1129 s
->type
.ref
= type
->ref
;
1135 /* create an external reference with no specific type similar to asm labels.
1136 This avoids type conflicts if the symbol is used from C too */
1137 ST_FUNC Sym
*external_helper_sym(int v
)
1139 CType ct
= { VT_ASM_FUNC
, NULL
};
1140 return external_global_sym(v
, &ct
);
1143 /* push a reference to an helper function (such as memmove) */
1144 ST_FUNC
void vpush_helper_func(int v
)
1146 vpushsym(&func_old_type
, external_helper_sym(v
));
1149 /* Merge symbol attributes. */
1150 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1152 if (sa1
->aligned
&& !sa
->aligned
)
1153 sa
->aligned
= sa1
->aligned
;
1154 sa
->packed
|= sa1
->packed
;
1155 sa
->weak
|= sa1
->weak
;
1156 sa
->nodebug
|= sa1
->nodebug
;
1157 if (sa1
->visibility
!= STV_DEFAULT
) {
1158 int vis
= sa
->visibility
;
1159 if (vis
== STV_DEFAULT
1160 || vis
> sa1
->visibility
)
1161 vis
= sa1
->visibility
;
1162 sa
->visibility
= vis
;
1164 sa
->dllexport
|= sa1
->dllexport
;
1165 sa
->nodecorate
|= sa1
->nodecorate
;
1166 sa
->dllimport
|= sa1
->dllimport
;
1169 /* Merge function attributes. */
1170 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1172 if (fa1
->func_call
&& !fa
->func_call
)
1173 fa
->func_call
= fa1
->func_call
;
1174 if (fa1
->func_type
&& !fa
->func_type
)
1175 fa
->func_type
= fa1
->func_type
;
1176 if (fa1
->func_args
&& !fa
->func_args
)
1177 fa
->func_args
= fa1
->func_args
;
1178 if (fa1
->func_noreturn
)
1179 fa
->func_noreturn
= 1;
1186 /* Merge attributes. */
1187 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1189 merge_symattr(&ad
->a
, &ad1
->a
);
1190 merge_funcattr(&ad
->f
, &ad1
->f
);
1193 ad
->section
= ad1
->section
;
1194 if (ad1
->alias_target
)
1195 ad
->alias_target
= ad1
->alias_target
;
1197 ad
->asm_label
= ad1
->asm_label
;
1199 ad
->attr_mode
= ad1
->attr_mode
;
1202 /* Merge some type attributes. */
1203 static void patch_type(Sym
*sym
, CType
*type
)
1205 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1206 if (!(sym
->type
.t
& VT_EXTERN
))
1207 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1208 sym
->type
.t
&= ~VT_EXTERN
;
1211 if (IS_ASM_SYM(sym
)) {
1212 /* stay static if both are static */
1213 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1214 sym
->type
.ref
= type
->ref
;
1215 if ((type
->t
& VT_BTYPE
) != VT_FUNC
&& !(type
->t
& VT_ARRAY
))
1219 if (!is_compatible_types(&sym
->type
, type
)) {
1220 tcc_error("incompatible types for redefinition of '%s'",
1221 get_tok_str(sym
->v
, NULL
));
1223 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1224 int static_proto
= sym
->type
.t
& VT_STATIC
;
1225 /* warn if static follows non-static function declaration */
1226 if ((type
->t
& VT_STATIC
) && !static_proto
1227 /* XXX this test for inline shouldn't be here. Until we
1228 implement gnu-inline mode again it silences a warning for
1229 mingw caused by our workarounds. */
1230 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1231 tcc_warning("static storage ignored for redefinition of '%s'",
1232 get_tok_str(sym
->v
, NULL
));
1234 /* set 'inline' if both agree or if one has static */
1235 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1236 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1237 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1238 static_proto
|= VT_INLINE
;
1241 if (0 == (type
->t
& VT_EXTERN
)) {
1242 struct FuncAttr f
= sym
->type
.ref
->f
;
1243 /* put complete type, use static from prototype */
1244 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1245 sym
->type
.ref
= type
->ref
;
1246 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1248 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1251 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1252 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1253 sym
->type
.ref
= type
->ref
;
1257 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1258 /* set array size if it was omitted in extern declaration */
1259 sym
->type
.ref
->c
= type
->ref
->c
;
1261 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1262 tcc_warning("storage mismatch for redefinition of '%s'",
1263 get_tok_str(sym
->v
, NULL
));
1267 /* Merge some storage attributes. */
1268 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1271 patch_type(sym
, type
);
1273 #ifdef TCC_TARGET_PE
1274 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1275 tcc_error("incompatible dll linkage for redefinition of '%s'",
1276 get_tok_str(sym
->v
, NULL
));
1278 merge_symattr(&sym
->a
, &ad
->a
);
1280 sym
->asm_label
= ad
->asm_label
;
1281 update_storage(sym
);
1284 /* copy sym to other stack */
1285 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1288 s
= sym_malloc(), *s
= *s0
;
1289 s
->prev
= *ps
, *ps
= s
;
1290 if (s
->v
< SYM_FIRST_ANOM
) {
1291 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1292 s
->prev_tok
= *ps
, *ps
= s
;
1297 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1298 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1300 int bt
= s
->type
.t
& VT_BTYPE
;
1301 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1302 Sym
**sp
= &s
->type
.ref
;
1303 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1304 Sym
*s2
= sym_copy(s
, ps
);
1305 sp
= &(*sp
= s2
)->next
;
1306 sym_copy_ref(s2
, ps
);
1311 /* define a new external reference to a symbol 'v' */
1312 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1316 /* look for global symbol */
1318 while (s
&& s
->sym_scope
)
1322 /* push forward reference */
1323 s
= global_identifier_push(v
, type
->t
, 0);
1326 s
->asm_label
= ad
->asm_label
;
1327 s
->type
.ref
= type
->ref
;
1328 /* copy type to the global stack */
1330 sym_copy_ref(s
, &global_stack
);
1332 patch_storage(s
, ad
, type
);
1334 /* push variables on local_stack if any */
1335 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1336 s
= sym_copy(s
, &local_stack
);
1340 /* save registers up to (vtop - n) stack entry */
1341 ST_FUNC
void save_regs(int n
)
1344 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1348 /* save r to the memory stack, and mark it as being free */
1349 ST_FUNC
void save_reg(int r
)
1351 save_reg_upstack(r
, 0);
1354 /* save r to the memory stack, and mark it as being free,
1355 if seen up to (vtop - n) stack entry */
1356 ST_FUNC
void save_reg_upstack(int r
, int n
)
1358 int l
, size
, align
, bt
;
1361 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1366 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1367 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1368 /* must save value on stack if not already done */
1370 bt
= p
->type
.t
& VT_BTYPE
;
1373 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1376 size
= type_size(&sv
.type
, &align
);
1377 l
= get_temp_local_var(size
,align
);
1378 sv
.r
= VT_LOCAL
| VT_LVAL
;
1380 store(p
->r
& VT_VALMASK
, &sv
);
1381 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1382 /* x86 specific: need to pop fp register ST0 if saved */
1383 if (r
== TREG_ST0
) {
1384 o(0xd8dd); /* fstp %st(0) */
1387 /* special long long case */
1388 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1393 /* mark that stack entry as being saved on the stack */
1394 if (p
->r
& VT_LVAL
) {
1395 /* also clear the bounded flag because the
1396 relocation address of the function was stored in
1398 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1400 p
->r
= VT_LVAL
| VT_LOCAL
;
1409 #ifdef TCC_TARGET_ARM
1410 /* find a register of class 'rc2' with at most one reference on stack.
1411 * If none, call get_reg(rc) */
1412 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1417 for(r
=0;r
<NB_REGS
;r
++) {
1418 if (reg_classes
[r
] & rc2
) {
1421 for(p
= vstack
; p
<= vtop
; p
++) {
1422 if ((p
->r
& VT_VALMASK
) == r
||
1434 /* find a free register of class 'rc'. If none, save one register */
1435 ST_FUNC
int get_reg(int rc
)
1440 /* find a free register */
1441 for(r
=0;r
<NB_REGS
;r
++) {
1442 if (reg_classes
[r
] & rc
) {
1445 for(p
=vstack
;p
<=vtop
;p
++) {
1446 if ((p
->r
& VT_VALMASK
) == r
||
1455 /* no register left : free the first one on the stack (VERY
1456 IMPORTANT to start from the bottom to ensure that we don't
1457 spill registers used in gen_opi()) */
1458 for(p
=vstack
;p
<=vtop
;p
++) {
1459 /* look at second register (if long long) */
1461 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1463 r
= p
->r
& VT_VALMASK
;
1464 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1470 /* Should never comes here */
1474 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1475 static int get_temp_local_var(int size
,int align
){
1477 struct temp_local_variable
*temp_var
;
1484 for(i
=0;i
<nb_temp_local_vars
;i
++){
1485 temp_var
=&arr_temp_local_vars
[i
];
1486 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1489 /*check if temp_var is free*/
1491 for(p
=vstack
;p
<=vtop
;p
++) {
1493 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1494 if(p
->c
.i
==temp_var
->location
){
1501 found_var
=temp_var
->location
;
1507 loc
= (loc
- size
) & -align
;
1508 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1509 temp_var
=&arr_temp_local_vars
[i
];
1510 temp_var
->location
=loc
;
1511 temp_var
->size
=size
;
1512 temp_var
->align
=align
;
1513 nb_temp_local_vars
++;
1520 static void clear_temp_local_var_list(){
1521 nb_temp_local_vars
=0;
1524 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1526 static void move_reg(int r
, int s
, int t
)
1540 /* get address of vtop (vtop MUST BE an lvalue) */
1541 ST_FUNC
void gaddrof(void)
1543 vtop
->r
&= ~VT_LVAL
;
1544 /* tricky: if saved lvalue, then we can go back to lvalue */
1545 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1546 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1549 #ifdef CONFIG_TCC_BCHECK
1550 /* generate a bounded pointer addition */
1551 static void gen_bounded_ptr_add(void)
1553 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1558 vpush_helper_func(TOK___bound_ptr_add
);
1563 /* returned pointer is in REG_IRET */
1564 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1567 /* relocation offset of the bounding function call point */
1568 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1571 /* patch pointer addition in vtop so that pointer dereferencing is
1573 static void gen_bounded_ptr_deref(void)
1583 size
= type_size(&vtop
->type
, &align
);
1585 case 1: func
= TOK___bound_ptr_indir1
; break;
1586 case 2: func
= TOK___bound_ptr_indir2
; break;
1587 case 4: func
= TOK___bound_ptr_indir4
; break;
1588 case 8: func
= TOK___bound_ptr_indir8
; break;
1589 case 12: func
= TOK___bound_ptr_indir12
; break;
1590 case 16: func
= TOK___bound_ptr_indir16
; break;
1592 /* may happen with struct member access */
1595 sym
= external_helper_sym(func
);
1597 put_extern_sym(sym
, NULL
, 0, 0);
1598 /* patch relocation */
1599 /* XXX: find a better solution ? */
1600 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1601 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1604 /* generate lvalue bound code */
1605 static void gbound(void)
1609 vtop
->r
&= ~VT_MUSTBOUND
;
1610 /* if lvalue, then use checking code before dereferencing */
1611 if (vtop
->r
& VT_LVAL
) {
1612 /* if not VT_BOUNDED value, then make one */
1613 if (!(vtop
->r
& VT_BOUNDED
)) {
1614 /* must save type because we must set it to int to get pointer */
1616 vtop
->type
.t
= VT_PTR
;
1619 gen_bounded_ptr_add();
1623 /* then check for dereferencing */
1624 gen_bounded_ptr_deref();
1628 /* we need to call __bound_ptr_add before we start to load function
1629 args into registers */
1630 ST_FUNC
void gbound_args(int nb_args
)
1635 for (i
= 1; i
<= nb_args
; ++i
)
1636 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1642 sv
= vtop
- nb_args
;
1643 if (sv
->r
& VT_SYM
) {
1647 #ifndef TCC_TARGET_PE
1648 || v
== TOK_sigsetjmp
1649 || v
== TOK___sigsetjmp
1652 vpush_helper_func(TOK___bound_setjmp
);
1655 func_bound_add_epilog
= 1;
1657 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1658 if (v
== TOK_alloca
)
1659 func_bound_add_epilog
= 1;
1662 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
1663 sv
->sym
->asm_label
= TOK___bound_longjmp
;
1668 /* Add bounds for local symbols from S to E (via ->prev) */
1669 static void add_local_bounds(Sym
*s
, Sym
*e
)
1671 for (; s
!= e
; s
= s
->prev
) {
1672 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1674 /* Add arrays/structs/unions because we always take address */
1675 if ((s
->type
.t
& VT_ARRAY
)
1676 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1677 || s
->a
.addrtaken
) {
1678 /* add local bound info */
1679 int align
, size
= type_size(&s
->type
, &align
);
1680 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1681 2 * sizeof(addr_t
));
1682 bounds_ptr
[0] = s
->c
;
1683 bounds_ptr
[1] = size
;
1689 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1690 static void pop_local_syms(Sym
*b
, int keep
)
1692 #ifdef CONFIG_TCC_BCHECK
1693 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
1694 add_local_bounds(local_stack
, b
);
1697 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
1698 sym_pop(&local_stack
, b
, keep
);
1701 /* increment an lvalue pointer */
1702 static void incr_offset(int offset
)
1704 int t
= vtop
->type
.t
;
1705 gaddrof(); /* remove VT_LVAL */
1706 vtop
->type
.t
= VT_PTRDIFF_T
; /* set scalar type */
1713 static void incr_bf_adr(int o
)
1715 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1719 /* single-byte load mode for packed or otherwise unaligned bitfields */
1720 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1723 save_reg_upstack(vtop
->r
, 1);
1724 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1725 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1734 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1736 vpushi((1 << n
) - 1), gen_op('&');
1739 vpushi(bits
), gen_op(TOK_SHL
);
1742 bits
+= n
, bit_size
-= n
, o
= 1;
1745 if (!(type
->t
& VT_UNSIGNED
)) {
1746 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1747 vpushi(n
), gen_op(TOK_SHL
);
1748 vpushi(n
), gen_op(TOK_SAR
);
1752 /* single-byte store mode for packed or otherwise unaligned bitfields */
1753 static void store_packed_bf(int bit_pos
, int bit_size
)
1755 int bits
, n
, o
, m
, c
;
1756 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
1758 save_reg_upstack(vtop
->r
, 1);
1759 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1761 incr_bf_adr(o
); // X B
1763 c
? vdup() : gv_dup(); // B V X
1766 vpushi(bits
), gen_op(TOK_SHR
);
1768 vpushi(bit_pos
), gen_op(TOK_SHL
);
1773 m
= ((1 << n
) - 1) << bit_pos
;
1774 vpushi(m
), gen_op('&'); // X B V1
1775 vpushv(vtop
-1); // X B V1 B
1776 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
1777 gen_op('&'); // X B V1 B1
1778 gen_op('|'); // X B V2
1780 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
1781 vstore(), vpop(); // X B
1782 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
1787 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
1790 if (0 == sv
->type
.ref
)
1792 t
= sv
->type
.ref
->auxtype
;
1793 if (t
!= -1 && t
!= VT_STRUCT
) {
1794 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
1800 /* store vtop a register belonging to class 'rc'. lvalues are
1801 converted to values. Cannot be used if cannot be converted to
1802 register value (such as structures). */
1803 ST_FUNC
int gv(int rc
)
1805 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
1806 int bit_pos
, bit_size
, size
, align
;
1808 /* NOTE: get_reg can modify vstack[] */
1809 if (vtop
->type
.t
& VT_BITFIELD
) {
1812 bit_pos
= BIT_POS(vtop
->type
.t
);
1813 bit_size
= BIT_SIZE(vtop
->type
.t
);
1814 /* remove bit field info to avoid loops */
1815 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
1818 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
1819 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
1820 type
.t
|= VT_UNSIGNED
;
1822 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
1824 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
1829 if (r
== VT_STRUCT
) {
1830 load_packed_bf(&type
, bit_pos
, bit_size
);
1832 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
1833 /* cast to int to propagate signedness in following ops */
1835 /* generate shifts */
1836 vpushi(bits
- (bit_pos
+ bit_size
));
1838 vpushi(bits
- bit_size
);
1839 /* NOTE: transformed to SHR if unsigned */
1844 if (is_float(vtop
->type
.t
) &&
1845 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1846 /* CPUs usually cannot use float constants, so we store them
1847 generically in data segment */
1848 init_params p
= { rodata_section
};
1849 unsigned long offset
;
1850 size
= type_size(&vtop
->type
, &align
);
1852 size
= 0, align
= 1;
1853 offset
= section_add(p
.sec
, size
, align
);
1854 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
1856 init_putv(&p
, &vtop
->type
, offset
);
1859 #ifdef CONFIG_TCC_BCHECK
1860 if (vtop
->r
& VT_MUSTBOUND
)
1864 bt
= vtop
->type
.t
& VT_BTYPE
;
1866 #ifdef TCC_TARGET_RISCV64
1868 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
1871 rc2
= RC2_TYPE(bt
, rc
);
1873 /* need to reload if:
1875 - lvalue (need to dereference pointer)
1876 - already a register, but not in the right class */
1877 r
= vtop
->r
& VT_VALMASK
;
1878 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
1879 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
1881 if (!r_ok
|| !r2_ok
) {
1884 if (1 /* we can 'mov (r),r' in cases */
1886 && (reg_classes
[r
] & rc
)
1889 save_reg_upstack(r
, 1);
1895 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
1896 int original_type
= vtop
->type
.t
;
1898 /* two register type load :
1899 expand to two words temporarily */
1900 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
1902 unsigned long long ll
= vtop
->c
.i
;
1903 vtop
->c
.i
= ll
; /* first word */
1905 vtop
->r
= r
; /* save register value */
1906 vpushi(ll
>> 32); /* second word */
1907 } else if (vtop
->r
& VT_LVAL
) {
1908 /* We do not want to modifier the long long pointer here.
1909 So we save any other instances down the stack */
1910 save_reg_upstack(vtop
->r
, 1);
1911 /* load from memory */
1912 vtop
->type
.t
= load_type
;
1915 vtop
[-1].r
= r
; /* save register value */
1916 /* increment pointer to get second word */
1917 incr_offset(PTR_SIZE
);
1919 /* move registers */
1922 if (r2_ok
&& vtop
->r2
< VT_CONST
)
1925 vtop
[-1].r
= r
; /* save register value */
1926 vtop
->r
= vtop
[-1].r2
;
1928 /* Allocate second register. Here we rely on the fact that
1929 get_reg() tries first to free r2 of an SValue. */
1933 /* write second register */
1936 vtop
->type
.t
= original_type
;
1938 if (vtop
->r
== VT_CMP
)
1940 /* one register type load */
1945 #ifdef TCC_TARGET_C67
1946 /* uses register pairs for doubles */
1947 if (bt
== VT_DOUBLE
)
1954 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
1955 ST_FUNC
void gv2(int rc1
, int rc2
)
1957 /* generate more generic register first. But VT_JMP or VT_CMP
1958 values must be generated first in all cases to avoid possible
1960 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
1965 /* test if reload is needed for first register */
1966 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
1976 /* test if reload is needed for first register */
1977 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
1984 /* expand 64bit on stack in two ints */
1985 ST_FUNC
void lexpand(void)
1988 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
1989 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
1990 if (v
== VT_CONST
) {
1993 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
1999 vtop
[0].r
= vtop
[-1].r2
;
2000 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2002 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2007 /* build a long long from two ints */
2008 static void lbuild(int t
)
2010 gv2(RC_INT
, RC_INT
);
2011 vtop
[-1].r2
= vtop
[0].r
;
2012 vtop
[-1].type
.t
= t
;
2017 /* convert stack entry to register and duplicate its value in another
2019 static void gv_dup(void)
2025 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2026 if (t
& VT_BITFIELD
) {
2036 /* stack: H L L1 H1 */
2046 /* duplicate value */
2056 /* generate CPU independent (unsigned) long long operations */
2057 static void gen_opl(int op
)
2059 int t
, a
, b
, op1
, c
, i
;
2061 unsigned short reg_iret
= REG_IRET
;
2062 unsigned short reg_lret
= REG_IRE2
;
2068 func
= TOK___divdi3
;
2071 func
= TOK___udivdi3
;
2074 func
= TOK___moddi3
;
2077 func
= TOK___umoddi3
;
2084 /* call generic long long function */
2085 vpush_helper_func(func
);
2090 vtop
->r2
= reg_lret
;
2098 //pv("gen_opl A",0,2);
2104 /* stack: L1 H1 L2 H2 */
2109 vtop
[-2] = vtop
[-3];
2112 /* stack: H1 H2 L1 L2 */
2113 //pv("gen_opl B",0,4);
2119 /* stack: H1 H2 L1 L2 ML MH */
2122 /* stack: ML MH H1 H2 L1 L2 */
2126 /* stack: ML MH H1 L2 H2 L1 */
2131 /* stack: ML MH M1 M2 */
2134 } else if (op
== '+' || op
== '-') {
2135 /* XXX: add non carry method too (for MIPS or alpha) */
2141 /* stack: H1 H2 (L1 op L2) */
2144 gen_op(op1
+ 1); /* TOK_xxxC2 */
2147 /* stack: H1 H2 (L1 op L2) */
2150 /* stack: (L1 op L2) H1 H2 */
2152 /* stack: (L1 op L2) (H1 op H2) */
2160 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2161 t
= vtop
[-1].type
.t
;
2165 /* stack: L H shift */
2167 /* constant: simpler */
2168 /* NOTE: all comments are for SHL. the other cases are
2169 done by swapping words */
2180 if (op
!= TOK_SAR
) {
2213 /* XXX: should provide a faster fallback on x86 ? */
2216 func
= TOK___ashrdi3
;
2219 func
= TOK___lshrdi3
;
2222 func
= TOK___ashldi3
;
2228 /* compare operations */
2234 /* stack: L1 H1 L2 H2 */
2236 vtop
[-1] = vtop
[-2];
2238 /* stack: L1 L2 H1 H2 */
2242 /* when values are equal, we need to compare low words. since
2243 the jump is inverted, we invert the test too. */
2246 else if (op1
== TOK_GT
)
2248 else if (op1
== TOK_ULT
)
2250 else if (op1
== TOK_UGT
)
2260 /* generate non equal test */
2262 vset_VT_CMP(TOK_NE
);
2266 /* compare low. Always unsigned */
2270 else if (op1
== TOK_LE
)
2272 else if (op1
== TOK_GT
)
2274 else if (op1
== TOK_GE
)
2277 #if 0//def TCC_TARGET_I386
2278 if (op
== TOK_NE
) { gsym(b
); break; }
2279 if (op
== TOK_EQ
) { gsym(a
); break; }
2288 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2290 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2291 return (a
^ b
) >> 63 ? -x
: x
;
2294 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2296 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2299 /* handle integer constant optimizations and various machine
2301 static void gen_opic(int op
)
2303 SValue
*v1
= vtop
- 1;
2305 int t1
= v1
->type
.t
& VT_BTYPE
;
2306 int t2
= v2
->type
.t
& VT_BTYPE
;
2307 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2308 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2309 uint64_t l1
= c1
? v1
->c
.i
: 0;
2310 uint64_t l2
= c2
? v2
->c
.i
: 0;
2311 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2314 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2315 l1
= ((uint32_t)l1
|
2316 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2317 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2318 l2
= ((uint32_t)l2
|
2319 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2323 case '+': l1
+= l2
; break;
2324 case '-': l1
-= l2
; break;
2325 case '&': l1
&= l2
; break;
2326 case '^': l1
^= l2
; break;
2327 case '|': l1
|= l2
; break;
2328 case '*': l1
*= l2
; break;
2335 /* if division by zero, generate explicit division */
2337 if (CONST_WANTED
&& !NOEVAL_WANTED
)
2338 tcc_error("division by zero in constant");
2342 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2343 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2344 case TOK_UDIV
: l1
= l1
/ l2
; break;
2345 case TOK_UMOD
: l1
= l1
% l2
; break;
2348 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2349 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2351 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2354 case TOK_ULT
: l1
= l1
< l2
; break;
2355 case TOK_UGE
: l1
= l1
>= l2
; break;
2356 case TOK_EQ
: l1
= l1
== l2
; break;
2357 case TOK_NE
: l1
= l1
!= l2
; break;
2358 case TOK_ULE
: l1
= l1
<= l2
; break;
2359 case TOK_UGT
: l1
= l1
> l2
; break;
2360 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2361 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2362 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2363 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2365 case TOK_LAND
: l1
= l1
&& l2
; break;
2366 case TOK_LOR
: l1
= l1
|| l2
; break;
2370 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2371 l1
= ((uint32_t)l1
|
2372 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2374 v1
->r
|= v2
->r
& VT_NONCONST
;
2377 /* if commutative ops, put c2 as constant */
2378 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2379 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2381 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2382 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2384 if (c1
&& ((l1
== 0 &&
2385 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2386 (l1
== -1 && op
== TOK_SAR
))) {
2387 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2389 } else if (c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2391 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2392 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2393 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2398 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2401 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2402 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2405 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2406 /* filter out NOP operations like x*1, x-0, x&-1... */
2408 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2409 /* try to use shifts instead of muls or divs */
2410 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2419 else if (op
== TOK_PDIV
)
2425 } else if (c2
&& (op
== '+' || op
== '-') &&
2426 (r
= vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
),
2427 r
== (VT_CONST
| VT_SYM
) || r
== VT_LOCAL
)) {
2428 /* symbol + constant case */
2432 /* The backends can't always deal with addends to symbols
2433 larger than +-1<<31. Don't construct such. */
2440 /* call low level op generator */
2441 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2442 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2447 if (vtop
->r
== VT_CONST
)
2448 vtop
->r
|= VT_NONCONST
; /* is const, but only by optimization */
2452 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2453 # define gen_negf gen_opf
2454 #elif defined TCC_TARGET_ARM
2455 void gen_negf(int op
)
2457 /* arm will detect 0-x and replace by vneg */
2458 vpushi(0), vswap(), gen_op('-');
2461 /* XXX: implement in gen_opf() for other backends too */
2462 void gen_negf(int op
)
2464 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2465 subtract(-0, x), but with them it's really a sign flip
2466 operation. We implement this with bit manipulation and have
2467 to do some type reinterpretation for this, which TCC can do
2470 int align
, size
, bt
;
2472 size
= type_size(&vtop
->type
, &align
);
2473 bt
= vtop
->type
.t
& VT_BTYPE
;
2474 save_reg(gv(RC_TYPE(bt
)));
2476 incr_bf_adr(size
- 1);
2478 vpushi(0x80); /* flip sign */
2485 /* generate a floating point operation with constant propagation */
2486 static void gen_opif(int op
)
2490 #if defined _MSC_VER && defined __x86_64__
2491 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2500 bt
= v1
->type
.t
& VT_BTYPE
;
2502 /* currently, we cannot do computations with forward symbols */
2503 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2504 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2506 if (bt
== VT_FLOAT
) {
2509 } else if (bt
== VT_DOUBLE
) {
2516 /* NOTE: we only do constant propagation if finite number (not
2517 NaN or infinity) (ANSI spec) */
2518 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !CONST_WANTED
)
2521 case '+': f1
+= f2
; break;
2522 case '-': f1
-= f2
; break;
2523 case '*': f1
*= f2
; break;
2526 union { float f
; unsigned u
; } x1
, x2
, y
;
2527 /* If not in initializer we need to potentially generate
2528 FP exceptions at runtime, otherwise we want to fold. */
2531 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2532 when used to compile the f1 /= f2 below, would be -nan */
2533 x1
.f
= f1
, x2
.f
= f2
;
2535 y
.u
= 0x7fc00000; /* nan */
2537 y
.u
= 0x7f800000; /* infinity */
2538 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
2573 /* XXX: overflow test ? */
2574 if (bt
== VT_FLOAT
) {
2576 } else if (bt
== VT_DOUBLE
) {
2583 if (op
== TOK_NEG
) {
2591 /* print a type. If 'varstr' is not NULL, then the variable is also
2592 printed in the type */
2594 /* XXX: add array and function pointers */
2595 static void type_to_str(char *buf
, int buf_size
,
2596 CType
*type
, const char *varstr
)
2608 pstrcat(buf
, buf_size
, "extern ");
2610 pstrcat(buf
, buf_size
, "static ");
2612 pstrcat(buf
, buf_size
, "typedef ");
2614 pstrcat(buf
, buf_size
, "inline ");
2616 if (t
& VT_VOLATILE
)
2617 pstrcat(buf
, buf_size
, "volatile ");
2618 if (t
& VT_CONSTANT
)
2619 pstrcat(buf
, buf_size
, "const ");
2621 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2622 || ((t
& VT_UNSIGNED
)
2623 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2626 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2628 buf_size
-= strlen(buf
);
2664 tstr
= "long double";
2666 pstrcat(buf
, buf_size
, tstr
);
2673 pstrcat(buf
, buf_size
, tstr
);
2674 v
= type
->ref
->v
& ~SYM_STRUCT
;
2675 if (v
>= SYM_FIRST_ANOM
)
2676 pstrcat(buf
, buf_size
, "<anonymous>");
2678 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2683 if (varstr
&& '*' == *varstr
) {
2684 pstrcat(buf1
, sizeof(buf1
), "(");
2685 pstrcat(buf1
, sizeof(buf1
), varstr
);
2686 pstrcat(buf1
, sizeof(buf1
), ")");
2688 pstrcat(buf1
, buf_size
, "(");
2690 while (sa
!= NULL
) {
2692 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2693 pstrcat(buf1
, sizeof(buf1
), buf2
);
2696 pstrcat(buf1
, sizeof(buf1
), ", ");
2698 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2699 pstrcat(buf1
, sizeof(buf1
), ", ...");
2700 pstrcat(buf1
, sizeof(buf1
), ")");
2701 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2705 if (t
& (VT_ARRAY
|VT_VLA
)) {
2706 if (varstr
&& '*' == *varstr
)
2707 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2709 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2710 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2713 pstrcpy(buf1
, sizeof(buf1
), "*");
2714 if (t
& VT_CONSTANT
)
2715 pstrcat(buf1
, buf_size
, "const ");
2716 if (t
& VT_VOLATILE
)
2717 pstrcat(buf1
, buf_size
, "volatile ");
2719 pstrcat(buf1
, sizeof(buf1
), varstr
);
2720 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2724 pstrcat(buf
, buf_size
, " ");
2725 pstrcat(buf
, buf_size
, varstr
);
2730 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2732 char buf1
[256], buf2
[256];
2733 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2734 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2735 tcc_error(fmt
, buf1
, buf2
);
2738 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2740 char buf1
[256], buf2
[256];
2741 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2742 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2743 tcc_warning(fmt
, buf1
, buf2
);
2746 static int pointed_size(CType
*type
)
2749 return type_size(pointed_type(type
), &align
);
2752 static inline int is_null_pointer(SValue
*p
)
2754 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
2756 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2757 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2758 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2759 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2760 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2761 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2765 /* compare function types. OLD functions match any new functions */
2766 static int is_compatible_func(CType
*type1
, CType
*type2
)
2772 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2774 if (s1
->f
.func_type
!= s2
->f
.func_type
2775 && s1
->f
.func_type
!= FUNC_OLD
2776 && s2
->f
.func_type
!= FUNC_OLD
)
2779 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2781 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
2792 /* return true if type1 and type2 are the same. If unqualified is
2793 true, qualifiers on the types are ignored.
2795 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2799 t1
= type1
->t
& VT_TYPE
;
2800 t2
= type2
->t
& VT_TYPE
;
2802 /* strip qualifiers before comparing */
2803 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2804 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2807 /* Default Vs explicit signedness only matters for char */
2808 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2812 /* XXX: bitfields ? */
2817 && !(type1
->ref
->c
< 0
2818 || type2
->ref
->c
< 0
2819 || type1
->ref
->c
== type2
->ref
->c
))
2822 /* test more complicated cases */
2823 bt1
= t1
& VT_BTYPE
;
2824 if (bt1
== VT_PTR
) {
2825 type1
= pointed_type(type1
);
2826 type2
= pointed_type(type2
);
2827 return is_compatible_types(type1
, type2
);
2828 } else if (bt1
== VT_STRUCT
) {
2829 return (type1
->ref
== type2
->ref
);
2830 } else if (bt1
== VT_FUNC
) {
2831 return is_compatible_func(type1
, type2
);
2832 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
2833 /* If both are enums then they must be the same, if only one is then
2834 t1 and t2 must be equal, which was checked above already. */
2835 return type1
->ref
== type2
->ref
;
2841 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2842 type is stored in DEST if non-null (except for pointer plus/minus) . */
2843 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
2845 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
2846 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
2852 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
2853 ret
= op
== '?' ? 1 : 0;
2854 /* NOTE: as an extension, we accept void on only one side */
2856 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2857 if (op
== '+') ; /* Handled in caller */
2858 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
2859 /* If one is a null ptr constant the result type is the other. */
2860 else if (is_null_pointer (op2
)) type
= *type1
;
2861 else if (is_null_pointer (op1
)) type
= *type2
;
2862 else if (bt1
!= bt2
) {
2863 /* accept comparison or cond-expr between pointer and integer
2865 if ((op
== '?' || TOK_ISCOND(op
))
2866 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
2867 tcc_warning("pointer/integer mismatch in %s",
2868 op
== '?' ? "conditional expression" : "comparison");
2869 else if (op
!= '-' || !is_integer_btype(bt2
))
2871 type
= *(bt1
== VT_PTR
? type1
: type2
);
2873 CType
*pt1
= pointed_type(type1
);
2874 CType
*pt2
= pointed_type(type2
);
2875 int pbt1
= pt1
->t
& VT_BTYPE
;
2876 int pbt2
= pt2
->t
& VT_BTYPE
;
2877 int newquals
, copied
= 0;
2878 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
2879 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
2880 if (op
!= '?' && !TOK_ISCOND(op
))
2883 type_incompatibility_warning(type1
, type2
,
2885 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
2886 : "pointer type mismatch in comparison('%s' and '%s')");
2889 /* pointers to void get preferred, otherwise the
2890 pointed to types minus qualifs should be compatible */
2891 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
2892 /* combine qualifs */
2893 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
2894 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2897 /* copy the pointer target symbol */
2898 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2901 pointed_type(&type
)->t
|= newquals
;
2903 /* pointers to incomplete arrays get converted to
2904 pointers to completed ones if possible */
2905 if (pt1
->t
& VT_ARRAY
2906 && pt2
->t
& VT_ARRAY
2907 && pointed_type(&type
)->ref
->c
< 0
2908 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
2911 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
2913 pointed_type(&type
)->ref
=
2914 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
2915 0, pointed_type(&type
)->ref
->c
);
2916 pointed_type(&type
)->ref
->c
=
2917 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
2923 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
2924 if (op
!= '?' || !compare_types(type1
, type2
, 1))
2927 } else if (is_float(bt1
) || is_float(bt2
)) {
2928 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
2929 type
.t
= VT_LDOUBLE
;
2930 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
2935 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
2936 /* cast to biggest op */
2937 type
.t
= VT_LLONG
| VT_LONG
;
2938 if (bt1
== VT_LLONG
)
2940 if (bt2
== VT_LLONG
)
2942 /* convert to unsigned if it does not fit in a long long */
2943 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
2944 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
2945 type
.t
|= VT_UNSIGNED
;
2947 /* integer operations */
2948 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
2949 /* convert to unsigned if it does not fit in an integer */
2950 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
2951 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
2952 type
.t
|= VT_UNSIGNED
;
2959 /* generic gen_op: handles types problems */
2960 ST_FUNC
void gen_op(int op
)
2962 int t1
, t2
, bt1
, bt2
, t
;
2963 CType type1
, combtype
;
2966 t1
= vtop
[-1].type
.t
;
2967 t2
= vtop
[0].type
.t
;
2968 bt1
= t1
& VT_BTYPE
;
2969 bt2
= t2
& VT_BTYPE
;
2971 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
2972 if (bt2
== VT_FUNC
) {
2973 mk_pointer(&vtop
->type
);
2976 if (bt1
== VT_FUNC
) {
2978 mk_pointer(&vtop
->type
);
2983 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
2984 tcc_error("invalid operand types for binary operation");
2985 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
2986 /* at least one operand is a pointer */
2987 /* relational op: must be both pointers */
2991 /* if both pointers, then it must be the '-' op */
2992 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
2994 tcc_error("cannot use pointers here");
2995 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
2996 vtop
->type
.t
&= ~VT_UNSIGNED
;
2999 vtop
->type
.t
= VT_PTRDIFF_T
;
3003 /* exactly one pointer : must be '+' or '-'. */
3004 if (op
!= '-' && op
!= '+')
3005 tcc_error("cannot use pointers here");
3006 /* Put pointer as first operand */
3007 if (bt2
== VT_PTR
) {
3009 t
= t1
, t1
= t2
, t2
= t
;
3012 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
3013 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3016 type1
= vtop
[-1].type
;
3017 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
3019 #ifdef CONFIG_TCC_BCHECK
3020 if (tcc_state
->do_bounds_check
&& !CONST_WANTED
) {
3021 /* if bounded pointers, we generate a special code to
3028 gen_bounded_ptr_add();
3034 type1
.t
&= ~(VT_ARRAY
|VT_VLA
);
3035 /* put again type if gen_opic() swaped operands */
3039 /* floats can only be used for a few operations */
3040 if (is_float(combtype
.t
)
3041 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3043 tcc_error("invalid operands for binary operation");
3044 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
3045 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
3046 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
3048 t
|= (VT_LONG
& t1
);
3052 t
= t2
= combtype
.t
;
3053 /* XXX: currently, some unsigned operations are explicit, so
3054 we modify them here */
3055 if (t
& VT_UNSIGNED
) {
3062 else if (op
== TOK_LT
)
3064 else if (op
== TOK_GT
)
3066 else if (op
== TOK_LE
)
3068 else if (op
== TOK_GE
)
3074 /* special case for shifts and long long: we keep the shift as
3076 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3083 if (TOK_ISCOND(op
)) {
3084 /* relational op: the result is an int */
3085 vtop
->type
.t
= VT_INT
;
3090 // Make sure that we have converted to an rvalue:
3091 if (vtop
->r
& VT_LVAL
)
3092 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3095 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3096 #define gen_cvt_itof1 gen_cvt_itof
3098 /* generic itof for unsigned long long case */
3099 static void gen_cvt_itof1(int t
)
3101 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3102 (VT_LLONG
| VT_UNSIGNED
)) {
3105 vpush_helper_func(TOK___floatundisf
);
3106 #if LDOUBLE_SIZE != 8
3107 else if (t
== VT_LDOUBLE
)
3108 vpush_helper_func(TOK___floatundixf
);
3111 vpush_helper_func(TOK___floatundidf
);
3122 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3123 #define gen_cvt_ftoi1 gen_cvt_ftoi
3125 /* generic ftoi for unsigned long long case */
3126 static void gen_cvt_ftoi1(int t
)
3129 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3130 /* not handled natively */
3131 st
= vtop
->type
.t
& VT_BTYPE
;
3133 vpush_helper_func(TOK___fixunssfdi
);
3134 #if LDOUBLE_SIZE != 8
3135 else if (st
== VT_LDOUBLE
)
3136 vpush_helper_func(TOK___fixunsxfdi
);
3139 vpush_helper_func(TOK___fixunsdfdi
);
3150 /* special delayed cast for char/short */
3151 static void force_charshort_cast(void)
3153 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3154 int dbt
= vtop
->type
.t
;
3155 vtop
->r
&= ~VT_MUSTCAST
;
3157 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3161 static void gen_cast_s(int t
)
3169 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3170 static void gen_cast(CType
*type
)
3172 int sbt
, dbt
, sf
, df
, c
;
3173 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3175 /* special delayed cast for char/short */
3176 if (vtop
->r
& VT_MUSTCAST
)
3177 force_charshort_cast();
3179 /* bitfields first get cast to ints */
3180 if (vtop
->type
.t
& VT_BITFIELD
)
3183 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3184 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3192 dbt_bt
= dbt
& VT_BTYPE
;
3193 sbt_bt
= sbt
& VT_BTYPE
;
3194 if (dbt_bt
== VT_VOID
)
3196 if (sbt_bt
== VT_VOID
) {
3198 cast_error(&vtop
->type
, type
);
3201 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3202 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3203 /* don't try to convert to ldouble when cross-compiling
3204 (except when it's '0' which is needed for arm:gen_negf()) */
3205 if (dbt_bt
== VT_LDOUBLE
&& !nocode_wanted
&& (sf
|| vtop
->c
.i
!= 0))
3209 /* constant case: we can do it now */
3210 /* XXX: in ISOC, cannot do it if error in convert */
3211 if (sbt
== VT_FLOAT
)
3212 vtop
->c
.ld
= vtop
->c
.f
;
3213 else if (sbt
== VT_DOUBLE
)
3214 vtop
->c
.ld
= vtop
->c
.d
;
3217 if (sbt_bt
== VT_LLONG
) {
3218 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3219 vtop
->c
.ld
= vtop
->c
.i
;
3221 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3223 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3224 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3226 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3229 if (dbt
== VT_FLOAT
)
3230 vtop
->c
.f
= (float)vtop
->c
.ld
;
3231 else if (dbt
== VT_DOUBLE
)
3232 vtop
->c
.d
= (double)vtop
->c
.ld
;
3233 } else if (sf
&& dbt
== VT_BOOL
) {
3234 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3237 vtop
->c
.i
= vtop
->c
.ld
;
3238 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3240 else if (sbt
& VT_UNSIGNED
)
3241 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3243 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3245 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3247 else if (dbt
== VT_BOOL
)
3248 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3250 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3251 dbt_bt
== VT_SHORT
? 0xffff :
3254 if (!(dbt
& VT_UNSIGNED
))
3255 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3260 } else if (dbt
== VT_BOOL
3261 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3262 == (VT_CONST
| VT_SYM
)) {
3263 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3269 /* cannot generate code for global or static initializers */
3270 if (nocode_wanted
& DATA_ONLY_WANTED
)
3273 /* non constant case: generate code */
3274 if (dbt
== VT_BOOL
) {
3275 gen_test_zero(TOK_NE
);
3281 /* convert from fp to fp */
3284 /* convert int to fp */
3287 /* convert fp to int */
3289 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3292 goto again
; /* may need char/short cast */
3297 ds
= btype_size(dbt_bt
);
3298 ss
= btype_size(sbt_bt
);
3299 if (ds
== 0 || ss
== 0)
3302 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3303 tcc_error("cast to incomplete type");
3305 /* same size and no sign conversion needed */
3306 if (ds
== ss
&& ds
>= 4)
3308 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3309 tcc_warning("cast between pointer and integer of different size");
3310 if (sbt_bt
== VT_PTR
) {
3311 /* put integer type to allow logical operations below */
3312 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3316 /* processor allows { int a = 0, b = *(char*)&a; }
3317 That means that if we cast to less width, we can just
3318 change the type and read it still later. */
3319 #define ALLOW_SUBTYPE_ACCESS 1
3321 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3322 /* value still in memory */
3326 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3328 goto done
; /* no 64bit envolved */
3336 /* generate high word */
3337 if (sbt
& VT_UNSIGNED
) {
3346 } else if (ss
== 8) {
3347 /* from long long: just take low order word */
3355 /* need to convert from 32bit to 64bit */
3356 if (sbt
& VT_UNSIGNED
) {
3357 #if defined(TCC_TARGET_RISCV64)
3358 /* RISC-V keeps 32bit vals in registers sign-extended.
3359 So here we need a zero-extension. */
3368 ss
= ds
, ds
= 4, dbt
= sbt
;
3369 } else if (ss
== 8) {
3370 /* RISC-V keeps 32bit vals in registers sign-extended.
3371 So here we need a sign-extension for signed types and
3372 zero-extension. for unsigned types. */
3373 #if !defined(TCC_TARGET_RISCV64)
3374 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3383 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3389 bits
= (ss
- ds
) * 8;
3390 /* for unsigned, gen_op will convert SAR to SHR */
3391 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3394 vpushi(bits
- trunc
);
3401 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3404 /* return type size as known at compile time. Put alignment at 'a' */
3405 ST_FUNC
int type_size(CType
*type
, int *a
)
3410 bt
= type
->t
& VT_BTYPE
;
3411 if (bt
== VT_STRUCT
) {
3416 } else if (bt
== VT_PTR
) {
3417 if (type
->t
& VT_ARRAY
) {
3421 ts
= type_size(&s
->type
, a
);
3423 if (ts
< 0 && s
->c
< 0)
3431 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3433 return -1; /* incomplete enum */
3434 } else if (bt
== VT_LDOUBLE
) {
3436 return LDOUBLE_SIZE
;
3437 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3438 #ifdef TCC_TARGET_I386
3439 #ifdef TCC_TARGET_PE
3444 #elif defined(TCC_TARGET_ARM)
3454 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3457 } else if (bt
== VT_SHORT
) {
3460 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3464 /* char, void, function, _Bool */
3470 /* push type size as known at runtime time on top of value stack. Put
3472 static void vpush_type_size(CType
*type
, int *a
)
3474 if (type
->t
& VT_VLA
) {
3475 type_size(&type
->ref
->type
, a
);
3476 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3478 int size
= type_size(type
, a
);
3480 tcc_error("unknown type size");
3485 /* return the pointed type of t */
3486 static inline CType
*pointed_type(CType
*type
)
3488 return &type
->ref
->type
;
3491 /* modify type so that its it is a pointer to type. */
3492 ST_FUNC
void mk_pointer(CType
*type
)
3495 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3496 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3500 /* return true if type1 and type2 are exactly the same (including
3503 static int is_compatible_types(CType
*type1
, CType
*type2
)
3505 return compare_types(type1
,type2
,0);
3508 /* return true if type1 and type2 are the same (ignoring qualifiers).
3510 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3512 return compare_types(type1
,type2
,1);
3515 static void cast_error(CType
*st
, CType
*dt
)
3517 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3520 /* verify type compatibility to store vtop in 'dt' type */
3521 static void verify_assign_cast(CType
*dt
)
3523 CType
*st
, *type1
, *type2
;
3524 int dbt
, sbt
, qualwarn
, lvl
;
3526 st
= &vtop
->type
; /* source type */
3527 dbt
= dt
->t
& VT_BTYPE
;
3528 sbt
= st
->t
& VT_BTYPE
;
3529 if (dt
->t
& VT_CONSTANT
)
3530 tcc_warning("assignment of read-only location");
3534 tcc_error("assignment to void expression");
3537 /* special cases for pointers */
3538 /* '0' can also be a pointer */
3539 if (is_null_pointer(vtop
))
3541 /* accept implicit pointer to integer cast with warning */
3542 if (is_integer_btype(sbt
)) {
3543 tcc_warning("assignment makes pointer from integer without a cast");
3546 type1
= pointed_type(dt
);
3548 type2
= pointed_type(st
);
3549 else if (sbt
== VT_FUNC
)
3550 type2
= st
; /* a function is implicitly a function pointer */
3553 if (is_compatible_types(type1
, type2
))
3555 for (qualwarn
= lvl
= 0;; ++lvl
) {
3556 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3557 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3559 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3560 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3561 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3563 type1
= pointed_type(type1
);
3564 type2
= pointed_type(type2
);
3566 if (!is_compatible_unqualified_types(type1
, type2
)) {
3567 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3568 /* void * can match anything */
3569 } else if (dbt
== sbt
3570 && is_integer_btype(sbt
& VT_BTYPE
)
3571 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3572 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3573 /* Like GCC don't warn by default for merely changes
3574 in pointer target signedness. Do warn for different
3575 base types, though, in particular for unsigned enums
3576 and signed int targets. */
3578 tcc_warning("assignment from incompatible pointer type");
3583 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
3589 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3590 tcc_warning("assignment makes integer from pointer without a cast");
3591 } else if (sbt
== VT_STRUCT
) {
3592 goto case_VT_STRUCT
;
3594 /* XXX: more tests */
3598 if (!is_compatible_unqualified_types(dt
, st
)) {
3606 static void gen_assign_cast(CType
*dt
)
3608 verify_assign_cast(dt
);
3612 /* store vtop in lvalue pushed on stack */
3613 ST_FUNC
void vstore(void)
3615 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3617 ft
= vtop
[-1].type
.t
;
3618 sbt
= vtop
->type
.t
& VT_BTYPE
;
3619 dbt
= ft
& VT_BTYPE
;
3620 verify_assign_cast(&vtop
[-1].type
);
3622 if (sbt
== VT_STRUCT
) {
3623 /* if structure, only generate pointer */
3624 /* structure assignment : generate memcpy */
3625 size
= type_size(&vtop
->type
, &align
);
3626 /* destination, keep on stack() as result */
3628 #ifdef CONFIG_TCC_BCHECK
3629 if (vtop
->r
& VT_MUSTBOUND
)
3630 gbound(); /* check would be wrong after gaddrof() */
3632 vtop
->type
.t
= VT_PTR
;
3636 #ifdef CONFIG_TCC_BCHECK
3637 if (vtop
->r
& VT_MUSTBOUND
)
3640 vtop
->type
.t
= VT_PTR
;
3643 #ifdef TCC_TARGET_NATIVE_STRUCT_COPY
3645 #ifdef CONFIG_TCC_BCHECK
3646 && !tcc_state
->do_bounds_check
3649 gen_struct_copy(size
);
3655 /* Use memmove, rather than memcpy, as dest and src may be same: */
3658 vpush_helper_func(TOK_memmove8
);
3659 else if(!(align
& 3))
3660 vpush_helper_func(TOK_memmove4
);
3663 vpush_helper_func(TOK_memmove
);
3668 } else if (ft
& VT_BITFIELD
) {
3669 /* bitfield store handling */
3671 /* save lvalue as expression result (example: s.b = s.a = n;) */
3672 vdup(), vtop
[-1] = vtop
[-2];
3674 bit_pos
= BIT_POS(ft
);
3675 bit_size
= BIT_SIZE(ft
);
3676 /* remove bit field info to avoid loops */
3677 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3679 if (dbt
== VT_BOOL
) {
3680 gen_cast(&vtop
[-1].type
);
3681 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3683 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3684 if (dbt
!= VT_BOOL
) {
3685 gen_cast(&vtop
[-1].type
);
3686 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3688 if (r
== VT_STRUCT
) {
3689 store_packed_bf(bit_pos
, bit_size
);
3691 unsigned long long mask
= (1ULL << bit_size
) - 1;
3692 if (dbt
!= VT_BOOL
) {
3694 if (dbt
== VT_LLONG
)
3697 vpushi((unsigned)mask
);
3704 /* duplicate destination */
3707 /* load destination, mask and or with source */
3708 if (dbt
== VT_LLONG
)
3709 vpushll(~(mask
<< bit_pos
));
3711 vpushi(~((unsigned)mask
<< bit_pos
));
3716 /* ... and discard */
3719 } else if (dbt
== VT_VOID
) {
3722 /* optimize char/short casts */
3724 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3725 && is_integer_btype(sbt
)
3727 if ((vtop
->r
& VT_MUSTCAST
)
3728 && btype_size(dbt
) > btype_size(sbt
)
3730 force_charshort_cast();
3733 gen_cast(&vtop
[-1].type
);
3736 #ifdef CONFIG_TCC_BCHECK
3737 /* bound check case */
3738 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3744 gv(RC_TYPE(dbt
)); /* generate value */
3747 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3748 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3749 vtop
->type
.t
= ft
& VT_TYPE
;
3752 /* if lvalue was saved on stack, must read it */
3753 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3755 r
= get_reg(RC_INT
);
3756 sv
.type
.t
= VT_PTRDIFF_T
;
3757 sv
.r
= VT_LOCAL
| VT_LVAL
;
3758 sv
.c
.i
= vtop
[-1].c
.i
;
3760 vtop
[-1].r
= r
| VT_LVAL
;
3763 r
= vtop
->r
& VT_VALMASK
;
3764 /* two word case handling :
3765 store second register at word + 4 (or +8 for x86-64) */
3766 if (USING_TWO_WORDS(dbt
)) {
3767 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3768 vtop
[-1].type
.t
= load_type
;
3771 incr_offset(PTR_SIZE
);
3773 /* XXX: it works because r2 is spilled last ! */
3774 store(vtop
->r2
, vtop
- 1);
3780 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3784 /* post defines POST/PRE add. c is the token ++ or -- */
3785 ST_FUNC
void inc(int post
, int c
)
3788 vdup(); /* save lvalue */
3790 gv_dup(); /* duplicate value */
3795 vpushi(c
- TOK_MID
);
3797 vstore(); /* store value */
3799 vpop(); /* if post op, return saved value */
3802 ST_FUNC CString
* parse_mult_str (const char *msg
)
3804 /* read the string */
3807 cstr_reset(&initstr
);
3808 while (tok
== TOK_STR
) {
3809 /* XXX: add \0 handling too ? */
3810 cstr_cat(&initstr
, tokc
.str
.data
, -1);
3813 cstr_ccat(&initstr
, '\0');
3817 /* If I is >= 1 and a power of two, returns log2(i)+1.
3818 If I is 0 returns 0. */
3819 ST_FUNC
int exact_log2p1(int i
)
3824 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
3835 /* Parse __attribute__((...)) GNUC extension. */
3836 static void parse_attribute(AttributeDef
*ad
)
3842 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
3847 while (tok
!= ')') {
3848 if (tok
< TOK_IDENT
)
3849 expect("attribute name");
3861 tcc_warning_c(warn_implicit_function_declaration
)(
3862 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
3863 s
= external_global_sym(tok
, &func_old_type
);
3864 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
3865 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
3866 ad
->cleanup_func
= s
;
3871 case TOK_CONSTRUCTOR1
:
3872 case TOK_CONSTRUCTOR2
:
3873 ad
->f
.func_ctor
= 1;
3875 case TOK_DESTRUCTOR1
:
3876 case TOK_DESTRUCTOR2
:
3877 ad
->f
.func_dtor
= 1;
3879 case TOK_ALWAYS_INLINE1
:
3880 case TOK_ALWAYS_INLINE2
:
3881 ad
->f
.func_alwinl
= 1;
3886 astr
= parse_mult_str("section name")->data
;
3887 ad
->section
= find_section(tcc_state
, astr
);
3893 astr
= parse_mult_str("alias(\"target\")")->data
;
3894 /* save string as token, for later */
3895 ad
->alias_target
= tok_alloc_const(astr
);
3898 case TOK_VISIBILITY1
:
3899 case TOK_VISIBILITY2
:
3901 astr
= parse_mult_str("visibility(\"default|hidden|internal|protected\")")->data
;
3902 if (!strcmp (astr
, "default"))
3903 ad
->a
.visibility
= STV_DEFAULT
;
3904 else if (!strcmp (astr
, "hidden"))
3905 ad
->a
.visibility
= STV_HIDDEN
;
3906 else if (!strcmp (astr
, "internal"))
3907 ad
->a
.visibility
= STV_INTERNAL
;
3908 else if (!strcmp (astr
, "protected"))
3909 ad
->a
.visibility
= STV_PROTECTED
;
3911 expect("visibility(\"default|hidden|internal|protected\")");
3919 if (n
<= 0 || (n
& (n
- 1)) != 0)
3920 tcc_error("alignment must be a positive power of two");
3925 ad
->a
.aligned
= exact_log2p1(n
);
3926 if (n
!= 1 << (ad
->a
.aligned
- 1))
3927 tcc_error("alignment of %d is larger than implemented", n
);
3943 /* currently, no need to handle it because tcc does not
3944 track unused objects */
3948 ad
->f
.func_noreturn
= 1;
3953 ad
->f
.func_call
= FUNC_CDECL
;
3958 ad
->f
.func_call
= FUNC_STDCALL
;
3960 #ifdef TCC_TARGET_I386
3970 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
3976 ad
->f
.func_call
= FUNC_FASTCALLW
;
3983 ad
->attr_mode
= VT_LLONG
+ 1;
3986 ad
->attr_mode
= VT_BYTE
+ 1;
3989 ad
->attr_mode
= VT_SHORT
+ 1;
3993 ad
->attr_mode
= VT_INT
+ 1;
3996 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4003 ad
->a
.dllexport
= 1;
4005 case TOK_NODECORATE
:
4006 ad
->a
.nodecorate
= 1;
4009 ad
->a
.dllimport
= 1;
4012 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
4013 /* skip parameters */
4015 int parenthesis
= 0;
4019 else if (tok
== ')')
4022 } while (parenthesis
&& tok
!= -1);
4035 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4038 int v1
= v
| SYM_FIELD
;
4039 if (!(v
& SYM_FIELD
)) { /* top-level call */
4040 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
4041 expect("struct or union");
4043 expect("field name");
4045 tcc_error("dereferencing incomplete type '%s'",
4046 get_tok_str(s
->v
& ~SYM_STRUCT
, 0));
4048 while ((s
= s
->next
) != NULL
) {
4053 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
4054 && s
->v
>= (SYM_FIRST_ANOM
| SYM_FIELD
)) {
4055 /* try to find field in anonymous sub-struct/union */
4056 Sym
*ret
= find_field (&s
->type
, v1
, cumofs
);
4063 if (!(v
& SYM_FIELD
))
4064 tcc_error("field not found: %s", get_tok_str(v
, NULL
));
4068 static void check_fields (CType
*type
, int check
)
4072 while ((s
= s
->next
) != NULL
) {
4073 int v
= s
->v
& ~SYM_FIELD
;
4074 if (v
< SYM_FIRST_ANOM
) {
4075 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
4076 if (check
&& (ts
->tok
& SYM_FIELD
))
4077 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
4078 ts
->tok
^= SYM_FIELD
;
4079 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
4080 check_fields (&s
->type
, check
);
4084 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4086 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4087 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4088 int pcc
= !tcc_state
->ms_bitfields
;
4089 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4096 prevbt
= VT_STRUCT
; /* make it never match */
4101 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4102 if (f
->type
.t
& VT_BITFIELD
)
4103 bit_size
= BIT_SIZE(f
->type
.t
);
4106 size
= type_size(&f
->type
, &align
);
4107 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4110 if (pcc
&& bit_size
== 0) {
4111 /* in pcc mode, packing does not affect zero-width bitfields */
4114 /* in pcc mode, attribute packed overrides if set. */
4115 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4118 /* pragma pack overrides align if lesser and packs bitfields always */
4121 if (pragma_pack
< align
)
4122 align
= pragma_pack
;
4123 /* in pcc mode pragma pack also overrides individual align */
4124 if (pcc
&& pragma_pack
< a
)
4128 /* some individual align was specified */
4132 if (type
->ref
->type
.t
== VT_UNION
) {
4133 if (pcc
&& bit_size
>= 0)
4134 size
= (bit_size
+ 7) >> 3;
4139 } else if (bit_size
< 0) {
4141 c
+= (bit_pos
+ 7) >> 3;
4142 c
= (c
+ align
- 1) & -align
;
4151 /* A bit-field. Layout is more complicated. There are two
4152 options: PCC (GCC) compatible and MS compatible */
4154 /* In PCC layout a bit-field is placed adjacent to the
4155 preceding bit-fields, except if:
4157 - an individual alignment was given
4158 - it would overflow its base type container and
4159 there is no packing */
4160 if (bit_size
== 0) {
4162 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4164 } else if (f
->a
.aligned
) {
4166 } else if (!packed
) {
4168 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4169 if (ofs
> size
/ align
)
4173 /* in pcc mode, long long bitfields have type int if they fit */
4174 if (size
== 8 && bit_size
<= 32)
4175 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4177 while (bit_pos
>= align
* 8)
4178 c
+= align
, bit_pos
-= align
* 8;
4181 /* In PCC layout named bit-fields influence the alignment
4182 of the containing struct using the base types alignment,
4183 except for packed fields (which here have correct align). */
4184 if (f
->v
& SYM_FIRST_ANOM
4185 // && bit_size // ??? gcc on ARM/rpi does that
4190 bt
= f
->type
.t
& VT_BTYPE
;
4191 if ((bit_pos
+ bit_size
> size
* 8)
4192 || (bit_size
> 0) == (bt
!= prevbt
)
4194 c
= (c
+ align
- 1) & -align
;
4197 /* In MS bitfield mode a bit-field run always uses
4198 at least as many bits as the underlying type.
4199 To start a new run it's also required that this
4200 or the last bit-field had non-zero width. */
4201 if (bit_size
|| prev_bit_size
)
4204 /* In MS layout the records alignment is normally
4205 influenced by the field, except for a zero-width
4206 field at the start of a run (but by further zero-width
4207 fields it is again). */
4208 if (bit_size
== 0 && prevbt
!= bt
)
4211 prev_bit_size
= bit_size
;
4214 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4215 | (bit_pos
<< VT_STRUCT_SHIFT
);
4216 bit_pos
+= bit_size
;
4218 if (align
> maxalign
)
4222 printf("set field %s offset %-2d size %-2d align %-2d",
4223 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4224 if (f
->type
.t
& VT_BITFIELD
) {
4225 printf(" pos %-2d bits %-2d",
4238 c
+= (bit_pos
+ 7) >> 3;
4240 /* store size and alignment */
4241 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4245 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4246 /* can happen if individual align for some member was given. In
4247 this case MSVC ignores maxalign when aligning the size */
4252 c
= (c
+ a
- 1) & -a
;
4256 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4259 /* check whether we can access bitfields by their type */
4260 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4264 if (0 == (f
->type
.t
& VT_BITFIELD
))
4268 bit_size
= BIT_SIZE(f
->type
.t
);
4271 bit_pos
= BIT_POS(f
->type
.t
);
4272 size
= type_size(&f
->type
, &align
);
4274 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4275 #ifdef TCC_TARGET_ARM
4276 && !(f
->c
& (align
- 1))
4281 /* try to access the field using a different type */
4282 c0
= -1, s
= align
= 1;
4285 px
= f
->c
* 8 + bit_pos
;
4286 cx
= (px
>> 3) & -align
;
4287 px
= px
- (cx
<< 3);
4290 s
= (px
+ bit_size
+ 7) >> 3;
4300 s
= type_size(&t
, &align
);
4304 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4305 #ifdef TCC_TARGET_ARM
4306 && !(cx
& (align
- 1))
4309 /* update offset and bit position */
4312 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4313 | (bit_pos
<< VT_STRUCT_SHIFT
);
4317 printf("FIX field %s offset %-2d size %-2d align %-2d "
4318 "pos %-2d bits %-2d\n",
4319 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4320 cx
, s
, align
, px
, bit_size
);
4323 /* fall back to load/store single-byte wise */
4324 f
->auxtype
= VT_STRUCT
;
4326 printf("FIX field %s : load byte-wise\n",
4327 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4333 static void do_Static_assert(void);
4335 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4336 static void struct_decl(CType
*type
, int u
)
4338 int v
, c
, size
, align
, flexible
;
4339 int bit_size
, bsize
, bt
;
4341 AttributeDef ad
, ad1
;
4344 memset(&ad
, 0, sizeof ad
);
4346 parse_attribute(&ad
);
4350 /* struct already defined ? return it */
4352 expect("struct/union/enum name");
4354 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4357 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4359 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4364 /* Record the original enum/struct/union token. */
4365 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4367 /* we put an undefined size for struct/union */
4368 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4369 s
->r
= 0; /* default alignment is zero as gcc */
4371 type
->t
= s
->type
.t
;
4377 tcc_error("struct/union/enum already defined");
4379 /* cannot be empty */
4380 /* non empty enums are not allowed */
4383 long long ll
= 0, pl
= 0, nl
= 0;
4386 /* enum symbols have static storage */
4387 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4391 expect("identifier");
4393 if (ss
&& !local_stack
)
4394 tcc_error("redefinition of enumerator '%s'",
4395 get_tok_str(v
, NULL
));
4399 ll
= expr_const64();
4401 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4403 *ps
= ss
, ps
= &ss
->next
;
4412 /* NOTE: we accept a trailing comma */
4417 /* set integral type of the enum */
4420 if (pl
!= (unsigned)pl
)
4421 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4423 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4424 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4425 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4427 /* set type for enum members */
4428 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4430 if (ll
== (int)ll
) /* default is int if it fits */
4432 if (t
.t
& VT_UNSIGNED
) {
4433 ss
->type
.t
|= VT_UNSIGNED
;
4434 if (ll
== (unsigned)ll
)
4437 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4438 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4443 while (tok
!= '}') {
4444 if (!parse_btype(&btype
, &ad1
, 0)) {
4445 if (tok
== TOK_STATIC_ASSERT
) {
4454 tcc_error("flexible array member '%s' not at the end of struct",
4455 get_tok_str(v
, NULL
));
4461 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4463 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4464 expect("identifier");
4466 int v
= btype
.ref
->v
;
4467 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4468 if (tcc_state
->ms_extensions
== 0)
4469 expect("identifier");
4473 if (type_size(&type1
, &align
) < 0) {
4474 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4477 tcc_error("field '%s' has incomplete type",
4478 get_tok_str(v
, NULL
));
4480 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4481 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4482 (type1
.t
& VT_STORAGE
))
4483 tcc_error("invalid type for '%s'",
4484 get_tok_str(v
, NULL
));
4488 bit_size
= expr_const();
4489 /* XXX: handle v = 0 case for messages */
4491 tcc_error("negative width in bit-field '%s'",
4492 get_tok_str(v
, NULL
));
4493 if (v
&& bit_size
== 0)
4494 tcc_error("zero width for bit-field '%s'",
4495 get_tok_str(v
, NULL
));
4496 parse_attribute(&ad1
);
4498 size
= type_size(&type1
, &align
);
4499 if (bit_size
>= 0) {
4500 bt
= type1
.t
& VT_BTYPE
;
4506 tcc_error("bitfields must have scalar type");
4508 if (bit_size
> bsize
) {
4509 tcc_error("width of '%s' exceeds its type",
4510 get_tok_str(v
, NULL
));
4511 } else if (bit_size
== bsize
4512 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4513 /* no need for bit fields */
4515 } else if (bit_size
== 64) {
4516 tcc_error("field width 64 not implemented");
4518 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4520 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4523 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4524 /* Remember we've seen a real field to check
4525 for placement of flexible array member. */
4528 /* If member is a struct or bit-field, enforce
4529 placing into the struct (as anonymous). */
4531 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4536 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4541 if (tok
== ';' || tok
== TOK_EOF
)
4548 parse_attribute(&ad
);
4549 if (ad
.cleanup_func
) {
4550 tcc_warning("attribute '__cleanup__' ignored on type");
4552 check_fields(type
, 1);
4553 check_fields(type
, 0);
4554 struct_layout(type
, &ad
);
4556 tcc_debug_fix_anon(tcc_state
, type
);
4561 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4563 merge_symattr(&ad
->a
, &s
->a
);
4564 merge_funcattr(&ad
->f
, &s
->f
);
4567 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4568 are added to the element type, copied because it could be a typedef. */
4569 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4571 while (type
->t
& VT_ARRAY
) {
4572 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4573 type
= &type
->ref
->type
;
4575 type
->t
|= qualifiers
;
4578 /* return 0 if no type declaration. otherwise, return the basic type
4581 static int parse_btype(CType
*type
, AttributeDef
*ad
, int ignore_label
)
4583 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4587 memset(ad
, 0, sizeof(AttributeDef
));
4597 /* currently, we really ignore extension */
4607 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4608 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4609 tmbt
: tcc_error("too many basic types");
4612 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4617 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4634 memset(&ad1
, 0, sizeof(AttributeDef
));
4635 if (parse_btype(&type1
, &ad1
, 0)) {
4636 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4638 n
= 1 << (ad1
.a
.aligned
- 1);
4640 type_size(&type1
, &n
);
4643 if (n
< 0 || (n
& (n
- 1)) != 0)
4644 tcc_error("alignment must be a positive power of two");
4647 ad
->a
.aligned
= exact_log2p1(n
);
4651 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4652 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4653 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4654 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4661 #ifdef TCC_TARGET_ARM64
4663 /* GCC's __uint128_t appears in some Linux header files. Make it a
4664 synonym for long double to get the size and alignment right. */
4672 tcc_error("_Complex is not yet supported");
4677 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4678 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4686 struct_decl(&type1
, VT_ENUM
);
4689 type
->ref
= type1
.ref
;
4692 struct_decl(&type1
, VT_STRUCT
);
4695 struct_decl(&type1
, VT_UNION
);
4698 /* type modifiers */
4702 parse_btype_qualify(type
, VT_ATOMIC
);
4705 parse_expr_type(&type1
);
4706 /* remove all storage modifiers except typedef */
4707 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4709 sym_to_attr(ad
, type1
.ref
);
4717 parse_btype_qualify(type
, VT_CONSTANT
);
4725 parse_btype_qualify(type
, VT_VOLATILE
);
4732 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4733 tcc_error("signed and unsigned modifier");
4746 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4747 tcc_error("signed and unsigned modifier");
4748 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4764 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4765 tcc_error("multiple storage classes");
4777 ad
->f
.func_noreturn
= 1;
4779 /* GNUC attribute */
4780 case TOK_ATTRIBUTE1
:
4781 case TOK_ATTRIBUTE2
:
4782 parse_attribute(ad
);
4783 if (ad
->attr_mode
) {
4784 u
= ad
->attr_mode
-1;
4785 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4793 parse_expr_type(&type1
);
4794 /* remove all storage modifiers except typedef */
4795 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4797 sym_to_attr(ad
, type1
.ref
);
4799 case TOK_THREAD_LOCAL
:
4800 tcc_error("_Thread_local is not implemented");
4805 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4809 if (tok
== ':' && ignore_label
) {
4810 /* ignore if it's a label */
4815 t
&= ~(VT_BTYPE
|VT_LONG
);
4816 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4817 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4818 type
->ref
= s
->type
.ref
;
4820 parse_btype_qualify(type
, t
);
4822 /* get attributes from typedef */
4831 if (tcc_state
->char_is_unsigned
) {
4832 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4835 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4836 bt
= t
& (VT_BTYPE
|VT_LONG
);
4838 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4839 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
4840 if (bt
== VT_LDOUBLE
)
4841 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4847 /* convert a function parameter type (array to pointer and function to
4848 function pointer) */
4849 static inline void convert_parameter_type(CType
*pt
)
4851 /* remove const and volatile qualifiers (XXX: const could be used
4852 to indicate a const function parameter */
4853 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4854 /* array must be transformed to pointer according to ANSI C */
4855 pt
->t
&= ~(VT_ARRAY
| VT_VLA
);
4856 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4861 ST_FUNC CString
* parse_asm_str(void)
4864 return parse_mult_str("string constant");
4867 /* Parse an asm label and return the token */
4868 static int asm_label_instr(void)
4874 astr
= parse_asm_str()->data
;
4877 printf("asm_alias: \"%s\"\n", astr
);
4879 v
= tok_alloc_const(astr
);
4883 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
4885 int n
, l
, t1
, arg_size
, align
;
4886 Sym
**plast
, *s
, *first
;
4889 TokenString
*vla_array_tok
= NULL
;
4890 int *vla_array_str
= NULL
;
4893 /* function type, or recursive declarator (return if so) */
4895 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
4899 else if (parse_btype(&pt
, &ad1
, 0))
4901 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
4902 merge_attr (ad
, &ad1
);
4913 /* read param name and compute offset */
4914 if (l
!= FUNC_OLD
) {
4915 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
4917 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
4918 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
4919 tcc_error("parameter declared as void");
4924 pt
.t
= VT_VOID
; /* invalid type */
4929 expect("identifier");
4930 convert_parameter_type(&pt
);
4931 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
4932 /* these symbols may be evaluated for VLArrays (see below, under
4933 nocode_wanted) which is why we push them here as normal symbols
4934 temporarily. Example: int func(int a, int b[++a]); */
4935 s
= sym_push(n
, &pt
, VT_LOCAL
|VT_LVAL
, 0);
4941 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
4946 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
, 0))
4947 tcc_error("invalid type");
4950 /* if no parameters, then old type prototype */
4953 /* remove parameter symbols from token table, keep on stack */
4955 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
4956 for (s
= first
; s
; s
= s
->next
)
4960 /* NOTE: const is ignored in returned type as it has a special
4961 meaning in gcc / C++ */
4962 type
->t
&= ~VT_CONSTANT
;
4963 /* some ancient pre-K&R C allows a function to return an array
4964 and the array brackets to be put after the arguments, such
4965 that "int c()[]" means something like "int[] c()" */
4968 skip(']'); /* only handle simple "[]" */
4971 /* we push a anonymous symbol which will contain the function prototype */
4972 ad
->f
.func_args
= arg_size
;
4973 ad
->f
.func_type
= l
;
4974 s
= sym_push(SYM_FIELD
, type
, 0, 0);
4980 } else if (tok
== '[') {
4981 int saved_nocode_wanted
= nocode_wanted
;
4982 /* array definition */
4986 if (td
& TYPE_PARAM
) while (1) {
4987 /* XXX The optional type-quals and static should only be accepted
4988 in parameter decls. The '*' as well, and then even only
4989 in prototypes (not function defs). */
4991 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5002 /* Code generation is not done now but has to be done
5003 at start of function. Save code here for later use. */
5005 skip_or_save_block(&vla_array_tok
);
5007 vla_array_str
= vla_array_tok
->str
;
5008 begin_macro(vla_array_tok
, 2);
5017 } else if (tok
!= ']') {
5018 if (!local_stack
|| (storage
& VT_STATIC
))
5019 vpushi(expr_const());
5021 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5022 length must always be evaluated, even under nocode_wanted,
5023 so that its size slot is initialized (e.g. under sizeof
5029 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5032 tcc_error("invalid array size");
5034 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5035 tcc_error("size of variable length array should be an integer");
5041 /* parse next post type */
5042 post_type(type
, ad
, storage
, (td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
)) | TYPE_NEST
);
5044 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5045 tcc_error("declaration of an array of functions");
5046 if ((type
->t
& VT_BTYPE
) == VT_VOID
5047 || type_size(type
, &align
) < 0)
5048 tcc_error("declaration of an array of incomplete type elements");
5050 t1
|= type
->t
& VT_VLA
;
5055 tcc_error("need explicit inner array size in VLAs");
5058 loc
-= type_size(&int_type
, &align
);
5062 vpush_type_size(type
, &align
);
5064 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5071 nocode_wanted
= saved_nocode_wanted
;
5073 /* we push an anonymous symbol which will contain the array
5075 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5076 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5079 if (vla_array_str
) {
5080 /* for function args, the top dimension is converted to pointer */
5081 if ((t1
& VT_VLA
) && (td
& TYPE_NEST
))
5082 s
->vla_array_str
= vla_array_str
;
5084 tok_str_free_str(vla_array_str
);
5090 /* Parse a type declarator (except basic type), and return the type
5091 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5092 expected. 'type' should contain the basic type. 'ad' is the
5093 attribute definition of the basic type. It can be modified by
5094 type_decl(). If this (possibly abstract) declarator is a pointer chain
5095 it returns the innermost pointed to type (equals *type, but is a different
5096 pointer), otherwise returns type itself, that's used for recursive calls. */
5097 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5100 int qualifiers
, storage
;
5102 /* recursive type, remove storage bits first, apply them later again */
5103 storage
= type
->t
& VT_STORAGE
;
5104 type
->t
&= ~VT_STORAGE
;
5107 while (tok
== '*') {
5113 qualifiers
|= VT_ATOMIC
;
5118 qualifiers
|= VT_CONSTANT
;
5123 qualifiers
|= VT_VOLATILE
;
5129 /* XXX: clarify attribute handling */
5130 case TOK_ATTRIBUTE1
:
5131 case TOK_ATTRIBUTE2
:
5132 parse_attribute(ad
);
5136 type
->t
|= qualifiers
;
5138 /* innermost pointed to type is the one for the first derivation */
5139 ret
= pointed_type(type
);
5143 /* This is possibly a parameter type list for abstract declarators
5144 ('int ()'), use post_type for testing this. */
5145 if (!post_type(type
, ad
, 0, td
)) {
5146 /* It's not, so it's a nested declarator, and the post operations
5147 apply to the innermost pointed to type (if any). */
5148 /* XXX: this is not correct to modify 'ad' at this point, but
5149 the syntax is not clear */
5150 parse_attribute(ad
);
5151 post
= type_decl(type
, ad
, v
, td
);
5155 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5156 /* type identifier */
5161 if (!(td
& TYPE_ABSTRACT
))
5162 expect("identifier");
5165 post_type(post
, ad
, post
!= ret
? 0 : storage
,
5166 td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5167 parse_attribute(ad
);
5172 /* indirection with full error checking and bound check */
5173 ST_FUNC
void indir(void)
5175 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5176 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5180 if (vtop
->r
& VT_LVAL
)
5182 vtop
->type
= *pointed_type(&vtop
->type
);
5183 /* Arrays and functions are never lvalues */
5184 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5185 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5187 /* if bound checking, the referenced pointer must be checked */
5188 #ifdef CONFIG_TCC_BCHECK
5189 if (tcc_state
->do_bounds_check
)
5190 vtop
->r
|= VT_MUSTBOUND
;
5195 /* pass a parameter to a function and do type checking and casting */
5196 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5201 func_type
= func
->f
.func_type
;
5202 if (func_type
== FUNC_OLD
||
5203 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5204 /* default casting : only need to convert float to double */
5205 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5206 gen_cast_s(VT_DOUBLE
);
5207 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5208 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5209 type
.ref
= vtop
->type
.ref
;
5211 } else if (vtop
->r
& VT_MUSTCAST
) {
5212 force_charshort_cast();
5214 } else if (arg
== NULL
) {
5215 tcc_error("too many arguments to function");
5218 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5219 gen_assign_cast(&type
);
5223 /* parse an expression and return its type without any side effect. */
5224 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5233 /* parse an expression of the form '(type)' or '(expr)' and return its
5235 static void parse_expr_type(CType
*type
)
5241 if (parse_btype(type
, &ad
, 0)) {
5242 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5244 expr_type(type
, gexpr
);
5249 static void parse_type(CType
*type
)
5254 if (!parse_btype(type
, &ad
, 0)) {
5257 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5260 static void parse_builtin_params(int nc
, const char *args
)
5269 while ((c
= *args
++)) {
5284 type
.t
= VT_CONSTANT
;
5290 type
.t
= VT_CONSTANT
;
5292 type
.t
|= char_type
.t
;
5304 gen_assign_cast(&type
);
5311 static void parse_atomic(int atok
)
5313 int size
, align
, arg
, t
, save
= 0;
5314 CType
*atom
, *atom_ptr
, ct
= {0};
5317 static const char *const templates
[] = {
5319 * Each entry consists of callback and function template.
5320 * The template represents argument types and return type.
5322 * ? void (return-only)
5325 * A read-only atomic
5326 * p pointer to memory
5333 /* keep in order of appearance in tcctok.h: */
5334 /* __atomic_store */ "alm.?",
5335 /* __atomic_load */ "Asm.v",
5336 /* __atomic_exchange */ "alsm.v",
5337 /* __atomic_compare_exchange */ "aplbmm.b",
5338 /* __atomic_fetch_add */ "avm.v",
5339 /* __atomic_fetch_sub */ "avm.v",
5340 /* __atomic_fetch_or */ "avm.v",
5341 /* __atomic_fetch_xor */ "avm.v",
5342 /* __atomic_fetch_and */ "avm.v",
5343 /* __atomic_fetch_nand */ "avm.v",
5344 /* __atomic_and_fetch */ "avm.v",
5345 /* __atomic_sub_fetch */ "avm.v",
5346 /* __atomic_or_fetch */ "avm.v",
5347 /* __atomic_xor_fetch */ "avm.v",
5348 /* __atomic_and_fetch */ "avm.v",
5349 /* __atomic_nand_fetch */ "avm.v"
5351 const char *template = templates
[(atok
- TOK___atomic_store
)];
5353 atom
= atom_ptr
= NULL
;
5354 size
= 0; /* pacify compiler */
5359 switch (template[arg
]) {
5362 atom_ptr
= &vtop
->type
;
5363 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5365 atom
= pointed_type(atom_ptr
);
5366 size
= type_size(atom
, &align
);
5368 || (size
& (size
- 1))
5369 || (atok
> TOK___atomic_compare_exchange
5370 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5371 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5372 expect("integral or integer-sized pointer target type");
5373 /* GCC does not care either: */
5374 /* if (!(atom->t & VT_ATOMIC))
5375 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5379 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5380 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5381 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5382 gen_assign_cast(atom_ptr
);
5385 gen_assign_cast(atom
);
5389 gen_assign_cast(atom
);
5398 gen_assign_cast(&int_type
);
5402 gen_assign_cast(&ct
);
5405 if ('.' == template[++arg
])
5412 switch (template[arg
+ 1]) {
5421 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5422 vpush_helper_func(tok_alloc_const(buf
));
5423 vrott(arg
- save
+ 1);
5424 gfunc_call(arg
- save
);
5427 PUT_R_RET(vtop
, ct
.t
);
5428 t
= ct
.t
& VT_BTYPE
;
5429 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
5431 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5433 vtop
->type
.t
= VT_INT
;
5445 ST_FUNC
void unary(void)
5447 int n
, t
, align
, size
, r
;
5452 /* generate line number info */
5454 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
5457 /* XXX: GCC 2.95.3 does not generate a table although it should be
5465 #ifdef TCC_TARGET_PE
5466 t
= VT_SHORT
|VT_UNSIGNED
;
5474 vsetc(&type
, VT_CONST
, &tokc
);
5478 t
= VT_INT
| VT_UNSIGNED
;
5484 t
= VT_LLONG
| VT_UNSIGNED
;
5493 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5494 t
= VT_DOUBLE
| VT_LONG
;
5500 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5503 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5505 case TOK___FUNCTION__
:
5507 goto tok_identifier
;
5511 cstr_reset(&tokcstr
);
5512 cstr_cat(&tokcstr
, funcname
, 0);
5513 tokc
.str
.size
= tokcstr
.size
;
5514 tokc
.str
.data
= tokcstr
.data
;
5517 #ifdef TCC_TARGET_PE
5518 t
= VT_SHORT
| VT_UNSIGNED
;
5525 /* string parsing */
5528 if (tcc_state
->warn_write_strings
& WARN_ON
)
5533 memset(&ad
, 0, sizeof(AttributeDef
));
5534 ad
.section
= rodata_section
;
5535 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5542 if (parse_btype(&type
, &ad
, 0)) {
5543 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5545 /* check ISOC99 compound literal */
5547 /* data is allocated locally by default */
5552 /* all except arrays are lvalues */
5553 if (!(type
.t
& VT_ARRAY
))
5555 memset(&ad
, 0, sizeof(AttributeDef
));
5556 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5557 } else if (t
== TOK_SOTYPE
) { /* from sizeof/alignof (...) */
5564 } else if (tok
== '{') {
5565 int saved_nocode_wanted
= nocode_wanted
;
5566 if (CONST_WANTED
&& !NOEVAL_WANTED
)
5568 if (0 == local_scope
)
5569 tcc_error("statement expression outside of function");
5570 /* save all registers */
5572 /* statement expression : we do not accept break/continue
5573 inside as GCC does. We do retain the nocode_wanted state,
5574 as statement expressions can't ever be entered from the
5575 outside, so any reactivation of code emission (from labels
5576 or loop heads) can be disabled again after the end of it. */
5578 /* If the statement expr can be entered, then we retain the current
5579 nocode_wanted state (from e.g. a 'return 0;' in the stmt-expr).
5580 If it can't be entered then the state is that from before the
5581 statement expression. */
5582 if (saved_nocode_wanted
)
5583 nocode_wanted
= saved_nocode_wanted
;
5598 /* functions names must be treated as function pointers,
5599 except for unary '&' and sizeof. Since we consider that
5600 functions are not lvalues, we only have to handle it
5601 there and in function calls. */
5602 /* arrays can also be used although they are not lvalues */
5603 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5604 !(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
)))
5607 vtop
->sym
->a
.addrtaken
= 1;
5608 mk_pointer(&vtop
->type
);
5614 gen_test_zero(TOK_EQ
);
5625 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5626 tcc_error("pointer not accepted for unary plus");
5627 /* In order to force cast, we add zero, except for floating point
5628 where we really need an noop (otherwise -0.0 will be transformed
5630 if (!is_float(vtop
->type
.t
)) {
5643 expr_type(&type
, unary
);
5644 if (t
== TOK_SIZEOF
) {
5645 vpush_type_size(&type
, &align
);
5646 gen_cast_s(VT_SIZE_T
);
5648 type_size(&type
, &align
);
5650 if (vtop
[1].r
& VT_SYM
)
5651 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5652 if (s
&& s
->a
.aligned
)
5653 align
= 1 << (s
->a
.aligned
- 1);
5658 case TOK_builtin_expect
:
5659 /* __builtin_expect is a no-op for now */
5660 parse_builtin_params(0, "ee");
5663 case TOK_builtin_types_compatible_p
:
5664 parse_builtin_params(0, "tt");
5665 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5666 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5667 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5671 case TOK_builtin_choose_expr
:
5698 case TOK_builtin_constant_p
:
5699 parse_builtin_params(1, "e");
5701 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
5702 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
)
5708 case TOK_builtin_frame_address
:
5709 case TOK_builtin_return_address
:
5715 level
= expr_const64();
5717 tcc_error("%s only takes positive integers",
5718 tok1
== TOK_builtin_return_address
?
5719 "__builtin_return_address" :
5720 "__builtin_frame_address");
5725 vset(&type
, VT_LOCAL
, 0); /* local frame */
5727 #ifdef TCC_TARGET_RISCV64
5731 mk_pointer(&vtop
->type
);
5732 indir(); /* -> parent frame */
5734 if (tok1
== TOK_builtin_return_address
) {
5735 // assume return address is just above frame pointer on stack
5736 #ifdef TCC_TARGET_ARM
5739 #elif defined TCC_TARGET_RISCV64
5746 mk_pointer(&vtop
->type
);
5751 #ifdef TCC_TARGET_RISCV64
5752 case TOK_builtin_va_start
:
5753 parse_builtin_params(0, "ee");
5754 r
= vtop
->r
& VT_VALMASK
;
5758 tcc_error("__builtin_va_start expects a local variable");
5763 #ifdef TCC_TARGET_X86_64
5764 #ifdef TCC_TARGET_PE
5765 case TOK_builtin_va_start
:
5766 parse_builtin_params(0, "ee");
5767 r
= vtop
->r
& VT_VALMASK
;
5771 tcc_error("__builtin_va_start expects a local variable");
5773 vtop
->type
= char_pointer_type
;
5778 case TOK_builtin_va_arg_types
:
5779 parse_builtin_params(0, "t");
5780 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5787 #ifdef TCC_TARGET_ARM64
5788 case TOK_builtin_va_start
: {
5789 parse_builtin_params(0, "ee");
5793 vtop
->type
.t
= VT_VOID
;
5796 case TOK_builtin_va_arg
: {
5797 parse_builtin_params(0, "et");
5805 case TOK___arm64_clear_cache
: {
5806 parse_builtin_params(0, "ee");
5809 vtop
->type
.t
= VT_VOID
;
5814 /* atomic operations */
5815 case TOK___atomic_store
:
5816 case TOK___atomic_load
:
5817 case TOK___atomic_exchange
:
5818 case TOK___atomic_compare_exchange
:
5819 case TOK___atomic_fetch_add
:
5820 case TOK___atomic_fetch_sub
:
5821 case TOK___atomic_fetch_or
:
5822 case TOK___atomic_fetch_xor
:
5823 case TOK___atomic_fetch_and
:
5824 case TOK___atomic_fetch_nand
:
5825 case TOK___atomic_add_fetch
:
5826 case TOK___atomic_sub_fetch
:
5827 case TOK___atomic_or_fetch
:
5828 case TOK___atomic_xor_fetch
:
5829 case TOK___atomic_and_fetch
:
5830 case TOK___atomic_nand_fetch
:
5834 /* pre operations */
5845 if (is_float(vtop
->type
.t
)) {
5855 goto tok_identifier
;
5857 /* allow to take the address of a label */
5858 if (tok
< TOK_UIDENT
)
5859 expect("label identifier");
5860 s
= label_find(tok
);
5862 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5864 if (s
->r
== LABEL_DECLARED
)
5865 s
->r
= LABEL_FORWARD
;
5867 if ((s
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5868 s
->type
.t
= VT_VOID
;
5869 mk_pointer(&s
->type
);
5870 s
->type
.t
|= VT_STATIC
;
5872 vpushsym(&s
->type
, s
);
5878 CType controlling_type
;
5879 int has_default
= 0;
5882 TokenString
*str
= NULL
;
5883 int saved_nocode_wanted
= nocode_wanted
;
5884 nocode_wanted
&= ~CONST_WANTED_MASK
;
5888 expr_type(&controlling_type
, expr_eq
);
5889 convert_parameter_type (&controlling_type
);
5891 nocode_wanted
= saved_nocode_wanted
;
5896 if (tok
== TOK_DEFAULT
) {
5898 tcc_error("too many 'default'");
5904 AttributeDef ad_tmp
;
5908 parse_btype(&cur_type
, &ad_tmp
, 0);
5909 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5910 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5912 tcc_error("type match twice");
5922 skip_or_save_block(&str
);
5924 skip_or_save_block(NULL
);
5931 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5932 tcc_error("type '%s' does not match any association", buf
);
5934 begin_macro(str
, 1);
5943 // special qnan , snan and infinity values
5948 vtop
->type
.t
= VT_FLOAT
;
5953 goto special_math_val
;
5956 goto special_math_val
;
5960 if (tok
< TOK_UIDENT
)
5961 tcc_error("expression expected before '%s'", get_tok_str(tok
, &tokc
));
5965 if (!s
|| IS_ASM_SYM(s
)) {
5966 const char *name
= get_tok_str(t
, NULL
);
5968 tcc_error("'%s' undeclared", name
);
5969 /* for simple function calls, we tolerate undeclared
5970 external reference to int() function */
5971 tcc_warning_c(warn_implicit_function_declaration
)(
5972 "implicit declaration of function '%s'", name
);
5973 s
= external_global_sym(t
, &func_old_type
);
5977 /* A symbol that has a register is a local register variable,
5978 which starts out as VT_LOCAL value. */
5979 if ((r
& VT_VALMASK
) < VT_CONST
)
5980 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5982 vset(&s
->type
, r
, s
->c
);
5983 /* Point to s as backpointer (even without r&VT_SYM).
5984 Will be used by at least the x86 inline asm parser for
5990 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5991 vtop
->c
.i
= s
->enum_val
;
5996 /* post operations */
5998 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
6001 } else if (tok
== '.' || tok
== TOK_ARROW
) {
6002 int qualifiers
, cumofs
;
6004 if (tok
== TOK_ARROW
)
6006 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
6008 /* expect pointer on structure */
6010 s
= find_field(&vtop
->type
, tok
, &cumofs
);
6011 /* add field offset to pointer */
6013 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
6016 /* change type to field type, and set to lvalue */
6017 vtop
->type
= s
->type
;
6018 vtop
->type
.t
|= qualifiers
;
6019 /* an array is never an lvalue */
6020 if (!(vtop
->type
.t
& VT_ARRAY
)) {
6022 #ifdef CONFIG_TCC_BCHECK
6023 /* if bound checking, the referenced pointer must be checked */
6024 if (tcc_state
->do_bounds_check
)
6025 vtop
->r
|= VT_MUSTBOUND
;
6029 } else if (tok
== '[') {
6035 } else if (tok
== '(') {
6038 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
6041 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
6042 /* pointer test (no array accepted) */
6043 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
6044 vtop
->type
= *pointed_type(&vtop
->type
);
6045 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6049 expect("function pointer");
6052 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6054 /* get return type */
6057 sa
= s
->next
; /* first parameter */
6058 nb_args
= regsize
= 0;
6060 /* compute first implicit argument if a structure is returned */
6061 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6062 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6063 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6064 &ret_align
, ®size
);
6065 if (ret_nregs
<= 0) {
6066 /* get some space for the returned structure */
6067 size
= type_size(&s
->type
, &align
);
6068 #ifdef TCC_TARGET_ARM64
6069 /* On arm64, a small struct is return in registers.
6070 It is much easier to write it to memory if we know
6071 that we are allowed to write some extra bytes, so
6072 round the allocated space up to a power of 2: */
6074 while (size
& (size
- 1))
6075 size
= (size
| (size
- 1)) + 1;
6077 loc
= (loc
- size
) & -align
;
6079 ret
.r
= VT_LOCAL
| VT_LVAL
;
6080 /* pass it as 'int' to avoid structure arg passing
6082 vseti(VT_LOCAL
, loc
);
6083 #ifdef CONFIG_TCC_BCHECK
6084 if (tcc_state
->do_bounds_check
)
6098 if (ret_nregs
> 0) {
6099 /* return in register */
6101 PUT_R_RET(&ret
, ret
.type
.t
);
6106 gfunc_param_typed(s
, sa
);
6116 tcc_error("too few arguments to function");
6118 gfunc_call(nb_args
);
6120 if (ret_nregs
< 0) {
6121 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6122 #ifdef TCC_TARGET_RISCV64
6123 arch_transfer_ret_regs(1);
6129 int rc
= reg_classes
[ret
.r
] & ~(RC_INT
| RC_FLOAT
);
6130 /* We assume that when a structure is returned in multiple
6131 registers, their classes are consecutive values of the
6134 for (r
= 0; r
< NB_REGS
; ++r
)
6135 if (reg_classes
[r
] & rc
)
6137 vsetc(&ret
.type
, r
, &ret
.c
);
6139 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6142 /* handle packed struct return */
6143 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6146 size
= type_size(&s
->type
, &align
);
6147 /* We're writing whole regs often, make sure there's enough
6148 space. Assume register size is power of 2. */
6149 size
= (size
+ regsize
- 1) & -regsize
;
6150 if (ret_align
> align
)
6152 loc
= (loc
- size
) & -align
;
6156 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6160 if (--ret_nregs
== 0)
6164 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6167 /* Promote char/short return values. This is matters only
6168 for calling function that were not compiled by TCC and
6169 only on some architectures. For those where it doesn't
6170 matter we expect things to be already promoted to int,
6172 t
= s
->type
.t
& VT_BTYPE
;
6173 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6175 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6177 vtop
->type
.t
= VT_INT
;
6181 if (s
->f
.func_noreturn
) {
6183 tcc_tcov_block_end(tcc_state
, -1);
6192 #ifndef precedence_parser /* original top-down parser */
6194 static void expr_prod(void)
6199 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6206 static void expr_sum(void)
6211 while ((t
= tok
) == '+' || t
== '-') {
6218 static void expr_shift(void)
6223 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6230 static void expr_cmp(void)
6235 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6236 t
== TOK_ULT
|| t
== TOK_UGE
) {
6243 static void expr_cmpeq(void)
6248 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6255 static void expr_and(void)
6258 while (tok
== '&') {
6265 static void expr_xor(void)
6268 while (tok
== '^') {
6275 static void expr_or(void)
6278 while (tok
== '|') {
6285 static void expr_landor(int op
);
6287 static void expr_land(void)
6290 if (tok
== TOK_LAND
)
6294 static void expr_lor(void)
6301 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6302 #else /* defined precedence_parser */
6303 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6304 # define expr_lor() unary(), expr_infix(1)
6306 static int precedence(int tok
)
6309 case TOK_LOR
: return 1;
6310 case TOK_LAND
: return 2;
6314 case TOK_EQ
: case TOK_NE
: return 6;
6315 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6316 case TOK_SHL
: case TOK_SAR
: return 8;
6317 case '+': case '-': return 9;
6318 case '*': case '/': case '%': return 10;
6320 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6325 static unsigned char prec
[256];
6326 static void init_prec(void)
6329 for (i
= 0; i
< 256; i
++)
6330 prec
[i
] = precedence(i
);
6332 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6334 static void expr_landor(int op
);
6336 static void expr_infix(int p
)
6339 while ((p2
= precedence(t
)) >= p
) {
6340 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6345 if (precedence(tok
) > p2
)
6354 /* Assuming vtop is a value used in a conditional context
6355 (i.e. compared with zero) return 0 if it's false, 1 if
6356 true and -1 if it can't be statically determined. */
6357 static int condition_3way(void)
6360 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6361 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6363 gen_cast_s(VT_BOOL
);
6370 static void expr_landor(int op
)
6372 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6374 c
= f
? i
: condition_3way();
6376 save_regs(1), cc
= 0;
6378 nocode_wanted
++, f
= 1;
6386 expr_landor_next(op
);
6398 static int is_cond_bool(SValue
*sv
)
6400 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6401 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6402 return (unsigned)sv
->c
.i
< 2;
6403 if (sv
->r
== VT_CMP
)
6408 static void expr_cond(void)
6410 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6417 c
= condition_3way();
6418 g
= (tok
== ':' && gnu_ext
);
6428 /* needed to avoid having different registers saved in
6440 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6441 mk_pointer(&vtop
->type
);
6442 sv
= *vtop
; /* save value to handle it later */
6443 vtop
--; /* no vpop so that FP stack is not flushed */
6460 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6461 mk_pointer(&vtop
->type
);
6463 /* cast operands to correct type according to ISOC rules */
6464 if (!combine_types(&type
, &sv
, vtop
, '?'))
6465 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6466 "type mismatch in conditional expression (have '%s' and '%s')");
6468 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6469 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6470 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6471 this code jumps directly to the if's then/else branches. */
6476 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6480 // tcc_warning("two conditions expr_cond");
6484 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6485 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6486 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6488 /* now we convert second operand */
6492 mk_pointer(&vtop
->type
);
6494 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6498 rc
= RC_TYPE(type
.t
);
6499 /* for long longs, we use fixed registers to avoid having
6500 to handle a complicated move */
6501 if (USING_TWO_WORDS(type
.t
))
6502 rc
= RC_RET(type
.t
);
6513 /* this is horrible, but we must also convert first
6519 mk_pointer(&vtop
->type
);
6521 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6527 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6537 static void expr_eq(void)
6542 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6550 gen_op(TOK_ASSIGN_OP(t
));
6556 ST_FUNC
void gexpr(void)
6564 } while (tok
== ',');
6566 /* convert array & function to pointer */
6567 convert_parameter_type(&vtop
->type
);
6569 /* make builtin_constant_p((1,2)) return 0 (like on gcc) */
6570 if ((vtop
->r
& VT_VALMASK
) == VT_CONST
&& nocode_wanted
&& !CONST_WANTED
)
6571 gv(RC_TYPE(vtop
->type
.t
));
6575 /* parse a constant expression and return value in vtop. */
6576 static void expr_const1(void)
6578 nocode_wanted
+= CONST_WANTED_BIT
;
6580 nocode_wanted
-= CONST_WANTED_BIT
;
6583 /* parse an integer constant and return its value. */
6584 static inline int64_t expr_const64(void)
6588 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
| VT_NONCONST
)) != VT_CONST
)
6589 expect("constant expression");
6595 /* parse an integer constant and return its value.
6596 Complain if it doesn't fit 32bit (signed or unsigned). */
6597 ST_FUNC
int expr_const(void)
6600 int64_t wc
= expr_const64();
6602 if (c
!= wc
&& (unsigned)c
!= wc
)
6603 tcc_error("constant exceeds 32 bit");
6607 /* ------------------------------------------------------------------------- */
6608 /* return from function */
6610 #ifndef TCC_TARGET_ARM64
6611 static void gfunc_return(CType
*func_type
)
6613 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6614 CType type
, ret_type
;
6615 int ret_align
, ret_nregs
, regsize
;
6616 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6617 &ret_align
, ®size
);
6618 if (ret_nregs
< 0) {
6619 #ifdef TCC_TARGET_RISCV64
6620 arch_transfer_ret_regs(0);
6622 } else if (0 == ret_nregs
) {
6623 /* if returning structure, must copy it to implicit
6624 first pointer arg location */
6627 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6630 /* copy structure value to pointer */
6633 /* returning structure packed into registers */
6634 int size
, addr
, align
, rc
, n
;
6635 size
= type_size(func_type
,&align
);
6636 if ((align
& (ret_align
- 1))
6637 && ((vtop
->r
& VT_VALMASK
) < VT_CONST
/* pointer to struct */
6638 || (vtop
->c
.i
& (ret_align
- 1))
6640 loc
= (loc
- size
) & -ret_align
;
6643 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6647 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6649 vtop
->type
= ret_type
;
6650 rc
= RC_RET(ret_type
.t
);
6651 //printf("struct return: n:%d t:%02x rc:%02x\n", ret_nregs, ret_type.t, rc);
6652 for (n
= ret_nregs
; --n
> 0;) {
6656 incr_offset(regsize
);
6657 /* We assume that when a structure is returned in multiple
6658 registers, their classes are consecutive values of the
6663 vtop
-= ret_nregs
- 1;
6666 gv(RC_RET(func_type
->t
));
6668 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6672 static void check_func_return(void)
6674 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6676 if (!strcmp (funcname
, "main")
6677 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6678 /* main returns 0 by default */
6680 gen_assign_cast(&func_vt
);
6681 gfunc_return(&func_vt
);
6683 tcc_warning("function might return no value: '%s'", funcname
);
6687 /* ------------------------------------------------------------------------- */
6690 static int case_cmpi(const void *pa
, const void *pb
)
6692 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6693 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6694 return a
< b
? -1 : a
> b
;
6697 static int case_cmpu(const void *pa
, const void *pb
)
6699 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6700 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6701 return a
< b
? -1 : a
> b
;
6704 static void gtst_addr(int t
, int a
)
6706 gsym_addr(gvtst(0, t
), a
);
6709 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6713 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6730 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6732 gcase(base
, len
/2, bsym
);
6736 base
+= e
; len
-= e
;
6746 if (p
->v1
== p
->v2
) {
6748 gtst_addr(0, p
->sym
);
6758 gtst_addr(0, p
->sym
);
6762 *bsym
= gjmp(*bsym
);
6765 static void end_switch(void)
6767 struct switch_t
*sw
= cur_switch
;
6768 dynarray_reset(&sw
->p
, &sw
->n
);
6769 cur_switch
= sw
->prev
;
6773 /* ------------------------------------------------------------------------- */
6774 /* __attribute__((cleanup(fn))) */
6776 static void try_call_scope_cleanup(Sym
*stop
)
6778 Sym
*cls
= cur_scope
->cl
.s
;
6780 for (; cls
!= stop
; cls
= cls
->ncl
) {
6781 Sym
*fs
= cls
->next
;
6782 Sym
*vs
= cls
->prev_tok
;
6784 vpushsym(&fs
->type
, fs
);
6785 vset(&vs
->type
, vs
->r
, vs
->c
);
6787 mk_pointer(&vtop
->type
);
6793 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6798 if (!cur_scope
->cl
.s
)
6801 /* search NCA of both cleanup chains given parents and initial depth */
6802 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6803 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6805 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6807 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6810 try_call_scope_cleanup(cc
);
6813 /* call 'func' for each __attribute__((cleanup(func))) */
6814 static void block_cleanup(struct scope
*o
)
6818 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6819 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6824 try_call_scope_cleanup(o
->cl
.s
);
6825 pcl
->jnext
= gjmp(0);
6827 goto remove_pending
;
6837 try_call_scope_cleanup(o
->cl
.s
);
6840 /* ------------------------------------------------------------------------- */
6843 static void vla_restore(int loc
)
6846 gen_vla_sp_restore(loc
);
6849 static void vla_leave(struct scope
*o
)
6851 struct scope
*c
= cur_scope
, *v
= NULL
;
6852 for (; c
!= o
&& c
; c
= c
->prev
)
6856 vla_restore(v
->vla
.locorig
);
6859 /* ------------------------------------------------------------------------- */
6862 static void new_scope(struct scope
*o
)
6864 /* copy and link previous scope */
6866 o
->prev
= cur_scope
;
6868 cur_scope
->vla
.num
= 0;
6870 /* record local declaration stack position */
6871 o
->lstk
= local_stack
;
6872 o
->llstk
= local_label_stack
;
6876 static void prev_scope(struct scope
*o
, int is_expr
)
6880 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6881 block_cleanup(o
->prev
);
6883 /* pop locally defined labels */
6884 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6886 /* In the is_expr case (a statement expression is finished here),
6887 vtop might refer to symbols on the local_stack. Either via the
6888 type or via vtop->sym. We can't pop those nor any that in turn
6889 might be referred to. To make it easier we don't roll back
6890 any symbols in that case; some upper level call to block() will
6891 do that. We do have to remove such symbols from the lookup
6892 tables, though. sym_pop will do that. */
6894 /* pop locally defined symbols */
6895 pop_local_syms(o
->lstk
, is_expr
);
6896 cur_scope
= o
->prev
;
6900 /* leave a scope via break/continue(/goto) */
6901 static void leave_scope(struct scope
*o
)
6905 try_call_scope_cleanup(o
->cl
.s
);
6909 /* short versiona for scopes with 'if/do/while/switch' which can
6910 declare only types (of struct/union/enum) */
6911 static void new_scope_s(struct scope
*o
)
6913 o
->lstk
= local_stack
;
6917 static void prev_scope_s(struct scope
*o
)
6919 sym_pop(&local_stack
, o
->lstk
, 0);
6923 /* ------------------------------------------------------------------------- */
6924 /* call block from 'for do while' loops */
6926 static void lblock(int *bsym
, int *csym
)
6928 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6929 int *b
= co
->bsym
, *c
= co
->csym
;
6943 static void block(int flags
)
6945 int a
, b
, c
, d
, e
, t
;
6949 if (flags
& STMT_EXPR
) {
6950 /* default return value is (void) */
6952 vtop
->type
.t
= VT_VOID
;
6957 /* If the token carries a value, next() might destroy it. Only with
6958 invalid code such as f(){"123"4;} */
6959 if (TOK_HAS_VALUE(t
))
6964 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_begin (tcc_state
);
6973 if (tok
== TOK_ELSE
) {
6978 gsym(d
); /* patch else jmp */
6984 } else if (t
== TOK_WHILE
) {
6998 } else if (t
== '{') {
7000 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
7003 /* handle local labels declarations */
7004 while (tok
== TOK_LABEL
) {
7007 if (tok
< TOK_UIDENT
)
7008 expect("label identifier");
7009 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
7011 } while (tok
== ',');
7015 while (tok
!= '}') {
7018 if (flags
& STMT_EXPR
)
7020 block(flags
| STMT_COMPOUND
);
7024 prev_scope(&o
, flags
& STMT_EXPR
);
7026 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
7029 else if (!nocode_wanted
)
7030 check_func_return();
7032 } else if (t
== TOK_RETURN
) {
7033 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
7037 gen_assign_cast(&func_vt
);
7039 if (vtop
->type
.t
!= VT_VOID
)
7040 tcc_warning("void function returns a value");
7044 tcc_warning("'return' with no value");
7047 leave_scope(root_scope
);
7049 gfunc_return(&func_vt
);
7051 /* jump unless last stmt in top-level block */
7052 if (tok
!= '}' || local_scope
!= 1)
7055 tcc_tcov_block_end (tcc_state
, -1);
7058 } else if (t
== TOK_BREAK
) {
7060 if (!cur_scope
->bsym
)
7061 tcc_error("cannot break");
7062 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
7063 leave_scope(cur_switch
->scope
);
7065 leave_scope(loop_scope
);
7066 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
7069 } else if (t
== TOK_CONTINUE
) {
7071 if (!cur_scope
->csym
)
7072 tcc_error("cannot continue");
7073 leave_scope(loop_scope
);
7074 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
7077 } else if (t
== TOK_FOR
) {
7082 /* c99 for-loop init decl? */
7083 if (!decl(VT_JMP
)) {
7084 /* no, regular for-loop init expr */
7112 } else if (t
== TOK_DO
) {
7128 } else if (t
== TOK_SWITCH
) {
7129 struct switch_t
*sw
;
7131 sw
= tcc_mallocz(sizeof *sw
);
7133 sw
->scope
= cur_scope
;
7134 sw
->prev
= cur_switch
;
7135 sw
->nocode_wanted
= nocode_wanted
;
7142 sw
->sv
= *vtop
--; /* save switch value */
7144 b
= gjmp(0); /* jump to first case */
7146 a
= gjmp(a
); /* add implicit break */
7151 if (sw
->nocode_wanted
)
7153 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7154 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7156 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7157 for (b
= 1; b
< sw
->n
; b
++)
7158 if (sw
->sv
.type
.t
& VT_UNSIGNED
7159 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7160 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7161 tcc_error("duplicate case value");
7164 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7167 gsym_addr(d
, sw
->def_sym
);
7175 } else if (t
== TOK_CASE
) {
7179 cr
= tcc_malloc(sizeof(struct case_t
));
7180 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7181 cr
->v1
= cr
->v2
= expr_const64();
7182 if (gnu_ext
&& tok
== TOK_DOTS
) {
7184 cr
->v2
= expr_const64();
7185 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7186 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7187 tcc_warning("empty case range");
7189 /* case and default are unreachable from a switch under nocode_wanted */
7190 if (!cur_switch
->nocode_wanted
)
7193 goto block_after_label
;
7195 } else if (t
== TOK_DEFAULT
) {
7198 if (cur_switch
->def_sym
)
7199 tcc_error("too many 'default'");
7200 cur_switch
->def_sym
= cur_switch
->nocode_wanted
? 1 : gind();
7202 goto block_after_label
;
7204 } else if (t
== TOK_GOTO
) {
7205 vla_restore(cur_scope
->vla
.locorig
);
7206 if (tok
== '*' && gnu_ext
) {
7210 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7214 } else if (tok
>= TOK_UIDENT
) {
7215 s
= label_find(tok
);
7216 /* put forward definition if needed */
7218 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7219 else if (s
->r
== LABEL_DECLARED
)
7220 s
->r
= LABEL_FORWARD
;
7222 if (s
->r
& LABEL_FORWARD
) {
7223 /* start new goto chain for cleanups, linked via label->next */
7224 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7225 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7226 pending_gotos
->prev_tok
= s
;
7227 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7228 pending_gotos
->next
= s
;
7230 s
->jnext
= gjmp(s
->jnext
);
7232 try_call_cleanup_goto(s
->cleanupstate
);
7233 gjmp_addr(s
->jnext
);
7238 expect("label identifier");
7242 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7246 if (tok
== ':' && t
>= TOK_UIDENT
) {
7251 if (s
->r
== LABEL_DEFINED
)
7252 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7253 s
->r
= LABEL_DEFINED
;
7255 Sym
*pcl
; /* pending cleanup goto */
7256 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7258 sym_pop(&s
->next
, NULL
, 0);
7262 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7265 s
->cleanupstate
= cur_scope
->cl
.s
;
7269 /* Accept attributes after labels (e.g. 'unused') */
7270 AttributeDef ad_tmp
;
7271 parse_attribute(&ad_tmp
);
7274 tcc_tcov_reset_ind(tcc_state
);
7275 vla_restore(cur_scope
->vla
.loc
);
7278 if (0 == (flags
& STMT_COMPOUND
))
7280 /* C23: insert implicit null-statement whithin compound statement */
7282 /* we accept this, but it is a mistake */
7283 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7286 /* expression case */
7290 if (flags
& STMT_EXPR
) {
7303 tcc_tcov_check_line (tcc_state
, 0), tcc_tcov_block_end (tcc_state
, 0);
7306 /* This skips over a stream of tokens containing balanced {} and ()
7307 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7308 with a '{'). If STR then allocates and stores the skipped tokens
7309 in *STR. This doesn't check if () and {} are nested correctly,
7310 i.e. "({)}" is accepted. */
7311 static void skip_or_save_block(TokenString
**str
)
7313 int braces
= tok
== '{';
7316 *str
= tok_str_alloc();
7328 if (str
|| level
> 0)
7329 tcc_error("unexpected end of file");
7334 tok_str_add_tok(*str
);
7336 if (t
== '{' || t
== '(' || t
== '[') {
7338 } else if (t
== '}' || t
== ')' || t
== ']') {
7340 if (level
== 0 && braces
&& t
== '}')
7345 tok_str_add(*str
, TOK_EOF
);
7348 #define EXPR_CONST 1
7351 static void parse_init_elem(int expr_type
)
7353 int saved_global_expr
;
7356 /* compound literals must be allocated globally in this case */
7357 saved_global_expr
= global_expr
;
7360 global_expr
= saved_global_expr
;
7361 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7362 (compound literals). */
7363 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7364 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7365 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7366 #ifdef TCC_TARGET_PE
7367 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7370 tcc_error("initializer element is not constant");
7379 static void init_assert(init_params
*p
, int offset
)
7381 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7382 : !nocode_wanted
&& offset
> p
->local_offset
)
7383 tcc_internal_error("initializer overflow");
7386 #define init_assert(sec, offset)
7389 /* put zeros for variable based init */
7390 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7392 init_assert(p
, c
+ size
);
7394 /* nothing to do because globals are already set to zero */
7396 vpush_helper_func(TOK_memset
);
7398 #ifdef TCC_TARGET_ARM
7410 #define DIF_SIZE_ONLY 2
7411 #define DIF_HAVE_ELEM 4
7414 /* delete relocations for specified range c ... c + size. Unfortunatly
7415 in very special cases, relocations may occur unordered */
7416 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7418 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7419 if (!sec
|| !sec
->reloc
)
7421 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7422 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7423 while (rel
< rel_end
) {
7424 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7425 sec
->reloc
->data_offset
-= sizeof *rel
;
7428 memcpy(rel2
, rel
, sizeof *rel
);
7435 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7437 if (ref
== p
->flex_array_ref
) {
7438 if (index
>= ref
->c
)
7440 } else if (ref
->c
< 0)
7441 tcc_error("flexible array has zero size in this context");
7444 /* t is the array or struct type. c is the array or struct
7445 address. cur_field is the pointer to the current
7446 field, for arrays the 'c' member contains the current start
7447 index. 'flags' is as in decl_initializer.
7448 'al' contains the already initialized length of the
7449 current container (starting at c). This returns the new length of that. */
7450 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7451 Sym
**cur_field
, int flags
, int al
)
7454 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7455 unsigned long corig
= c
;
7460 if (flags
& DIF_HAVE_ELEM
)
7463 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7470 /* NOTE: we only support ranges for last designator */
7471 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7473 if (!(type
->t
& VT_ARRAY
))
7474 expect("array type");
7476 index
= index_last
= expr_const();
7477 if (tok
== TOK_DOTS
&& gnu_ext
) {
7479 index_last
= expr_const();
7483 decl_design_flex(p
, s
, index_last
);
7484 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7485 tcc_error("index exceeds array bounds or range is empty");
7487 (*cur_field
)->c
= index_last
;
7488 type
= pointed_type(type
);
7489 elem_size
= type_size(type
, &align
);
7490 c
+= index
* elem_size
;
7491 nb_elems
= index_last
- index
+ 1;
7498 f
= find_field(type
, l
, &cumofs
);
7509 } else if (!gnu_ext
) {
7514 if (type
->t
& VT_ARRAY
) {
7515 index
= (*cur_field
)->c
;
7517 decl_design_flex(p
, s
, index
);
7519 tcc_error("too many initializers");
7520 type
= pointed_type(type
);
7521 elem_size
= type_size(type
, &align
);
7522 c
+= index
* elem_size
;
7525 /* Skip bitfield padding. Also with size 32 and 64. */
7526 while (f
&& (f
->v
& SYM_FIRST_ANOM
) &&
7527 is_integer_btype(f
->type
.t
& VT_BTYPE
))
7528 *cur_field
= f
= f
->next
;
7530 tcc_error("too many initializers");
7536 if (!elem_size
) /* for structs */
7537 elem_size
= type_size(type
, &align
);
7539 /* Using designators the same element can be initialized more
7540 than once. In that case we need to delete possibly already
7541 existing relocations. */
7542 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7543 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7544 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7547 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7549 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7553 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7554 /* make init_putv/vstore believe it were a struct */
7556 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7560 vpush_ref(type
, p
->sec
, c
, elem_size
);
7562 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7563 for (i
= 1; i
< nb_elems
; i
++) {
7565 init_putv(p
, type
, c
+ elem_size
* i
);
7570 c
+= nb_elems
* elem_size
;
7576 /* store a value or an expression directly in global data or in local array */
7577 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7583 Section
*sec
= p
->sec
;
7587 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7589 size
= type_size(type
, &align
);
7590 if (type
->t
& VT_BITFIELD
)
7591 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7592 init_assert(p
, c
+ size
);
7595 /* XXX: not portable */
7596 /* XXX: generate error if incorrect relocation */
7597 gen_assign_cast(&dtype
);
7598 bt
= type
->t
& VT_BTYPE
;
7600 if ((vtop
->r
& VT_SYM
)
7602 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7603 || (type
->t
& VT_BITFIELD
))
7604 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7606 tcc_error("initializer element is not computable at load time");
7608 if (NODATA_WANTED
) {
7613 ptr
= sec
->data
+ c
;
7616 /* XXX: make code faster ? */
7617 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7618 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7619 /* XXX This rejects compound literals like
7620 '(void *){ptr}'. The problem is that '&sym' is
7621 represented the same way, which would be ruled out
7622 by the SYM_FIRST_ANOM check above, but also '"string"'
7623 in 'char *p = "string"' is represented the same
7624 with the type being VT_PTR and the symbol being an
7625 anonymous one. That is, there's no difference in vtop
7626 between '(void *){x}' and '&(void *){x}'. Ignore
7627 pointer typed entities here. Hopefully no real code
7628 will ever use compound literals with scalar type. */
7629 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7630 /* These come from compound literals, memcpy stuff over. */
7634 esym
= elfsym(vtop
->sym
);
7635 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7636 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7638 /* We need to copy over all memory contents, and that
7639 includes relocations. Use the fact that relocs are
7640 created it order, so look from the end of relocs
7641 until we hit one before the copied region. */
7642 unsigned long relofs
= ssec
->reloc
->data_offset
;
7643 while (relofs
>= sizeof(*rel
)) {
7644 relofs
-= sizeof(*rel
);
7645 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
7646 if (rel
->r_offset
>= esym
->st_value
+ size
)
7648 if (rel
->r_offset
< esym
->st_value
)
7650 put_elf_reloca(symtab_section
, sec
,
7651 c
+ rel
->r_offset
- esym
->st_value
,
7652 ELFW(R_TYPE
)(rel
->r_info
),
7653 ELFW(R_SYM
)(rel
->r_info
),
7663 if (type
->t
& VT_BITFIELD
) {
7664 int bit_pos
, bit_size
, bits
, n
;
7665 unsigned char *p
, v
, m
;
7666 bit_pos
= BIT_POS(vtop
->type
.t
);
7667 bit_size
= BIT_SIZE(vtop
->type
.t
);
7668 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7669 bit_pos
&= 7, bits
= 0;
7674 v
= val
>> bits
<< bit_pos
;
7675 m
= ((1 << n
) - 1) << bit_pos
;
7676 *p
= (*p
& ~m
) | (v
& m
);
7677 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7682 *(char *)ptr
= val
!= 0;
7688 write16le(ptr
, val
);
7691 write32le(ptr
, val
);
7694 write64le(ptr
, val
);
7697 #if defined TCC_IS_NATIVE_387
7698 /* Host and target platform may be different but both have x87.
7699 On windows, tcc does not use VT_LDOUBLE, except when it is a
7700 cross compiler. In this case a mingw gcc as host compiler
7701 comes here with 10-byte long doubles, while msvc or tcc won't.
7702 tcc itself can still translate by asm.
7703 In any case we avoid possibly random bytes 11 and 12.
7705 if (sizeof (long double) >= 10)
7706 memcpy(ptr
, &vtop
->c
.ld
, 10);
7708 else if (sizeof (long double) == sizeof (double))
7709 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7713 /* For other platforms it should work natively, but may not work
7714 for cross compilers */
7715 if (sizeof(long double) == LDOUBLE_SIZE
)
7716 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
7717 else if (sizeof(double) == LDOUBLE_SIZE
)
7718 *(double*)ptr
= (double)vtop
->c
.ld
;
7719 else if (0 == memcmp(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
))
7720 ; /* nothing to do for 0.0 */
7721 #ifndef TCC_CROSS_TEST
7723 tcc_error("can't cross compile long double constants");
7728 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
7731 if (vtop
->r
& VT_SYM
)
7732 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7734 write64le(ptr
, val
);
7737 write32le(ptr
, val
);
7741 write64le(ptr
, val
);
7745 if (vtop
->r
& VT_SYM
)
7746 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7747 write32le(ptr
, val
);
7751 //tcc_internal_error("unexpected type");
7757 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7764 /* 't' contains the type and storage info. 'c' is the offset of the
7765 object in section 'sec'. If 'sec' is NULL, it means stack based
7766 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7767 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7768 size only evaluation is wanted (only for arrays). */
7769 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7771 int len
, n
, no_oblock
, i
;
7777 /* generate line number info */
7778 if (debug_modes
&& !(flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7779 tcc_debug_line(tcc_state
), tcc_tcov_check_line (tcc_state
, 1);
7781 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7782 /* In case of strings we have special handling for arrays, so
7783 don't consume them as initializer value (which would commit them
7784 to some anonymous symbol). */
7785 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7786 (!(flags
& DIF_SIZE_ONLY
)
7787 /* a struct may be initialized from a struct of same type, as in
7788 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
7789 In that case we need to parse the element in order to check
7790 it for compatibility below */
7791 || (type
->t
& VT_BTYPE
) == VT_STRUCT
)
7793 int ncw_prev
= nocode_wanted
;
7794 if ((flags
& DIF_SIZE_ONLY
) && !p
->sec
)
7796 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7797 nocode_wanted
= ncw_prev
;
7798 flags
|= DIF_HAVE_ELEM
;
7801 if (type
->t
& VT_ARRAY
) {
7803 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7811 t1
= pointed_type(type
);
7812 size1
= type_size(t1
, &align1
);
7814 /* only parse strings here if correct type (otherwise: handle
7815 them as ((w)char *) expressions */
7816 if ((tok
== TOK_LSTR
&&
7817 #ifdef TCC_TARGET_PE
7818 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7820 (t1
->t
& VT_BTYPE
) == VT_INT
7822 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7824 cstr_reset(&initstr
);
7825 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7826 tcc_error("unhandled string literal merging");
7827 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7829 initstr
.size
-= size1
;
7831 len
+= tokc
.str
.size
;
7833 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7835 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7838 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7839 && tok
!= TOK_EOF
) {
7840 /* Not a lone literal but part of a bigger expression. */
7841 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7842 tokc
.str
.size
= initstr
.size
;
7843 tokc
.str
.data
= initstr
.data
;
7847 decl_design_flex(p
, s
, len
);
7848 if (!(flags
& DIF_SIZE_ONLY
)) {
7853 tcc_warning("initializer-string for array is too long");
7854 /* in order to go faster for common case (char
7855 string in global variable, we handle it
7857 if (p
->sec
&& size1
== 1) {
7858 init_assert(p
, c
+ nb
);
7860 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7864 /* only add trailing zero if enough storage (no
7865 warning in this case since it is standard) */
7866 if (flags
& DIF_CLEAR
)
7869 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7873 } else if (size1
== 1)
7874 ch
= ((unsigned char *)initstr
.data
)[i
];
7876 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7878 init_putv(p
, t1
, c
+ i
* size1
);
7889 /* zero memory once in advance */
7890 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7891 init_putz(p
, c
, n
*size1
);
7896 /* GNU extension: if the initializer is empty for a flex array,
7897 it's size is zero. We won't enter the loop, so set the size
7899 decl_design_flex(p
, s
, len
);
7900 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7901 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7902 flags
&= ~DIF_HAVE_ELEM
;
7903 if (type
->t
& VT_ARRAY
) {
7905 /* special test for multi dimensional arrays (may not
7906 be strictly correct if designators are used at the
7908 if (no_oblock
&& len
>= n
*size1
)
7911 if (s
->type
.t
== VT_UNION
)
7915 if (no_oblock
&& f
== NULL
)
7927 } else if ((flags
& DIF_HAVE_ELEM
)
7928 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7929 The source type might have VT_CONSTANT set, which is
7930 of course assignable to non-const elements. */
7931 && is_compatible_unqualified_types(type
, &vtop
->type
)) {
7934 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7936 if ((flags
& DIF_FIRST
) || tok
== '{') {
7946 } else if (tok
== '{') {
7947 if (flags
& DIF_HAVE_ELEM
)
7950 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7953 } else one_elem
: if ((flags
& DIF_SIZE_ONLY
)) {
7954 /* If we supported only ISO C we wouldn't have to accept calling
7955 this on anything than an array if DIF_SIZE_ONLY (and even then
7956 only on the outermost level, so no recursion would be needed),
7957 because initializing a flex array member isn't supported.
7958 But GNU C supports it, so we need to recurse even into
7959 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7960 /* just skip expression */
7961 if (flags
& DIF_HAVE_ELEM
)
7964 skip_or_save_block(NULL
);
7967 if (!(flags
& DIF_HAVE_ELEM
)) {
7968 /* This should happen only when we haven't parsed
7969 the init element above for fear of committing a
7970 string constant to memory too early. */
7971 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7972 expect("string constant");
7973 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7975 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7976 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7978 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7982 init_putv(p
, type
, c
);
7986 /* parse an initializer for type 't' if 'has_init' is non zero, and
7987 allocate space in local or global data space ('r' is either
7988 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7989 variable 'v' of scope 'scope' is declared before initializers
7990 are parsed. If 'v' is zero, then a reference to the new object
7991 is put in the value stack. If 'has_init' is 2, a special parsing
7992 is done to handle string constants. */
7993 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7994 int has_init
, int v
, int global
)
7996 int size
, align
, addr
;
7997 TokenString
*init_str
= NULL
;
8000 Sym
*flexible_array
;
8002 int saved_nocode_wanted
= nocode_wanted
;
8003 #ifdef CONFIG_TCC_BCHECK
8004 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
8006 init_params p
= {0};
8008 /* Always allocate static or global variables */
8009 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
8010 nocode_wanted
|= DATA_ONLY_WANTED
;
8012 flexible_array
= NULL
;
8013 size
= type_size(type
, &align
);
8015 /* exactly one flexible array may be initialized, either the
8016 toplevel array or the last member of the toplevel struct */
8019 // error out except for top-level incomplete arrays
8020 // (arrays of incomplete types are handled in array parsing)
8021 if (!(type
->t
& VT_ARRAY
))
8022 tcc_error("initialization of incomplete type");
8024 /* If the base type itself was an array type of unspecified size
8025 (like in 'typedef int arr[]; arr x = {1};') then we will
8026 overwrite the unknown size by the real one for this decl.
8027 We need to unshare the ref symbol holding that size. */
8028 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
8029 p
.flex_array_ref
= type
->ref
;
8031 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8032 Sym
*field
= type
->ref
->next
;
8035 field
= field
->next
;
8036 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
8037 flexible_array
= field
;
8038 p
.flex_array_ref
= field
->type
.ref
;
8045 /* If unknown size, do a dry-run 1st pass */
8047 tcc_error("unknown type size");
8048 if (has_init
== 2) {
8049 /* only get strings */
8050 init_str
= tok_str_alloc();
8051 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8052 tok_str_add_tok(init_str
);
8055 tok_str_add(init_str
, TOK_EOF
);
8057 skip_or_save_block(&init_str
);
8061 begin_macro(init_str
, 1);
8063 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
8064 /* prepare second initializer parsing */
8065 macro_ptr
= init_str
->str
;
8068 /* if still unknown size, error */
8069 size
= type_size(type
, &align
);
8071 tcc_error("unknown type size");
8073 /* If there's a flex member and it was used in the initializer
8075 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
8076 size
+= flexible_array
->type
.ref
->c
8077 * pointed_size(&flexible_array
->type
);
8080 /* take into account specified alignment if bigger */
8081 if (ad
->a
.aligned
) {
8082 int speca
= 1 << (ad
->a
.aligned
- 1);
8085 } else if (ad
->a
.packed
) {
8089 if (!v
&& NODATA_WANTED
)
8090 size
= 0, align
= 1;
8092 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
8094 #ifdef CONFIG_TCC_BCHECK
8096 /* add padding between stack variables for bound checking */
8100 loc
= (loc
- size
) & -align
;
8102 p
.local_offset
= addr
+ size
;
8103 #ifdef CONFIG_TCC_BCHECK
8105 /* add padding between stack variables for bound checking */
8110 /* local variable */
8111 #ifdef CONFIG_TCC_ASM
8112 if (ad
->asm_label
) {
8113 int reg
= asm_parse_regvar(ad
->asm_label
);
8115 r
= (r
& ~VT_VALMASK
) | reg
;
8118 sym
= sym_push(v
, type
, r
, addr
);
8119 if (ad
->cleanup_func
) {
8120 Sym
*cls
= sym_push2(&all_cleanups
,
8121 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
8122 cls
->prev_tok
= sym
;
8123 cls
->next
= ad
->cleanup_func
;
8124 cls
->ncl
= cur_scope
->cl
.s
;
8125 cur_scope
->cl
.s
= cls
;
8130 /* push local reference */
8131 vset(type
, r
, addr
);
8136 /* see if the symbol was already defined */
8139 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
8140 && sym
->type
.ref
->c
> type
->ref
->c
) {
8141 /* flex array was already declared with explicit size
8143 int arr[] = { 1,2,3 }; */
8144 type
->ref
->c
= sym
->type
.ref
->c
;
8145 size
= type_size(type
, &align
);
8147 patch_storage(sym
, ad
, type
);
8148 /* we accept several definitions of the same global variable. */
8149 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8154 /* allocate symbol in corresponding section */
8158 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
8159 tp
= &tp
->ref
->type
;
8160 if (tp
->t
& VT_CONSTANT
) {
8161 sec
= rodata_section
;
8162 } else if (has_init
) {
8164 /*if (tcc_state->g_debug & 4)
8165 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8166 } else if (tcc_state
->nocommon
)
8171 addr
= section_add(sec
, size
, align
);
8172 #ifdef CONFIG_TCC_BCHECK
8173 /* add padding if bound check */
8175 section_add(sec
, 1, 1);
8178 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8179 sec
= common_section
;
8184 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8185 patch_storage(sym
, ad
, NULL
);
8187 /* update symbol definition */
8188 put_extern_sym(sym
, sec
, addr
, size
);
8190 /* push global reference */
8191 vpush_ref(type
, sec
, addr
, size
);
8196 #ifdef CONFIG_TCC_BCHECK
8197 /* handles bounds now because the symbol must be defined
8198 before for the relocation */
8202 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8203 /* then add global bound info */
8204 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8205 bounds_ptr
[0] = 0; /* relocated */
8206 bounds_ptr
[1] = size
;
8211 if (type
->t
& VT_VLA
) {
8217 /* save before-VLA stack pointer if needed */
8218 if (cur_scope
->vla
.num
== 0) {
8219 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8220 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8222 gen_vla_sp_save(loc
-= PTR_SIZE
);
8223 cur_scope
->vla
.locorig
= loc
;
8227 vpush_type_size(type
, &a
);
8228 gen_vla_alloc(type
, a
);
8229 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8230 /* on _WIN64, because of the function args scratch area, the
8231 result of alloca differs from RSP and is returned in RAX. */
8232 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8234 gen_vla_sp_save(addr
);
8235 cur_scope
->vla
.loc
= addr
;
8236 cur_scope
->vla
.num
++;
8237 } else if (has_init
) {
8239 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8240 /* patch flexible array member size back to -1, */
8241 /* for possible subsequent similar declarations */
8243 flexible_array
->type
.ref
->c
= -1;
8247 /* restore parse state if needed */
8253 nocode_wanted
= saved_nocode_wanted
;
8256 /* generate vla code saved in post_type() */
8257 static void func_vla_arg_code(Sym
*arg
)
8260 TokenString
*vla_array_tok
= NULL
;
8263 func_vla_arg_code(arg
->type
.ref
);
8265 if ((arg
->type
.t
& VT_VLA
) && arg
->type
.ref
->vla_array_str
) {
8266 loc
-= type_size(&int_type
, &align
);
8268 arg
->type
.ref
->c
= loc
;
8271 vla_array_tok
= tok_str_alloc();
8272 vla_array_tok
->str
= arg
->type
.ref
->vla_array_str
;
8273 begin_macro(vla_array_tok
, 1);
8278 vpush_type_size(&arg
->type
.ref
->type
, &align
);
8280 vset(&int_type
, VT_LOCAL
|VT_LVAL
, arg
->type
.ref
->c
);
8287 static void func_vla_arg(Sym
*sym
)
8291 for (arg
= sym
->type
.ref
->next
; arg
; arg
= arg
->next
)
8292 if ((arg
->type
.t
& VT_BTYPE
) == VT_PTR
&& (arg
->type
.ref
->type
.t
& VT_VLA
))
8293 func_vla_arg_code(arg
->type
.ref
);
8296 /* parse a function defined by symbol 'sym' and generate its code in
8297 'cur_text_section' */
8298 static void gen_function(Sym
*sym
)
8300 struct scope f
= { 0 };
8301 cur_scope
= root_scope
= &f
;
8304 ind
= cur_text_section
->data_offset
;
8305 if (sym
->a
.aligned
) {
8306 size_t newoff
= section_add(cur_text_section
, 0,
8307 1 << (sym
->a
.aligned
- 1));
8308 gen_fill_nops(newoff
- ind
);
8311 funcname
= get_tok_str(sym
->v
, NULL
);
8313 func_vt
= sym
->type
.ref
->type
;
8314 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8316 /* NOTE: we patch the symbol size later */
8317 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8319 if (sym
->type
.ref
->f
.func_ctor
)
8320 add_array (tcc_state
, ".init_array", sym
->c
);
8321 if (sym
->type
.ref
->f
.func_dtor
)
8322 add_array (tcc_state
, ".fini_array", sym
->c
);
8324 /* put debug symbol */
8325 tcc_debug_funcstart(tcc_state
, sym
);
8327 /* push a dummy symbol to enable local sym storage */
8328 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8329 local_scope
= 1; /* for function parameters */
8331 tcc_debug_prolog_epilog(tcc_state
, 0);
8335 clear_temp_local_var_list();
8341 /* reset local stack */
8342 pop_local_syms(NULL
, 0);
8343 tcc_debug_prolog_epilog(tcc_state
, 1);
8346 /* end of function */
8347 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8349 /* patch symbol size */
8350 elfsym(sym
)->st_size
= ind
- func_ind
;
8352 cur_text_section
->data_offset
= ind
;
8354 label_pop(&global_label_stack
, NULL
, 0);
8355 sym_pop(&all_cleanups
, NULL
, 0);
8357 /* It's better to crash than to generate wrong code */
8358 cur_text_section
= NULL
;
8359 funcname
= ""; /* for safety */
8360 func_vt
.t
= VT_VOID
; /* for safety */
8361 func_var
= 0; /* for safety */
8362 ind
= 0; /* for safety */
8364 nocode_wanted
= DATA_ONLY_WANTED
;
8367 /* do this after funcend debug info */
8371 static void gen_inline_functions(TCCState
*s
)
8374 int inline_generated
, i
;
8375 struct InlineFunc
*fn
;
8377 tcc_open_bf(s
, ":inline:", 0);
8378 /* iterate while inline function are referenced */
8380 inline_generated
= 0;
8381 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8382 fn
= s
->inline_fns
[i
];
8384 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8385 /* the function was used or forced (and then not internal):
8386 generate its code and convert it to a normal function */
8388 tcc_debug_putfile(s
, fn
->filename
);
8389 begin_macro(fn
->func_str
, 1);
8391 cur_text_section
= text_section
;
8395 inline_generated
= 1;
8398 } while (inline_generated
);
8402 static void free_inline_functions(TCCState
*s
)
8405 /* free tokens of unused inline functions */
8406 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8407 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8409 tok_str_free(fn
->func_str
);
8411 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8414 static void do_Static_assert(void)
8422 msg
= "_Static_assert fail";
8425 msg
= parse_mult_str("string constant")->data
;
8429 tcc_error("%s", msg
);
8433 /* 'l' is VT_LOCAL or VT_CONST to define default storage type
8434 or VT_CMP if parsing old style parameter list
8435 or VT_JMP if parsing c99 for decl: for (int i = 0, ...) */
8436 static int decl(int l
)
8438 int v
, has_init
, r
, oldint
;
8441 AttributeDef ad
, adbase
;
8446 if (!parse_btype(&btype
, &adbase
, l
== VT_LOCAL
)) {
8449 /* skip redundant ';' if not in old parameter decl scope */
8450 if (tok
== ';' && l
!= VT_CMP
) {
8454 if (tok
== TOK_STATIC_ASSERT
) {
8460 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8461 /* global asm block */
8465 if (tok
>= TOK_UIDENT
) {
8466 /* special test for old K&R protos without explicit int
8467 type. Only accepted when defining global data */
8472 expect("declaration");
8478 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8480 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8481 tcc_warning("unnamed struct/union that defines no instances");
8485 if (IS_ENUM(btype
.t
)) {
8491 while (1) { /* iterate thru each declaration */
8494 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8498 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8499 printf("type = '%s'\n", buf
);
8502 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8503 if ((type
.t
& VT_STATIC
) && (l
!= VT_CONST
))
8504 tcc_error("function without file scope cannot be static");
8505 /* if old style function prototype, we accept a
8508 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
) {
8512 #if defined TCC_TARGET_MACHO || defined TARGETOS_ANDROID
8513 if (sym
->f
.func_alwinl
8514 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8515 == (VT_EXTERN
| VT_INLINE
))) {
8516 /* always_inline functions must be handled as if they
8517 don't generate multiple global defs, even if extern
8518 inline, i.e. GNU inline semantics for those. Rewrite
8519 them into static inline. */
8520 type
.t
&= ~VT_EXTERN
;
8521 type
.t
|= VT_STATIC
;
8524 /* always compile 'extern inline' */
8525 if (type
.t
& VT_EXTERN
)
8526 type
.t
&= ~VT_INLINE
;
8528 } else if (oldint
) {
8529 tcc_warning("type defaults to int");
8532 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8533 ad
.asm_label
= asm_label_instr();
8534 /* parse one last attribute list, after asm label */
8535 parse_attribute(&ad
);
8537 /* gcc does not allow __asm__("label") with function definition,
8544 #ifdef TCC_TARGET_PE
8545 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8546 if (type
.t
& VT_STATIC
)
8547 tcc_error("cannot have dll linkage with static");
8548 if (type
.t
& VT_TYPEDEF
) {
8549 tcc_warning("'%s' attribute ignored for typedef",
8550 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8551 (ad
.a
.dllexport
= 0, "dllexport"));
8552 } else if (ad
.a
.dllimport
) {
8553 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8556 type
.t
|= VT_EXTERN
;
8562 tcc_error("cannot use local functions");
8563 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8564 expect("function definition");
8566 /* reject abstract declarators in function definition
8567 make old style params without decl have int type */
8569 while ((sym
= sym
->next
) != NULL
) {
8570 if (!(sym
->v
& ~SYM_FIELD
))
8571 expect("identifier");
8572 if (sym
->type
.t
== VT_VOID
)
8573 sym
->type
= int_type
;
8576 /* apply post-declaraton attributes */
8577 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8579 /* put function symbol */
8580 type
.t
&= ~VT_EXTERN
;
8581 sym
= external_sym(v
, &type
, 0, &ad
);
8583 /* static inline functions are just recorded as a kind
8584 of macro. Their code will be emitted at the end of
8585 the compilation unit only if they are used */
8586 if (sym
->type
.t
& VT_INLINE
) {
8587 struct InlineFunc
*fn
;
8588 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8589 strcpy(fn
->filename
, file
->filename
);
8591 dynarray_add(&tcc_state
->inline_fns
,
8592 &tcc_state
->nb_inline_fns
, fn
);
8593 skip_or_save_block(&fn
->func_str
);
8595 /* compute text section */
8596 cur_text_section
= ad
.section
;
8597 if (!cur_text_section
)
8598 cur_text_section
= text_section
;
8604 /* find parameter in function parameter list */
8605 for (sym
= func_vt
.ref
->next
; sym
; sym
= sym
->next
)
8606 if ((sym
->v
& ~SYM_FIELD
) == v
)
8608 tcc_error("declaration for parameter '%s' but no such parameter",
8609 get_tok_str(v
, NULL
));
8611 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8612 tcc_error("storage class specified for '%s'",
8613 get_tok_str(v
, NULL
));
8614 if (sym
->type
.t
!= VT_VOID
)
8615 tcc_error("redefinition of parameter '%s'",
8616 get_tok_str(v
, NULL
));
8617 convert_parameter_type(&type
);
8619 } else if (type
.t
& VT_TYPEDEF
) {
8620 /* save typedefed type */
8621 /* XXX: test storage specifiers ? */
8623 if (sym
&& sym
->sym_scope
== local_scope
) {
8624 if (!is_compatible_types(&sym
->type
, &type
)
8625 || !(sym
->type
.t
& VT_TYPEDEF
))
8626 tcc_error("incompatible redefinition of '%s'",
8627 get_tok_str(v
, NULL
));
8630 sym
= sym_push(v
, &type
, 0, 0);
8633 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8634 merge_funcattr(&sym
->type
.ref
->f
, &ad
.f
);
8636 tcc_debug_typedef (tcc_state
, sym
);
8637 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8638 && !(type
.t
& VT_EXTERN
)) {
8639 tcc_error("declaration of void object");
8642 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8643 /* external function definition */
8644 /* specific case for func_call attribute */
8645 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8646 } else if (!(type
.t
& VT_ARRAY
)) {
8647 /* not lvalue if array */
8650 has_init
= (tok
== '=');
8651 if (has_init
&& (type
.t
& VT_VLA
))
8652 tcc_error("variable length array cannot be initialized");
8654 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8655 || (type
.t
& VT_BTYPE
) == VT_FUNC
8656 /* as with GCC, uninitialized global arrays with no size
8657 are considered extern: */
8658 || ((type
.t
& VT_ARRAY
) && !has_init
8659 && l
== VT_CONST
&& type
.ref
->c
< 0)
8661 /* external variable or function */
8662 type
.t
|= VT_EXTERN
;
8663 sym
= external_sym(v
, &type
, r
, &ad
);
8665 if (l
== VT_CONST
|| (type
.t
& VT_STATIC
))
8671 else if (l
== VT_CONST
)
8672 /* uninitialized global variables may be overridden */
8673 type
.t
|= VT_EXTERN
;
8674 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
== VT_CONST
);
8677 if (ad
.alias_target
&& l
== VT_CONST
) {
8678 /* Aliases need to be emitted when their target symbol
8679 is emitted, even if perhaps unreferenced.
8680 We only support the case where the base is already
8681 defined, otherwise we would need deferring to emit
8682 the aliases until the end of the compile unit. */
8683 Sym
*alias_target
= sym_find(ad
.alias_target
);
8684 ElfSym
*esym
= elfsym(alias_target
);
8686 tcc_error("unsupported forward __alias__ attribute");
8687 put_extern_sym2(sym_find(v
), esym
->st_shndx
,
8688 esym
->st_value
, esym
->st_size
, 1);
8704 /* ------------------------------------------------------------------------- */
8707 /* ------------------------------------------------------------------------- */