2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
66 static int gind(void) { CODE_ON(); return ind
; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
70 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
77 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
79 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
81 static int last_line_num
, new_file
, func_ind
; /* debug info control */
82 ST_DATA
const char *funcname
;
83 ST_DATA CType int_type
, func_old_type
, char_pointer_type
;
86 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
87 #define VT_PTRDIFF_T VT_INT
89 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
90 #define VT_PTRDIFF_T VT_LLONG
92 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
93 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
96 ST_DATA
struct switch_t
{
100 } **p
; int n
; /* list of case ranges */
101 int def_sym
; /* default symbol */
104 struct switch_t
*prev
;
106 } *cur_switch
; /* current switch */
108 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
109 /*list of temporary local variables on the stack in current function. */
110 ST_DATA
struct temp_local_variable
{
111 int location
; //offset on stack. Svalue.c.i
114 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
115 short nb_temp_local_vars
;
117 static struct scope
{
119 struct { int loc
, num
; } vla
;
120 struct { Sym
*s
; int n
; } cl
;
123 } *cur_scope
, *loop_scope
, *root_scope
;
125 /********************************************************/
126 /* stab debug support */
128 static const struct {
131 } default_debug
[] = {
132 { VT_INT
, "int:t1=r1;-2147483648;2147483647;", },
133 { VT_BYTE
, "char:t2=r2;0;127;", },
135 { VT_LONG
| VT_INT
, "long int:t3=r3;-2147483648;2147483647;", },
137 { VT_LLONG
| VT_LONG
, "long int:t3=r3;-9223372036854775808;9223372036854775807;", },
139 { VT_INT
| VT_UNSIGNED
, "unsigned int:t4=r4;0;4294967295;", },
141 { VT_LONG
| VT_INT
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;4294967295;", },
143 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
144 { VT_LLONG
| VT_LONG
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;01777777777777777777777;", },
146 { VT_QLONG
, "__int128:t6=r6;0;-1;", },
147 { VT_QLONG
| VT_UNSIGNED
, "__int128 unsigned:t7=r7;0;-1;", },
148 { VT_LLONG
, "long long int:t8=r8;-9223372036854775808;9223372036854775807;", },
149 { VT_LLONG
| VT_UNSIGNED
, "long long unsigned int:t9=r9;0;01777777777777777777777;", },
150 { VT_SHORT
, "short int:t10=r10;-32768;32767;", },
151 { VT_SHORT
| VT_UNSIGNED
, "short unsigned int:t11=r11;0;65535;", },
152 { VT_BYTE
| VT_DEFSIGN
, "signed char:t12=r12;-128;127;", },
153 { VT_BYTE
| VT_DEFSIGN
| VT_UNSIGNED
, "unsigned char:t13=r13;0;255;", },
154 { VT_FLOAT
, "float:t14=r1;4;0;", },
155 { VT_DOUBLE
, "double:t15=r1;8;0;", },
156 { VT_LDOUBLE
, "long double:t16=r1;16;0;", },
157 { -1, "_Float32:t17=r1;4;0;", },
158 { -1, "_Float64:t18=r1;8;0;", },
159 { -1, "_Float128:t19=r1;16;0;", },
160 { -1, "_Float32x:t20=r1;8;0;", },
161 { -1, "_Float64x:t21=r1;16;0;", },
162 { -1, "_Decimal32:t22=r1;4;0;", },
163 { -1, "_Decimal64:t23=r1;8;0;", },
164 { -1, "_Decimal128:t24=r1;16;0;", },
165 { VT_VOID
, "void:t25=25", },
168 static int debug_next_type
;
170 static struct debug_hash
{
175 static int n_debug_hash
;
177 static struct debug_info
{
188 struct debug_info
*child
, *next
, *last
, *parent
;
189 } *debug_info
, *debug_info_root
;
191 /********************************************************/
193 #define precedence_parser
194 static void init_prec(void);
196 /********************************************************/
197 #ifndef CONFIG_TCC_ASM
198 ST_FUNC
void asm_instr(void)
200 tcc_error("inline asm() not supported");
202 ST_FUNC
void asm_global_instr(void)
204 tcc_error("inline asm() not supported");
208 /* ------------------------------------------------------------------------- */
209 static void gen_cast(CType
*type
);
210 static void gen_cast_s(int t
);
211 static inline CType
*pointed_type(CType
*type
);
212 static int is_compatible_types(CType
*type1
, CType
*type2
);
213 static int parse_btype(CType
*type
, AttributeDef
*ad
);
214 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
215 static void parse_expr_type(CType
*type
);
216 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
217 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
218 static void block(int is_expr
);
219 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
220 static void decl(int l
);
221 static int decl0(int l
, int is_for_loop_init
, Sym
*);
222 static void expr_eq(void);
223 static void vla_runtime_type_size(CType
*type
, int *a
);
224 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
225 static inline int64_t expr_const64(void);
226 static void vpush64(int ty
, unsigned long long v
);
227 static void vpush(CType
*type
);
228 static int gvtst(int inv
, int t
);
229 static void gen_inline_functions(TCCState
*s
);
230 static void free_inline_functions(TCCState
*s
);
231 static void skip_or_save_block(TokenString
**str
);
232 static void gv_dup(void);
233 static int get_temp_local_var(int size
,int align
);
234 static void clear_temp_local_var_list();
235 static void cast_error(CType
*st
, CType
*dt
);
237 ST_INLN
int is_float(int t
)
239 int bt
= t
& VT_BTYPE
;
240 return bt
== VT_LDOUBLE
246 static inline int is_integer_btype(int bt
)
255 static int btype_size(int bt
)
257 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
261 bt
== VT_PTR
? PTR_SIZE
: 0;
264 /* returns function return register from type */
265 static int R_RET(int t
)
269 #ifdef TCC_TARGET_X86_64
270 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
272 #elif defined TCC_TARGET_RISCV64
273 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
279 /* returns 2nd function return register, if any */
280 static int R2_RET(int t
)
286 #elif defined TCC_TARGET_X86_64
291 #elif defined TCC_TARGET_RISCV64
298 /* returns true for two-word types */
299 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
301 /* put function return registers to stack value */
302 static void PUT_R_RET(SValue
*sv
, int t
)
304 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
307 /* returns function return register class for type t */
308 static int RC_RET(int t
)
310 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
313 /* returns generic register class for type t */
314 static int RC_TYPE(int t
)
318 #ifdef TCC_TARGET_X86_64
319 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
321 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
323 #elif defined TCC_TARGET_RISCV64
324 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
330 /* returns 2nd register class corresponding to t and rc */
331 static int RC2_TYPE(int t
, int rc
)
333 if (!USING_TWO_WORDS(t
))
348 /* we use our own 'finite' function to avoid potential problems with
349 non standard math libs */
350 /* XXX: endianness dependent */
351 ST_FUNC
int ieee_finite(double d
)
354 memcpy(p
, &d
, sizeof(double));
355 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
358 /* compiling intel long double natively */
359 #if (defined __i386__ || defined __x86_64__) \
360 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
361 # define TCC_IS_NATIVE_387
364 ST_FUNC
void test_lvalue(void)
366 if (!(vtop
->r
& VT_LVAL
))
370 ST_FUNC
void check_vstack(void)
372 if (vtop
!= vstack
- 1)
373 tcc_error("internal compiler error: vstack leak (%d)",
374 (int)(vtop
- vstack
+ 1));
377 /* ------------------------------------------------------------------------- */
378 /* vstack debugging aid */
381 void pv (const char *lbl
, int a
, int b
)
384 for (i
= a
; i
< a
+ b
; ++i
) {
385 SValue
*p
= &vtop
[-i
];
386 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
387 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
392 /* ------------------------------------------------------------------------- */
393 /* start of translation unit info */
394 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
400 /* file info: full path + filename */
401 section_sym
= put_elf_sym(symtab_section
, 0, 0,
402 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
403 text_section
->sh_num
, NULL
);
404 getcwd(buf
, sizeof(buf
));
406 normalize_slashes(buf
);
408 pstrcat(buf
, sizeof(buf
), "/");
409 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
410 text_section
->data_offset
, text_section
, section_sym
);
411 put_stabs_r(s1
, file
->prev
->filename
, N_SO
, 0, 0,
412 text_section
->data_offset
, text_section
, section_sym
);
413 for (i
= 0; i
< sizeof (default_debug
) / sizeof (default_debug
[0]); i
++)
414 put_stabs(s1
, default_debug
[i
].name
, N_LSYM
, 0, 0, 0);
416 new_file
= last_line_num
= 0;
418 debug_next_type
= sizeof(default_debug
) / sizeof(default_debug
[0]);
422 /* we're currently 'including' the <command line> */
426 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
427 symbols can be safely used */
428 put_elf_sym(symtab_section
, 0, 0,
429 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
430 SHN_ABS
, file
->filename
);
433 static void tcc_debug_stabs (TCCState
*s1
, const char *str
, int type
, unsigned long value
,
434 Section
*sec
, int sym_index
)
440 (struct debug_sym
*)tcc_realloc (debug_info
->sym
,
441 sizeof(struct debug_sym
) *
442 (debug_info
->n_sym
+ 1));
443 s
= debug_info
->sym
+ debug_info
->n_sym
++;
446 s
->str
= tcc_strdup(str
);
448 s
->sym_index
= sym_index
;
451 put_stabs_r (s1
, str
, type
, 0, 0, value
, sec
, sym_index
);
453 put_stabs (s1
, str
, type
, 0, 0, value
);
456 static void tcc_debug_stabn(int type
, int value
)
458 if (type
== N_LBRAC
) {
459 struct debug_info
*info
=
460 (struct debug_info
*) tcc_mallocz(sizeof (*info
));
463 info
->parent
= debug_info
;
465 if (debug_info
->child
) {
466 if (debug_info
->child
->last
)
467 debug_info
->child
->last
->next
= info
;
469 debug_info
->child
->next
= info
;
470 debug_info
->child
->last
= info
;
473 debug_info
->child
= info
;
476 debug_info_root
= info
;
480 debug_info
->end
= value
;
481 debug_info
= debug_info
->parent
;
485 static void tcc_get_debug_info(TCCState
*s1
, Sym
*s
, CString
*result
)
494 type
= t
->type
.t
& ~(VT_EXTERN
| VT_STATIC
| VT_CONSTANT
| VT_VOLATILE
);
495 if ((type
& VT_BTYPE
) != VT_BYTE
)
497 if (type
== VT_PTR
|| type
== (VT_PTR
| VT_ARRAY
))
498 n
++, t
= t
->type
.ref
;
502 if ((type
& VT_BTYPE
) == VT_STRUCT
) {
506 for (i
= 0; i
< n_debug_hash
; i
++) {
507 if (t
== debug_hash
[i
].type
) {
508 debug_type
= debug_hash
[i
].debug_type
;
512 if (debug_type
== -1) {
513 debug_type
= ++debug_next_type
;
514 debug_hash
= (struct debug_hash
*)
515 tcc_realloc (debug_hash
,
516 (n_debug_hash
+ 1) * sizeof(*debug_hash
));
517 debug_hash
[n_debug_hash
].debug_type
= debug_type
;
518 debug_hash
[n_debug_hash
++].type
= t
;
520 cstr_printf (&str
, "%s:T%d=%c%d",
521 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
522 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
524 IS_UNION (t
->type
.t
) ? 'u' : 's',
527 int pos
, size
, align
;
530 cstr_printf (&str
, "%s:",
531 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
532 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
533 tcc_get_debug_info (s1
, t
, &str
);
534 if (t
->type
.t
& VT_BITFIELD
) {
535 pos
= t
->c
* 8 + BIT_POS(t
->type
.t
);
536 size
= BIT_SIZE(t
->type
.t
);
540 size
= type_size(&t
->type
, &align
) * 8;
542 cstr_printf (&str
, ",%d,%d;", pos
, size
);
544 cstr_printf (&str
, ";");
545 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
549 else if (IS_ENUM(type
)) {
550 Sym
*e
= t
= t
->type
.ref
;
552 debug_type
= ++debug_next_type
;
554 cstr_printf (&str
, "%s:T%d=e",
555 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
556 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
560 cstr_printf (&str
, "%s:",
561 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
562 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
563 cstr_printf (&str
, e
->type
.t
& VT_UNSIGNED
? "%u," : "%d,",
566 cstr_printf (&str
, ";");
567 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
571 type
&= ~VT_STRUCT_MASK
;
573 debug_type
<= sizeof(default_debug
) / sizeof(default_debug
[0]);
575 if (default_debug
[debug_type
- 1].type
== type
)
577 if (debug_type
> sizeof(default_debug
) / sizeof(default_debug
[0]))
581 cstr_printf (result
, "%d=", ++debug_next_type
);
584 type
= t
->type
.t
& ~(VT_EXTERN
| VT_STATIC
| VT_CONSTANT
| VT_VOLATILE
);
585 if ((type
& VT_BTYPE
) != VT_BYTE
)
588 cstr_printf (result
, "%d=*", ++debug_next_type
);
589 else if (type
== (VT_PTR
| VT_ARRAY
))
590 cstr_printf (result
, "%d=ar1;0;%d;",
591 ++debug_next_type
, t
->type
.ref
->c
- 1);
596 cstr_printf (result
, "%d", debug_type
);
599 static void tcc_debug_finish (TCCState
*s1
, struct debug_info
*cur
)
603 struct debug_info
*next
= cur
->next
;
605 for (i
= 0; i
< cur
->n_sym
; i
++) {
606 struct debug_sym
*s
= &cur
->sym
[i
];
609 put_stabs_r(s1
, s
->str
, s
->type
, 0, 0, s
->value
,
610 s
->sec
, s
->sym_index
);
612 put_stabs(s1
, s
->str
, s
->type
, 0, 0, s
->value
);
616 put_stabn(s1
, N_LBRAC
, 0, 0, cur
->start
);
617 tcc_debug_finish (s1
, cur
->child
);
618 put_stabn(s1
, N_RBRAC
, 0, 0, cur
->end
);
624 static void tcc_add_debug_info(TCCState
*s1
, int param
, Sym
*s
, Sym
*e
)
627 cstr_new (&debug_str
);
628 for (; s
!= e
; s
= s
->prev
) {
629 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
631 cstr_reset (&debug_str
);
632 cstr_printf (&debug_str
, "%s:%s", get_tok_str(s
->v
, NULL
), param
? "p" : "");
633 tcc_get_debug_info(s1
, s
, &debug_str
);
634 tcc_debug_stabs(s1
, debug_str
.data
, param
? N_PSYM
: N_LSYM
, s
->c
, NULL
, 0);
636 cstr_free (&debug_str
);
639 static void tcc_debug_extern_sym(TCCState
*s1
, Sym
*sym
, int sh_num
, int sym_bind
)
641 Section
*s
= s1
->sections
[sh_num
];
645 cstr_printf (&str
, "%s:%c",
646 get_tok_str(sym
->v
, NULL
),
647 sym_bind
== STB_GLOBAL
? 'G' : local_scope
? 'V' : 'S'
649 tcc_get_debug_info(s1
, sym
, &str
);
650 if (sym_bind
== STB_GLOBAL
)
651 tcc_debug_stabs(s1
, str
.data
, N_GSYM
, 0, NULL
, 0);
653 tcc_debug_stabs(s1
, str
.data
,
654 (sym
->type
.t
& VT_STATIC
) && data_section
== s
655 ? N_STSYM
: N_LCSYM
, 0, s
, sym
->c
);
659 /* put end of translation unit info */
660 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
664 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
665 text_section
->data_offset
, text_section
, section_sym
);
666 tcc_free(debug_hash
);
669 static BufferedFile
* put_new_file(TCCState
*s1
)
671 BufferedFile
*f
= file
;
672 /* use upper file if from inline ":asm:" */
673 if (f
->filename
[0] == ':')
676 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
677 new_file
= last_line_num
= 0;
682 /* generate line number info */
683 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
687 || cur_text_section
!= text_section
688 || !(f
= put_new_file(s1
))
689 || last_line_num
== f
->line_num
)
691 if (func_ind
!= -1) {
692 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
694 /* from tcc_assemble */
695 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
697 last_line_num
= f
->line_num
;
700 /* put function symbol */
701 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
707 debug_info_root
= NULL
;
709 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
710 if (!(f
= put_new_file(s1
)))
712 cstr_new (&debug_str
);
713 cstr_printf(&debug_str
, "%s:%c", funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
714 tcc_get_debug_info(s1
, sym
->type
.ref
, &debug_str
);
715 put_stabs_r(s1
, debug_str
.data
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
716 cstr_free (&debug_str
);
721 /* put function size */
722 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
726 tcc_debug_stabn(N_RBRAC
, size
);
727 tcc_debug_finish (s1
, debug_info_root
);
730 /* put alternative filename */
731 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
733 if (0 == strcmp(file
->filename
, filename
))
735 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
739 /* begin of #include */
740 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
744 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
748 /* end of #include */
749 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
753 put_stabn(s1
, N_EINCL
, 0, 0, 0);
757 /* ------------------------------------------------------------------------- */
758 /* initialize vstack and types. This must be done also for tcc -E */
759 ST_FUNC
void tccgen_init(TCCState
*s1
)
762 memset(vtop
, 0, sizeof *vtop
);
764 /* define some often used types */
766 char_pointer_type
.t
= VT_BYTE
;
767 mk_pointer(&char_pointer_type
);
768 func_old_type
.t
= VT_FUNC
;
769 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
770 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
771 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
772 #ifdef precedence_parser
777 ST_FUNC
int tccgen_compile(TCCState
*s1
)
779 cur_text_section
= NULL
;
781 anon_sym
= SYM_FIRST_ANOM
;
784 nocode_wanted
= 0x80000000;
788 #ifdef TCC_TARGET_ARM
792 printf("%s: **** new file\n", file
->filename
);
794 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
797 gen_inline_functions(s1
);
799 /* end of translation unit info */
804 ST_FUNC
void tccgen_finish(TCCState
*s1
)
806 free_inline_functions(s1
);
807 sym_pop(&global_stack
, NULL
, 0);
808 sym_pop(&local_stack
, NULL
, 0);
809 /* free preprocessor macros */
812 dynarray_reset(&sym_pools
, &nb_sym_pools
);
813 sym_free_first
= NULL
;
816 /* ------------------------------------------------------------------------- */
817 ST_FUNC ElfSym
*elfsym(Sym
*s
)
821 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
824 /* apply storage attributes to Elf symbol */
825 ST_FUNC
void update_storage(Sym
*sym
)
828 int sym_bind
, old_sym_bind
;
834 if (sym
->a
.visibility
)
835 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
838 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
839 sym_bind
= STB_LOCAL
;
840 else if (sym
->a
.weak
)
843 sym_bind
= STB_GLOBAL
;
844 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
845 if (sym_bind
!= old_sym_bind
) {
846 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
850 if (sym
->a
.dllimport
)
851 esym
->st_other
|= ST_PE_IMPORT
;
852 if (sym
->a
.dllexport
)
853 esym
->st_other
|= ST_PE_EXPORT
;
857 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
858 get_tok_str(sym
->v
, NULL
),
859 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
867 /* ------------------------------------------------------------------------- */
868 /* update sym->c so that it points to an external symbol in section
869 'section' with value 'value' */
871 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
872 addr_t value
, unsigned long size
,
873 int can_add_underscore
)
875 int sym_type
, sym_bind
, info
, other
, t
;
879 #ifdef CONFIG_TCC_BCHECK
883 name
= get_tok_str(sym
->v
, NULL
);
884 #ifdef CONFIG_TCC_BCHECK
885 if (tcc_state
->do_bounds_check
) {
886 /* XXX: avoid doing that for statics ? */
887 /* if bound checking is activated, we change some function
888 names by adding the "__bound" prefix */
891 /* XXX: we rely only on malloc hooks */
914 #ifndef TCC_TARGET_PE
917 strcpy(buf
, "__bound_");
925 if ((t
& VT_BTYPE
) == VT_FUNC
) {
927 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
928 sym_type
= STT_NOTYPE
;
930 sym_type
= STT_OBJECT
;
932 if (t
& (VT_STATIC
| VT_INLINE
))
933 sym_bind
= STB_LOCAL
;
935 sym_bind
= STB_GLOBAL
;
938 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
939 Sym
*ref
= sym
->type
.ref
;
940 if (ref
->a
.nodecorate
) {
941 can_add_underscore
= 0;
943 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
944 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
946 other
|= ST_PE_STDCALL
;
947 can_add_underscore
= 0;
951 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
953 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
957 name
= get_tok_str(sym
->asm_label
, NULL
);
958 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
959 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
961 if (tcc_state
->do_debug
962 && sym_type
!= STT_FUNC
963 && sym
->v
< SYM_FIRST_ANOM
)
964 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
);
968 esym
->st_value
= value
;
969 esym
->st_size
= size
;
970 esym
->st_shndx
= sh_num
;
975 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
976 addr_t value
, unsigned long size
)
978 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
979 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
982 /* add a new relocation entry to symbol 'sym' in section 's' */
983 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
988 if (nocode_wanted
&& s
== cur_text_section
)
993 put_extern_sym(sym
, NULL
, 0, 0);
997 /* now we can add ELF relocation info */
998 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
1002 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
1004 greloca(s
, sym
, offset
, type
, 0);
1008 /* ------------------------------------------------------------------------- */
1009 /* symbol allocator */
1010 static Sym
*__sym_malloc(void)
1012 Sym
*sym_pool
, *sym
, *last_sym
;
1015 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
1016 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
1018 last_sym
= sym_free_first
;
1020 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
1021 sym
->next
= last_sym
;
1025 sym_free_first
= last_sym
;
1029 static inline Sym
*sym_malloc(void)
1033 sym
= sym_free_first
;
1035 sym
= __sym_malloc();
1036 sym_free_first
= sym
->next
;
1039 sym
= tcc_malloc(sizeof(Sym
));
1044 ST_INLN
void sym_free(Sym
*sym
)
1047 sym
->next
= sym_free_first
;
1048 sym_free_first
= sym
;
1054 /* push, without hashing */
1055 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
1060 memset(s
, 0, sizeof *s
);
1070 /* find a symbol and return its associated structure. 's' is the top
1071 of the symbol stack */
1072 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
1077 else if (s
->v
== -1)
1084 /* structure lookup */
1085 ST_INLN Sym
*struct_find(int v
)
1088 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1090 return table_ident
[v
]->sym_struct
;
1093 /* find an identifier */
1094 ST_INLN Sym
*sym_find(int v
)
1097 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1099 return table_ident
[v
]->sym_identifier
;
1102 static int sym_scope(Sym
*s
)
1104 if (IS_ENUM_VAL (s
->type
.t
))
1105 return s
->type
.ref
->sym_scope
;
1107 return s
->sym_scope
;
1110 /* push a given symbol on the symbol stack */
1111 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
1120 s
= sym_push2(ps
, v
, type
->t
, c
);
1121 s
->type
.ref
= type
->ref
;
1123 /* don't record fields or anonymous symbols */
1125 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1126 /* record symbol in token array */
1127 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1129 ps
= &ts
->sym_struct
;
1131 ps
= &ts
->sym_identifier
;
1134 s
->sym_scope
= local_scope
;
1135 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
1136 tcc_error("redeclaration of '%s'",
1137 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
1142 /* push a global identifier */
1143 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
1146 s
= sym_push2(&global_stack
, v
, t
, c
);
1147 s
->r
= VT_CONST
| VT_SYM
;
1148 /* don't record anonymous symbol */
1149 if (v
< SYM_FIRST_ANOM
) {
1150 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
1151 /* modify the top most local identifier, so that sym_identifier will
1152 point to 's' when popped; happens when called from inline asm */
1153 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
1154 ps
= &(*ps
)->prev_tok
;
1161 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1162 pop them yet from the list, but do remove them from the token array. */
1163 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
1173 /* remove symbol in token array */
1175 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1176 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1178 ps
= &ts
->sym_struct
;
1180 ps
= &ts
->sym_identifier
;
1191 /* ------------------------------------------------------------------------- */
1192 static void vcheck_cmp(void)
1194 /* cannot let cpu flags if other instruction are generated. Also
1195 avoid leaving VT_JMP anywhere except on the top of the stack
1196 because it would complicate the code generator.
1198 Don't do this when nocode_wanted. vtop might come from
1199 !nocode_wanted regions (see 88_codeopt.c) and transforming
1200 it to a register without actually generating code is wrong
1201 as their value might still be used for real. All values
1202 we push under nocode_wanted will eventually be popped
1203 again, so that the VT_CMP/VT_JMP value will be in vtop
1204 when code is unsuppressed again. */
1206 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
1210 static void vsetc(CType
*type
, int r
, CValue
*vc
)
1212 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1213 tcc_error("memory full (vstack)");
1218 vtop
->r2
= VT_CONST
;
1223 ST_FUNC
void vswap(void)
1233 /* pop stack value */
1234 ST_FUNC
void vpop(void)
1237 v
= vtop
->r
& VT_VALMASK
;
1238 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1239 /* for x86, we need to pop the FP stack */
1240 if (v
== TREG_ST0
) {
1241 o(0xd8dd); /* fstp %st(0) */
1245 /* need to put correct jump if && or || without test */
1252 /* push constant of type "type" with useless value */
1253 static void vpush(CType
*type
)
1255 vset(type
, VT_CONST
, 0);
1258 /* push arbitrary 64bit constant */
1259 static void vpush64(int ty
, unsigned long long v
)
1266 vsetc(&ctype
, VT_CONST
, &cval
);
1269 /* push integer constant */
1270 ST_FUNC
void vpushi(int v
)
1275 /* push a pointer sized constant */
1276 static void vpushs(addr_t v
)
1278 vpush64(VT_SIZE_T
, v
);
1281 /* push long long constant */
1282 static inline void vpushll(long long v
)
1284 vpush64(VT_LLONG
, v
);
1287 ST_FUNC
void vset(CType
*type
, int r
, int v
)
1291 vsetc(type
, r
, &cval
);
1294 static void vseti(int r
, int v
)
1302 ST_FUNC
void vpushv(SValue
*v
)
1304 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1305 tcc_error("memory full (vstack)");
1310 static void vdup(void)
1315 /* rotate n first stack elements to the bottom
1316 I1 ... In -> I2 ... In I1 [top is right]
1318 ST_FUNC
void vrotb(int n
)
1325 for(i
=-n
+1;i
!=0;i
++)
1326 vtop
[i
] = vtop
[i
+1];
1330 /* rotate the n elements before entry e towards the top
1331 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1333 ST_FUNC
void vrote(SValue
*e
, int n
)
1340 for(i
= 0;i
< n
- 1; i
++)
1345 /* rotate n first stack elements to the top
1346 I1 ... In -> In I1 ... I(n-1) [top is right]
1348 ST_FUNC
void vrott(int n
)
1353 /* ------------------------------------------------------------------------- */
1354 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1356 /* called from generators to set the result from relational ops */
1357 ST_FUNC
void vset_VT_CMP(int op
)
1365 /* called once before asking generators to load VT_CMP to a register */
1366 static void vset_VT_JMP(void)
1368 int op
= vtop
->cmp_op
;
1370 if (vtop
->jtrue
|| vtop
->jfalse
) {
1371 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1372 int inv
= op
& (op
< 2); /* small optimization */
1373 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1375 /* otherwise convert flags (rsp. 0/1) to register */
1377 if (op
< 2) /* doesn't seem to happen */
1382 /* Set CPU Flags, doesn't yet jump */
1383 static void gvtst_set(int inv
, int t
)
1387 if (vtop
->r
!= VT_CMP
) {
1390 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1391 vset_VT_CMP(vtop
->c
.i
!= 0);
1394 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1395 *p
= gjmp_append(*p
, t
);
1398 /* Generate value test
1400 * Generate a test for any value (jump, comparison and integers) */
1401 static int gvtst(int inv
, int t
)
1406 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1408 x
= u
, u
= t
, t
= x
;
1411 /* jump to the wanted target */
1413 t
= gjmp_cond(op
^ inv
, t
);
1416 /* resolve complementary jumps to here */
1423 /* generate a zero or nozero test */
1424 static void gen_test_zero(int op
)
1426 if (vtop
->r
== VT_CMP
) {
1430 vtop
->jfalse
= vtop
->jtrue
;
1440 /* ------------------------------------------------------------------------- */
1441 /* push a symbol value of TYPE */
1442 static inline void vpushsym(CType
*type
, Sym
*sym
)
1446 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1450 /* Return a static symbol pointing to a section */
1451 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1457 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1458 sym
->type
.t
|= VT_STATIC
;
1459 put_extern_sym(sym
, sec
, offset
, size
);
1463 /* push a reference to a section offset by adding a dummy symbol */
1464 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1466 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1469 /* define a new external reference to a symbol 'v' of type 'u' */
1470 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1476 /* push forward reference */
1477 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1478 s
->type
.ref
= type
->ref
;
1479 } else if (IS_ASM_SYM(s
)) {
1480 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1481 s
->type
.ref
= type
->ref
;
1487 /* Merge symbol attributes. */
1488 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1490 if (sa1
->aligned
&& !sa
->aligned
)
1491 sa
->aligned
= sa1
->aligned
;
1492 sa
->packed
|= sa1
->packed
;
1493 sa
->weak
|= sa1
->weak
;
1494 if (sa1
->visibility
!= STV_DEFAULT
) {
1495 int vis
= sa
->visibility
;
1496 if (vis
== STV_DEFAULT
1497 || vis
> sa1
->visibility
)
1498 vis
= sa1
->visibility
;
1499 sa
->visibility
= vis
;
1501 sa
->dllexport
|= sa1
->dllexport
;
1502 sa
->nodecorate
|= sa1
->nodecorate
;
1503 sa
->dllimport
|= sa1
->dllimport
;
1506 /* Merge function attributes. */
1507 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1509 if (fa1
->func_call
&& !fa
->func_call
)
1510 fa
->func_call
= fa1
->func_call
;
1511 if (fa1
->func_type
&& !fa
->func_type
)
1512 fa
->func_type
= fa1
->func_type
;
1513 if (fa1
->func_args
&& !fa
->func_args
)
1514 fa
->func_args
= fa1
->func_args
;
1515 if (fa1
->func_noreturn
)
1516 fa
->func_noreturn
= 1;
1523 /* Merge attributes. */
1524 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1526 merge_symattr(&ad
->a
, &ad1
->a
);
1527 merge_funcattr(&ad
->f
, &ad1
->f
);
1530 ad
->section
= ad1
->section
;
1531 if (ad1
->alias_target
)
1532 ad
->alias_target
= ad1
->alias_target
;
1534 ad
->asm_label
= ad1
->asm_label
;
1536 ad
->attr_mode
= ad1
->attr_mode
;
1539 /* Merge some type attributes. */
1540 static void patch_type(Sym
*sym
, CType
*type
)
1542 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1543 if (!(sym
->type
.t
& VT_EXTERN
))
1544 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1545 sym
->type
.t
&= ~VT_EXTERN
;
1548 if (IS_ASM_SYM(sym
)) {
1549 /* stay static if both are static */
1550 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1551 sym
->type
.ref
= type
->ref
;
1554 if (!is_compatible_types(&sym
->type
, type
)) {
1555 tcc_error("incompatible types for redefinition of '%s'",
1556 get_tok_str(sym
->v
, NULL
));
1558 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1559 int static_proto
= sym
->type
.t
& VT_STATIC
;
1560 /* warn if static follows non-static function declaration */
1561 if ((type
->t
& VT_STATIC
) && !static_proto
1562 /* XXX this test for inline shouldn't be here. Until we
1563 implement gnu-inline mode again it silences a warning for
1564 mingw caused by our workarounds. */
1565 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1566 tcc_warning("static storage ignored for redefinition of '%s'",
1567 get_tok_str(sym
->v
, NULL
));
1569 /* set 'inline' if both agree or if one has static */
1570 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1571 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1572 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1573 static_proto
|= VT_INLINE
;
1576 if (0 == (type
->t
& VT_EXTERN
)) {
1577 struct FuncAttr f
= sym
->type
.ref
->f
;
1578 /* put complete type, use static from prototype */
1579 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1580 sym
->type
.ref
= type
->ref
;
1581 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1583 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1586 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1587 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1588 sym
->type
.ref
= type
->ref
;
1592 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1593 /* set array size if it was omitted in extern declaration */
1594 sym
->type
.ref
->c
= type
->ref
->c
;
1596 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1597 tcc_warning("storage mismatch for redefinition of '%s'",
1598 get_tok_str(sym
->v
, NULL
));
1602 /* Merge some storage attributes. */
1603 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1606 patch_type(sym
, type
);
1608 #ifdef TCC_TARGET_PE
1609 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1610 tcc_error("incompatible dll linkage for redefinition of '%s'",
1611 get_tok_str(sym
->v
, NULL
));
1613 merge_symattr(&sym
->a
, &ad
->a
);
1615 sym
->asm_label
= ad
->asm_label
;
1616 update_storage(sym
);
1619 /* copy sym to other stack */
1620 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1623 s
= sym_malloc(), *s
= *s0
;
1624 s
->prev
= *ps
, *ps
= s
;
1625 if (s
->v
< SYM_FIRST_ANOM
) {
1626 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1627 s
->prev_tok
= *ps
, *ps
= s
;
1632 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1633 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1635 int bt
= s
->type
.t
& VT_BTYPE
;
1636 if (bt
== VT_FUNC
|| bt
== VT_PTR
) {
1637 Sym
**sp
= &s
->type
.ref
;
1638 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1639 Sym
*s2
= sym_copy(s
, ps
);
1640 sp
= &(*sp
= s2
)->next
;
1641 sym_copy_ref(s2
, ps
);
1646 /* define a new external reference to a symbol 'v' */
1647 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1651 /* look for global symbol */
1653 while (s
&& s
->sym_scope
)
1657 /* push forward reference */
1658 s
= global_identifier_push(v
, type
->t
, 0);
1661 s
->asm_label
= ad
->asm_label
;
1662 s
->type
.ref
= type
->ref
;
1663 /* copy type to the global stack */
1665 sym_copy_ref(s
, &global_stack
);
1667 patch_storage(s
, ad
, type
);
1669 /* push variables on local_stack if any */
1670 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1671 s
= sym_copy(s
, &local_stack
);
1675 /* push a reference to global symbol v */
1676 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1678 vpushsym(type
, external_global_sym(v
, type
));
1681 /* save registers up to (vtop - n) stack entry */
1682 ST_FUNC
void save_regs(int n
)
1685 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1689 /* save r to the memory stack, and mark it as being free */
1690 ST_FUNC
void save_reg(int r
)
1692 save_reg_upstack(r
, 0);
1695 /* save r to the memory stack, and mark it as being free,
1696 if seen up to (vtop - n) stack entry */
1697 ST_FUNC
void save_reg_upstack(int r
, int n
)
1699 int l
, size
, align
, bt
;
1702 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1707 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1708 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1709 /* must save value on stack if not already done */
1711 bt
= p
->type
.t
& VT_BTYPE
;
1714 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1717 size
= type_size(&sv
.type
, &align
);
1718 l
= get_temp_local_var(size
,align
);
1719 sv
.r
= VT_LOCAL
| VT_LVAL
;
1721 store(p
->r
& VT_VALMASK
, &sv
);
1722 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1723 /* x86 specific: need to pop fp register ST0 if saved */
1724 if (r
== TREG_ST0
) {
1725 o(0xd8dd); /* fstp %st(0) */
1728 /* special long long case */
1729 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1734 /* mark that stack entry as being saved on the stack */
1735 if (p
->r
& VT_LVAL
) {
1736 /* also clear the bounded flag because the
1737 relocation address of the function was stored in
1739 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1741 p
->r
= VT_LVAL
| VT_LOCAL
;
1749 #ifdef TCC_TARGET_ARM
1750 /* find a register of class 'rc2' with at most one reference on stack.
1751 * If none, call get_reg(rc) */
1752 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1757 for(r
=0;r
<NB_REGS
;r
++) {
1758 if (reg_classes
[r
] & rc2
) {
1761 for(p
= vstack
; p
<= vtop
; p
++) {
1762 if ((p
->r
& VT_VALMASK
) == r
||
1774 /* find a free register of class 'rc'. If none, save one register */
1775 ST_FUNC
int get_reg(int rc
)
1780 /* find a free register */
1781 for(r
=0;r
<NB_REGS
;r
++) {
1782 if (reg_classes
[r
] & rc
) {
1785 for(p
=vstack
;p
<=vtop
;p
++) {
1786 if ((p
->r
& VT_VALMASK
) == r
||
1795 /* no register left : free the first one on the stack (VERY
1796 IMPORTANT to start from the bottom to ensure that we don't
1797 spill registers used in gen_opi()) */
1798 for(p
=vstack
;p
<=vtop
;p
++) {
1799 /* look at second register (if long long) */
1801 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1803 r
= p
->r
& VT_VALMASK
;
1804 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1810 /* Should never comes here */
1814 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1815 static int get_temp_local_var(int size
,int align
){
1817 struct temp_local_variable
*temp_var
;
1824 for(i
=0;i
<nb_temp_local_vars
;i
++){
1825 temp_var
=&arr_temp_local_vars
[i
];
1826 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1829 /*check if temp_var is free*/
1831 for(p
=vstack
;p
<=vtop
;p
++) {
1833 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1834 if(p
->c
.i
==temp_var
->location
){
1841 found_var
=temp_var
->location
;
1847 loc
= (loc
- size
) & -align
;
1848 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1849 temp_var
=&arr_temp_local_vars
[i
];
1850 temp_var
->location
=loc
;
1851 temp_var
->size
=size
;
1852 temp_var
->align
=align
;
1853 nb_temp_local_vars
++;
1860 static void clear_temp_local_var_list(){
1861 nb_temp_local_vars
=0;
1864 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1866 static void move_reg(int r
, int s
, int t
)
1880 /* get address of vtop (vtop MUST BE an lvalue) */
1881 ST_FUNC
void gaddrof(void)
1883 vtop
->r
&= ~VT_LVAL
;
1884 /* tricky: if saved lvalue, then we can go back to lvalue */
1885 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1886 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1889 #ifdef CONFIG_TCC_BCHECK
1890 /* generate lvalue bound code */
1891 static void gbound(void)
1895 vtop
->r
&= ~VT_MUSTBOUND
;
1896 /* if lvalue, then use checking code before dereferencing */
1897 if (vtop
->r
& VT_LVAL
) {
1898 /* if not VT_BOUNDED value, then make one */
1899 if (!(vtop
->r
& VT_BOUNDED
)) {
1900 /* must save type because we must set it to int to get pointer */
1902 vtop
->type
.t
= VT_PTR
;
1905 gen_bounded_ptr_add();
1909 /* then check for dereferencing */
1910 gen_bounded_ptr_deref();
1914 /* we need to call __bound_ptr_add before we start to load function
1915 args into registers */
1916 ST_FUNC
void gbound_args(int nb_args
)
1919 for (i
= 1; i
<= nb_args
; ++i
)
1920 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1927 /* Add bounds for local symbols from S to E (via ->prev) */
1928 static void add_local_bounds(Sym
*s
, Sym
*e
)
1930 for (; s
!= e
; s
= s
->prev
) {
1931 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1933 /* Add arrays/structs/unions because we always take address */
1934 if ((s
->type
.t
& VT_ARRAY
)
1935 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1936 || s
->a
.addrtaken
) {
1937 /* add local bound info */
1938 int align
, size
= type_size(&s
->type
, &align
);
1939 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1940 2 * sizeof(addr_t
));
1941 bounds_ptr
[0] = s
->c
;
1942 bounds_ptr
[1] = size
;
1948 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1949 static void pop_local_syms(Sym
**ptop
, Sym
*b
, int keep
, int ellipsis
)
1951 #ifdef CONFIG_TCC_BCHECK
1952 if (tcc_state
->do_bounds_check
&& !ellipsis
&& !keep
)
1953 add_local_bounds(*ptop
, b
);
1955 if (tcc_state
->do_debug
)
1956 tcc_add_debug_info (tcc_state
, !local_scope
, *ptop
, b
);
1957 sym_pop(ptop
, b
, keep
);
1960 static void incr_bf_adr(int o
)
1962 vtop
->type
= char_pointer_type
;
1966 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1970 /* single-byte load mode for packed or otherwise unaligned bitfields */
1971 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1974 save_reg_upstack(vtop
->r
, 1);
1975 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1976 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1985 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1987 vpushi((1 << n
) - 1), gen_op('&');
1990 vpushi(bits
), gen_op(TOK_SHL
);
1993 bits
+= n
, bit_size
-= n
, o
= 1;
1996 if (!(type
->t
& VT_UNSIGNED
)) {
1997 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
1998 vpushi(n
), gen_op(TOK_SHL
);
1999 vpushi(n
), gen_op(TOK_SAR
);
2003 /* single-byte store mode for packed or otherwise unaligned bitfields */
2004 static void store_packed_bf(int bit_pos
, int bit_size
)
2006 int bits
, n
, o
, m
, c
;
2008 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2010 save_reg_upstack(vtop
->r
, 1);
2011 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2013 incr_bf_adr(o
); // X B
2015 c
? vdup() : gv_dup(); // B V X
2018 vpushi(bits
), gen_op(TOK_SHR
);
2020 vpushi(bit_pos
), gen_op(TOK_SHL
);
2025 m
= ((1 << n
) - 1) << bit_pos
;
2026 vpushi(m
), gen_op('&'); // X B V1
2027 vpushv(vtop
-1); // X B V1 B
2028 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
2029 gen_op('&'); // X B V1 B1
2030 gen_op('|'); // X B V2
2032 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
2033 vstore(), vpop(); // X B
2034 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
2039 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
2042 if (0 == sv
->type
.ref
)
2044 t
= sv
->type
.ref
->auxtype
;
2045 if (t
!= -1 && t
!= VT_STRUCT
) {
2046 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
2052 /* store vtop a register belonging to class 'rc'. lvalues are
2053 converted to values. Cannot be used if cannot be converted to
2054 register value (such as structures). */
2055 ST_FUNC
int gv(int rc
)
2057 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
2058 int bit_pos
, bit_size
, size
, align
;
2060 /* NOTE: get_reg can modify vstack[] */
2061 if (vtop
->type
.t
& VT_BITFIELD
) {
2064 bit_pos
= BIT_POS(vtop
->type
.t
);
2065 bit_size
= BIT_SIZE(vtop
->type
.t
);
2066 /* remove bit field info to avoid loops */
2067 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
2070 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
2071 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
2072 type
.t
|= VT_UNSIGNED
;
2074 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
2076 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2081 if (r
== VT_STRUCT
) {
2082 load_packed_bf(&type
, bit_pos
, bit_size
);
2084 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
2085 /* cast to int to propagate signedness in following ops */
2087 /* generate shifts */
2088 vpushi(bits
- (bit_pos
+ bit_size
));
2090 vpushi(bits
- bit_size
);
2091 /* NOTE: transformed to SHR if unsigned */
2096 if (is_float(vtop
->type
.t
) &&
2097 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2098 unsigned long offset
;
2099 /* CPUs usually cannot use float constants, so we store them
2100 generically in data segment */
2101 size
= type_size(&vtop
->type
, &align
);
2103 size
= 0, align
= 1;
2104 offset
= section_add(data_section
, size
, align
);
2105 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
2107 init_putv(&vtop
->type
, data_section
, offset
);
2110 #ifdef CONFIG_TCC_BCHECK
2111 if (vtop
->r
& VT_MUSTBOUND
)
2115 bt
= vtop
->type
.t
& VT_BTYPE
;
2117 #ifdef TCC_TARGET_RISCV64
2119 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
2122 rc2
= RC2_TYPE(bt
, rc
);
2124 /* need to reload if:
2126 - lvalue (need to dereference pointer)
2127 - already a register, but not in the right class */
2128 r
= vtop
->r
& VT_VALMASK
;
2129 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
2130 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
2132 if (!r_ok
|| !r2_ok
) {
2136 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
2137 int original_type
= vtop
->type
.t
;
2139 /* two register type load :
2140 expand to two words temporarily */
2141 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2143 unsigned long long ll
= vtop
->c
.i
;
2144 vtop
->c
.i
= ll
; /* first word */
2146 vtop
->r
= r
; /* save register value */
2147 vpushi(ll
>> 32); /* second word */
2148 } else if (vtop
->r
& VT_LVAL
) {
2149 /* We do not want to modifier the long long pointer here.
2150 So we save any other instances down the stack */
2151 save_reg_upstack(vtop
->r
, 1);
2152 /* load from memory */
2153 vtop
->type
.t
= load_type
;
2156 vtop
[-1].r
= r
; /* save register value */
2157 /* increment pointer to get second word */
2158 vtop
->type
.t
= VT_PTRDIFF_T
;
2163 vtop
->type
.t
= load_type
;
2165 /* move registers */
2168 if (r2_ok
&& vtop
->r2
< VT_CONST
)
2171 vtop
[-1].r
= r
; /* save register value */
2172 vtop
->r
= vtop
[-1].r2
;
2174 /* Allocate second register. Here we rely on the fact that
2175 get_reg() tries first to free r2 of an SValue. */
2179 /* write second register */
2182 vtop
->type
.t
= original_type
;
2184 if (vtop
->r
== VT_CMP
)
2186 /* one register type load */
2191 #ifdef TCC_TARGET_C67
2192 /* uses register pairs for doubles */
2193 if (bt
== VT_DOUBLE
)
2200 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2201 ST_FUNC
void gv2(int rc1
, int rc2
)
2203 /* generate more generic register first. But VT_JMP or VT_CMP
2204 values must be generated first in all cases to avoid possible
2206 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
2211 /* test if reload is needed for first register */
2212 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
2222 /* test if reload is needed for first register */
2223 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
2230 /* expand 64bit on stack in two ints */
2231 ST_FUNC
void lexpand(void)
2234 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
2235 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
2236 if (v
== VT_CONST
) {
2239 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
2245 vtop
[0].r
= vtop
[-1].r2
;
2246 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2248 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2253 /* build a long long from two ints */
2254 static void lbuild(int t
)
2256 gv2(RC_INT
, RC_INT
);
2257 vtop
[-1].r2
= vtop
[0].r
;
2258 vtop
[-1].type
.t
= t
;
2263 /* convert stack entry to register and duplicate its value in another
2265 static void gv_dup(void)
2271 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2272 if (t
& VT_BITFIELD
) {
2282 /* stack: H L L1 H1 */
2292 /* duplicate value */
2302 /* generate CPU independent (unsigned) long long operations */
2303 static void gen_opl(int op
)
2305 int t
, a
, b
, op1
, c
, i
;
2307 unsigned short reg_iret
= REG_IRET
;
2308 unsigned short reg_lret
= REG_IRE2
;
2314 func
= TOK___divdi3
;
2317 func
= TOK___udivdi3
;
2320 func
= TOK___moddi3
;
2323 func
= TOK___umoddi3
;
2330 /* call generic long long function */
2331 vpush_global_sym(&func_old_type
, func
);
2336 vtop
->r2
= reg_lret
;
2344 //pv("gen_opl A",0,2);
2350 /* stack: L1 H1 L2 H2 */
2355 vtop
[-2] = vtop
[-3];
2358 /* stack: H1 H2 L1 L2 */
2359 //pv("gen_opl B",0,4);
2365 /* stack: H1 H2 L1 L2 ML MH */
2368 /* stack: ML MH H1 H2 L1 L2 */
2372 /* stack: ML MH H1 L2 H2 L1 */
2377 /* stack: ML MH M1 M2 */
2380 } else if (op
== '+' || op
== '-') {
2381 /* XXX: add non carry method too (for MIPS or alpha) */
2387 /* stack: H1 H2 (L1 op L2) */
2390 gen_op(op1
+ 1); /* TOK_xxxC2 */
2393 /* stack: H1 H2 (L1 op L2) */
2396 /* stack: (L1 op L2) H1 H2 */
2398 /* stack: (L1 op L2) (H1 op H2) */
2406 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2407 t
= vtop
[-1].type
.t
;
2411 /* stack: L H shift */
2413 /* constant: simpler */
2414 /* NOTE: all comments are for SHL. the other cases are
2415 done by swapping words */
2426 if (op
!= TOK_SAR
) {
2459 /* XXX: should provide a faster fallback on x86 ? */
2462 func
= TOK___ashrdi3
;
2465 func
= TOK___lshrdi3
;
2468 func
= TOK___ashldi3
;
2474 /* compare operations */
2480 /* stack: L1 H1 L2 H2 */
2482 vtop
[-1] = vtop
[-2];
2484 /* stack: L1 L2 H1 H2 */
2488 /* when values are equal, we need to compare low words. since
2489 the jump is inverted, we invert the test too. */
2492 else if (op1
== TOK_GT
)
2494 else if (op1
== TOK_ULT
)
2496 else if (op1
== TOK_UGT
)
2506 /* generate non equal test */
2508 vset_VT_CMP(TOK_NE
);
2512 /* compare low. Always unsigned */
2516 else if (op1
== TOK_LE
)
2518 else if (op1
== TOK_GT
)
2520 else if (op1
== TOK_GE
)
2523 #if 0//def TCC_TARGET_I386
2524 if (op
== TOK_NE
) { gsym(b
); break; }
2525 if (op
== TOK_EQ
) { gsym(a
); break; }
2534 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2536 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2537 return (a
^ b
) >> 63 ? -x
: x
;
2540 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2542 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2545 /* handle integer constant optimizations and various machine
2547 static void gen_opic(int op
)
2549 SValue
*v1
= vtop
- 1;
2551 int t1
= v1
->type
.t
& VT_BTYPE
;
2552 int t2
= v2
->type
.t
& VT_BTYPE
;
2553 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2554 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2555 uint64_t l1
= c1
? v1
->c
.i
: 0;
2556 uint64_t l2
= c2
? v2
->c
.i
: 0;
2557 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2559 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2560 l1
= ((uint32_t)l1
|
2561 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2562 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2563 l2
= ((uint32_t)l2
|
2564 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2568 case '+': l1
+= l2
; break;
2569 case '-': l1
-= l2
; break;
2570 case '&': l1
&= l2
; break;
2571 case '^': l1
^= l2
; break;
2572 case '|': l1
|= l2
; break;
2573 case '*': l1
*= l2
; break;
2580 /* if division by zero, generate explicit division */
2582 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2583 tcc_error("division by zero in constant");
2587 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2588 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2589 case TOK_UDIV
: l1
= l1
/ l2
; break;
2590 case TOK_UMOD
: l1
= l1
% l2
; break;
2593 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2594 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2596 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2599 case TOK_ULT
: l1
= l1
< l2
; break;
2600 case TOK_UGE
: l1
= l1
>= l2
; break;
2601 case TOK_EQ
: l1
= l1
== l2
; break;
2602 case TOK_NE
: l1
= l1
!= l2
; break;
2603 case TOK_ULE
: l1
= l1
<= l2
; break;
2604 case TOK_UGT
: l1
= l1
> l2
; break;
2605 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2606 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2607 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2608 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2610 case TOK_LAND
: l1
= l1
&& l2
; break;
2611 case TOK_LOR
: l1
= l1
|| l2
; break;
2615 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2616 l1
= ((uint32_t)l1
|
2617 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2621 /* if commutative ops, put c2 as constant */
2622 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2623 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2625 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2626 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2628 if (!const_wanted
&&
2630 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2631 (l1
== -1 && op
== TOK_SAR
))) {
2632 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2634 } else if (!const_wanted
&&
2635 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2637 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2638 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2639 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2644 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2647 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2648 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2651 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2652 /* filter out NOP operations like x*1, x-0, x&-1... */
2654 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2655 /* try to use shifts instead of muls or divs */
2656 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2665 else if (op
== TOK_PDIV
)
2671 } else if (c2
&& (op
== '+' || op
== '-') &&
2672 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2673 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2674 /* symbol + constant case */
2678 /* The backends can't always deal with addends to symbols
2679 larger than +-1<<31. Don't construct such. */
2686 /* call low level op generator */
2687 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2688 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2696 /* generate a floating point operation with constant propagation */
2697 static void gen_opif(int op
)
2701 #if defined _MSC_VER && defined __x86_64__
2702 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2709 /* currently, we cannot do computations with forward symbols */
2710 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2711 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2713 if (v1
->type
.t
== VT_FLOAT
) {
2716 } else if (v1
->type
.t
== VT_DOUBLE
) {
2724 /* NOTE: we only do constant propagation if finite number (not
2725 NaN or infinity) (ANSI spec) */
2726 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2730 case '+': f1
+= f2
; break;
2731 case '-': f1
-= f2
; break;
2732 case '*': f1
*= f2
; break;
2735 /* If not in initializer we need to potentially generate
2736 FP exceptions at runtime, otherwise we want to fold. */
2742 /* XXX: also handles tests ? */
2746 /* XXX: overflow test ? */
2747 if (v1
->type
.t
== VT_FLOAT
) {
2749 } else if (v1
->type
.t
== VT_DOUBLE
) {
2761 /* print a type. If 'varstr' is not NULL, then the variable is also
2762 printed in the type */
2764 /* XXX: add array and function pointers */
2765 static void type_to_str(char *buf
, int buf_size
,
2766 CType
*type
, const char *varstr
)
2778 pstrcat(buf
, buf_size
, "extern ");
2780 pstrcat(buf
, buf_size
, "static ");
2782 pstrcat(buf
, buf_size
, "typedef ");
2784 pstrcat(buf
, buf_size
, "inline ");
2785 if (t
& VT_VOLATILE
)
2786 pstrcat(buf
, buf_size
, "volatile ");
2787 if (t
& VT_CONSTANT
)
2788 pstrcat(buf
, buf_size
, "const ");
2790 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2791 || ((t
& VT_UNSIGNED
)
2792 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2795 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2797 buf_size
-= strlen(buf
);
2833 tstr
= "long double";
2835 pstrcat(buf
, buf_size
, tstr
);
2842 pstrcat(buf
, buf_size
, tstr
);
2843 v
= type
->ref
->v
& ~SYM_STRUCT
;
2844 if (v
>= SYM_FIRST_ANOM
)
2845 pstrcat(buf
, buf_size
, "<anonymous>");
2847 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2852 if (varstr
&& '*' == *varstr
) {
2853 pstrcat(buf1
, sizeof(buf1
), "(");
2854 pstrcat(buf1
, sizeof(buf1
), varstr
);
2855 pstrcat(buf1
, sizeof(buf1
), ")");
2857 pstrcat(buf1
, buf_size
, "(");
2859 while (sa
!= NULL
) {
2861 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2862 pstrcat(buf1
, sizeof(buf1
), buf2
);
2865 pstrcat(buf1
, sizeof(buf1
), ", ");
2867 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2868 pstrcat(buf1
, sizeof(buf1
), ", ...");
2869 pstrcat(buf1
, sizeof(buf1
), ")");
2870 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2875 if (varstr
&& '*' == *varstr
)
2876 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2878 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2879 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2882 pstrcpy(buf1
, sizeof(buf1
), "*");
2883 if (t
& VT_CONSTANT
)
2884 pstrcat(buf1
, buf_size
, "const ");
2885 if (t
& VT_VOLATILE
)
2886 pstrcat(buf1
, buf_size
, "volatile ");
2888 pstrcat(buf1
, sizeof(buf1
), varstr
);
2889 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2893 pstrcat(buf
, buf_size
, " ");
2894 pstrcat(buf
, buf_size
, varstr
);
2899 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2901 char buf1
[256], buf2
[256];
2902 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2903 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2904 tcc_error(fmt
, buf1
, buf2
);
2907 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2909 char buf1
[256], buf2
[256];
2910 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2911 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2912 tcc_warning(fmt
, buf1
, buf2
);
2915 static int pointed_size(CType
*type
)
2918 return type_size(pointed_type(type
), &align
);
2921 static void vla_runtime_pointed_size(CType
*type
)
2924 vla_runtime_type_size(pointed_type(type
), &align
);
2927 static inline int is_null_pointer(SValue
*p
)
2929 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2931 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2932 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2933 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2934 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2935 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2936 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2940 /* compare function types. OLD functions match any new functions */
2941 static int is_compatible_func(CType
*type1
, CType
*type2
)
2947 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2949 if (s1
->f
.func_type
!= s2
->f
.func_type
2950 && s1
->f
.func_type
!= FUNC_OLD
2951 && s2
->f
.func_type
!= FUNC_OLD
)
2953 /* we should check the function return type for FUNC_OLD too
2954 but that causes problems with the internally used support
2955 functions such as TOK_memmove */
2956 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
2958 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
2961 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2972 /* return true if type1 and type2 are the same. If unqualified is
2973 true, qualifiers on the types are ignored.
2975 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2979 t1
= type1
->t
& VT_TYPE
;
2980 t2
= type2
->t
& VT_TYPE
;
2982 /* strip qualifiers before comparing */
2983 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2984 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2987 /* Default Vs explicit signedness only matters for char */
2988 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2992 /* XXX: bitfields ? */
2997 && !(type1
->ref
->c
< 0
2998 || type2
->ref
->c
< 0
2999 || type1
->ref
->c
== type2
->ref
->c
))
3002 /* test more complicated cases */
3003 bt1
= t1
& VT_BTYPE
;
3004 if (bt1
== VT_PTR
) {
3005 type1
= pointed_type(type1
);
3006 type2
= pointed_type(type2
);
3007 return is_compatible_types(type1
, type2
);
3008 } else if (bt1
== VT_STRUCT
) {
3009 return (type1
->ref
== type2
->ref
);
3010 } else if (bt1
== VT_FUNC
) {
3011 return is_compatible_func(type1
, type2
);
3012 } else if (IS_ENUM(type1
->t
) || IS_ENUM(type2
->t
)) {
3013 return type1
->ref
== type2
->ref
;
3019 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3020 type is stored in DEST if non-null (except for pointer plus/minus) . */
3021 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
3023 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
3024 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
3030 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
3031 ret
= op
== '?' ? 1 : 0;
3032 /* NOTE: as an extension, we accept void on only one side */
3034 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3035 if (op
== '+') ; /* Handled in caller */
3036 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3037 /* If one is a null ptr constant the result type is the other. */
3038 else if (is_null_pointer (op2
)) type
= *type1
;
3039 else if (is_null_pointer (op1
)) type
= *type2
;
3040 else if (bt1
!= bt2
) {
3041 /* accept comparison or cond-expr between pointer and integer
3043 if ((op
== '?' || (op
>= TOK_ULT
&& op
<= TOK_LOR
))
3044 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
3045 tcc_warning("pointer/integer mismatch in %s",
3046 op
== '?' ? "conditional expression" : "comparison");
3047 else if (op
!= '-' || !is_integer_btype(bt2
))
3049 type
= *(bt1
== VT_PTR
? type1
: type2
);
3051 CType
*pt1
= pointed_type(type1
);
3052 CType
*pt2
= pointed_type(type2
);
3053 int pbt1
= pt1
->t
& VT_BTYPE
;
3054 int pbt2
= pt2
->t
& VT_BTYPE
;
3055 int newquals
, copied
= 0;
3056 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
3057 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
3058 if (op
!= '?' && (op
< TOK_ULT
|| op
> TOK_LOR
))
3061 type_incompatibility_warning(type1
, type2
,
3063 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3064 : "pointer type mismatch in comparison('%s' and '%s')");
3067 /* pointers to void get preferred, otherwise the
3068 pointed to types minus qualifs should be compatible */
3069 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
3070 /* combine qualifs */
3071 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
3072 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3075 /* copy the pointer target symbol */
3076 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3079 pointed_type(&type
)->t
|= newquals
;
3081 /* pointers to incomplete arrays get converted to
3082 pointers to completed ones if possible */
3083 if (pt1
->t
& VT_ARRAY
3084 && pt2
->t
& VT_ARRAY
3085 && pointed_type(&type
)->ref
->c
< 0
3086 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
3089 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3091 pointed_type(&type
)->ref
=
3092 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
3093 0, pointed_type(&type
)->ref
->c
);
3094 pointed_type(&type
)->ref
->c
=
3095 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
3099 if (op
>= TOK_ULT
&& op
<= TOK_LOR
)
3101 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
3102 if (op
!= '?' || !compare_types(type1
, type2
, 1))
3105 } else if (is_float(bt1
) || is_float(bt2
)) {
3106 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
3107 type
.t
= VT_LDOUBLE
;
3108 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
3113 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
3114 /* cast to biggest op */
3115 type
.t
= VT_LLONG
| VT_LONG
;
3116 if (bt1
== VT_LLONG
)
3118 if (bt2
== VT_LLONG
)
3120 /* convert to unsigned if it does not fit in a long long */
3121 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
3122 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
3123 type
.t
|= VT_UNSIGNED
;
3125 /* integer operations */
3126 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
3127 /* convert to unsigned if it does not fit in an integer */
3128 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
3129 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
3130 type
.t
|= VT_UNSIGNED
;
3137 /* generic gen_op: handles types problems */
3138 ST_FUNC
void gen_op(int op
)
3140 int u
, t1
, t2
, bt1
, bt2
, t
;
3141 CType type1
, combtype
;
3144 t1
= vtop
[-1].type
.t
;
3145 t2
= vtop
[0].type
.t
;
3146 bt1
= t1
& VT_BTYPE
;
3147 bt2
= t2
& VT_BTYPE
;
3149 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
3150 if (bt2
== VT_FUNC
) {
3151 mk_pointer(&vtop
->type
);
3154 if (bt1
== VT_FUNC
) {
3156 mk_pointer(&vtop
->type
);
3161 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
3162 tcc_error_noabort("invalid operand types for binary operation");
3164 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3165 /* at least one operand is a pointer */
3166 /* relational op: must be both pointers */
3167 if (op
>= TOK_ULT
&& op
<= TOK_LOR
)
3169 /* if both pointers, then it must be the '-' op */
3170 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3172 tcc_error("cannot use pointers here");
3173 if (vtop
[-1].type
.t
& VT_VLA
) {
3174 vla_runtime_pointed_size(&vtop
[-1].type
);
3176 vpushi(pointed_size(&vtop
[-1].type
));
3180 vtop
->type
.t
= VT_PTRDIFF_T
;
3184 /* exactly one pointer : must be '+' or '-'. */
3185 if (op
!= '-' && op
!= '+')
3186 tcc_error("cannot use pointers here");
3187 /* Put pointer as first operand */
3188 if (bt2
== VT_PTR
) {
3190 t
= t1
, t1
= t2
, t2
= t
;
3193 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
3194 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3197 type1
= vtop
[-1].type
;
3198 type1
.t
&= ~VT_ARRAY
;
3199 if (vtop
[-1].type
.t
& VT_VLA
)
3200 vla_runtime_pointed_size(&vtop
[-1].type
);
3202 u
= pointed_size(&vtop
[-1].type
);
3204 tcc_error("unknown array element size");
3208 /* XXX: cast to int ? (long long case) */
3213 #ifdef CONFIG_TCC_BCHECK
3214 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
3215 /* if bounded pointers, we generate a special code to
3222 gen_bounded_ptr_add();
3228 /* put again type if gen_opic() swaped operands */
3232 /* floats can only be used for a few operations */
3233 if (is_float(combtype
.t
)
3234 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3235 && (op
< TOK_ULT
|| op
> TOK_LOR
))
3236 tcc_error("invalid operands for binary operation");
3237 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
3238 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
3239 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
3241 t
|= (VT_LONG
& t1
);
3245 t
= t2
= combtype
.t
;
3246 /* XXX: currently, some unsigned operations are explicit, so
3247 we modify them here */
3248 if (t
& VT_UNSIGNED
) {
3255 else if (op
== TOK_LT
)
3257 else if (op
== TOK_GT
)
3259 else if (op
== TOK_LE
)
3261 else if (op
== TOK_GE
)
3267 /* special case for shifts and long long: we keep the shift as
3269 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3276 if (op
>= TOK_ULT
&& op
<= TOK_LOR
) {
3277 /* relational op: the result is an int */
3278 vtop
->type
.t
= VT_INT
;
3283 // Make sure that we have converted to an rvalue:
3284 if (vtop
->r
& VT_LVAL
)
3285 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3288 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3289 #define gen_cvt_itof1 gen_cvt_itof
3291 /* generic itof for unsigned long long case */
3292 static void gen_cvt_itof1(int t
)
3294 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3295 (VT_LLONG
| VT_UNSIGNED
)) {
3298 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
3299 #if LDOUBLE_SIZE != 8
3300 else if (t
== VT_LDOUBLE
)
3301 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
3304 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
3315 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3316 #define gen_cvt_ftoi1 gen_cvt_ftoi
3318 /* generic ftoi for unsigned long long case */
3319 static void gen_cvt_ftoi1(int t
)
3322 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3323 /* not handled natively */
3324 st
= vtop
->type
.t
& VT_BTYPE
;
3326 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
3327 #if LDOUBLE_SIZE != 8
3328 else if (st
== VT_LDOUBLE
)
3329 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
3332 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
3343 /* special delayed cast for char/short */
3344 static void force_charshort_cast(void)
3346 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3347 int dbt
= vtop
->type
.t
;
3348 vtop
->r
&= ~VT_MUSTCAST
;
3350 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3354 static void gen_cast_s(int t
)
3362 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3363 static void gen_cast(CType
*type
)
3365 int sbt
, dbt
, sf
, df
, c
;
3366 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3368 /* special delayed cast for char/short */
3369 if (vtop
->r
& VT_MUSTCAST
)
3370 force_charshort_cast();
3372 /* bitfields first get cast to ints */
3373 if (vtop
->type
.t
& VT_BITFIELD
)
3376 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3377 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3385 dbt_bt
= dbt
& VT_BTYPE
;
3386 sbt_bt
= sbt
& VT_BTYPE
;
3388 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3389 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3390 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3393 /* constant case: we can do it now */
3394 /* XXX: in ISOC, cannot do it if error in convert */
3395 if (sbt
== VT_FLOAT
)
3396 vtop
->c
.ld
= vtop
->c
.f
;
3397 else if (sbt
== VT_DOUBLE
)
3398 vtop
->c
.ld
= vtop
->c
.d
;
3401 if (sbt_bt
== VT_LLONG
) {
3402 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3403 vtop
->c
.ld
= vtop
->c
.i
;
3405 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3407 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3408 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3410 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3413 if (dbt
== VT_FLOAT
)
3414 vtop
->c
.f
= (float)vtop
->c
.ld
;
3415 else if (dbt
== VT_DOUBLE
)
3416 vtop
->c
.d
= (double)vtop
->c
.ld
;
3417 } else if (sf
&& dbt
== VT_BOOL
) {
3418 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3421 vtop
->c
.i
= vtop
->c
.ld
;
3422 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3424 else if (sbt
& VT_UNSIGNED
)
3425 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3427 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3429 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3431 else if (dbt
== VT_BOOL
)
3432 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3434 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3435 dbt_bt
== VT_SHORT
? 0xffff :
3438 if (!(dbt
& VT_UNSIGNED
))
3439 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3444 } else if (dbt
== VT_BOOL
3445 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3446 == (VT_CONST
| VT_SYM
)) {
3447 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3453 /* cannot generate code for global or static initializers */
3454 if (STATIC_DATA_WANTED
)
3457 /* non constant case: generate code */
3458 if (dbt
== VT_BOOL
) {
3459 gen_test_zero(TOK_NE
);
3465 /* convert from fp to fp */
3468 /* convert int to fp */
3471 /* convert fp to int */
3473 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3476 goto again
; /* may need char/short cast */
3481 ds
= btype_size(dbt_bt
);
3482 ss
= btype_size(sbt_bt
);
3483 if (ds
== 0 || ss
== 0) {
3484 if (dbt_bt
== VT_VOID
)
3486 cast_error(&vtop
->type
, type
);
3488 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3489 tcc_error("cast to incomplete type");
3491 /* same size and no sign conversion needed */
3492 if (ds
== ss
&& ds
>= 4)
3494 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3495 tcc_warning("cast between pointer and integer of different size");
3496 if (sbt_bt
== VT_PTR
) {
3497 /* put integer type to allow logical operations below */
3498 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3502 /* processor allows { int a = 0, b = *(char*)&a; }
3503 That means that if we cast to less width, we can just
3504 change the type and read it still later. */
3505 #define ALLOW_SUBTYPE_ACCESS 1
3507 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3508 /* value still in memory */
3514 goto done
; /* no 64bit envolved */
3522 /* generate high word */
3523 if (sbt
& VT_UNSIGNED
) {
3532 } else if (ss
== 8) {
3533 /* from long long: just take low order word */
3541 /* need to convert from 32bit to 64bit */
3542 if (sbt
& VT_UNSIGNED
) {
3543 #if defined(TCC_TARGET_RISCV64)
3544 /* RISC-V keeps 32bit vals in registers sign-extended.
3545 So here we need a zero-extension. */
3554 ss
= ds
, ds
= 4, dbt
= sbt
;
3555 } else if (ss
== 8) {
3556 /* XXX some architectures (e.g. risc-v) would like it
3557 better for this merely being a 32-to-64 sign or zero-
3559 trunc
= 32; /* zero upper 32 bits */
3567 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3573 bits
= (ss
- ds
) * 8;
3574 /* for unsigned, gen_op will convert SAR to SHR */
3575 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3578 vpushi(bits
- trunc
);
3585 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3588 /* return type size as known at compile time. Put alignment at 'a' */
3589 ST_FUNC
int type_size(CType
*type
, int *a
)
3594 bt
= type
->t
& VT_BTYPE
;
3595 if (bt
== VT_STRUCT
) {
3600 } else if (bt
== VT_PTR
) {
3601 if (type
->t
& VT_ARRAY
) {
3605 ts
= type_size(&s
->type
, a
);
3607 if (ts
< 0 && s
->c
< 0)
3615 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3616 return -1; /* incomplete enum */
3617 } else if (bt
== VT_LDOUBLE
) {
3619 return LDOUBLE_SIZE
;
3620 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3621 #ifdef TCC_TARGET_I386
3622 #ifdef TCC_TARGET_PE
3627 #elif defined(TCC_TARGET_ARM)
3637 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3640 } else if (bt
== VT_SHORT
) {
3643 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3647 /* char, void, function, _Bool */
3653 /* push type size as known at runtime time on top of value stack. Put
3655 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
3657 if (type
->t
& VT_VLA
) {
3658 type_size(&type
->ref
->type
, a
);
3659 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3661 vpushi(type_size(type
, a
));
3665 /* return the pointed type of t */
3666 static inline CType
*pointed_type(CType
*type
)
3668 return &type
->ref
->type
;
3671 /* modify type so that its it is a pointer to type. */
3672 ST_FUNC
void mk_pointer(CType
*type
)
3675 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3676 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3680 /* return true if type1 and type2 are exactly the same (including
3683 static int is_compatible_types(CType
*type1
, CType
*type2
)
3685 return compare_types(type1
,type2
,0);
3688 /* return true if type1 and type2 are the same (ignoring qualifiers).
3690 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3692 return compare_types(type1
,type2
,1);
3695 static void cast_error(CType
*st
, CType
*dt
)
3697 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3700 /* verify type compatibility to store vtop in 'dt' type */
3701 static void verify_assign_cast(CType
*dt
)
3703 CType
*st
, *type1
, *type2
;
3704 int dbt
, sbt
, qualwarn
, lvl
;
3706 st
= &vtop
->type
; /* source type */
3707 dbt
= dt
->t
& VT_BTYPE
;
3708 sbt
= st
->t
& VT_BTYPE
;
3709 if (dt
->t
& VT_CONSTANT
)
3710 tcc_warning("assignment of read-only location");
3714 tcc_error("assignment to void expression");
3717 /* special cases for pointers */
3718 /* '0' can also be a pointer */
3719 if (is_null_pointer(vtop
))
3721 /* accept implicit pointer to integer cast with warning */
3722 if (is_integer_btype(sbt
)) {
3723 tcc_warning("assignment makes pointer from integer without a cast");
3726 type1
= pointed_type(dt
);
3728 type2
= pointed_type(st
);
3729 else if (sbt
== VT_FUNC
)
3730 type2
= st
; /* a function is implicitly a function pointer */
3733 if (is_compatible_types(type1
, type2
))
3735 for (qualwarn
= lvl
= 0;; ++lvl
) {
3736 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3737 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3739 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3740 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3741 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3743 type1
= pointed_type(type1
);
3744 type2
= pointed_type(type2
);
3746 if (!is_compatible_unqualified_types(type1
, type2
)) {
3747 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3748 /* void * can match anything */
3749 } else if (dbt
== sbt
3750 && is_integer_btype(sbt
& VT_BTYPE
)
3751 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3752 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3753 /* Like GCC don't warn by default for merely changes
3754 in pointer target signedness. Do warn for different
3755 base types, though, in particular for unsigned enums
3756 and signed int targets. */
3758 tcc_warning("assignment from incompatible pointer type");
3763 tcc_warning("assignment discards qualifiers from pointer target type");
3769 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3770 tcc_warning("assignment makes integer from pointer without a cast");
3771 } else if (sbt
== VT_STRUCT
) {
3772 goto case_VT_STRUCT
;
3774 /* XXX: more tests */
3778 if (!is_compatible_unqualified_types(dt
, st
)) {
3786 static void gen_assign_cast(CType
*dt
)
3788 verify_assign_cast(dt
);
3792 /* store vtop in lvalue pushed on stack */
3793 ST_FUNC
void vstore(void)
3795 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3797 ft
= vtop
[-1].type
.t
;
3798 sbt
= vtop
->type
.t
& VT_BTYPE
;
3799 dbt
= ft
& VT_BTYPE
;
3801 verify_assign_cast(&vtop
[-1].type
);
3803 if (sbt
== VT_STRUCT
) {
3804 /* if structure, only generate pointer */
3805 /* structure assignment : generate memcpy */
3806 /* XXX: optimize if small size */
3807 size
= type_size(&vtop
->type
, &align
);
3811 #ifdef CONFIG_TCC_BCHECK
3812 if (vtop
->r
& VT_MUSTBOUND
)
3813 gbound(); /* check would be wrong after gaddrof() */
3815 vtop
->type
.t
= VT_PTR
;
3818 /* address of memcpy() */
3821 vpush_global_sym(&func_old_type
, TOK_memcpy8
);
3822 else if(!(align
& 3))
3823 vpush_global_sym(&func_old_type
, TOK_memcpy4
);
3826 /* Use memmove, rather than memcpy, as dest and src may be same: */
3827 vpush_global_sym(&func_old_type
, TOK_memmove
);
3832 #ifdef CONFIG_TCC_BCHECK
3833 if (vtop
->r
& VT_MUSTBOUND
)
3836 vtop
->type
.t
= VT_PTR
;
3841 /* leave source on stack */
3843 } else if (ft
& VT_BITFIELD
) {
3844 /* bitfield store handling */
3846 /* save lvalue as expression result (example: s.b = s.a = n;) */
3847 vdup(), vtop
[-1] = vtop
[-2];
3849 bit_pos
= BIT_POS(ft
);
3850 bit_size
= BIT_SIZE(ft
);
3851 /* remove bit field info to avoid loops */
3852 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3854 if (dbt
== VT_BOOL
) {
3855 gen_cast(&vtop
[-1].type
);
3856 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3858 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3859 if (dbt
!= VT_BOOL
) {
3860 gen_cast(&vtop
[-1].type
);
3861 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3863 if (r
== VT_STRUCT
) {
3864 store_packed_bf(bit_pos
, bit_size
);
3866 unsigned long long mask
= (1ULL << bit_size
) - 1;
3867 if (dbt
!= VT_BOOL
) {
3869 if (dbt
== VT_LLONG
)
3872 vpushi((unsigned)mask
);
3879 /* duplicate destination */
3882 /* load destination, mask and or with source */
3883 if (dbt
== VT_LLONG
)
3884 vpushll(~(mask
<< bit_pos
));
3886 vpushi(~((unsigned)mask
<< bit_pos
));
3891 /* ... and discard */
3894 } else if (dbt
== VT_VOID
) {
3897 /* optimize char/short casts */
3899 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3900 && is_integer_btype(sbt
)
3902 if ((vtop
->r
& VT_MUSTCAST
)
3903 && btype_size(dbt
) > btype_size(sbt
)
3905 force_charshort_cast();
3908 gen_cast(&vtop
[-1].type
);
3911 #ifdef CONFIG_TCC_BCHECK
3912 /* bound check case */
3913 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3919 gv(RC_TYPE(dbt
)); /* generate value */
3922 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3923 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3924 vtop
->type
.t
= ft
& VT_TYPE
;
3927 /* if lvalue was saved on stack, must read it */
3928 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3930 r
= get_reg(RC_INT
);
3931 sv
.type
.t
= VT_PTRDIFF_T
;
3932 sv
.r
= VT_LOCAL
| VT_LVAL
;
3933 sv
.c
.i
= vtop
[-1].c
.i
;
3935 vtop
[-1].r
= r
| VT_LVAL
;
3938 r
= vtop
->r
& VT_VALMASK
;
3939 /* two word case handling :
3940 store second register at word + 4 (or +8 for x86-64) */
3941 if (USING_TWO_WORDS(dbt
)) {
3942 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3943 vtop
[-1].type
.t
= load_type
;
3946 /* convert to int to increment easily */
3947 vtop
->type
.t
= VT_PTRDIFF_T
;
3953 vtop
[-1].type
.t
= load_type
;
3954 /* XXX: it works because r2 is spilled last ! */
3955 store(vtop
->r2
, vtop
- 1);
3961 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3965 /* post defines POST/PRE add. c is the token ++ or -- */
3966 ST_FUNC
void inc(int post
, int c
)
3969 vdup(); /* save lvalue */
3971 gv_dup(); /* duplicate value */
3976 vpushi(c
- TOK_MID
);
3978 vstore(); /* store value */
3980 vpop(); /* if post op, return saved value */
3983 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3985 /* read the string */
3989 while (tok
== TOK_STR
) {
3990 /* XXX: add \0 handling too ? */
3991 cstr_cat(astr
, tokc
.str
.data
, -1);
3994 cstr_ccat(astr
, '\0');
3997 /* If I is >= 1 and a power of two, returns log2(i)+1.
3998 If I is 0 returns 0. */
3999 static int exact_log2p1(int i
)
4004 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
4015 /* Parse __attribute__((...)) GNUC extension. */
4016 static void parse_attribute(AttributeDef
*ad
)
4022 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
4027 while (tok
!= ')') {
4028 if (tok
< TOK_IDENT
)
4029 expect("attribute name");
4041 tcc_warning("implicit declaration of function '%s'",
4042 get_tok_str(tok
, &tokc
));
4043 s
= external_global_sym(tok
, &func_old_type
);
4045 else if ((s
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
4046 ad
->cleanup_func
= s
;
4049 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
4051 ad
->cleanup_func
= s
;
4056 case TOK_CONSTRUCTOR1
:
4057 case TOK_CONSTRUCTOR2
:
4058 ad
->f
.func_ctor
= 1;
4060 case TOK_DESTRUCTOR1
:
4061 case TOK_DESTRUCTOR2
:
4062 ad
->f
.func_dtor
= 1;
4067 parse_mult_str(&astr
, "section name");
4068 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
4075 parse_mult_str(&astr
, "alias(\"target\")");
4076 ad
->alias_target
= /* save string as token, for later */
4077 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
4081 case TOK_VISIBILITY1
:
4082 case TOK_VISIBILITY2
:
4084 parse_mult_str(&astr
,
4085 "visibility(\"default|hidden|internal|protected\")");
4086 if (!strcmp (astr
.data
, "default"))
4087 ad
->a
.visibility
= STV_DEFAULT
;
4088 else if (!strcmp (astr
.data
, "hidden"))
4089 ad
->a
.visibility
= STV_HIDDEN
;
4090 else if (!strcmp (astr
.data
, "internal"))
4091 ad
->a
.visibility
= STV_INTERNAL
;
4092 else if (!strcmp (astr
.data
, "protected"))
4093 ad
->a
.visibility
= STV_PROTECTED
;
4095 expect("visibility(\"default|hidden|internal|protected\")");
4104 if (n
<= 0 || (n
& (n
- 1)) != 0)
4105 tcc_error("alignment must be a positive power of two");
4110 ad
->a
.aligned
= exact_log2p1(n
);
4111 if (n
!= 1 << (ad
->a
.aligned
- 1))
4112 tcc_error("alignment of %d is larger than implemented", n
);
4124 /* currently, no need to handle it because tcc does not
4125 track unused objects */
4129 ad
->f
.func_noreturn
= 1;
4134 ad
->f
.func_call
= FUNC_CDECL
;
4139 ad
->f
.func_call
= FUNC_STDCALL
;
4141 #ifdef TCC_TARGET_I386
4151 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
4157 ad
->f
.func_call
= FUNC_FASTCALLW
;
4164 ad
->attr_mode
= VT_LLONG
+ 1;
4167 ad
->attr_mode
= VT_BYTE
+ 1;
4170 ad
->attr_mode
= VT_SHORT
+ 1;
4174 ad
->attr_mode
= VT_INT
+ 1;
4177 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4184 ad
->a
.dllexport
= 1;
4186 case TOK_NODECORATE
:
4187 ad
->a
.nodecorate
= 1;
4190 ad
->a
.dllimport
= 1;
4193 if (tcc_state
->warn_unsupported
)
4194 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
4195 /* skip parameters */
4197 int parenthesis
= 0;
4201 else if (tok
== ')')
4204 } while (parenthesis
&& tok
!= -1);
4217 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4221 while ((s
= s
->next
) != NULL
) {
4222 if ((s
->v
& SYM_FIELD
) &&
4223 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
4224 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
4225 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
4237 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4239 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4240 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4241 int pcc
= !tcc_state
->ms_bitfields
;
4242 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4249 prevbt
= VT_STRUCT
; /* make it never match */
4254 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4255 if (f
->type
.t
& VT_BITFIELD
)
4256 bit_size
= BIT_SIZE(f
->type
.t
);
4259 size
= type_size(&f
->type
, &align
);
4260 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4263 if (pcc
&& bit_size
== 0) {
4264 /* in pcc mode, packing does not affect zero-width bitfields */
4267 /* in pcc mode, attribute packed overrides if set. */
4268 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4271 /* pragma pack overrides align if lesser and packs bitfields always */
4274 if (pragma_pack
< align
)
4275 align
= pragma_pack
;
4276 /* in pcc mode pragma pack also overrides individual align */
4277 if (pcc
&& pragma_pack
< a
)
4281 /* some individual align was specified */
4285 if (type
->ref
->type
.t
== VT_UNION
) {
4286 if (pcc
&& bit_size
>= 0)
4287 size
= (bit_size
+ 7) >> 3;
4292 } else if (bit_size
< 0) {
4294 c
+= (bit_pos
+ 7) >> 3;
4295 c
= (c
+ align
- 1) & -align
;
4304 /* A bit-field. Layout is more complicated. There are two
4305 options: PCC (GCC) compatible and MS compatible */
4307 /* In PCC layout a bit-field is placed adjacent to the
4308 preceding bit-fields, except if:
4310 - an individual alignment was given
4311 - it would overflow its base type container and
4312 there is no packing */
4313 if (bit_size
== 0) {
4315 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4317 } else if (f
->a
.aligned
) {
4319 } else if (!packed
) {
4321 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4322 if (ofs
> size
/ align
)
4326 /* in pcc mode, long long bitfields have type int if they fit */
4327 if (size
== 8 && bit_size
<= 32)
4328 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4330 while (bit_pos
>= align
* 8)
4331 c
+= align
, bit_pos
-= align
* 8;
4334 /* In PCC layout named bit-fields influence the alignment
4335 of the containing struct using the base types alignment,
4336 except for packed fields (which here have correct align). */
4337 if (f
->v
& SYM_FIRST_ANOM
4338 // && bit_size // ??? gcc on ARM/rpi does that
4343 bt
= f
->type
.t
& VT_BTYPE
;
4344 if ((bit_pos
+ bit_size
> size
* 8)
4345 || (bit_size
> 0) == (bt
!= prevbt
)
4347 c
= (c
+ align
- 1) & -align
;
4350 /* In MS bitfield mode a bit-field run always uses
4351 at least as many bits as the underlying type.
4352 To start a new run it's also required that this
4353 or the last bit-field had non-zero width. */
4354 if (bit_size
|| prev_bit_size
)
4357 /* In MS layout the records alignment is normally
4358 influenced by the field, except for a zero-width
4359 field at the start of a run (but by further zero-width
4360 fields it is again). */
4361 if (bit_size
== 0 && prevbt
!= bt
)
4364 prev_bit_size
= bit_size
;
4367 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4368 | (bit_pos
<< VT_STRUCT_SHIFT
);
4369 bit_pos
+= bit_size
;
4371 if (align
> maxalign
)
4375 printf("set field %s offset %-2d size %-2d align %-2d",
4376 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4377 if (f
->type
.t
& VT_BITFIELD
) {
4378 printf(" pos %-2d bits %-2d",
4391 c
+= (bit_pos
+ 7) >> 3;
4393 /* store size and alignment */
4394 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4398 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4399 /* can happen if individual align for some member was given. In
4400 this case MSVC ignores maxalign when aligning the size */
4405 c
= (c
+ a
- 1) & -a
;
4409 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4412 /* check whether we can access bitfields by their type */
4413 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4417 if (0 == (f
->type
.t
& VT_BITFIELD
))
4421 bit_size
= BIT_SIZE(f
->type
.t
);
4424 bit_pos
= BIT_POS(f
->type
.t
);
4425 size
= type_size(&f
->type
, &align
);
4426 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
4429 /* try to access the field using a different type */
4430 c0
= -1, s
= align
= 1;
4433 px
= f
->c
* 8 + bit_pos
;
4434 cx
= (px
>> 3) & -align
;
4435 px
= px
- (cx
<< 3);
4438 s
= (px
+ bit_size
+ 7) >> 3;
4448 s
= type_size(&t
, &align
);
4452 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
4453 /* update offset and bit position */
4456 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4457 | (bit_pos
<< VT_STRUCT_SHIFT
);
4461 printf("FIX field %s offset %-2d size %-2d align %-2d "
4462 "pos %-2d bits %-2d\n",
4463 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4464 cx
, s
, align
, px
, bit_size
);
4467 /* fall back to load/store single-byte wise */
4468 f
->auxtype
= VT_STRUCT
;
4470 printf("FIX field %s : load byte-wise\n",
4471 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4477 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4478 static void struct_decl(CType
*type
, int u
)
4480 int v
, c
, size
, align
, flexible
;
4481 int bit_size
, bsize
, bt
;
4483 AttributeDef ad
, ad1
;
4486 memset(&ad
, 0, sizeof ad
);
4488 parse_attribute(&ad
);
4492 /* struct already defined ? return it */
4494 expect("struct/union/enum name");
4496 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4499 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4501 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4506 /* Record the original enum/struct/union token. */
4507 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4509 /* we put an undefined size for struct/union */
4510 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4511 s
->r
= 0; /* default alignment is zero as gcc */
4513 type
->t
= s
->type
.t
;
4519 tcc_error("struct/union/enum already defined");
4521 /* cannot be empty */
4522 /* non empty enums are not allowed */
4525 long long ll
= 0, pl
= 0, nl
= 0;
4528 /* enum symbols have static storage */
4529 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4533 expect("identifier");
4535 if (ss
&& !local_stack
)
4536 tcc_error("redefinition of enumerator '%s'",
4537 get_tok_str(v
, NULL
));
4541 ll
= expr_const64();
4543 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4545 *ps
= ss
, ps
= &ss
->next
;
4554 /* NOTE: we accept a trailing comma */
4559 /* set integral type of the enum */
4562 if (pl
!= (unsigned)pl
)
4563 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4565 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4566 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4567 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4569 /* set type for enum members */
4570 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4572 if (ll
== (int)ll
) /* default is int if it fits */
4574 if (t
.t
& VT_UNSIGNED
) {
4575 ss
->type
.t
|= VT_UNSIGNED
;
4576 if (ll
== (unsigned)ll
)
4579 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4580 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4585 while (tok
!= '}') {
4586 if (!parse_btype(&btype
, &ad1
)) {
4592 tcc_error("flexible array member '%s' not at the end of struct",
4593 get_tok_str(v
, NULL
));
4599 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4601 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4602 expect("identifier");
4604 int v
= btype
.ref
->v
;
4605 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4606 if (tcc_state
->ms_extensions
== 0)
4607 expect("identifier");
4611 if (type_size(&type1
, &align
) < 0) {
4612 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4615 tcc_error("field '%s' has incomplete type",
4616 get_tok_str(v
, NULL
));
4618 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4619 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4620 (type1
.t
& VT_STORAGE
))
4621 tcc_error("invalid type for '%s'",
4622 get_tok_str(v
, NULL
));
4626 bit_size
= expr_const();
4627 /* XXX: handle v = 0 case for messages */
4629 tcc_error("negative width in bit-field '%s'",
4630 get_tok_str(v
, NULL
));
4631 if (v
&& bit_size
== 0)
4632 tcc_error("zero width for bit-field '%s'",
4633 get_tok_str(v
, NULL
));
4634 parse_attribute(&ad1
);
4636 size
= type_size(&type1
, &align
);
4637 if (bit_size
>= 0) {
4638 bt
= type1
.t
& VT_BTYPE
;
4644 tcc_error("bitfields must have scalar type");
4646 if (bit_size
> bsize
) {
4647 tcc_error("width of '%s' exceeds its type",
4648 get_tok_str(v
, NULL
));
4649 } else if (bit_size
== bsize
4650 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4651 /* no need for bit fields */
4653 } else if (bit_size
== 64) {
4654 tcc_error("field width 64 not implemented");
4656 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4658 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4661 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4662 /* Remember we've seen a real field to check
4663 for placement of flexible array member. */
4666 /* If member is a struct or bit-field, enforce
4667 placing into the struct (as anonymous). */
4669 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4674 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4679 if (tok
== ';' || tok
== TOK_EOF
)
4686 parse_attribute(&ad
);
4687 if (ad
.cleanup_func
) {
4688 tcc_warning("attribute '__cleanup__' ignored on type");
4690 struct_layout(type
, &ad
);
4695 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4697 merge_symattr(&ad
->a
, &s
->a
);
4698 merge_funcattr(&ad
->f
, &s
->f
);
4701 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4702 are added to the element type, copied because it could be a typedef. */
4703 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4705 while (type
->t
& VT_ARRAY
) {
4706 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4707 type
= &type
->ref
->type
;
4709 type
->t
|= qualifiers
;
4712 /* return 0 if no type declaration. otherwise, return the basic type
4715 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4717 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4721 memset(ad
, 0, sizeof(AttributeDef
));
4731 /* currently, we really ignore extension */
4741 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4742 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4743 tmbt
: tcc_error("too many basic types");
4746 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4751 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4768 memset(&ad1
, 0, sizeof(AttributeDef
));
4769 if (parse_btype(&type1
, &ad1
)) {
4770 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4772 n
= 1 << (ad1
.a
.aligned
- 1);
4774 type_size(&type1
, &n
);
4777 if (n
<= 0 || (n
& (n
- 1)) != 0)
4778 tcc_error("alignment must be a positive power of two");
4781 ad
->a
.aligned
= exact_log2p1(n
);
4785 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4786 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4787 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4788 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4795 #ifdef TCC_TARGET_ARM64
4797 /* GCC's __uint128_t appears in some Linux header files. Make it a
4798 synonym for long double to get the size and alignment right. */
4809 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4810 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4818 struct_decl(&type1
, VT_ENUM
);
4821 type
->ref
= type1
.ref
;
4824 struct_decl(&type1
, VT_STRUCT
);
4827 struct_decl(&type1
, VT_UNION
);
4830 /* type modifiers */
4835 parse_btype_qualify(type
, VT_CONSTANT
);
4843 parse_btype_qualify(type
, VT_VOLATILE
);
4850 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4851 tcc_error("signed and unsigned modifier");
4864 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4865 tcc_error("signed and unsigned modifier");
4866 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4882 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4883 tcc_error("multiple storage classes");
4895 ad
->f
.func_noreturn
= 1;
4897 /* GNUC attribute */
4898 case TOK_ATTRIBUTE1
:
4899 case TOK_ATTRIBUTE2
:
4900 parse_attribute(ad
);
4901 if (ad
->attr_mode
) {
4902 u
= ad
->attr_mode
-1;
4903 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4911 parse_expr_type(&type1
);
4912 /* remove all storage modifiers except typedef */
4913 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4915 sym_to_attr(ad
, type1
.ref
);
4921 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4925 if (tok
== ':' && !in_generic
) {
4926 /* ignore if it's a label */
4931 t
&= ~(VT_BTYPE
|VT_LONG
);
4932 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4933 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4934 type
->ref
= s
->type
.ref
;
4936 parse_btype_qualify(type
, t
);
4938 /* get attributes from typedef */
4947 if (tcc_state
->char_is_unsigned
) {
4948 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4951 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4952 bt
= t
& (VT_BTYPE
|VT_LONG
);
4954 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4955 #ifdef TCC_TARGET_PE
4956 if (bt
== VT_LDOUBLE
)
4957 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4963 /* convert a function parameter type (array to pointer and function to
4964 function pointer) */
4965 static inline void convert_parameter_type(CType
*pt
)
4967 /* remove const and volatile qualifiers (XXX: const could be used
4968 to indicate a const function parameter */
4969 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4970 /* array must be transformed to pointer according to ANSI C */
4972 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4977 ST_FUNC
void parse_asm_str(CString
*astr
)
4980 parse_mult_str(astr
, "string constant");
4983 /* Parse an asm label and return the token */
4984 static int asm_label_instr(void)
4990 parse_asm_str(&astr
);
4993 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4995 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
5000 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
5002 int n
, l
, t1
, arg_size
, align
, unused_align
;
5003 Sym
**plast
, *s
, *first
;
5008 /* function type, or recursive declarator (return if so) */
5010 if (td
&& !(td
& TYPE_ABSTRACT
))
5014 else if (parse_btype(&pt
, &ad1
))
5017 merge_attr (ad
, &ad1
);
5026 /* read param name and compute offset */
5027 if (l
!= FUNC_OLD
) {
5028 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
5030 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
5031 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
5032 tcc_error("parameter declared as void");
5036 expect("identifier");
5037 pt
.t
= VT_VOID
; /* invalid type */
5041 convert_parameter_type(&pt
);
5042 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
5043 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
5049 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
5054 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
5055 tcc_error("invalid type");
5058 /* if no parameters, then old type prototype */
5061 /* NOTE: const is ignored in returned type as it has a special
5062 meaning in gcc / C++ */
5063 type
->t
&= ~VT_CONSTANT
;
5064 /* some ancient pre-K&R C allows a function to return an array
5065 and the array brackets to be put after the arguments, such
5066 that "int c()[]" means something like "int[] c()" */
5069 skip(']'); /* only handle simple "[]" */
5072 /* we push a anonymous symbol which will contain the function prototype */
5073 ad
->f
.func_args
= arg_size
;
5074 ad
->f
.func_type
= l
;
5075 s
= sym_push(SYM_FIELD
, type
, 0, 0);
5081 } else if (tok
== '[') {
5082 int saved_nocode_wanted
= nocode_wanted
;
5083 /* array definition */
5086 /* XXX The optional type-quals and static should only be accepted
5087 in parameter decls. The '*' as well, and then even only
5088 in prototypes (not function defs). */
5090 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5105 if (!local_stack
|| (storage
& VT_STATIC
))
5106 vpushi(expr_const());
5108 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5109 length must always be evaluated, even under nocode_wanted,
5110 so that its size slot is initialized (e.g. under sizeof
5115 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5118 tcc_error("invalid array size");
5120 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5121 tcc_error("size of variable length array should be an integer");
5127 /* parse next post type */
5128 post_type(type
, ad
, storage
, 0);
5130 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5131 tcc_error("declaration of an array of functions");
5132 if ((type
->t
& VT_BTYPE
) == VT_VOID
5133 || type_size(type
, &unused_align
) < 0)
5134 tcc_error("declaration of an array of incomplete type elements");
5136 t1
|= type
->t
& VT_VLA
;
5140 tcc_error("need explicit inner array size in VLAs");
5141 loc
-= type_size(&int_type
, &align
);
5145 vla_runtime_type_size(type
, &align
);
5147 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5153 nocode_wanted
= saved_nocode_wanted
;
5155 /* we push an anonymous symbol which will contain the array
5157 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5158 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5164 /* Parse a type declarator (except basic type), and return the type
5165 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5166 expected. 'type' should contain the basic type. 'ad' is the
5167 attribute definition of the basic type. It can be modified by
5168 type_decl(). If this (possibly abstract) declarator is a pointer chain
5169 it returns the innermost pointed to type (equals *type, but is a different
5170 pointer), otherwise returns type itself, that's used for recursive calls. */
5171 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5174 int qualifiers
, storage
;
5176 /* recursive type, remove storage bits first, apply them later again */
5177 storage
= type
->t
& VT_STORAGE
;
5178 type
->t
&= ~VT_STORAGE
;
5181 while (tok
== '*') {
5189 qualifiers
|= VT_CONSTANT
;
5194 qualifiers
|= VT_VOLATILE
;
5200 /* XXX: clarify attribute handling */
5201 case TOK_ATTRIBUTE1
:
5202 case TOK_ATTRIBUTE2
:
5203 parse_attribute(ad
);
5207 type
->t
|= qualifiers
;
5209 /* innermost pointed to type is the one for the first derivation */
5210 ret
= pointed_type(type
);
5214 /* This is possibly a parameter type list for abstract declarators
5215 ('int ()'), use post_type for testing this. */
5216 if (!post_type(type
, ad
, 0, td
)) {
5217 /* It's not, so it's a nested declarator, and the post operations
5218 apply to the innermost pointed to type (if any). */
5219 /* XXX: this is not correct to modify 'ad' at this point, but
5220 the syntax is not clear */
5221 parse_attribute(ad
);
5222 post
= type_decl(type
, ad
, v
, td
);
5226 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5227 /* type identifier */
5232 if (!(td
& TYPE_ABSTRACT
))
5233 expect("identifier");
5236 post_type(post
, ad
, storage
, 0);
5237 parse_attribute(ad
);
5242 /* indirection with full error checking and bound check */
5243 ST_FUNC
void indir(void)
5245 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5246 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5250 if (vtop
->r
& VT_LVAL
)
5252 vtop
->type
= *pointed_type(&vtop
->type
);
5253 /* Arrays and functions are never lvalues */
5254 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5255 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5257 /* if bound checking, the referenced pointer must be checked */
5258 #ifdef CONFIG_TCC_BCHECK
5259 if (tcc_state
->do_bounds_check
)
5260 vtop
->r
|= VT_MUSTBOUND
;
5265 /* pass a parameter to a function and do type checking and casting */
5266 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5271 func_type
= func
->f
.func_type
;
5272 if (func_type
== FUNC_OLD
||
5273 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5274 /* default casting : only need to convert float to double */
5275 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5276 gen_cast_s(VT_DOUBLE
);
5277 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5278 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5279 type
.ref
= vtop
->type
.ref
;
5281 } else if (vtop
->r
& VT_MUSTCAST
) {
5282 force_charshort_cast();
5284 } else if (arg
== NULL
) {
5285 tcc_error("too many arguments to function");
5288 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5289 gen_assign_cast(&type
);
5293 /* parse an expression and return its type without any side effect. */
5294 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5303 /* parse an expression of the form '(type)' or '(expr)' and return its
5305 static void parse_expr_type(CType
*type
)
5311 if (parse_btype(type
, &ad
)) {
5312 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5314 expr_type(type
, gexpr
);
5319 static void parse_type(CType
*type
)
5324 if (!parse_btype(type
, &ad
)) {
5327 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5330 static void parse_builtin_params(int nc
, const char *args
)
5337 while ((c
= *args
++)) {
5341 case 'e': expr_eq(); continue;
5342 case 't': parse_type(&t
); vpush(&t
); continue;
5343 default: tcc_error("internal error"); break;
5351 ST_FUNC
void unary(void)
5353 int n
, t
, align
, size
, r
, sizeof_caller
;
5358 /* generate line number info */
5359 if (tcc_state
->do_debug
)
5360 tcc_debug_line(tcc_state
);
5362 sizeof_caller
= in_sizeof
;
5365 /* XXX: GCC 2.95.3 does not generate a table although it should be
5373 #ifdef TCC_TARGET_PE
5374 t
= VT_SHORT
|VT_UNSIGNED
;
5382 vsetc(&type
, VT_CONST
, &tokc
);
5386 t
= VT_INT
| VT_UNSIGNED
;
5392 t
= VT_LLONG
| VT_UNSIGNED
;
5404 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5407 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5409 case TOK___FUNCTION__
:
5411 goto tok_identifier
;
5417 /* special function name identifier */
5418 len
= strlen(funcname
) + 1;
5419 /* generate char[len] type */
5424 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
5425 if (!NODATA_WANTED
) {
5426 ptr
= section_ptr_add(data_section
, len
);
5427 memcpy(ptr
, funcname
, len
);
5433 #ifdef TCC_TARGET_PE
5434 t
= VT_SHORT
| VT_UNSIGNED
;
5440 /* string parsing */
5442 if (tcc_state
->char_is_unsigned
)
5443 t
= VT_BYTE
| VT_UNSIGNED
;
5445 if (tcc_state
->warn_write_strings
)
5450 memset(&ad
, 0, sizeof(AttributeDef
));
5451 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5456 if (parse_btype(&type
, &ad
)) {
5457 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5459 /* check ISOC99 compound literal */
5461 /* data is allocated locally by default */
5466 /* all except arrays are lvalues */
5467 if (!(type
.t
& VT_ARRAY
))
5469 memset(&ad
, 0, sizeof(AttributeDef
));
5470 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5472 if (sizeof_caller
) {
5479 } else if (tok
== '{') {
5480 int saved_nocode_wanted
= nocode_wanted
;
5481 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5482 tcc_error("expected constant");
5483 /* save all registers */
5485 /* statement expression : we do not accept break/continue
5486 inside as GCC does. We do retain the nocode_wanted state,
5487 as statement expressions can't ever be entered from the
5488 outside, so any reactivation of code emission (from labels
5489 or loop heads) can be disabled again after the end of it. */
5491 nocode_wanted
= saved_nocode_wanted
;
5506 /* functions names must be treated as function pointers,
5507 except for unary '&' and sizeof. Since we consider that
5508 functions are not lvalues, we only have to handle it
5509 there and in function calls. */
5510 /* arrays can also be used although they are not lvalues */
5511 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5512 !(vtop
->type
.t
& VT_ARRAY
))
5515 vtop
->sym
->a
.addrtaken
= 1;
5516 mk_pointer(&vtop
->type
);
5522 gen_test_zero(TOK_EQ
);
5533 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5534 tcc_error("pointer not accepted for unary plus");
5535 /* In order to force cast, we add zero, except for floating point
5536 where we really need an noop (otherwise -0.0 will be transformed
5538 if (!is_float(vtop
->type
.t
)) {
5550 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5552 if (vtop
[1].r
& VT_SYM
)
5553 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5554 size
= type_size(&type
, &align
);
5555 if (s
&& s
->a
.aligned
)
5556 align
= 1 << (s
->a
.aligned
- 1);
5557 if (t
== TOK_SIZEOF
) {
5558 if (!(type
.t
& VT_VLA
)) {
5560 tcc_error("sizeof applied to an incomplete type");
5563 vla_runtime_type_size(&type
, &align
);
5568 vtop
->type
.t
|= VT_UNSIGNED
;
5571 case TOK_builtin_expect
:
5572 /* __builtin_expect is a no-op for now */
5573 parse_builtin_params(0, "ee");
5576 case TOK_builtin_types_compatible_p
:
5577 parse_builtin_params(0, "tt");
5578 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5579 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5580 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5584 case TOK_builtin_choose_expr
:
5611 case TOK_builtin_constant_p
:
5612 parse_builtin_params(1, "e");
5613 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5617 case TOK_builtin_frame_address
:
5618 case TOK_builtin_return_address
:
5624 if (tok
!= TOK_CINT
) {
5625 tcc_error("%s only takes positive integers",
5626 tok1
== TOK_builtin_return_address
?
5627 "__builtin_return_address" :
5628 "__builtin_frame_address");
5630 level
= (uint32_t)tokc
.i
;
5635 vset(&type
, VT_LOCAL
, 0); /* local frame */
5637 mk_pointer(&vtop
->type
);
5638 indir(); /* -> parent frame */
5640 if (tok1
== TOK_builtin_return_address
) {
5641 // assume return address is just above frame pointer on stack
5644 mk_pointer(&vtop
->type
);
5649 #ifdef TCC_TARGET_RISCV64
5650 case TOK_builtin_va_start
:
5651 parse_builtin_params(0, "ee");
5652 r
= vtop
->r
& VT_VALMASK
;
5656 tcc_error("__builtin_va_start expects a local variable");
5661 #ifdef TCC_TARGET_X86_64
5662 #ifdef TCC_TARGET_PE
5663 case TOK_builtin_va_start
:
5664 parse_builtin_params(0, "ee");
5665 r
= vtop
->r
& VT_VALMASK
;
5669 tcc_error("__builtin_va_start expects a local variable");
5671 vtop
->type
= char_pointer_type
;
5676 case TOK_builtin_va_arg_types
:
5677 parse_builtin_params(0, "t");
5678 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5685 #ifdef TCC_TARGET_ARM64
5686 case TOK_builtin_va_start
: {
5687 parse_builtin_params(0, "ee");
5691 vtop
->type
.t
= VT_VOID
;
5694 case TOK_builtin_va_arg
: {
5695 parse_builtin_params(0, "et");
5703 case TOK___arm64_clear_cache
: {
5704 parse_builtin_params(0, "ee");
5707 vtop
->type
.t
= VT_VOID
;
5711 /* pre operations */
5722 t
= vtop
->type
.t
& VT_BTYPE
;
5724 /* In IEEE negate(x) isn't subtract(0,x), but rather
5728 vtop
->c
.f
= -1.0 * 0.0;
5729 else if (t
== VT_DOUBLE
)
5730 vtop
->c
.d
= -1.0 * 0.0;
5732 vtop
->c
.ld
= -1.0 * 0.0;
5740 goto tok_identifier
;
5742 /* allow to take the address of a label */
5743 if (tok
< TOK_UIDENT
)
5744 expect("label identifier");
5745 s
= label_find(tok
);
5747 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5749 if (s
->r
== LABEL_DECLARED
)
5750 s
->r
= LABEL_FORWARD
;
5753 s
->type
.t
= VT_VOID
;
5754 mk_pointer(&s
->type
);
5755 s
->type
.t
|= VT_STATIC
;
5757 vpushsym(&s
->type
, s
);
5763 CType controlling_type
;
5764 int has_default
= 0;
5767 TokenString
*str
= NULL
;
5768 int saved_const_wanted
= const_wanted
;
5773 expr_type(&controlling_type
, expr_eq
);
5774 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5775 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5776 mk_pointer(&controlling_type
);
5777 const_wanted
= saved_const_wanted
;
5781 if (tok
== TOK_DEFAULT
) {
5783 tcc_error("too many 'default'");
5789 AttributeDef ad_tmp
;
5794 parse_btype(&cur_type
, &ad_tmp
);
5797 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5798 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5800 tcc_error("type match twice");
5810 skip_or_save_block(&str
);
5812 skip_or_save_block(NULL
);
5819 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5820 tcc_error("type '%s' does not match any association", buf
);
5822 begin_macro(str
, 1);
5831 // special qnan , snan and infinity values
5836 vtop
->type
.t
= VT_FLOAT
;
5841 goto special_math_val
;
5844 goto special_math_val
;
5851 expect("identifier");
5853 if (!s
|| IS_ASM_SYM(s
)) {
5854 const char *name
= get_tok_str(t
, NULL
);
5856 tcc_error("'%s' undeclared", name
);
5857 /* for simple function calls, we tolerate undeclared
5858 external reference to int() function */
5859 if (tcc_state
->warn_implicit_function_declaration
5860 #ifdef TCC_TARGET_PE
5861 /* people must be warned about using undeclared WINAPI functions
5862 (which usually start with uppercase letter) */
5863 || (name
[0] >= 'A' && name
[0] <= 'Z')
5866 tcc_warning("implicit declaration of function '%s'", name
);
5867 s
= external_global_sym(t
, &func_old_type
);
5871 /* A symbol that has a register is a local register variable,
5872 which starts out as VT_LOCAL value. */
5873 if ((r
& VT_VALMASK
) < VT_CONST
)
5874 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5876 vset(&s
->type
, r
, s
->c
);
5877 /* Point to s as backpointer (even without r&VT_SYM).
5878 Will be used by at least the x86 inline asm parser for
5884 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5885 vtop
->c
.i
= s
->enum_val
;
5890 /* post operations */
5892 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5895 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5896 int qualifiers
, cumofs
= 0;
5898 if (tok
== TOK_ARROW
)
5900 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5903 /* expect pointer on structure */
5904 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
) {
5906 type_to_str(got
, sizeof got
, &vtop
->type
, NULL
);
5907 tcc_error("expected struct or union but not '%s'", got
);
5909 if (tok
== TOK_CDOUBLE
)
5910 expect("field name");
5912 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5913 expect("field name");
5914 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5916 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5917 /* add field offset to pointer */
5918 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5919 vpushi(cumofs
+ s
->c
);
5921 /* change type to field type, and set to lvalue */
5922 vtop
->type
= s
->type
;
5923 vtop
->type
.t
|= qualifiers
;
5924 /* an array is never an lvalue */
5925 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5927 #ifdef CONFIG_TCC_BCHECK
5928 /* if bound checking, the referenced pointer must be checked */
5929 if (tcc_state
->do_bounds_check
)
5930 vtop
->r
|= VT_MUSTBOUND
;
5934 } else if (tok
== '[') {
5940 } else if (tok
== '(') {
5943 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5946 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5947 /* pointer test (no array accepted) */
5948 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5949 vtop
->type
= *pointed_type(&vtop
->type
);
5950 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5954 expect("function pointer");
5957 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5959 /* get return type */
5962 sa
= s
->next
; /* first parameter */
5963 nb_args
= regsize
= 0;
5965 /* compute first implicit argument if a structure is returned */
5966 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5967 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5968 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
5969 &ret_align
, ®size
);
5970 if (ret_nregs
<= 0) {
5971 /* get some space for the returned structure */
5972 size
= type_size(&s
->type
, &align
);
5973 #ifdef TCC_TARGET_ARM64
5974 /* On arm64, a small struct is return in registers.
5975 It is much easier to write it to memory if we know
5976 that we are allowed to write some extra bytes, so
5977 round the allocated space up to a power of 2: */
5979 while (size
& (size
- 1))
5980 size
= (size
| (size
- 1)) + 1;
5982 loc
= (loc
- size
) & -align
;
5984 ret
.r
= VT_LOCAL
| VT_LVAL
;
5985 /* pass it as 'int' to avoid structure arg passing
5987 vseti(VT_LOCAL
, loc
);
5999 if (ret_nregs
> 0) {
6000 /* return in register */
6002 PUT_R_RET(&ret
, ret
.type
.t
);
6007 gfunc_param_typed(s
, sa
);
6017 tcc_error("too few arguments to function");
6019 #ifdef CONFIG_TCC_BCHECK
6020 if (tcc_state
->do_bounds_check
&&
6021 (nb_args
== 1 || nb_args
== 2) &&
6022 (vtop
[-nb_args
].r
& VT_SYM
) &&
6023 (vtop
[-nb_args
].sym
->v
== TOK_setjmp
||
6024 vtop
[-nb_args
].sym
->v
== TOK__setjmp
6025 #ifndef TCC_TARGET_PE
6026 || vtop
[-nb_args
].sym
->v
== TOK_sigsetjmp
6027 || vtop
[-nb_args
].sym
->v
== TOK___sigsetjmp
6030 vpush_global_sym(&func_old_type
, TOK___bound_setjmp
);
6031 vpushv(vtop
- nb_args
);
6033 vpushv(vtop
- nb_args
);
6034 gfunc_call(nb_args
);
6037 gfunc_call(nb_args
);
6039 if (ret_nregs
< 0) {
6040 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6041 #ifdef TCC_TARGET_RISCV64
6042 arch_transfer_ret_regs(1);
6046 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6047 vsetc(&ret
.type
, r
, &ret
.c
);
6048 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6051 /* handle packed struct return */
6052 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6055 size
= type_size(&s
->type
, &align
);
6056 /* We're writing whole regs often, make sure there's enough
6057 space. Assume register size is power of 2. */
6058 if (regsize
> align
)
6060 loc
= (loc
- size
) & -align
;
6064 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6068 if (--ret_nregs
== 0)
6072 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6075 /* Promote char/short return values. This is matters only
6076 for calling function that were not compiled by TCC and
6077 only on some architectures. For those where it doesn't
6078 matter we expect things to be already promoted to int,
6080 t
= s
->type
.t
& VT_BTYPE
;
6081 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6083 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6085 vtop
->type
.t
= VT_INT
;
6089 if (s
->f
.func_noreturn
)
6097 #ifndef precedence_parser /* original top-down parser */
6099 static void expr_prod(void)
6104 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6111 static void expr_sum(void)
6116 while ((t
= tok
) == '+' || t
== '-') {
6123 static void expr_shift(void)
6128 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6135 static void expr_cmp(void)
6140 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6141 t
== TOK_ULT
|| t
== TOK_UGE
) {
6148 static void expr_cmpeq(void)
6153 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6160 static void expr_and(void)
6163 while (tok
== '&') {
6170 static void expr_xor(void)
6173 while (tok
== '^') {
6180 static void expr_or(void)
6183 while (tok
== '|') {
6190 static void expr_landor(int op
);
6192 static void expr_land(void)
6195 if (tok
== TOK_LAND
)
6199 static void expr_lor(void)
6206 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6207 #else /* defined precedence_parser */
6208 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6209 # define expr_lor() unary(), expr_infix(1)
6211 static int precedence(int tok
)
6214 case TOK_LOR
: return 1;
6215 case TOK_LAND
: return 2;
6219 case TOK_EQ
: case TOK_NE
: return 6;
6220 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6221 case TOK_SHL
: case TOK_SAR
: return 8;
6222 case '+': case '-': return 9;
6223 case '*': case '/': case '%': return 10;
6225 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6230 static unsigned char prec
[256];
6231 static void init_prec(void)
6234 for (i
= 0; i
< 256; i
++)
6235 prec
[i
] = precedence(i
);
6237 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6239 static void expr_landor(int op
);
6241 static void expr_infix(int p
)
6244 while ((p2
= precedence(t
)) >= p
) {
6245 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6250 if (precedence(tok
) > p2
)
6259 /* Assuming vtop is a value used in a conditional context
6260 (i.e. compared with zero) return 0 if it's false, 1 if
6261 true and -1 if it can't be statically determined. */
6262 static int condition_3way(void)
6265 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6266 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6268 gen_cast_s(VT_BOOL
);
6275 static void expr_landor(int op
)
6277 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6279 c
= f
? i
: condition_3way();
6281 save_regs(1), cc
= 0;
6283 nocode_wanted
++, f
= 1;
6291 expr_landor_next(op
);
6303 static int is_cond_bool(SValue
*sv
)
6305 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6306 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6307 return (unsigned)sv
->c
.i
< 2;
6308 if (sv
->r
== VT_CMP
)
6313 static void expr_cond(void)
6315 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6323 c
= condition_3way();
6324 g
= (tok
== ':' && gnu_ext
);
6334 /* needed to avoid having different registers saved in
6341 ncw_prev
= nocode_wanted
;
6347 if (c
< 0 && vtop
->r
== VT_CMP
) {
6354 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6355 mk_pointer(&vtop
->type
);
6356 sv
= *vtop
; /* save value to handle it later */
6357 vtop
--; /* no vpop so that FP stack is not flushed */
6367 nocode_wanted
= ncw_prev
;
6373 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6374 if (sv
.r
== VT_CMP
) {
6385 nocode_wanted
= ncw_prev
;
6386 // tcc_warning("two conditions expr_cond");
6390 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6391 mk_pointer(&vtop
->type
);
6393 /* cast operands to correct type according to ISOC rules */
6394 if (!combine_types(&type
, &sv
, vtop
, '?'))
6395 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6396 "type mismatch in conditional expression (have '%s' and '%s')");
6397 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6398 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6399 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6401 /* now we convert second operand */
6405 mk_pointer(&vtop
->type
);
6407 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6411 rc
= RC_TYPE(type
.t
);
6412 /* for long longs, we use fixed registers to avoid having
6413 to handle a complicated move */
6414 if (USING_TWO_WORDS(type
.t
))
6415 rc
= RC_RET(type
.t
);
6423 nocode_wanted
= ncw_prev
;
6425 /* this is horrible, but we must also convert first
6431 mk_pointer(&vtop
->type
);
6433 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6439 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6449 static void expr_eq(void)
6454 if ((t
= tok
) == '='
6455 || (t
>= TOK_A_MOD
&& t
<= TOK_A_DIV
)
6456 || t
== TOK_A_XOR
|| t
== TOK_A_OR
6457 || t
== TOK_A_SHL
|| t
== TOK_A_SAR
) {
6471 ST_FUNC
void gexpr(void)
6482 /* parse a constant expression and return value in vtop. */
6483 static void expr_const1(void)
6486 nocode_wanted
+= unevalmask
+ 1;
6488 nocode_wanted
-= unevalmask
+ 1;
6492 /* parse an integer constant and return its value. */
6493 static inline int64_t expr_const64(void)
6497 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6498 expect("constant expression");
6504 /* parse an integer constant and return its value.
6505 Complain if it doesn't fit 32bit (signed or unsigned). */
6506 ST_FUNC
int expr_const(void)
6509 int64_t wc
= expr_const64();
6511 if (c
!= wc
&& (unsigned)c
!= wc
)
6512 tcc_error("constant exceeds 32 bit");
6516 /* ------------------------------------------------------------------------- */
6517 /* return from function */
6519 #ifndef TCC_TARGET_ARM64
6520 static void gfunc_return(CType
*func_type
)
6522 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6523 CType type
, ret_type
;
6524 int ret_align
, ret_nregs
, regsize
;
6525 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6526 &ret_align
, ®size
);
6527 if (ret_nregs
< 0) {
6528 #ifdef TCC_TARGET_RISCV64
6529 arch_transfer_ret_regs(0);
6531 } else if (0 == ret_nregs
) {
6532 /* if returning structure, must copy it to implicit
6533 first pointer arg location */
6536 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6539 /* copy structure value to pointer */
6542 /* returning structure packed into registers */
6543 int size
, addr
, align
, rc
;
6544 size
= type_size(func_type
,&align
);
6545 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6546 (vtop
->c
.i
& (ret_align
-1)))
6547 && (align
& (ret_align
-1))) {
6548 loc
= (loc
- size
) & -ret_align
;
6551 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6555 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6557 vtop
->type
= ret_type
;
6558 rc
= RC_RET(ret_type
.t
);
6566 if (--ret_nregs
== 0)
6568 /* We assume that when a structure is returned in multiple
6569 registers, their classes are consecutive values of the
6572 vtop
->c
.i
+= regsize
;
6577 gv(RC_RET(func_type
->t
));
6579 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6583 static void check_func_return(void)
6585 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6587 if (!strcmp (funcname
, "main")
6588 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6589 /* main returns 0 by default */
6591 gen_assign_cast(&func_vt
);
6592 gfunc_return(&func_vt
);
6594 tcc_warning("function might return no value: '%s'", funcname
);
6598 /* ------------------------------------------------------------------------- */
6601 static int case_cmp(const void *pa
, const void *pb
)
6603 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6604 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6605 return a
< b
? -1 : a
> b
;
6608 static void gtst_addr(int t
, int a
)
6610 gsym_addr(gvtst(0, t
), a
);
6613 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6617 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6634 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6636 gcase(base
, len
/2, bsym
);
6640 base
+= e
; len
-= e
;
6650 if (p
->v1
== p
->v2
) {
6652 gtst_addr(0, p
->sym
);
6662 gtst_addr(0, p
->sym
);
6666 *bsym
= gjmp(*bsym
);
6669 /* ------------------------------------------------------------------------- */
6670 /* __attribute__((cleanup(fn))) */
6672 static void try_call_scope_cleanup(Sym
*stop
)
6674 Sym
*cls
= cur_scope
->cl
.s
;
6676 for (; cls
!= stop
; cls
= cls
->ncl
) {
6677 Sym
*fs
= cls
->next
;
6678 Sym
*vs
= cls
->prev_tok
;
6680 vpushsym(&fs
->type
, fs
);
6681 vset(&vs
->type
, vs
->r
, vs
->c
);
6683 mk_pointer(&vtop
->type
);
6689 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6694 if (!cur_scope
->cl
.s
)
6697 /* search NCA of both cleanup chains given parents and initial depth */
6698 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6699 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6701 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6703 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6706 try_call_scope_cleanup(cc
);
6709 /* call 'func' for each __attribute__((cleanup(func))) */
6710 static void block_cleanup(struct scope
*o
)
6714 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6715 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6720 try_call_scope_cleanup(o
->cl
.s
);
6721 pcl
->jnext
= gjmp(0);
6723 goto remove_pending
;
6733 try_call_scope_cleanup(o
->cl
.s
);
6736 /* ------------------------------------------------------------------------- */
6739 static void vla_restore(int loc
)
6742 gen_vla_sp_restore(loc
);
6745 static void vla_leave(struct scope
*o
)
6747 if (o
->vla
.num
< cur_scope
->vla
.num
)
6748 vla_restore(o
->vla
.loc
);
6751 /* ------------------------------------------------------------------------- */
6754 void new_scope(struct scope
*o
)
6756 /* copy and link previous scope */
6758 o
->prev
= cur_scope
;
6761 /* record local declaration stack position */
6762 o
->lstk
= local_stack
;
6763 o
->llstk
= local_label_stack
;
6767 if (tcc_state
->do_debug
)
6768 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
6771 void prev_scope(struct scope
*o
, int is_expr
)
6775 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6776 block_cleanup(o
->prev
);
6778 /* pop locally defined labels */
6779 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6781 /* In the is_expr case (a statement expression is finished here),
6782 vtop might refer to symbols on the local_stack. Either via the
6783 type or via vtop->sym. We can't pop those nor any that in turn
6784 might be referred to. To make it easier we don't roll back
6785 any symbols in that case; some upper level call to block() will
6786 do that. We do have to remove such symbols from the lookup
6787 tables, though. sym_pop will do that. */
6789 /* pop locally defined symbols */
6790 pop_local_syms(&local_stack
, o
->lstk
, is_expr
, 0);
6791 cur_scope
= o
->prev
;
6794 if (tcc_state
->do_debug
)
6795 tcc_debug_stabn(N_RBRAC
, ind
- func_ind
);
6798 /* leave a scope via break/continue(/goto) */
6799 void leave_scope(struct scope
*o
)
6803 try_call_scope_cleanup(o
->cl
.s
);
6807 /* ------------------------------------------------------------------------- */
6808 /* call block from 'for do while' loops */
6810 static void lblock(int *bsym
, int *csym
)
6812 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6813 int *b
= co
->bsym
, *c
= co
->csym
;
6827 static void block(int is_expr
)
6829 int a
, b
, c
, d
, e
, t
;
6834 /* default return value is (void) */
6836 vtop
->type
.t
= VT_VOID
;
6848 if (tok
== TOK_ELSE
) {
6853 gsym(d
); /* patch else jmp */
6858 } else if (t
== TOK_WHILE
) {
6870 } else if (t
== '{') {
6873 /* handle local labels declarations */
6874 while (tok
== TOK_LABEL
) {
6877 if (tok
< TOK_UIDENT
)
6878 expect("label identifier");
6879 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6881 } while (tok
== ',');
6885 while (tok
!= '}') {
6894 prev_scope(&o
, is_expr
);
6897 else if (!nocode_wanted
)
6898 check_func_return();
6900 } else if (t
== TOK_RETURN
) {
6901 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6905 gen_assign_cast(&func_vt
);
6907 if (vtop
->type
.t
!= VT_VOID
)
6908 tcc_warning("void function returns a value");
6912 tcc_warning("'return' with no value");
6915 leave_scope(root_scope
);
6917 gfunc_return(&func_vt
);
6919 /* jump unless last stmt in top-level block */
6920 if (tok
!= '}' || local_scope
!= 1)
6924 } else if (t
== TOK_BREAK
) {
6926 if (!cur_scope
->bsym
)
6927 tcc_error("cannot break");
6928 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
6929 leave_scope(cur_switch
->scope
);
6931 leave_scope(loop_scope
);
6932 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6935 } else if (t
== TOK_CONTINUE
) {
6937 if (!cur_scope
->csym
)
6938 tcc_error("cannot continue");
6939 leave_scope(loop_scope
);
6940 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6943 } else if (t
== TOK_FOR
) {
6948 /* c99 for-loop init decl? */
6949 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6950 /* no, regular for-loop init expr */
6978 } else if (t
== TOK_DO
) {
6992 } else if (t
== TOK_SWITCH
) {
6993 struct switch_t
*sw
;
6995 sw
= tcc_mallocz(sizeof *sw
);
6997 sw
->scope
= cur_scope
;
6998 sw
->prev
= cur_switch
;
7004 sw
->sv
= *vtop
--; /* save switch value */
7007 b
= gjmp(0); /* jump to first case */
7009 a
= gjmp(a
); /* add implicit break */
7013 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmp
);
7014 for (b
= 1; b
< sw
->n
; b
++)
7015 if (sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7016 tcc_error("duplicate case value");
7018 /* Our switch table sorting is signed, so the compared
7019 value needs to be as well when it's 64bit. */
7021 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
7022 vtop
->type
.t
&= ~VT_UNSIGNED
;
7024 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7027 gsym_addr(d
, sw
->def_sym
);
7033 dynarray_reset(&sw
->p
, &sw
->n
);
7034 cur_switch
= sw
->prev
;
7037 } else if (t
== TOK_CASE
) {
7038 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7041 cr
->v1
= cr
->v2
= expr_const64();
7042 if (gnu_ext
&& tok
== TOK_DOTS
) {
7044 cr
->v2
= expr_const64();
7045 if (cr
->v2
< cr
->v1
)
7046 tcc_warning("empty case range");
7049 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7052 goto block_after_label
;
7054 } else if (t
== TOK_DEFAULT
) {
7057 if (cur_switch
->def_sym
)
7058 tcc_error("too many 'default'");
7059 cur_switch
->def_sym
= gind();
7062 goto block_after_label
;
7064 } else if (t
== TOK_GOTO
) {
7065 vla_restore(root_scope
->vla
.loc
);
7066 if (tok
== '*' && gnu_ext
) {
7070 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7074 } else if (tok
>= TOK_UIDENT
) {
7075 s
= label_find(tok
);
7076 /* put forward definition if needed */
7078 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7079 else if (s
->r
== LABEL_DECLARED
)
7080 s
->r
= LABEL_FORWARD
;
7082 if (s
->r
& LABEL_FORWARD
) {
7083 /* start new goto chain for cleanups, linked via label->next */
7084 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7085 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7086 pending_gotos
->prev_tok
= s
;
7087 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7088 pending_gotos
->next
= s
;
7090 s
->jnext
= gjmp(s
->jnext
);
7092 try_call_cleanup_goto(s
->cleanupstate
);
7093 gjmp_addr(s
->jnext
);
7098 expect("label identifier");
7102 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7106 if (tok
== ':' && t
>= TOK_UIDENT
) {
7111 if (s
->r
== LABEL_DEFINED
)
7112 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7113 s
->r
= LABEL_DEFINED
;
7115 Sym
*pcl
; /* pending cleanup goto */
7116 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7118 sym_pop(&s
->next
, NULL
, 0);
7122 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7125 s
->cleanupstate
= cur_scope
->cl
.s
;
7128 vla_restore(cur_scope
->vla
.loc
);
7129 /* we accept this, but it is a mistake */
7131 tcc_warning("deprecated use of label at end of compound statement");
7137 /* expression case */
7153 /* This skips over a stream of tokens containing balanced {} and ()
7154 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7155 with a '{'). If STR then allocates and stores the skipped tokens
7156 in *STR. This doesn't check if () and {} are nested correctly,
7157 i.e. "({)}" is accepted. */
7158 static void skip_or_save_block(TokenString
**str
)
7160 int braces
= tok
== '{';
7163 *str
= tok_str_alloc();
7165 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7167 if (tok
== TOK_EOF
) {
7168 if (str
|| level
> 0)
7169 tcc_error("unexpected end of file");
7174 tok_str_add_tok(*str
);
7177 if (t
== '{' || t
== '(') {
7179 } else if (t
== '}' || t
== ')') {
7181 if (level
== 0 && braces
&& t
== '}')
7186 tok_str_add(*str
, -1);
7187 tok_str_add(*str
, 0);
7191 #define EXPR_CONST 1
7194 static void parse_init_elem(int expr_type
)
7196 int saved_global_expr
;
7199 /* compound literals must be allocated globally in this case */
7200 saved_global_expr
= global_expr
;
7203 global_expr
= saved_global_expr
;
7204 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7205 (compound literals). */
7206 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7207 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7208 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7209 #ifdef TCC_TARGET_PE
7210 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7213 tcc_error("initializer element is not constant");
7221 /* put zeros for variable based init */
7222 static void init_putz(Section
*sec
, unsigned long c
, int size
)
7225 /* nothing to do because globals are already set to zero */
7227 vpush_global_sym(&func_old_type
, TOK_memset
);
7229 #ifdef TCC_TARGET_ARM
7241 #define DIF_SIZE_ONLY 2
7242 #define DIF_HAVE_ELEM 4
7244 /* t is the array or struct type. c is the array or struct
7245 address. cur_field is the pointer to the current
7246 field, for arrays the 'c' member contains the current start
7247 index. 'flags' is as in decl_initializer.
7248 'al' contains the already initialized length of the
7249 current container (starting at c). This returns the new length of that. */
7250 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
7251 Sym
**cur_field
, int flags
, int al
)
7254 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7255 unsigned long corig
= c
;
7260 if (flags
& DIF_HAVE_ELEM
)
7263 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7270 /* NOTE: we only support ranges for last designator */
7271 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7273 if (!(type
->t
& VT_ARRAY
))
7274 expect("array type");
7276 index
= index_last
= expr_const();
7277 if (tok
== TOK_DOTS
&& gnu_ext
) {
7279 index_last
= expr_const();
7283 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
7285 tcc_error("invalid index");
7287 (*cur_field
)->c
= index_last
;
7288 type
= pointed_type(type
);
7289 elem_size
= type_size(type
, &align
);
7290 c
+= index
* elem_size
;
7291 nb_elems
= index_last
- index
+ 1;
7298 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7299 expect("struct/union type");
7301 f
= find_field(type
, l
, &cumofs
);
7314 } else if (!gnu_ext
) {
7319 if (type
->t
& VT_ARRAY
) {
7320 index
= (*cur_field
)->c
;
7321 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
7322 tcc_error("index too large");
7323 type
= pointed_type(type
);
7324 c
+= index
* type_size(type
, &align
);
7327 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7328 *cur_field
= f
= f
->next
;
7330 tcc_error("too many field init");
7335 /* must put zero in holes (note that doing it that way
7336 ensures that it even works with designators) */
7337 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
7338 init_putz(sec
, corig
+ al
, c
- corig
- al
);
7339 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
7341 /* XXX: make it more general */
7342 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7343 unsigned long c_end
;
7348 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7349 for (i
= 1; i
< nb_elems
; i
++) {
7350 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
7355 } else if (!NODATA_WANTED
) {
7356 c_end
= c
+ nb_elems
* elem_size
;
7357 if (c_end
> sec
->data_allocated
)
7358 section_realloc(sec
, c_end
);
7359 src
= sec
->data
+ c
;
7361 for(i
= 1; i
< nb_elems
; i
++) {
7363 memcpy(dst
, src
, elem_size
);
7367 c
+= nb_elems
* type_size(type
, &align
);
7373 /* store a value or an expression directly in global data or in local array */
7374 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
7381 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7385 /* XXX: not portable */
7386 /* XXX: generate error if incorrect relocation */
7387 gen_assign_cast(&dtype
);
7388 bt
= type
->t
& VT_BTYPE
;
7390 if ((vtop
->r
& VT_SYM
)
7393 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7394 || (type
->t
& VT_BITFIELD
))
7395 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7397 tcc_error("initializer element is not computable at load time");
7399 if (NODATA_WANTED
) {
7404 size
= type_size(type
, &align
);
7405 section_reserve(sec
, c
+ size
);
7406 ptr
= sec
->data
+ c
;
7408 /* XXX: make code faster ? */
7409 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7410 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7411 /* XXX This rejects compound literals like
7412 '(void *){ptr}'. The problem is that '&sym' is
7413 represented the same way, which would be ruled out
7414 by the SYM_FIRST_ANOM check above, but also '"string"'
7415 in 'char *p = "string"' is represented the same
7416 with the type being VT_PTR and the symbol being an
7417 anonymous one. That is, there's no difference in vtop
7418 between '(void *){x}' and '&(void *){x}'. Ignore
7419 pointer typed entities here. Hopefully no real code
7420 will every use compound literals with scalar type. */
7421 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7422 /* These come from compound literals, memcpy stuff over. */
7426 esym
= elfsym(vtop
->sym
);
7427 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7428 memmove (ptr
, ssec
->data
+ esym
->st_value
, size
);
7430 /* We need to copy over all memory contents, and that
7431 includes relocations. Use the fact that relocs are
7432 created it order, so look from the end of relocs
7433 until we hit one before the copied region. */
7434 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
7435 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
7436 while (num_relocs
--) {
7438 if (rel
->r_offset
>= esym
->st_value
+ size
)
7440 if (rel
->r_offset
< esym
->st_value
)
7442 /* Note: if the same fields are initialized multiple
7443 times (possible with designators) then we possibly
7444 add multiple relocations for the same offset here.
7445 That would lead to wrong code, the last reloc needs
7446 to win. We clean this up later after the whole
7447 initializer is parsed. */
7448 put_elf_reloca(symtab_section
, sec
,
7449 c
+ rel
->r_offset
- esym
->st_value
,
7450 ELFW(R_TYPE
)(rel
->r_info
),
7451 ELFW(R_SYM
)(rel
->r_info
),
7461 if (type
->t
& VT_BITFIELD
) {
7462 int bit_pos
, bit_size
, bits
, n
;
7463 unsigned char *p
, v
, m
;
7464 bit_pos
= BIT_POS(vtop
->type
.t
);
7465 bit_size
= BIT_SIZE(vtop
->type
.t
);
7466 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7467 bit_pos
&= 7, bits
= 0;
7472 v
= vtop
->c
.i
>> bits
<< bit_pos
;
7473 m
= ((1 << n
) - 1) << bit_pos
;
7474 *p
= (*p
& ~m
) | (v
& m
);
7475 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7479 /* XXX: when cross-compiling we assume that each type has the
7480 same representation on host and target, which is likely to
7481 be wrong in the case of long double */
7483 vtop
->c
.i
= vtop
->c
.i
!= 0;
7485 *(char *)ptr
|= vtop
->c
.i
;
7488 *(short *)ptr
|= vtop
->c
.i
;
7491 *(float*)ptr
= vtop
->c
.f
;
7494 *(double *)ptr
= vtop
->c
.d
;
7497 #if defined TCC_IS_NATIVE_387
7498 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7499 memcpy(ptr
, &vtop
->c
.ld
, 10);
7501 else if (sizeof (long double) == sizeof (double))
7502 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7504 else if (vtop
->c
.ld
== 0.0)
7508 if (sizeof(long double) == LDOUBLE_SIZE
)
7509 *(long double*)ptr
= vtop
->c
.ld
;
7510 else if (sizeof(double) == LDOUBLE_SIZE
)
7511 *(double *)ptr
= (double)vtop
->c
.ld
;
7513 tcc_error("can't cross compile long double constants");
7517 *(long long *)ptr
|= vtop
->c
.i
;
7524 addr_t val
= vtop
->c
.i
;
7526 if (vtop
->r
& VT_SYM
)
7527 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7529 *(addr_t
*)ptr
|= val
;
7531 if (vtop
->r
& VT_SYM
)
7532 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7533 *(addr_t
*)ptr
|= val
;
7539 int val
= vtop
->c
.i
;
7541 if (vtop
->r
& VT_SYM
)
7542 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7546 if (vtop
->r
& VT_SYM
)
7547 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7556 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7563 /* 't' contains the type and storage info. 'c' is the offset of the
7564 object in section 'sec'. If 'sec' is NULL, it means stack based
7565 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7566 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7567 size only evaluation is wanted (only for arrays). */
7568 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
7571 int len
, n
, no_oblock
, nb
, i
;
7577 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7578 /* In case of strings we have special handling for arrays, so
7579 don't consume them as initializer value (which would commit them
7580 to some anonymous symbol). */
7581 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7582 !(flags
& DIF_SIZE_ONLY
)) {
7583 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7584 flags
|= DIF_HAVE_ELEM
;
7587 if ((flags
& DIF_HAVE_ELEM
) &&
7588 !(type
->t
& VT_ARRAY
) &&
7589 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7590 The source type might have VT_CONSTANT set, which is
7591 of course assignable to non-const elements. */
7592 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7593 init_putv(type
, sec
, c
);
7594 } else if (type
->t
& VT_ARRAY
) {
7597 t1
= pointed_type(type
);
7598 size1
= type_size(t1
, &align1
);
7601 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7604 tcc_error("character array initializer must be a literal,"
7605 " optionally enclosed in braces");
7610 /* only parse strings here if correct type (otherwise: handle
7611 them as ((w)char *) expressions */
7612 if ((tok
== TOK_LSTR
&&
7613 #ifdef TCC_TARGET_PE
7614 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7616 (t1
->t
& VT_BTYPE
) == VT_INT
7618 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7620 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7623 /* compute maximum number of chars wanted */
7625 cstr_len
= tokc
.str
.size
;
7627 cstr_len
= tokc
.str
.size
/ sizeof(nwchar_t
);
7630 if (n
>= 0 && nb
> (n
- len
))
7632 if (!(flags
& DIF_SIZE_ONLY
)) {
7634 tcc_warning("initializer-string for array is too long");
7635 /* in order to go faster for common case (char
7636 string in global variable, we handle it
7638 if (sec
&& tok
== TOK_STR
&& size1
== 1) {
7640 memcpy(sec
->data
+ c
+ len
, tokc
.str
.data
, nb
);
7644 ch
= ((unsigned char *)tokc
.str
.data
)[i
];
7646 ch
= ((nwchar_t
*)tokc
.str
.data
)[i
];
7648 init_putv(t1
, sec
, c
+ (len
+ i
) * size1
);
7655 /* only add trailing zero if enough storage (no
7656 warning in this case since it is standard) */
7657 if (n
< 0 || len
< n
) {
7658 if (!(flags
& DIF_SIZE_ONLY
)) {
7660 init_putv(t1
, sec
, c
+ (len
* size1
));
7671 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7672 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7673 flags
&= ~DIF_HAVE_ELEM
;
7674 if (type
->t
& VT_ARRAY
) {
7676 /* special test for multi dimensional arrays (may not
7677 be strictly correct if designators are used at the
7679 if (no_oblock
&& len
>= n
*size1
)
7682 if (s
->type
.t
== VT_UNION
)
7686 if (no_oblock
&& f
== NULL
)
7695 /* put zeros at the end */
7696 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7697 init_putz(sec
, c
+ len
, n
*size1
- len
);
7700 /* patch type size if needed, which happens only for array types */
7702 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7703 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7706 if ((flags
& DIF_FIRST
) || tok
== '{') {
7714 } else if (tok
== '{') {
7715 if (flags
& DIF_HAVE_ELEM
)
7718 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7720 } else if ((flags
& DIF_SIZE_ONLY
)) {
7721 /* If we supported only ISO C we wouldn't have to accept calling
7722 this on anything than an array if DIF_SIZE_ONLY (and even then
7723 only on the outermost level, so no recursion would be needed),
7724 because initializing a flex array member isn't supported.
7725 But GNU C supports it, so we need to recurse even into
7726 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7727 /* just skip expression */
7728 skip_or_save_block(NULL
);
7730 if (!(flags
& DIF_HAVE_ELEM
)) {
7731 /* This should happen only when we haven't parsed
7732 the init element above for fear of committing a
7733 string constant to memory too early. */
7734 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7735 expect("string constant");
7736 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7738 init_putv(type
, sec
, c
);
7742 /* parse an initializer for type 't' if 'has_init' is non zero, and
7743 allocate space in local or global data space ('r' is either
7744 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7745 variable 'v' of scope 'scope' is declared before initializers
7746 are parsed. If 'v' is zero, then a reference to the new object
7747 is put in the value stack. If 'has_init' is 2, a special parsing
7748 is done to handle string constants. */
7749 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7750 int has_init
, int v
, int scope
)
7752 int size
, align
, addr
;
7753 TokenString
*init_str
= NULL
;
7756 Sym
*flexible_array
;
7758 int saved_nocode_wanted
= nocode_wanted
;
7759 #ifdef CONFIG_TCC_BCHECK
7760 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7763 /* Always allocate static or global variables */
7764 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7765 nocode_wanted
|= 0x80000000;
7767 flexible_array
= NULL
;
7768 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7769 Sym
*field
= type
->ref
->next
;
7772 field
= field
->next
;
7773 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7774 flexible_array
= field
;
7778 size
= type_size(type
, &align
);
7779 /* If unknown size, we must evaluate it before
7780 evaluating initializers because
7781 initializers can generate global data too
7782 (e.g. string pointers or ISOC99 compound
7783 literals). It also simplifies local
7784 initializers handling */
7785 if (size
< 0 || (flexible_array
&& has_init
)) {
7787 tcc_error("unknown type size");
7788 /* get all init string */
7789 if (has_init
== 2) {
7790 init_str
= tok_str_alloc();
7791 /* only get strings */
7792 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7793 tok_str_add_tok(init_str
);
7796 tok_str_add(init_str
, -1);
7797 tok_str_add(init_str
, 0);
7799 skip_or_save_block(&init_str
);
7804 begin_macro(init_str
, 1);
7806 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7807 /* prepare second initializer parsing */
7808 macro_ptr
= init_str
->str
;
7811 /* if still unknown size, error */
7812 size
= type_size(type
, &align
);
7814 tcc_error("unknown type size");
7816 /* If there's a flex member and it was used in the initializer
7818 if (flexible_array
&&
7819 flexible_array
->type
.ref
->c
> 0)
7820 size
+= flexible_array
->type
.ref
->c
7821 * pointed_size(&flexible_array
->type
);
7822 /* take into account specified alignment if bigger */
7823 if (ad
->a
.aligned
) {
7824 int speca
= 1 << (ad
->a
.aligned
- 1);
7827 } else if (ad
->a
.packed
) {
7831 if (!v
&& NODATA_WANTED
)
7832 size
= 0, align
= 1;
7834 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7836 #ifdef CONFIG_TCC_BCHECK
7838 /* add padding between stack variables for bound checking */
7842 loc
= (loc
- size
) & -align
;
7844 #ifdef CONFIG_TCC_BCHECK
7846 /* add padding between stack variables for bound checking */
7851 /* local variable */
7852 #ifdef CONFIG_TCC_ASM
7853 if (ad
->asm_label
) {
7854 int reg
= asm_parse_regvar(ad
->asm_label
);
7856 r
= (r
& ~VT_VALMASK
) | reg
;
7859 sym
= sym_push(v
, type
, r
, addr
);
7860 if (ad
->cleanup_func
) {
7861 Sym
*cls
= sym_push2(&all_cleanups
,
7862 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7863 cls
->prev_tok
= sym
;
7864 cls
->next
= ad
->cleanup_func
;
7865 cls
->ncl
= cur_scope
->cl
.s
;
7866 cur_scope
->cl
.s
= cls
;
7871 /* push local reference */
7872 vset(type
, r
, addr
);
7875 if (v
&& scope
== VT_CONST
) {
7876 /* see if the symbol was already defined */
7879 patch_storage(sym
, ad
, type
);
7880 /* we accept several definitions of the same global variable. */
7881 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7886 /* allocate symbol in corresponding section */
7891 else if (tcc_state
->nocommon
)
7896 addr
= section_add(sec
, size
, align
);
7897 #ifdef CONFIG_TCC_BCHECK
7898 /* add padding if bound check */
7900 section_add(sec
, 1, 1);
7903 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7904 sec
= common_section
;
7909 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7910 patch_storage(sym
, ad
, NULL
);
7912 /* update symbol definition */
7913 put_extern_sym(sym
, sec
, addr
, size
);
7915 /* push global reference */
7916 vpush_ref(type
, sec
, addr
, size
);
7921 #ifdef CONFIG_TCC_BCHECK
7922 /* handles bounds now because the symbol must be defined
7923 before for the relocation */
7927 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7928 /* then add global bound info */
7929 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7930 bounds_ptr
[0] = 0; /* relocated */
7931 bounds_ptr
[1] = size
;
7936 if (type
->t
& VT_VLA
) {
7942 /* save current stack pointer */
7943 if (root_scope
->vla
.loc
== 0) {
7944 struct scope
*v
= cur_scope
;
7945 gen_vla_sp_save(loc
-= PTR_SIZE
);
7946 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
7949 vla_runtime_type_size(type
, &a
);
7950 gen_vla_alloc(type
, a
);
7951 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7952 /* on _WIN64, because of the function args scratch area, the
7953 result of alloca differs from RSP and is returned in RAX. */
7954 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7956 gen_vla_sp_save(addr
);
7957 cur_scope
->vla
.loc
= addr
;
7958 cur_scope
->vla
.num
++;
7959 } else if (has_init
) {
7960 size_t oldreloc_offset
= 0;
7961 if (sec
&& sec
->reloc
)
7962 oldreloc_offset
= sec
->reloc
->data_offset
;
7963 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
7964 if (sec
&& sec
->reloc
)
7965 squeeze_multi_relocs(sec
, oldreloc_offset
);
7966 /* patch flexible array member size back to -1, */
7967 /* for possible subsequent similar declarations */
7969 flexible_array
->type
.ref
->c
= -1;
7973 /* restore parse state if needed */
7979 nocode_wanted
= saved_nocode_wanted
;
7982 /* parse a function defined by symbol 'sym' and generate its code in
7983 'cur_text_section' */
7984 static void gen_function(Sym
*sym
)
7986 /* Initialize VLA state */
7987 struct scope f
= { 0 };
7988 cur_scope
= root_scope
= &f
;
7991 ind
= cur_text_section
->data_offset
;
7992 if (sym
->a
.aligned
) {
7993 size_t newoff
= section_add(cur_text_section
, 0,
7994 1 << (sym
->a
.aligned
- 1));
7995 gen_fill_nops(newoff
- ind
);
7997 /* NOTE: we patch the symbol size later */
7998 put_extern_sym(sym
, cur_text_section
, ind
, 0);
7999 if (sym
->type
.ref
->f
.func_ctor
)
8000 add_array (tcc_state
, ".init_array", sym
->c
);
8001 if (sym
->type
.ref
->f
.func_dtor
)
8002 add_array (tcc_state
, ".fini_array", sym
->c
);
8004 funcname
= get_tok_str(sym
->v
, NULL
);
8006 func_vt
= sym
->type
.ref
->type
;
8007 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8009 /* put debug symbol */
8010 tcc_debug_funcstart(tcc_state
, sym
);
8011 /* push a dummy symbol to enable local sym storage */
8012 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8013 local_scope
= 1; /* for function parameters */
8017 clear_temp_local_var_list();
8021 /* reset local stack */
8022 pop_local_syms(&local_stack
, NULL
, 0, func_var
);
8024 cur_text_section
->data_offset
= ind
;
8026 label_pop(&global_label_stack
, NULL
, 0);
8027 sym_pop(&all_cleanups
, NULL
, 0);
8028 /* patch symbol size */
8029 elfsym(sym
)->st_size
= ind
- func_ind
;
8030 /* end of function */
8031 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8032 /* It's better to crash than to generate wrong code */
8033 cur_text_section
= NULL
;
8034 funcname
= ""; /* for safety */
8035 func_vt
.t
= VT_VOID
; /* for safety */
8036 func_var
= 0; /* for safety */
8037 ind
= 0; /* for safety */
8038 nocode_wanted
= 0x80000000;
8040 /* do this after funcend debug info */
8044 static void gen_inline_functions(TCCState
*s
)
8047 int inline_generated
, i
;
8048 struct InlineFunc
*fn
;
8050 tcc_open_bf(s
, ":inline:", 0);
8051 /* iterate while inline function are referenced */
8053 inline_generated
= 0;
8054 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8055 fn
= s
->inline_fns
[i
];
8057 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8058 /* the function was used or forced (and then not internal):
8059 generate its code and convert it to a normal function */
8061 tcc_debug_putfile(s
, fn
->filename
);
8062 begin_macro(fn
->func_str
, 1);
8064 cur_text_section
= text_section
;
8068 inline_generated
= 1;
8071 } while (inline_generated
);
8075 static void free_inline_functions(TCCState
*s
)
8078 /* free tokens of unused inline functions */
8079 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8080 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8082 tok_str_free(fn
->func_str
);
8084 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8087 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8088 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8089 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8094 AttributeDef ad
, adbase
;
8097 if (tok
== TOK_STATIC_ASSERT
) {
8107 tcc_error("_Static_assert fail");
8109 goto static_assert_out
;
8113 parse_mult_str(&error_str
, "string constant");
8115 tcc_error("%s", (char *)error_str
.data
);
8116 cstr_free(&error_str
);
8122 if (!parse_btype(&btype
, &adbase
)) {
8123 if (is_for_loop_init
)
8125 /* skip redundant ';' if not in old parameter decl scope */
8126 if (tok
== ';' && l
!= VT_CMP
) {
8132 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8133 /* global asm block */
8137 if (tok
>= TOK_UIDENT
) {
8138 /* special test for old K&R protos without explicit int
8139 type. Only accepted when defining global data */
8143 expect("declaration");
8148 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8149 int v
= btype
.ref
->v
;
8150 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8151 tcc_warning("unnamed struct/union that defines no instances");
8155 if (IS_ENUM(btype
.t
)) {
8160 while (1) { /* iterate thru each declaration */
8162 /* If the base type itself was an array type of unspecified
8163 size (like in 'typedef int arr[]; arr x = {1};') then
8164 we will overwrite the unknown size by the real one for
8165 this decl. We need to unshare the ref symbol holding
8167 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
8168 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
8171 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8175 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8176 printf("type = '%s'\n", buf
);
8179 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8180 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8181 tcc_error("function without file scope cannot be static");
8182 /* if old style function prototype, we accept a
8185 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8186 decl0(VT_CMP
, 0, sym
);
8187 /* always compile 'extern inline' */
8188 if (type
.t
& VT_EXTERN
)
8189 type
.t
&= ~VT_INLINE
;
8192 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8193 ad
.asm_label
= asm_label_instr();
8194 /* parse one last attribute list, after asm label */
8195 parse_attribute(&ad
);
8197 /* gcc does not allow __asm__("label") with function definition,
8204 #ifdef TCC_TARGET_PE
8205 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8206 if (type
.t
& VT_STATIC
)
8207 tcc_error("cannot have dll linkage with static");
8208 if (type
.t
& VT_TYPEDEF
) {
8209 tcc_warning("'%s' attribute ignored for typedef",
8210 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8211 (ad
.a
.dllexport
= 0, "dllexport"));
8212 } else if (ad
.a
.dllimport
) {
8213 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8216 type
.t
|= VT_EXTERN
;
8222 tcc_error("cannot use local functions");
8223 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8224 expect("function definition");
8226 /* reject abstract declarators in function definition
8227 make old style params without decl have int type */
8229 while ((sym
= sym
->next
) != NULL
) {
8230 if (!(sym
->v
& ~SYM_FIELD
))
8231 expect("identifier");
8232 if (sym
->type
.t
== VT_VOID
)
8233 sym
->type
= int_type
;
8236 /* apply post-declaraton attributes */
8237 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8239 /* put function symbol */
8240 type
.t
&= ~VT_EXTERN
;
8241 sym
= external_sym(v
, &type
, 0, &ad
);
8243 /* static inline functions are just recorded as a kind
8244 of macro. Their code will be emitted at the end of
8245 the compilation unit only if they are used */
8246 if (sym
->type
.t
& VT_INLINE
) {
8247 struct InlineFunc
*fn
;
8248 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8249 strcpy(fn
->filename
, file
->filename
);
8251 skip_or_save_block(&fn
->func_str
);
8252 dynarray_add(&tcc_state
->inline_fns
,
8253 &tcc_state
->nb_inline_fns
, fn
);
8255 /* compute text section */
8256 cur_text_section
= ad
.section
;
8257 if (!cur_text_section
)
8258 cur_text_section
= text_section
;
8264 /* find parameter in function parameter list */
8265 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8266 if ((sym
->v
& ~SYM_FIELD
) == v
)
8268 tcc_error("declaration for parameter '%s' but no such parameter",
8269 get_tok_str(v
, NULL
));
8271 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8272 tcc_error("storage class specified for '%s'",
8273 get_tok_str(v
, NULL
));
8274 if (sym
->type
.t
!= VT_VOID
)
8275 tcc_error("redefinition of parameter '%s'",
8276 get_tok_str(v
, NULL
));
8277 convert_parameter_type(&type
);
8279 } else if (type
.t
& VT_TYPEDEF
) {
8280 /* save typedefed type */
8281 /* XXX: test storage specifiers ? */
8283 if (sym
&& sym
->sym_scope
== local_scope
) {
8284 if (!is_compatible_types(&sym
->type
, &type
)
8285 || !(sym
->type
.t
& VT_TYPEDEF
))
8286 tcc_error("incompatible redefinition of '%s'",
8287 get_tok_str(v
, NULL
));
8290 sym
= sym_push(v
, &type
, 0, 0);
8294 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8295 && !(type
.t
& VT_EXTERN
)) {
8296 tcc_error("declaration of void object");
8299 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8300 /* external function definition */
8301 /* specific case for func_call attribute */
8303 } else if (!(type
.t
& VT_ARRAY
)) {
8304 /* not lvalue if array */
8307 has_init
= (tok
== '=');
8308 if (has_init
&& (type
.t
& VT_VLA
))
8309 tcc_error("variable length array cannot be initialized");
8310 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8311 || (type
.t
& VT_BTYPE
) == VT_FUNC
8312 /* as with GCC, uninitialized global arrays with no size
8313 are considered extern: */
8314 || ((type
.t
& VT_ARRAY
) && !has_init
8315 && l
== VT_CONST
&& type
.ref
->c
< 0)
8317 /* external variable or function */
8318 type
.t
|= VT_EXTERN
;
8319 sym
= external_sym(v
, &type
, r
, &ad
);
8320 if (ad
.alias_target
) {
8323 alias_target
= sym_find(ad
.alias_target
);
8324 esym
= elfsym(alias_target
);
8326 tcc_error("unsupported forward __alias__ attribute");
8327 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
8330 if (type
.t
& VT_STATIC
)
8336 else if (l
== VT_CONST
)
8337 /* uninitialized global variables may be overridden */
8338 type
.t
|= VT_EXTERN
;
8339 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8343 if (is_for_loop_init
)
8355 static void decl(int l
)
8360 /* ------------------------------------------------------------------------- */
8363 /* ------------------------------------------------------------------------- */