2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
49 ST_DATA
char debug_modes
;
52 static SValue _vstack
[1 + VSTACK_SIZE
];
53 #define vstack (_vstack + 1)
55 ST_DATA
int const_wanted
; /* true if constant wanted */
56 ST_DATA
int nocode_wanted
; /* no code generation wanted */
57 #define unevalmask 0xffff /* unevaluated subexpression */
58 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
59 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
61 /* Automagical code suppression ----> */
62 #define CODE_OFF() (nocode_wanted |= 0x20000000)
63 #define CODE_ON() (nocode_wanted &= ~0x20000000)
65 static void tcc_tcov_block_begin(void);
67 /* Clear 'nocode_wanted' at label if it was used */
68 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
69 static int gind(void) { int t
= ind
; CODE_ON(); if (debug_modes
) tcc_tcov_block_begin(); return t
; }
71 /* Set 'nocode_wanted' after unconditional jumps */
72 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
73 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
75 /* These are #undef'd at the end of this file */
76 #define gjmp_addr gjmp_addr_acs
80 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
81 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
82 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
84 static int last_line_num
, new_file
, func_ind
; /* debug info control */
85 ST_DATA
const char *funcname
;
86 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
87 static CString initstr
;
90 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_INT
93 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T VT_LLONG
96 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
97 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
100 ST_DATA
struct switch_t
{
104 } **p
; int n
; /* list of case ranges */
105 int def_sym
; /* default symbol */
108 struct switch_t
*prev
;
110 } *cur_switch
; /* current switch */
112 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
113 /*list of temporary local variables on the stack in current function. */
114 ST_DATA
struct temp_local_variable
{
115 int location
; //offset on stack. Svalue.c.i
118 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
119 short nb_temp_local_vars
;
121 static struct scope
{
123 struct { int loc
, locorig
, num
; } vla
;
124 struct { Sym
*s
; int n
; } cl
;
127 } *cur_scope
, *loop_scope
, *root_scope
;
135 /********************************************************/
136 /* stab debug support */
138 static const struct {
141 } default_debug
[] = {
142 { VT_INT
, "int:t1=r1;-2147483648;2147483647;" },
143 { VT_BYTE
, "char:t2=r2;0;127;" },
145 { VT_LONG
| VT_INT
, "long int:t3=r3;-2147483648;2147483647;" },
147 { VT_LLONG
| VT_LONG
, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
149 { VT_INT
| VT_UNSIGNED
, "unsigned int:t4=r4;0;037777777777;" },
151 { VT_LONG
| VT_INT
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;037777777777;" },
153 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
154 { VT_LLONG
| VT_LONG
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;01777777777777777777777;" },
156 { VT_QLONG
, "__int128:t6=r6;0;-1;" },
157 { VT_QLONG
| VT_UNSIGNED
, "__int128 unsigned:t7=r7;0;-1;" },
158 { VT_LLONG
, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
159 { VT_LLONG
| VT_UNSIGNED
, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
160 { VT_SHORT
, "short int:t10=r10;-32768;32767;" },
161 { VT_SHORT
| VT_UNSIGNED
, "short unsigned int:t11=r11;0;65535;" },
162 { VT_BYTE
| VT_DEFSIGN
, "signed char:t12=r12;-128;127;" },
163 { VT_BYTE
| VT_DEFSIGN
| VT_UNSIGNED
, "unsigned char:t13=r13;0;255;" },
164 { VT_FLOAT
, "float:t14=r1;4;0;" },
165 { VT_DOUBLE
, "double:t15=r1;8;0;" },
166 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
167 { VT_DOUBLE
| VT_LONG
, "long double:t16=r1;8;0;" },
169 { VT_LDOUBLE
, "long double:t16=r1;16;0;" },
171 { -1, "_Float32:t17=r1;4;0;" },
172 { -1, "_Float64:t18=r1;8;0;" },
173 { -1, "_Float128:t19=r1;16;0;" },
174 { -1, "_Float32x:t20=r1;8;0;" },
175 { -1, "_Float64x:t21=r1;16;0;" },
176 { -1, "_Decimal32:t22=r1;4;0;" },
177 { -1, "_Decimal64:t23=r1;8;0;" },
178 { -1, "_Decimal128:t24=r1;16;0;" },
179 /* if default char is unsigned */
180 { VT_BYTE
| VT_UNSIGNED
, "unsigned char:t25=r25;0;255;" },
182 { VT_BOOL
, "bool:t26=r26;0;255;" },
183 { VT_VOID
, "void:t27=27" },
186 static int debug_next_type
;
188 static struct debug_hash
{
193 static int n_debug_hash
;
195 static struct debug_info
{
206 struct debug_info
*child
, *next
, *last
, *parent
;
207 } *debug_info
, *debug_info_root
;
210 unsigned long offset
;
211 unsigned long last_file_name
;
212 unsigned long last_func_name
;
217 /********************************************************/
219 #define precedence_parser
220 static void init_prec(void);
222 /********************************************************/
223 #ifndef CONFIG_TCC_ASM
224 ST_FUNC
void asm_instr(void)
226 tcc_error("inline asm() not supported");
228 ST_FUNC
void asm_global_instr(void)
230 tcc_error("inline asm() not supported");
234 /* ------------------------------------------------------------------------- */
235 static void gen_cast(CType
*type
);
236 static void gen_cast_s(int t
);
237 static inline CType
*pointed_type(CType
*type
);
238 static int is_compatible_types(CType
*type1
, CType
*type2
);
239 static int parse_btype(CType
*type
, AttributeDef
*ad
);
240 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
241 static void parse_expr_type(CType
*type
);
242 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
243 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
244 static void block(int is_expr
);
245 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
246 static void decl(int l
);
247 static int decl0(int l
, int is_for_loop_init
, Sym
*);
248 static void expr_eq(void);
249 static void vla_runtime_type_size(CType
*type
, int *a
);
250 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
251 static inline int64_t expr_const64(void);
252 static void vpush64(int ty
, unsigned long long v
);
253 static void vpush(CType
*type
);
254 static int gvtst(int inv
, int t
);
255 static void gen_inline_functions(TCCState
*s
);
256 static void free_inline_functions(TCCState
*s
);
257 static void skip_or_save_block(TokenString
**str
);
258 static void gv_dup(void);
259 static int get_temp_local_var(int size
,int align
);
260 static void clear_temp_local_var_list();
261 static void cast_error(CType
*st
, CType
*dt
);
263 ST_INLN
int is_float(int t
)
265 int bt
= t
& VT_BTYPE
;
266 return bt
== VT_LDOUBLE
272 static inline int is_integer_btype(int bt
)
281 static int btype_size(int bt
)
283 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
287 bt
== VT_PTR
? PTR_SIZE
: 0;
290 /* returns function return register from type */
291 static int R_RET(int t
)
295 #ifdef TCC_TARGET_X86_64
296 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
298 #elif defined TCC_TARGET_RISCV64
299 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
305 /* returns 2nd function return register, if any */
306 static int R2_RET(int t
)
312 #elif defined TCC_TARGET_X86_64
317 #elif defined TCC_TARGET_RISCV64
324 /* returns true for two-word types */
325 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
327 /* put function return registers to stack value */
328 static void PUT_R_RET(SValue
*sv
, int t
)
330 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
333 /* returns function return register class for type t */
334 static int RC_RET(int t
)
336 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
339 /* returns generic register class for type t */
340 static int RC_TYPE(int t
)
344 #ifdef TCC_TARGET_X86_64
345 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
347 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
349 #elif defined TCC_TARGET_RISCV64
350 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
356 /* returns 2nd register class corresponding to t and rc */
357 static int RC2_TYPE(int t
, int rc
)
359 if (!USING_TWO_WORDS(t
))
374 /* we use our own 'finite' function to avoid potential problems with
375 non standard math libs */
376 /* XXX: endianness dependent */
377 ST_FUNC
int ieee_finite(double d
)
380 memcpy(p
, &d
, sizeof(double));
381 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
384 /* compiling intel long double natively */
385 #if (defined __i386__ || defined __x86_64__) \
386 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
387 # define TCC_IS_NATIVE_387
390 ST_FUNC
void test_lvalue(void)
392 if (!(vtop
->r
& VT_LVAL
))
396 ST_FUNC
void check_vstack(void)
398 if (vtop
!= vstack
- 1)
399 tcc_error("internal compiler error: vstack leak (%d)",
400 (int)(vtop
- vstack
+ 1));
403 /* ------------------------------------------------------------------------- */
404 /* vstack debugging aid */
407 void pv (const char *lbl
, int a
, int b
)
410 for (i
= a
; i
< a
+ b
; ++i
) {
411 SValue
*p
= &vtop
[-i
];
412 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
413 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
418 /* ------------------------------------------------------------------------- */
419 /* start of translation unit info */
420 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
426 /* file info: full path + filename */
427 section_sym
= put_elf_sym(symtab_section
, 0, 0,
428 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
429 text_section
->sh_num
, NULL
);
430 getcwd(buf
, sizeof(buf
));
432 normalize_slashes(buf
);
434 pstrcat(buf
, sizeof(buf
), "/");
435 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
436 text_section
->data_offset
, text_section
, section_sym
);
437 put_stabs_r(s1
, file
->prev
? file
->prev
->filename
: file
->filename
,
439 text_section
->data_offset
, text_section
, section_sym
);
440 for (i
= 0; i
< sizeof (default_debug
) / sizeof (default_debug
[0]); i
++)
441 put_stabs(s1
, default_debug
[i
].name
, N_LSYM
, 0, 0, 0);
443 new_file
= last_line_num
= 0;
445 debug_next_type
= sizeof(default_debug
) / sizeof(default_debug
[0]);
449 /* we're currently 'including' the <command line> */
453 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
454 symbols can be safely used */
455 put_elf_sym(symtab_section
, 0, 0,
456 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
457 SHN_ABS
, file
->filename
);
460 /* put end of translation unit info */
461 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
465 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
466 text_section
->data_offset
, text_section
, section_sym
);
467 tcc_free(debug_hash
);
470 static BufferedFile
* put_new_file(TCCState
*s1
)
472 BufferedFile
*f
= file
;
473 /* use upper file if from inline ":asm:" */
474 if (f
->filename
[0] == ':')
477 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
478 new_file
= last_line_num
= 0;
483 /* put alternative filename */
484 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
486 if (0 == strcmp(file
->filename
, filename
))
488 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
492 /* begin of #include */
493 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
497 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
501 /* end of #include */
502 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
506 put_stabn(s1
, N_EINCL
, 0, 0, 0);
510 /* generate line number info */
511 static void tcc_debug_line(TCCState
*s1
)
515 || cur_text_section
!= text_section
516 || !(f
= put_new_file(s1
))
517 || last_line_num
== f
->line_num
)
519 if (func_ind
!= -1) {
520 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
522 /* from tcc_assemble */
523 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
525 last_line_num
= f
->line_num
;
528 static void tcc_debug_stabs (TCCState
*s1
, const char *str
, int type
, unsigned long value
,
529 Section
*sec
, int sym_index
)
535 (struct debug_sym
*)tcc_realloc (debug_info
->sym
,
536 sizeof(struct debug_sym
) *
537 (debug_info
->n_sym
+ 1));
538 s
= debug_info
->sym
+ debug_info
->n_sym
++;
541 s
->str
= tcc_strdup(str
);
543 s
->sym_index
= sym_index
;
546 put_stabs_r (s1
, str
, type
, 0, 0, value
, sec
, sym_index
);
548 put_stabs (s1
, str
, type
, 0, 0, value
);
551 static void tcc_debug_stabn(TCCState
*s1
, int type
, int value
)
555 if (type
== N_LBRAC
) {
556 struct debug_info
*info
=
557 (struct debug_info
*) tcc_mallocz(sizeof (*info
));
560 info
->parent
= debug_info
;
562 if (debug_info
->child
) {
563 if (debug_info
->child
->last
)
564 debug_info
->child
->last
->next
= info
;
566 debug_info
->child
->next
= info
;
567 debug_info
->child
->last
= info
;
570 debug_info
->child
= info
;
573 debug_info_root
= info
;
577 debug_info
->end
= value
;
578 debug_info
= debug_info
->parent
;
582 static void tcc_get_debug_info(TCCState
*s1
, Sym
*s
, CString
*result
)
591 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
592 if ((type
& VT_BTYPE
) != VT_BYTE
)
594 if (type
== VT_PTR
|| type
== (VT_PTR
| VT_ARRAY
))
595 n
++, t
= t
->type
.ref
;
599 if ((type
& VT_BTYPE
) == VT_STRUCT
) {
603 for (i
= 0; i
< n_debug_hash
; i
++) {
604 if (t
== debug_hash
[i
].type
) {
605 debug_type
= debug_hash
[i
].debug_type
;
609 if (debug_type
== -1) {
610 debug_type
= ++debug_next_type
;
611 debug_hash
= (struct debug_hash
*)
612 tcc_realloc (debug_hash
,
613 (n_debug_hash
+ 1) * sizeof(*debug_hash
));
614 debug_hash
[n_debug_hash
].debug_type
= debug_type
;
615 debug_hash
[n_debug_hash
++].type
= t
;
617 cstr_printf (&str
, "%s:T%d=%c%d",
618 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
619 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
621 IS_UNION (t
->type
.t
) ? 'u' : 's',
624 int pos
, size
, align
;
627 cstr_printf (&str
, "%s:",
628 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
629 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
630 tcc_get_debug_info (s1
, t
, &str
);
631 if (t
->type
.t
& VT_BITFIELD
) {
632 pos
= t
->c
* 8 + BIT_POS(t
->type
.t
);
633 size
= BIT_SIZE(t
->type
.t
);
637 size
= type_size(&t
->type
, &align
) * 8;
639 cstr_printf (&str
, ",%d,%d;", pos
, size
);
641 cstr_printf (&str
, ";");
642 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
646 else if (IS_ENUM(type
)) {
647 Sym
*e
= t
= t
->type
.ref
;
649 debug_type
= ++debug_next_type
;
651 cstr_printf (&str
, "%s:T%d=e",
652 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
653 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
657 cstr_printf (&str
, "%s:",
658 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
659 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
660 cstr_printf (&str
, e
->type
.t
& VT_UNSIGNED
? "%u," : "%d,",
663 cstr_printf (&str
, ";");
664 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
667 else if ((type
& VT_BTYPE
) != VT_FUNC
) {
668 type
&= ~VT_STRUCT_MASK
;
670 debug_type
<= sizeof(default_debug
) / sizeof(default_debug
[0]);
672 if (default_debug
[debug_type
- 1].type
== type
)
674 if (debug_type
> sizeof(default_debug
) / sizeof(default_debug
[0]))
678 cstr_printf (result
, "%d=", ++debug_next_type
);
681 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
682 if ((type
& VT_BTYPE
) != VT_BYTE
)
685 cstr_printf (result
, "%d=*", ++debug_next_type
);
686 else if (type
== (VT_PTR
| VT_ARRAY
))
687 cstr_printf (result
, "%d=ar1;0;%d;",
688 ++debug_next_type
, t
->type
.ref
->c
- 1);
689 else if (type
== VT_FUNC
) {
690 cstr_printf (result
, "%d=f", ++debug_next_type
);
691 tcc_get_debug_info (s1
, t
->type
.ref
, result
);
698 cstr_printf (result
, "%d", debug_type
);
701 static void tcc_debug_finish (TCCState
*s1
, struct debug_info
*cur
)
705 struct debug_info
*next
= cur
->next
;
707 for (i
= 0; i
< cur
->n_sym
; i
++) {
708 struct debug_sym
*s
= &cur
->sym
[i
];
711 put_stabs_r(s1
, s
->str
, s
->type
, 0, 0, s
->value
,
712 s
->sec
, s
->sym_index
);
714 put_stabs(s1
, s
->str
, s
->type
, 0, 0, s
->value
);
718 put_stabn(s1
, N_LBRAC
, 0, 0, cur
->start
);
719 tcc_debug_finish (s1
, cur
->child
);
720 put_stabn(s1
, N_RBRAC
, 0, 0, cur
->end
);
726 static void tcc_add_debug_info(TCCState
*s1
, int param
, Sym
*s
, Sym
*e
)
731 cstr_new (&debug_str
);
732 for (; s
!= e
; s
= s
->prev
) {
733 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
735 cstr_reset (&debug_str
);
736 cstr_printf (&debug_str
, "%s:%s", get_tok_str(s
->v
, NULL
), param
? "p" : "");
737 tcc_get_debug_info(s1
, s
, &debug_str
);
738 tcc_debug_stabs(s1
, debug_str
.data
, param
? N_PSYM
: N_LSYM
, s
->c
, NULL
, 0);
740 cstr_free (&debug_str
);
743 /* put function symbol */
744 static void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
750 debug_info_root
= NULL
;
752 tcc_debug_stabn(s1
, N_LBRAC
, ind
- func_ind
);
753 if (!(f
= put_new_file(s1
)))
755 cstr_new (&debug_str
);
756 cstr_printf(&debug_str
, "%s:%c", funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
757 tcc_get_debug_info(s1
, sym
->type
.ref
, &debug_str
);
758 put_stabs_r(s1
, debug_str
.data
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
759 cstr_free (&debug_str
);
764 /* put function size */
765 static void tcc_debug_funcend(TCCState
*s1
, int size
)
769 tcc_debug_stabn(s1
, N_RBRAC
, size
);
770 tcc_debug_finish (s1
, debug_info_root
);
774 static void tcc_debug_extern_sym(TCCState
*s1
, Sym
*sym
, int sh_num
, int sym_bind
, int sym_type
)
781 if (sym_type
== STT_FUNC
|| sym
->v
>= SYM_FIRST_ANOM
)
783 s
= s1
->sections
[sh_num
];
786 cstr_printf (&str
, "%s:%c",
787 get_tok_str(sym
->v
, NULL
),
788 sym_bind
== STB_GLOBAL
? 'G' : local_scope
? 'V' : 'S'
790 tcc_get_debug_info(s1
, sym
, &str
);
791 if (sym_bind
== STB_GLOBAL
)
792 tcc_debug_stabs(s1
, str
.data
, N_GSYM
, 0, NULL
, 0);
794 tcc_debug_stabs(s1
, str
.data
,
795 (sym
->type
.t
& VT_STATIC
) && data_section
== s
796 ? N_STSYM
: N_LCSYM
, 0, s
, sym
->c
);
800 static void tcc_debug_typedef(TCCState
*s1
, Sym
*sym
)
807 cstr_printf (&str
, "%s:t",
808 (sym
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
809 ? "" : get_tok_str(sym
->v
& ~SYM_FIELD
, NULL
));
810 tcc_get_debug_info(s1
, sym
, &str
);
811 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
815 /* ------------------------------------------------------------------------- */
816 /* for section layout see lib/tcov.c */
818 static void tcc_tcov_block_end(int line
);
820 static void tcc_tcov_block_begin(void)
824 unsigned long last_offset
= tcov_data
.offset
;
826 tcc_tcov_block_end (0);
827 if (tcc_state
->test_coverage
== 0 || nocode_wanted
)
830 if (tcov_data
.last_file_name
== 0 ||
831 strcmp ((const char *)(tcov_section
->data
+ tcov_data
.last_file_name
),
832 file
->true_filename
) != 0) {
836 if (tcov_data
.last_func_name
)
837 section_ptr_add(tcov_section
, 1);
838 if (tcov_data
.last_file_name
)
839 section_ptr_add(tcov_section
, 1);
840 tcov_data
.last_func_name
= 0;
842 if (file
->true_filename
[0] == '/') {
843 tcov_data
.last_file_name
= tcov_section
->data_offset
;
844 cstr_printf (&cstr
, "%s", file
->true_filename
);
847 getcwd (wd
, sizeof(wd
));
848 tcov_data
.last_file_name
= tcov_section
->data_offset
+ strlen(wd
) + 1;
849 cstr_printf (&cstr
, "%s/%s", wd
, file
->true_filename
);
851 ptr
= section_ptr_add(tcov_section
, cstr
.size
+ 1);
852 strncpy((char *)ptr
, cstr
.data
, cstr
.size
);
854 normalize_slashes((char *)ptr
);
858 if (tcov_data
.last_func_name
== 0 ||
859 strcmp ((const char *)(tcov_section
->data
+ tcov_data
.last_func_name
),
863 if (tcov_data
.last_func_name
)
864 section_ptr_add(tcov_section
, 1);
865 tcov_data
.last_func_name
= tcov_section
->data_offset
;
866 len
= strlen (funcname
);
867 ptr
= section_ptr_add(tcov_section
, len
+ 1);
868 strncpy((char *)ptr
, funcname
, len
);
869 section_ptr_add(tcov_section
, -tcov_section
->data_offset
& 7);
870 ptr
= section_ptr_add(tcov_section
, 8);
871 write64le (ptr
, file
->line_num
);
873 if (ind
== tcov_data
.ind
&& tcov_data
.line
== file
->line_num
)
874 tcov_data
.offset
= last_offset
;
877 label
.type
.t
= VT_LLONG
| VT_STATIC
;
879 ptr
= section_ptr_add(tcov_section
, 16);
880 tcov_data
.line
= file
->line_num
;
881 write64le (ptr
, (tcov_data
.line
<< 8) | 0xff);
882 put_extern_sym(&label
, tcov_section
,
883 ((unsigned char *)ptr
- tcov_section
->data
) + 8, 0);
884 sv
.type
= label
.type
;
885 sv
.r
= VT_SYM
| VT_LVAL
| VT_CONST
;
889 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || \
890 defined TCC_TARGET_ARM || defined TCC_TARGET_ARM64 || \
891 defined TCC_TARGET_RISCV64
892 gen_increment_tcov (&sv
);
898 tcov_data
.offset
= (unsigned char *)ptr
- tcov_section
->data
;
903 static void tcc_tcov_block_end(int line
)
905 if (tcc_state
->test_coverage
== 0)
907 if (tcov_data
.offset
) {
908 void *ptr
= tcov_section
->data
+ tcov_data
.offset
;
909 unsigned long long nline
= line
? line
: file
->line_num
;
911 write64le (ptr
, (read64le (ptr
) & 0xfffffffffull
) | (nline
<< 36));
912 tcov_data
.offset
= 0;
916 static void tcc_tcov_check_line(int start
)
918 if (tcc_state
->test_coverage
== 0)
920 if (tcov_data
.line
!= file
->line_num
) {
921 if ((tcov_data
.line
+ 1) != file
->line_num
) {
922 tcc_tcov_block_end (tcov_data
.line
);
924 tcc_tcov_block_begin ();
927 tcov_data
.line
= file
->line_num
;
931 static void tcc_tcov_start(void)
933 if (tcc_state
->test_coverage
== 0)
935 memset (&tcov_data
, 0, sizeof (tcov_data
));
936 if (tcov_section
== NULL
) {
937 tcov_section
= new_section(tcc_state
, ".tcov", SHT_PROGBITS
,
938 SHF_ALLOC
| SHF_WRITE
);
939 section_ptr_add(tcov_section
, 4); // pointer to executable name
943 static void tcc_tcov_end(void)
945 if (tcc_state
->test_coverage
== 0)
947 if (tcov_data
.last_func_name
)
948 section_ptr_add(tcov_section
, 1);
949 if (tcov_data
.last_file_name
)
950 section_ptr_add(tcov_section
, 1);
953 /* ------------------------------------------------------------------------- */
954 /* initialize vstack and types. This must be done also for tcc -E */
955 ST_FUNC
void tccgen_init(TCCState
*s1
)
958 memset(vtop
, 0, sizeof *vtop
);
960 /* define some often used types */
963 char_type
.t
= VT_BYTE
;
964 if (s1
->char_is_unsigned
)
965 char_type
.t
|= VT_UNSIGNED
;
966 char_pointer_type
= char_type
;
967 mk_pointer(&char_pointer_type
);
969 func_old_type
.t
= VT_FUNC
;
970 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
971 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
972 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
973 #ifdef precedence_parser
979 ST_FUNC
int tccgen_compile(TCCState
*s1
)
981 cur_text_section
= NULL
;
983 anon_sym
= SYM_FIRST_ANOM
;
986 nocode_wanted
= 0x80000000;
988 debug_modes
= s1
->do_debug
| s1
->test_coverage
<< 1;
992 #ifdef TCC_TARGET_ARM
996 printf("%s: **** new file\n", file
->filename
);
998 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
1001 gen_inline_functions(s1
);
1003 /* end of translation unit info */
1009 ST_FUNC
void tccgen_finish(TCCState
*s1
)
1011 cstr_free(&initstr
);
1012 free_inline_functions(s1
);
1013 sym_pop(&global_stack
, NULL
, 0);
1014 sym_pop(&local_stack
, NULL
, 0);
1015 /* free preprocessor macros */
1017 /* free sym_pools */
1018 dynarray_reset(&sym_pools
, &nb_sym_pools
);
1019 sym_free_first
= NULL
;
1022 /* ------------------------------------------------------------------------- */
1023 ST_FUNC ElfSym
*elfsym(Sym
*s
)
1027 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
1030 /* apply storage attributes to Elf symbol */
1031 ST_FUNC
void update_storage(Sym
*sym
)
1034 int sym_bind
, old_sym_bind
;
1040 if (sym
->a
.visibility
)
1041 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
1042 | sym
->a
.visibility
;
1044 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
1045 sym_bind
= STB_LOCAL
;
1046 else if (sym
->a
.weak
)
1047 sym_bind
= STB_WEAK
;
1049 sym_bind
= STB_GLOBAL
;
1050 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
1051 if (sym_bind
!= old_sym_bind
) {
1052 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
1055 #ifdef TCC_TARGET_PE
1056 if (sym
->a
.dllimport
)
1057 esym
->st_other
|= ST_PE_IMPORT
;
1058 if (sym
->a
.dllexport
)
1059 esym
->st_other
|= ST_PE_EXPORT
;
1063 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
1064 get_tok_str(sym
->v
, NULL
),
1065 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
1073 /* ------------------------------------------------------------------------- */
1074 /* update sym->c so that it points to an external symbol in section
1075 'section' with value 'value' */
1077 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
1078 addr_t value
, unsigned long size
,
1079 int can_add_underscore
)
1081 int sym_type
, sym_bind
, info
, other
, t
;
1087 name
= get_tok_str(sym
->v
, NULL
);
1089 if ((t
& VT_BTYPE
) == VT_FUNC
) {
1090 sym_type
= STT_FUNC
;
1091 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
1092 sym_type
= STT_NOTYPE
;
1093 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
1094 sym_type
= STT_FUNC
;
1096 sym_type
= STT_OBJECT
;
1098 if (t
& (VT_STATIC
| VT_INLINE
))
1099 sym_bind
= STB_LOCAL
;
1101 sym_bind
= STB_GLOBAL
;
1104 #ifdef TCC_TARGET_PE
1105 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
1106 Sym
*ref
= sym
->type
.ref
;
1107 if (ref
->a
.nodecorate
) {
1108 can_add_underscore
= 0;
1110 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
1111 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
1113 other
|= ST_PE_STDCALL
;
1114 can_add_underscore
= 0;
1119 if (sym
->asm_label
) {
1120 name
= get_tok_str(sym
->asm_label
, NULL
);
1121 can_add_underscore
= 0;
1124 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
1126 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
1130 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
1131 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
1134 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
1138 esym
->st_value
= value
;
1139 esym
->st_size
= size
;
1140 esym
->st_shndx
= sh_num
;
1142 update_storage(sym
);
1145 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
1146 addr_t value
, unsigned long size
)
1148 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
1149 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
1152 /* add a new relocation entry to symbol 'sym' in section 's' */
1153 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
1158 if (nocode_wanted
&& s
== cur_text_section
)
1163 put_extern_sym(sym
, NULL
, 0, 0);
1167 /* now we can add ELF relocation info */
1168 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
1172 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
1174 greloca(s
, sym
, offset
, type
, 0);
1178 /* ------------------------------------------------------------------------- */
1179 /* symbol allocator */
1180 static Sym
*__sym_malloc(void)
1182 Sym
*sym_pool
, *sym
, *last_sym
;
1185 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
1186 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
1188 last_sym
= sym_free_first
;
1190 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
1191 sym
->next
= last_sym
;
1195 sym_free_first
= last_sym
;
1199 static inline Sym
*sym_malloc(void)
1203 sym
= sym_free_first
;
1205 sym
= __sym_malloc();
1206 sym_free_first
= sym
->next
;
1209 sym
= tcc_malloc(sizeof(Sym
));
1214 ST_INLN
void sym_free(Sym
*sym
)
1217 sym
->next
= sym_free_first
;
1218 sym_free_first
= sym
;
1224 /* push, without hashing */
1225 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
1230 memset(s
, 0, sizeof *s
);
1240 /* find a symbol and return its associated structure. 's' is the top
1241 of the symbol stack */
1242 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
1247 else if (s
->v
== -1)
1254 /* structure lookup */
1255 ST_INLN Sym
*struct_find(int v
)
1258 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1260 return table_ident
[v
]->sym_struct
;
1263 /* find an identifier */
1264 ST_INLN Sym
*sym_find(int v
)
1267 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1269 return table_ident
[v
]->sym_identifier
;
1272 static int sym_scope(Sym
*s
)
1274 if (IS_ENUM_VAL (s
->type
.t
))
1275 return s
->type
.ref
->sym_scope
;
1277 return s
->sym_scope
;
1280 /* push a given symbol on the symbol stack */
1281 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
1290 s
= sym_push2(ps
, v
, type
->t
, c
);
1291 s
->type
.ref
= type
->ref
;
1293 /* don't record fields or anonymous symbols */
1295 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1296 /* record symbol in token array */
1297 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1299 ps
= &ts
->sym_struct
;
1301 ps
= &ts
->sym_identifier
;
1304 s
->sym_scope
= local_scope
;
1305 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
1306 tcc_error("redeclaration of '%s'",
1307 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
1312 /* push a global identifier */
1313 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
1316 s
= sym_push2(&global_stack
, v
, t
, c
);
1317 s
->r
= VT_CONST
| VT_SYM
;
1318 /* don't record anonymous symbol */
1319 if (v
< SYM_FIRST_ANOM
) {
1320 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
1321 /* modify the top most local identifier, so that sym_identifier will
1322 point to 's' when popped; happens when called from inline asm */
1323 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
1324 ps
= &(*ps
)->prev_tok
;
1331 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1332 pop them yet from the list, but do remove them from the token array. */
1333 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
1343 /* remove symbol in token array */
1345 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1346 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1348 ps
= &ts
->sym_struct
;
1350 ps
= &ts
->sym_identifier
;
1361 /* ------------------------------------------------------------------------- */
1362 static void vcheck_cmp(void)
1364 /* cannot let cpu flags if other instruction are generated. Also
1365 avoid leaving VT_JMP anywhere except on the top of the stack
1366 because it would complicate the code generator.
1368 Don't do this when nocode_wanted. vtop might come from
1369 !nocode_wanted regions (see 88_codeopt.c) and transforming
1370 it to a register without actually generating code is wrong
1371 as their value might still be used for real. All values
1372 we push under nocode_wanted will eventually be popped
1373 again, so that the VT_CMP/VT_JMP value will be in vtop
1374 when code is unsuppressed again. */
1376 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
1380 static void vsetc(CType
*type
, int r
, CValue
*vc
)
1382 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1383 tcc_error("memory full (vstack)");
1388 vtop
->r2
= VT_CONST
;
1393 ST_FUNC
void vswap(void)
1403 /* pop stack value */
1404 ST_FUNC
void vpop(void)
1407 v
= vtop
->r
& VT_VALMASK
;
1408 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1409 /* for x86, we need to pop the FP stack */
1410 if (v
== TREG_ST0
) {
1411 o(0xd8dd); /* fstp %st(0) */
1415 /* need to put correct jump if && or || without test */
1422 /* push constant of type "type" with useless value */
1423 static void vpush(CType
*type
)
1425 vset(type
, VT_CONST
, 0);
1428 /* push arbitrary 64bit constant */
1429 static void vpush64(int ty
, unsigned long long v
)
1436 vsetc(&ctype
, VT_CONST
, &cval
);
1439 /* push integer constant */
1440 ST_FUNC
void vpushi(int v
)
1445 /* push a pointer sized constant */
1446 static void vpushs(addr_t v
)
1448 vpush64(VT_SIZE_T
, v
);
1451 /* push long long constant */
1452 static inline void vpushll(long long v
)
1454 vpush64(VT_LLONG
, v
);
1457 ST_FUNC
void vset(CType
*type
, int r
, int v
)
1461 vsetc(type
, r
, &cval
);
1464 static void vseti(int r
, int v
)
1472 ST_FUNC
void vpushv(SValue
*v
)
1474 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1475 tcc_error("memory full (vstack)");
1480 static void vdup(void)
1485 /* rotate n first stack elements to the bottom
1486 I1 ... In -> I2 ... In I1 [top is right]
1488 ST_FUNC
void vrotb(int n
)
1495 for(i
=-n
+1;i
!=0;i
++)
1496 vtop
[i
] = vtop
[i
+1];
1500 /* rotate the n elements before entry e towards the top
1501 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1503 ST_FUNC
void vrote(SValue
*e
, int n
)
1510 for(i
= 0;i
< n
- 1; i
++)
1515 /* rotate n first stack elements to the top
1516 I1 ... In -> In I1 ... I(n-1) [top is right]
1518 ST_FUNC
void vrott(int n
)
1523 /* ------------------------------------------------------------------------- */
1524 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1526 /* called from generators to set the result from relational ops */
1527 ST_FUNC
void vset_VT_CMP(int op
)
1535 /* called once before asking generators to load VT_CMP to a register */
1536 static void vset_VT_JMP(void)
1538 int op
= vtop
->cmp_op
;
1540 if (vtop
->jtrue
|| vtop
->jfalse
) {
1541 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1542 int inv
= op
& (op
< 2); /* small optimization */
1543 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1545 /* otherwise convert flags (rsp. 0/1) to register */
1547 if (op
< 2) /* doesn't seem to happen */
1552 /* Set CPU Flags, doesn't yet jump */
1553 static void gvtst_set(int inv
, int t
)
1557 if (vtop
->r
!= VT_CMP
) {
1560 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1561 vset_VT_CMP(vtop
->c
.i
!= 0);
1564 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1565 *p
= gjmp_append(*p
, t
);
1568 /* Generate value test
1570 * Generate a test for any value (jump, comparison and integers) */
1571 static int gvtst(int inv
, int t
)
1576 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1578 x
= u
, u
= t
, t
= x
;
1581 /* jump to the wanted target */
1583 t
= gjmp_cond(op
^ inv
, t
);
1586 /* resolve complementary jumps to here */
1593 /* generate a zero or nozero test */
1594 static void gen_test_zero(int op
)
1596 if (vtop
->r
== VT_CMP
) {
1600 vtop
->jfalse
= vtop
->jtrue
;
1610 /* ------------------------------------------------------------------------- */
1611 /* push a symbol value of TYPE */
1612 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1616 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1620 /* Return a static symbol pointing to a section */
1621 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1627 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1628 sym
->type
.t
|= VT_STATIC
;
1629 put_extern_sym(sym
, sec
, offset
, size
);
1633 /* push a reference to a section offset by adding a dummy symbol */
1634 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1636 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1639 /* define a new external reference to a symbol 'v' of type 'u' */
1640 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1646 /* push forward reference */
1647 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1648 s
->type
.ref
= type
->ref
;
1649 } else if (IS_ASM_SYM(s
)) {
1650 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1651 s
->type
.ref
= type
->ref
;
1657 /* create an external reference with no specific type similar to asm labels.
1658 This avoids type conflicts if the symbol is used from C too */
1659 ST_FUNC Sym
*external_helper_sym(int v
)
1661 CType ct
= { VT_ASM_FUNC
, NULL
};
1662 return external_global_sym(v
, &ct
);
1665 /* push a reference to an helper function (such as memmove) */
1666 ST_FUNC
void vpush_helper_func(int v
)
1668 vpushsym(&func_old_type
, external_helper_sym(v
));
1671 /* Merge symbol attributes. */
1672 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1674 if (sa1
->aligned
&& !sa
->aligned
)
1675 sa
->aligned
= sa1
->aligned
;
1676 sa
->packed
|= sa1
->packed
;
1677 sa
->weak
|= sa1
->weak
;
1678 if (sa1
->visibility
!= STV_DEFAULT
) {
1679 int vis
= sa
->visibility
;
1680 if (vis
== STV_DEFAULT
1681 || vis
> sa1
->visibility
)
1682 vis
= sa1
->visibility
;
1683 sa
->visibility
= vis
;
1685 sa
->dllexport
|= sa1
->dllexport
;
1686 sa
->nodecorate
|= sa1
->nodecorate
;
1687 sa
->dllimport
|= sa1
->dllimport
;
1690 /* Merge function attributes. */
1691 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1693 if (fa1
->func_call
&& !fa
->func_call
)
1694 fa
->func_call
= fa1
->func_call
;
1695 if (fa1
->func_type
&& !fa
->func_type
)
1696 fa
->func_type
= fa1
->func_type
;
1697 if (fa1
->func_args
&& !fa
->func_args
)
1698 fa
->func_args
= fa1
->func_args
;
1699 if (fa1
->func_noreturn
)
1700 fa
->func_noreturn
= 1;
1707 /* Merge attributes. */
1708 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1710 merge_symattr(&ad
->a
, &ad1
->a
);
1711 merge_funcattr(&ad
->f
, &ad1
->f
);
1714 ad
->section
= ad1
->section
;
1715 if (ad1
->alias_target
)
1716 ad
->alias_target
= ad1
->alias_target
;
1718 ad
->asm_label
= ad1
->asm_label
;
1720 ad
->attr_mode
= ad1
->attr_mode
;
1723 /* Merge some type attributes. */
1724 static void patch_type(Sym
*sym
, CType
*type
)
1726 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1727 if (!(sym
->type
.t
& VT_EXTERN
))
1728 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1729 sym
->type
.t
&= ~VT_EXTERN
;
1732 if (IS_ASM_SYM(sym
)) {
1733 /* stay static if both are static */
1734 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1735 sym
->type
.ref
= type
->ref
;
1738 if (!is_compatible_types(&sym
->type
, type
)) {
1739 tcc_error("incompatible types for redefinition of '%s'",
1740 get_tok_str(sym
->v
, NULL
));
1742 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1743 int static_proto
= sym
->type
.t
& VT_STATIC
;
1744 /* warn if static follows non-static function declaration */
1745 if ((type
->t
& VT_STATIC
) && !static_proto
1746 /* XXX this test for inline shouldn't be here. Until we
1747 implement gnu-inline mode again it silences a warning for
1748 mingw caused by our workarounds. */
1749 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1750 tcc_warning("static storage ignored for redefinition of '%s'",
1751 get_tok_str(sym
->v
, NULL
));
1753 /* set 'inline' if both agree or if one has static */
1754 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1755 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1756 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1757 static_proto
|= VT_INLINE
;
1760 if (0 == (type
->t
& VT_EXTERN
)) {
1761 struct FuncAttr f
= sym
->type
.ref
->f
;
1762 /* put complete type, use static from prototype */
1763 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1764 sym
->type
.ref
= type
->ref
;
1765 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1767 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1770 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1771 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1772 sym
->type
.ref
= type
->ref
;
1776 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1777 /* set array size if it was omitted in extern declaration */
1778 sym
->type
.ref
->c
= type
->ref
->c
;
1780 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1781 tcc_warning("storage mismatch for redefinition of '%s'",
1782 get_tok_str(sym
->v
, NULL
));
1786 /* Merge some storage attributes. */
1787 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1790 patch_type(sym
, type
);
1792 #ifdef TCC_TARGET_PE
1793 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1794 tcc_error("incompatible dll linkage for redefinition of '%s'",
1795 get_tok_str(sym
->v
, NULL
));
1797 merge_symattr(&sym
->a
, &ad
->a
);
1799 sym
->asm_label
= ad
->asm_label
;
1800 update_storage(sym
);
1803 /* copy sym to other stack */
1804 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1807 s
= sym_malloc(), *s
= *s0
;
1808 s
->prev
= *ps
, *ps
= s
;
1809 if (s
->v
< SYM_FIRST_ANOM
) {
1810 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1811 s
->prev_tok
= *ps
, *ps
= s
;
1816 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1817 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1819 int bt
= s
->type
.t
& VT_BTYPE
;
1820 if (bt
== VT_FUNC
|| bt
== VT_PTR
) {
1821 Sym
**sp
= &s
->type
.ref
;
1822 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1823 Sym
*s2
= sym_copy(s
, ps
);
1824 sp
= &(*sp
= s2
)->next
;
1825 sym_copy_ref(s2
, ps
);
1830 /* define a new external reference to a symbol 'v' */
1831 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1835 /* look for global symbol */
1837 while (s
&& s
->sym_scope
)
1841 /* push forward reference */
1842 s
= global_identifier_push(v
, type
->t
, 0);
1845 s
->asm_label
= ad
->asm_label
;
1846 s
->type
.ref
= type
->ref
;
1847 /* copy type to the global stack */
1849 sym_copy_ref(s
, &global_stack
);
1851 patch_storage(s
, ad
, type
);
1853 /* push variables on local_stack if any */
1854 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1855 s
= sym_copy(s
, &local_stack
);
1859 /* save registers up to (vtop - n) stack entry */
1860 ST_FUNC
void save_regs(int n
)
1863 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1867 /* save r to the memory stack, and mark it as being free */
1868 ST_FUNC
void save_reg(int r
)
1870 save_reg_upstack(r
, 0);
1873 /* save r to the memory stack, and mark it as being free,
1874 if seen up to (vtop - n) stack entry */
1875 ST_FUNC
void save_reg_upstack(int r
, int n
)
1877 int l
, size
, align
, bt
;
1880 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1885 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1886 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1887 /* must save value on stack if not already done */
1889 bt
= p
->type
.t
& VT_BTYPE
;
1892 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1895 size
= type_size(&sv
.type
, &align
);
1896 l
= get_temp_local_var(size
,align
);
1897 sv
.r
= VT_LOCAL
| VT_LVAL
;
1899 store(p
->r
& VT_VALMASK
, &sv
);
1900 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1901 /* x86 specific: need to pop fp register ST0 if saved */
1902 if (r
== TREG_ST0
) {
1903 o(0xd8dd); /* fstp %st(0) */
1906 /* special long long case */
1907 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1912 /* mark that stack entry as being saved on the stack */
1913 if (p
->r
& VT_LVAL
) {
1914 /* also clear the bounded flag because the
1915 relocation address of the function was stored in
1917 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1919 p
->r
= VT_LVAL
| VT_LOCAL
;
1927 #ifdef TCC_TARGET_ARM
1928 /* find a register of class 'rc2' with at most one reference on stack.
1929 * If none, call get_reg(rc) */
1930 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1935 for(r
=0;r
<NB_REGS
;r
++) {
1936 if (reg_classes
[r
] & rc2
) {
1939 for(p
= vstack
; p
<= vtop
; p
++) {
1940 if ((p
->r
& VT_VALMASK
) == r
||
1952 /* find a free register of class 'rc'. If none, save one register */
1953 ST_FUNC
int get_reg(int rc
)
1958 /* find a free register */
1959 for(r
=0;r
<NB_REGS
;r
++) {
1960 if (reg_classes
[r
] & rc
) {
1963 for(p
=vstack
;p
<=vtop
;p
++) {
1964 if ((p
->r
& VT_VALMASK
) == r
||
1973 /* no register left : free the first one on the stack (VERY
1974 IMPORTANT to start from the bottom to ensure that we don't
1975 spill registers used in gen_opi()) */
1976 for(p
=vstack
;p
<=vtop
;p
++) {
1977 /* look at second register (if long long) */
1979 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1981 r
= p
->r
& VT_VALMASK
;
1982 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1988 /* Should never comes here */
1992 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1993 static int get_temp_local_var(int size
,int align
){
1995 struct temp_local_variable
*temp_var
;
2002 for(i
=0;i
<nb_temp_local_vars
;i
++){
2003 temp_var
=&arr_temp_local_vars
[i
];
2004 if(temp_var
->size
<size
||align
!=temp_var
->align
){
2007 /*check if temp_var is free*/
2009 for(p
=vstack
;p
<=vtop
;p
++) {
2011 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
2012 if(p
->c
.i
==temp_var
->location
){
2019 found_var
=temp_var
->location
;
2025 loc
= (loc
- size
) & -align
;
2026 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
2027 temp_var
=&arr_temp_local_vars
[i
];
2028 temp_var
->location
=loc
;
2029 temp_var
->size
=size
;
2030 temp_var
->align
=align
;
2031 nb_temp_local_vars
++;
2038 static void clear_temp_local_var_list(){
2039 nb_temp_local_vars
=0;
2042 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
2044 static void move_reg(int r
, int s
, int t
)
2058 /* get address of vtop (vtop MUST BE an lvalue) */
2059 ST_FUNC
void gaddrof(void)
2061 vtop
->r
&= ~VT_LVAL
;
2062 /* tricky: if saved lvalue, then we can go back to lvalue */
2063 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
2064 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
2067 #ifdef CONFIG_TCC_BCHECK
2068 /* generate a bounded pointer addition */
2069 static void gen_bounded_ptr_add(void)
2071 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
2076 vpush_helper_func(TOK___bound_ptr_add
);
2081 /* returned pointer is in REG_IRET */
2082 vtop
->r
= REG_IRET
| VT_BOUNDED
;
2085 /* relocation offset of the bounding function call point */
2086 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
2089 /* patch pointer addition in vtop so that pointer dereferencing is
2091 static void gen_bounded_ptr_deref(void)
2101 size
= type_size(&vtop
->type
, &align
);
2103 case 1: func
= TOK___bound_ptr_indir1
; break;
2104 case 2: func
= TOK___bound_ptr_indir2
; break;
2105 case 4: func
= TOK___bound_ptr_indir4
; break;
2106 case 8: func
= TOK___bound_ptr_indir8
; break;
2107 case 12: func
= TOK___bound_ptr_indir12
; break;
2108 case 16: func
= TOK___bound_ptr_indir16
; break;
2110 /* may happen with struct member access */
2113 sym
= external_helper_sym(func
);
2115 put_extern_sym(sym
, NULL
, 0, 0);
2116 /* patch relocation */
2117 /* XXX: find a better solution ? */
2118 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
2119 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
2122 /* generate lvalue bound code */
2123 static void gbound(void)
2127 vtop
->r
&= ~VT_MUSTBOUND
;
2128 /* if lvalue, then use checking code before dereferencing */
2129 if (vtop
->r
& VT_LVAL
) {
2130 /* if not VT_BOUNDED value, then make one */
2131 if (!(vtop
->r
& VT_BOUNDED
)) {
2132 /* must save type because we must set it to int to get pointer */
2134 vtop
->type
.t
= VT_PTR
;
2137 gen_bounded_ptr_add();
2141 /* then check for dereferencing */
2142 gen_bounded_ptr_deref();
2146 /* we need to call __bound_ptr_add before we start to load function
2147 args into registers */
2148 ST_FUNC
void gbound_args(int nb_args
)
2153 for (i
= 1; i
<= nb_args
; ++i
)
2154 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
2160 sv
= vtop
- nb_args
;
2161 if (sv
->r
& VT_SYM
) {
2165 #ifndef TCC_TARGET_PE
2166 || v
== TOK_sigsetjmp
2167 || v
== TOK___sigsetjmp
2170 vpush_helper_func(TOK___bound_setjmp
);
2173 func_bound_add_epilog
= 1;
2175 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
2176 if (v
== TOK_alloca
)
2177 func_bound_add_epilog
= 1;
2180 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
2181 sv
->sym
->asm_label
= TOK___bound_longjmp
;
2186 /* Add bounds for local symbols from S to E (via ->prev) */
2187 static void add_local_bounds(Sym
*s
, Sym
*e
)
2189 for (; s
!= e
; s
= s
->prev
) {
2190 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
2192 /* Add arrays/structs/unions because we always take address */
2193 if ((s
->type
.t
& VT_ARRAY
)
2194 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
2195 || s
->a
.addrtaken
) {
2196 /* add local bound info */
2197 int align
, size
= type_size(&s
->type
, &align
);
2198 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
2199 2 * sizeof(addr_t
));
2200 bounds_ptr
[0] = s
->c
;
2201 bounds_ptr
[1] = size
;
2207 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2208 static void pop_local_syms(Sym
*b
, int keep
)
2210 #ifdef CONFIG_TCC_BCHECK
2211 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
2212 add_local_bounds(local_stack
, b
);
2215 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
2216 sym_pop(&local_stack
, b
, keep
);
2219 static void incr_bf_adr(int o
)
2221 vtop
->type
= char_pointer_type
;
2225 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
2229 /* single-byte load mode for packed or otherwise unaligned bitfields */
2230 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
2233 save_reg_upstack(vtop
->r
, 1);
2234 vpush64(type
->t
& VT_BTYPE
, 0); // B X
2235 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2244 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
2246 vpushi((1 << n
) - 1), gen_op('&');
2249 vpushi(bits
), gen_op(TOK_SHL
);
2252 bits
+= n
, bit_size
-= n
, o
= 1;
2255 if (!(type
->t
& VT_UNSIGNED
)) {
2256 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
2257 vpushi(n
), gen_op(TOK_SHL
);
2258 vpushi(n
), gen_op(TOK_SAR
);
2262 /* single-byte store mode for packed or otherwise unaligned bitfields */
2263 static void store_packed_bf(int bit_pos
, int bit_size
)
2265 int bits
, n
, o
, m
, c
;
2266 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2268 save_reg_upstack(vtop
->r
, 1);
2269 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2271 incr_bf_adr(o
); // X B
2273 c
? vdup() : gv_dup(); // B V X
2276 vpushi(bits
), gen_op(TOK_SHR
);
2278 vpushi(bit_pos
), gen_op(TOK_SHL
);
2283 m
= ((1 << n
) - 1) << bit_pos
;
2284 vpushi(m
), gen_op('&'); // X B V1
2285 vpushv(vtop
-1); // X B V1 B
2286 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
2287 gen_op('&'); // X B V1 B1
2288 gen_op('|'); // X B V2
2290 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
2291 vstore(), vpop(); // X B
2292 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
2297 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
2300 if (0 == sv
->type
.ref
)
2302 t
= sv
->type
.ref
->auxtype
;
2303 if (t
!= -1 && t
!= VT_STRUCT
) {
2304 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
2310 /* store vtop a register belonging to class 'rc'. lvalues are
2311 converted to values. Cannot be used if cannot be converted to
2312 register value (such as structures). */
2313 ST_FUNC
int gv(int rc
)
2315 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
2316 int bit_pos
, bit_size
, size
, align
;
2318 /* NOTE: get_reg can modify vstack[] */
2319 if (vtop
->type
.t
& VT_BITFIELD
) {
2322 bit_pos
= BIT_POS(vtop
->type
.t
);
2323 bit_size
= BIT_SIZE(vtop
->type
.t
);
2324 /* remove bit field info to avoid loops */
2325 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
2328 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
2329 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
2330 type
.t
|= VT_UNSIGNED
;
2332 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
2334 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2339 if (r
== VT_STRUCT
) {
2340 load_packed_bf(&type
, bit_pos
, bit_size
);
2342 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
2343 /* cast to int to propagate signedness in following ops */
2345 /* generate shifts */
2346 vpushi(bits
- (bit_pos
+ bit_size
));
2348 vpushi(bits
- bit_size
);
2349 /* NOTE: transformed to SHR if unsigned */
2354 if (is_float(vtop
->type
.t
) &&
2355 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2356 /* CPUs usually cannot use float constants, so we store them
2357 generically in data segment */
2358 init_params p
= { data_section
};
2359 unsigned long offset
;
2360 size
= type_size(&vtop
->type
, &align
);
2362 size
= 0, align
= 1;
2363 offset
= section_add(p
.sec
, size
, align
);
2364 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
2366 init_putv(&p
, &vtop
->type
, offset
);
2369 #ifdef CONFIG_TCC_BCHECK
2370 if (vtop
->r
& VT_MUSTBOUND
)
2374 bt
= vtop
->type
.t
& VT_BTYPE
;
2376 #ifdef TCC_TARGET_RISCV64
2378 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
2381 rc2
= RC2_TYPE(bt
, rc
);
2383 /* need to reload if:
2385 - lvalue (need to dereference pointer)
2386 - already a register, but not in the right class */
2387 r
= vtop
->r
& VT_VALMASK
;
2388 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
2389 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
2391 if (!r_ok
|| !r2_ok
) {
2395 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
2396 int original_type
= vtop
->type
.t
;
2398 /* two register type load :
2399 expand to two words temporarily */
2400 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2402 unsigned long long ll
= vtop
->c
.i
;
2403 vtop
->c
.i
= ll
; /* first word */
2405 vtop
->r
= r
; /* save register value */
2406 vpushi(ll
>> 32); /* second word */
2407 } else if (vtop
->r
& VT_LVAL
) {
2408 /* We do not want to modifier the long long pointer here.
2409 So we save any other instances down the stack */
2410 save_reg_upstack(vtop
->r
, 1);
2411 /* load from memory */
2412 vtop
->type
.t
= load_type
;
2415 vtop
[-1].r
= r
; /* save register value */
2416 /* increment pointer to get second word */
2417 vtop
->type
.t
= VT_PTRDIFF_T
;
2422 vtop
->type
.t
= load_type
;
2424 /* move registers */
2427 if (r2_ok
&& vtop
->r2
< VT_CONST
)
2430 vtop
[-1].r
= r
; /* save register value */
2431 vtop
->r
= vtop
[-1].r2
;
2433 /* Allocate second register. Here we rely on the fact that
2434 get_reg() tries first to free r2 of an SValue. */
2438 /* write second register */
2441 vtop
->type
.t
= original_type
;
2443 if (vtop
->r
== VT_CMP
)
2445 /* one register type load */
2450 #ifdef TCC_TARGET_C67
2451 /* uses register pairs for doubles */
2452 if (bt
== VT_DOUBLE
)
2459 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2460 ST_FUNC
void gv2(int rc1
, int rc2
)
2462 /* generate more generic register first. But VT_JMP or VT_CMP
2463 values must be generated first in all cases to avoid possible
2465 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
2470 /* test if reload is needed for first register */
2471 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
2481 /* test if reload is needed for first register */
2482 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
2489 /* expand 64bit on stack in two ints */
2490 ST_FUNC
void lexpand(void)
2493 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
2494 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
2495 if (v
== VT_CONST
) {
2498 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
2504 vtop
[0].r
= vtop
[-1].r2
;
2505 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2507 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2512 /* build a long long from two ints */
2513 static void lbuild(int t
)
2515 gv2(RC_INT
, RC_INT
);
2516 vtop
[-1].r2
= vtop
[0].r
;
2517 vtop
[-1].type
.t
= t
;
2522 /* convert stack entry to register and duplicate its value in another
2524 static void gv_dup(void)
2530 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2531 if (t
& VT_BITFIELD
) {
2541 /* stack: H L L1 H1 */
2551 /* duplicate value */
2561 /* generate CPU independent (unsigned) long long operations */
2562 static void gen_opl(int op
)
2564 int t
, a
, b
, op1
, c
, i
;
2566 unsigned short reg_iret
= REG_IRET
;
2567 unsigned short reg_lret
= REG_IRE2
;
2573 func
= TOK___divdi3
;
2576 func
= TOK___udivdi3
;
2579 func
= TOK___moddi3
;
2582 func
= TOK___umoddi3
;
2589 /* call generic long long function */
2590 vpush_helper_func(func
);
2595 vtop
->r2
= reg_lret
;
2603 //pv("gen_opl A",0,2);
2609 /* stack: L1 H1 L2 H2 */
2614 vtop
[-2] = vtop
[-3];
2617 /* stack: H1 H2 L1 L2 */
2618 //pv("gen_opl B",0,4);
2624 /* stack: H1 H2 L1 L2 ML MH */
2627 /* stack: ML MH H1 H2 L1 L2 */
2631 /* stack: ML MH H1 L2 H2 L1 */
2636 /* stack: ML MH M1 M2 */
2639 } else if (op
== '+' || op
== '-') {
2640 /* XXX: add non carry method too (for MIPS or alpha) */
2646 /* stack: H1 H2 (L1 op L2) */
2649 gen_op(op1
+ 1); /* TOK_xxxC2 */
2652 /* stack: H1 H2 (L1 op L2) */
2655 /* stack: (L1 op L2) H1 H2 */
2657 /* stack: (L1 op L2) (H1 op H2) */
2665 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2666 t
= vtop
[-1].type
.t
;
2670 /* stack: L H shift */
2672 /* constant: simpler */
2673 /* NOTE: all comments are for SHL. the other cases are
2674 done by swapping words */
2685 if (op
!= TOK_SAR
) {
2718 /* XXX: should provide a faster fallback on x86 ? */
2721 func
= TOK___ashrdi3
;
2724 func
= TOK___lshrdi3
;
2727 func
= TOK___ashldi3
;
2733 /* compare operations */
2739 /* stack: L1 H1 L2 H2 */
2741 vtop
[-1] = vtop
[-2];
2743 /* stack: L1 L2 H1 H2 */
2747 /* when values are equal, we need to compare low words. since
2748 the jump is inverted, we invert the test too. */
2751 else if (op1
== TOK_GT
)
2753 else if (op1
== TOK_ULT
)
2755 else if (op1
== TOK_UGT
)
2765 /* generate non equal test */
2767 vset_VT_CMP(TOK_NE
);
2771 /* compare low. Always unsigned */
2775 else if (op1
== TOK_LE
)
2777 else if (op1
== TOK_GT
)
2779 else if (op1
== TOK_GE
)
2782 #if 0//def TCC_TARGET_I386
2783 if (op
== TOK_NE
) { gsym(b
); break; }
2784 if (op
== TOK_EQ
) { gsym(a
); break; }
2793 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2795 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2796 return (a
^ b
) >> 63 ? -x
: x
;
2799 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2801 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2804 /* handle integer constant optimizations and various machine
2806 static void gen_opic(int op
)
2808 SValue
*v1
= vtop
- 1;
2810 int t1
= v1
->type
.t
& VT_BTYPE
;
2811 int t2
= v2
->type
.t
& VT_BTYPE
;
2812 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2813 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2814 uint64_t l1
= c1
? v1
->c
.i
: 0;
2815 uint64_t l2
= c2
? v2
->c
.i
: 0;
2816 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2818 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2819 l1
= ((uint32_t)l1
|
2820 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2821 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2822 l2
= ((uint32_t)l2
|
2823 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2827 case '+': l1
+= l2
; break;
2828 case '-': l1
-= l2
; break;
2829 case '&': l1
&= l2
; break;
2830 case '^': l1
^= l2
; break;
2831 case '|': l1
|= l2
; break;
2832 case '*': l1
*= l2
; break;
2839 /* if division by zero, generate explicit division */
2841 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2842 tcc_error("division by zero in constant");
2846 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2847 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2848 case TOK_UDIV
: l1
= l1
/ l2
; break;
2849 case TOK_UMOD
: l1
= l1
% l2
; break;
2852 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2853 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2855 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2858 case TOK_ULT
: l1
= l1
< l2
; break;
2859 case TOK_UGE
: l1
= l1
>= l2
; break;
2860 case TOK_EQ
: l1
= l1
== l2
; break;
2861 case TOK_NE
: l1
= l1
!= l2
; break;
2862 case TOK_ULE
: l1
= l1
<= l2
; break;
2863 case TOK_UGT
: l1
= l1
> l2
; break;
2864 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2865 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2866 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2867 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2869 case TOK_LAND
: l1
= l1
&& l2
; break;
2870 case TOK_LOR
: l1
= l1
|| l2
; break;
2874 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2875 l1
= ((uint32_t)l1
|
2876 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2880 /* if commutative ops, put c2 as constant */
2881 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2882 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2884 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2885 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2887 if (!const_wanted
&&
2889 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2890 (l1
== -1 && op
== TOK_SAR
))) {
2891 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2893 } else if (!const_wanted
&&
2894 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2896 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2897 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2898 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2903 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2906 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2907 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2910 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2911 /* filter out NOP operations like x*1, x-0, x&-1... */
2913 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2914 /* try to use shifts instead of muls or divs */
2915 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2924 else if (op
== TOK_PDIV
)
2930 } else if (c2
&& (op
== '+' || op
== '-') &&
2931 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2932 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2933 /* symbol + constant case */
2937 /* The backends can't always deal with addends to symbols
2938 larger than +-1<<31. Don't construct such. */
2945 /* call low level op generator */
2946 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2947 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2955 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2956 # define gen_negf gen_opf
2957 #elif defined TCC_TARGET_ARM
2958 void gen_negf(int op
)
2960 /* arm will detect 0-x and replace by vneg */
2961 vpushi(0), vswap(), gen_op('-');
2964 /* XXX: implement in gen_opf() for other backends too */
2965 void gen_negf(int op
)
2967 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2968 subtract(-0, x), but with them it's really a sign flip
2969 operation. We implement this with bit manipulation and have
2970 to do some type reinterpretation for this, which TCC can do
2973 int align
, size
, bt
;
2975 size
= type_size(&vtop
->type
, &align
);
2976 bt
= vtop
->type
.t
& VT_BTYPE
;
2977 save_reg(gv(RC_TYPE(bt
)));
2979 incr_bf_adr(size
- 1);
2981 vpushi(0x80); /* flip sign */
2988 /* generate a floating point operation with constant propagation */
2989 static void gen_opif(int op
)
2993 #if defined _MSC_VER && defined __x86_64__
2994 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
3004 /* currently, we cannot do computations with forward symbols */
3005 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3006 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3008 if (v1
->type
.t
== VT_FLOAT
) {
3011 } else if (v1
->type
.t
== VT_DOUBLE
) {
3018 /* NOTE: we only do constant propagation if finite number (not
3019 NaN or infinity) (ANSI spec) */
3020 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !const_wanted
)
3023 case '+': f1
+= f2
; break;
3024 case '-': f1
-= f2
; break;
3025 case '*': f1
*= f2
; break;
3027 if (f2
== (f1
-f1
) ) {
3028 union { float f
; unsigned u
; } x1
, x2
, y
;
3029 /* If not in initializer we need to potentially generate
3030 FP exceptions at runtime, otherwise we want to fold. */
3033 /* the run-time result of 0.0/0.0 on x87, also of other compilers
3034 when used to compile the f1 /= f2 below, would be -nan */
3035 x1
.f
= f1
, x2
.f
= f2
;
3037 y
.u
= 0x7fc00000; /* nan */
3039 y
.u
= 0x7f800000; /* infinity */
3040 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
3049 /* XXX: also handles tests ? */
3055 /* XXX: overflow test ? */
3056 if (v1
->type
.t
== VT_FLOAT
) {
3058 } else if (v1
->type
.t
== VT_DOUBLE
) {
3065 if (op
== TOK_NEG
) {
3073 /* print a type. If 'varstr' is not NULL, then the variable is also
3074 printed in the type */
3076 /* XXX: add array and function pointers */
3077 static void type_to_str(char *buf
, int buf_size
,
3078 CType
*type
, const char *varstr
)
3090 pstrcat(buf
, buf_size
, "extern ");
3092 pstrcat(buf
, buf_size
, "static ");
3094 pstrcat(buf
, buf_size
, "typedef ");
3096 pstrcat(buf
, buf_size
, "inline ");
3097 if (t
& VT_VOLATILE
)
3098 pstrcat(buf
, buf_size
, "volatile ");
3099 if (t
& VT_CONSTANT
)
3100 pstrcat(buf
, buf_size
, "const ");
3102 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
3103 || ((t
& VT_UNSIGNED
)
3104 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
3107 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
3109 buf_size
-= strlen(buf
);
3145 tstr
= "long double";
3147 pstrcat(buf
, buf_size
, tstr
);
3154 pstrcat(buf
, buf_size
, tstr
);
3155 v
= type
->ref
->v
& ~SYM_STRUCT
;
3156 if (v
>= SYM_FIRST_ANOM
)
3157 pstrcat(buf
, buf_size
, "<anonymous>");
3159 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3164 if (varstr
&& '*' == *varstr
) {
3165 pstrcat(buf1
, sizeof(buf1
), "(");
3166 pstrcat(buf1
, sizeof(buf1
), varstr
);
3167 pstrcat(buf1
, sizeof(buf1
), ")");
3169 pstrcat(buf1
, buf_size
, "(");
3171 while (sa
!= NULL
) {
3173 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3174 pstrcat(buf1
, sizeof(buf1
), buf2
);
3177 pstrcat(buf1
, sizeof(buf1
), ", ");
3179 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3180 pstrcat(buf1
, sizeof(buf1
), ", ...");
3181 pstrcat(buf1
, sizeof(buf1
), ")");
3182 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3187 if (varstr
&& '*' == *varstr
)
3188 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3190 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3191 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3194 pstrcpy(buf1
, sizeof(buf1
), "*");
3195 if (t
& VT_CONSTANT
)
3196 pstrcat(buf1
, buf_size
, "const ");
3197 if (t
& VT_VOLATILE
)
3198 pstrcat(buf1
, buf_size
, "volatile ");
3200 pstrcat(buf1
, sizeof(buf1
), varstr
);
3201 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3205 pstrcat(buf
, buf_size
, " ");
3206 pstrcat(buf
, buf_size
, varstr
);
3211 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
3213 char buf1
[256], buf2
[256];
3214 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3215 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3216 tcc_error(fmt
, buf1
, buf2
);
3219 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
3221 char buf1
[256], buf2
[256];
3222 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3223 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3224 tcc_warning(fmt
, buf1
, buf2
);
3227 static int pointed_size(CType
*type
)
3230 return type_size(pointed_type(type
), &align
);
3233 static void vla_runtime_pointed_size(CType
*type
)
3236 vla_runtime_type_size(pointed_type(type
), &align
);
3239 static inline int is_null_pointer(SValue
*p
)
3241 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
3243 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
3244 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
3245 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
3246 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
3247 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
3248 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3252 /* compare function types. OLD functions match any new functions */
3253 static int is_compatible_func(CType
*type1
, CType
*type2
)
3259 if (s1
->f
.func_call
!= s2
->f
.func_call
)
3261 if (s1
->f
.func_type
!= s2
->f
.func_type
3262 && s1
->f
.func_type
!= FUNC_OLD
3263 && s2
->f
.func_type
!= FUNC_OLD
)
3266 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
3268 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
3279 /* return true if type1 and type2 are the same. If unqualified is
3280 true, qualifiers on the types are ignored.
3282 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3286 t1
= type1
->t
& VT_TYPE
;
3287 t2
= type2
->t
& VT_TYPE
;
3289 /* strip qualifiers before comparing */
3290 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3291 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3294 /* Default Vs explicit signedness only matters for char */
3295 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3299 /* XXX: bitfields ? */
3304 && !(type1
->ref
->c
< 0
3305 || type2
->ref
->c
< 0
3306 || type1
->ref
->c
== type2
->ref
->c
))
3309 /* test more complicated cases */
3310 bt1
= t1
& VT_BTYPE
;
3311 if (bt1
== VT_PTR
) {
3312 type1
= pointed_type(type1
);
3313 type2
= pointed_type(type2
);
3314 return is_compatible_types(type1
, type2
);
3315 } else if (bt1
== VT_STRUCT
) {
3316 return (type1
->ref
== type2
->ref
);
3317 } else if (bt1
== VT_FUNC
) {
3318 return is_compatible_func(type1
, type2
);
3319 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
3320 /* If both are enums then they must be the same, if only one is then
3321 t1 and t2 must be equal, which was checked above already. */
3322 return type1
->ref
== type2
->ref
;
3328 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3329 type is stored in DEST if non-null (except for pointer plus/minus) . */
3330 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
3332 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
3333 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
3339 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
3340 ret
= op
== '?' ? 1 : 0;
3341 /* NOTE: as an extension, we accept void on only one side */
3343 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3344 if (op
== '+') ; /* Handled in caller */
3345 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3346 /* If one is a null ptr constant the result type is the other. */
3347 else if (is_null_pointer (op2
)) type
= *type1
;
3348 else if (is_null_pointer (op1
)) type
= *type2
;
3349 else if (bt1
!= bt2
) {
3350 /* accept comparison or cond-expr between pointer and integer
3352 if ((op
== '?' || TOK_ISCOND(op
))
3353 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
3354 tcc_warning("pointer/integer mismatch in %s",
3355 op
== '?' ? "conditional expression" : "comparison");
3356 else if (op
!= '-' || !is_integer_btype(bt2
))
3358 type
= *(bt1
== VT_PTR
? type1
: type2
);
3360 CType
*pt1
= pointed_type(type1
);
3361 CType
*pt2
= pointed_type(type2
);
3362 int pbt1
= pt1
->t
& VT_BTYPE
;
3363 int pbt2
= pt2
->t
& VT_BTYPE
;
3364 int newquals
, copied
= 0;
3365 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
3366 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
3367 if (op
!= '?' && !TOK_ISCOND(op
))
3370 type_incompatibility_warning(type1
, type2
,
3372 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3373 : "pointer type mismatch in comparison('%s' and '%s')");
3376 /* pointers to void get preferred, otherwise the
3377 pointed to types minus qualifs should be compatible */
3378 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
3379 /* combine qualifs */
3380 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
3381 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3384 /* copy the pointer target symbol */
3385 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3388 pointed_type(&type
)->t
|= newquals
;
3390 /* pointers to incomplete arrays get converted to
3391 pointers to completed ones if possible */
3392 if (pt1
->t
& VT_ARRAY
3393 && pt2
->t
& VT_ARRAY
3394 && pointed_type(&type
)->ref
->c
< 0
3395 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
3398 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3400 pointed_type(&type
)->ref
=
3401 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
3402 0, pointed_type(&type
)->ref
->c
);
3403 pointed_type(&type
)->ref
->c
=
3404 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
3410 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
3411 if (op
!= '?' || !compare_types(type1
, type2
, 1))
3414 } else if (is_float(bt1
) || is_float(bt2
)) {
3415 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
3416 type
.t
= VT_LDOUBLE
;
3417 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
3422 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
3423 /* cast to biggest op */
3424 type
.t
= VT_LLONG
| VT_LONG
;
3425 if (bt1
== VT_LLONG
)
3427 if (bt2
== VT_LLONG
)
3429 /* convert to unsigned if it does not fit in a long long */
3430 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
3431 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
3432 type
.t
|= VT_UNSIGNED
;
3434 /* integer operations */
3435 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
3436 /* convert to unsigned if it does not fit in an integer */
3437 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
3438 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
3439 type
.t
|= VT_UNSIGNED
;
3446 /* generic gen_op: handles types problems */
3447 ST_FUNC
void gen_op(int op
)
3449 int u
, t1
, t2
, bt1
, bt2
, t
;
3450 CType type1
, combtype
;
3453 t1
= vtop
[-1].type
.t
;
3454 t2
= vtop
[0].type
.t
;
3455 bt1
= t1
& VT_BTYPE
;
3456 bt2
= t2
& VT_BTYPE
;
3458 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
3459 if (bt2
== VT_FUNC
) {
3460 mk_pointer(&vtop
->type
);
3463 if (bt1
== VT_FUNC
) {
3465 mk_pointer(&vtop
->type
);
3470 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
3471 tcc_error_noabort("invalid operand types for binary operation");
3473 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3474 /* at least one operand is a pointer */
3475 /* relational op: must be both pointers */
3478 /* if both pointers, then it must be the '-' op */
3479 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3481 tcc_error("cannot use pointers here");
3482 if (vtop
[-1].type
.t
& VT_VLA
) {
3483 vla_runtime_pointed_size(&vtop
[-1].type
);
3485 vpushi(pointed_size(&vtop
[-1].type
));
3489 vtop
->type
.t
= VT_PTRDIFF_T
;
3493 /* exactly one pointer : must be '+' or '-'. */
3494 if (op
!= '-' && op
!= '+')
3495 tcc_error("cannot use pointers here");
3496 /* Put pointer as first operand */
3497 if (bt2
== VT_PTR
) {
3499 t
= t1
, t1
= t2
, t2
= t
;
3502 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
3503 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3506 type1
= vtop
[-1].type
;
3507 if (vtop
[-1].type
.t
& VT_VLA
)
3508 vla_runtime_pointed_size(&vtop
[-1].type
);
3510 u
= pointed_size(&vtop
[-1].type
);
3512 tcc_error("unknown array element size");
3516 /* XXX: cast to int ? (long long case) */
3521 #ifdef CONFIG_TCC_BCHECK
3522 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
3523 /* if bounded pointers, we generate a special code to
3530 gen_bounded_ptr_add();
3536 type1
.t
&= ~VT_ARRAY
;
3537 /* put again type if gen_opic() swaped operands */
3541 /* floats can only be used for a few operations */
3542 if (is_float(combtype
.t
)
3543 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3545 tcc_error("invalid operands for binary operation");
3546 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
3547 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
3548 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
3550 t
|= (VT_LONG
& t1
);
3554 t
= t2
= combtype
.t
;
3555 /* XXX: currently, some unsigned operations are explicit, so
3556 we modify them here */
3557 if (t
& VT_UNSIGNED
) {
3564 else if (op
== TOK_LT
)
3566 else if (op
== TOK_GT
)
3568 else if (op
== TOK_LE
)
3570 else if (op
== TOK_GE
)
3576 /* special case for shifts and long long: we keep the shift as
3578 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3585 if (TOK_ISCOND(op
)) {
3586 /* relational op: the result is an int */
3587 vtop
->type
.t
= VT_INT
;
3592 // Make sure that we have converted to an rvalue:
3593 if (vtop
->r
& VT_LVAL
)
3594 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3597 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3598 #define gen_cvt_itof1 gen_cvt_itof
3600 /* generic itof for unsigned long long case */
3601 static void gen_cvt_itof1(int t
)
3603 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3604 (VT_LLONG
| VT_UNSIGNED
)) {
3607 vpush_helper_func(TOK___floatundisf
);
3608 #if LDOUBLE_SIZE != 8
3609 else if (t
== VT_LDOUBLE
)
3610 vpush_helper_func(TOK___floatundixf
);
3613 vpush_helper_func(TOK___floatundidf
);
3624 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3625 #define gen_cvt_ftoi1 gen_cvt_ftoi
3627 /* generic ftoi for unsigned long long case */
3628 static void gen_cvt_ftoi1(int t
)
3631 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3632 /* not handled natively */
3633 st
= vtop
->type
.t
& VT_BTYPE
;
3635 vpush_helper_func(TOK___fixunssfdi
);
3636 #if LDOUBLE_SIZE != 8
3637 else if (st
== VT_LDOUBLE
)
3638 vpush_helper_func(TOK___fixunsxfdi
);
3641 vpush_helper_func(TOK___fixunsdfdi
);
3652 /* special delayed cast for char/short */
3653 static void force_charshort_cast(void)
3655 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3656 int dbt
= vtop
->type
.t
;
3657 vtop
->r
&= ~VT_MUSTCAST
;
3659 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3663 static void gen_cast_s(int t
)
3671 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3672 static void gen_cast(CType
*type
)
3674 int sbt
, dbt
, sf
, df
, c
;
3675 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3677 /* special delayed cast for char/short */
3678 if (vtop
->r
& VT_MUSTCAST
)
3679 force_charshort_cast();
3681 /* bitfields first get cast to ints */
3682 if (vtop
->type
.t
& VT_BITFIELD
)
3685 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3686 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3694 dbt_bt
= dbt
& VT_BTYPE
;
3695 sbt_bt
= sbt
& VT_BTYPE
;
3697 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3698 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3699 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3702 /* constant case: we can do it now */
3703 /* XXX: in ISOC, cannot do it if error in convert */
3704 if (sbt
== VT_FLOAT
)
3705 vtop
->c
.ld
= vtop
->c
.f
;
3706 else if (sbt
== VT_DOUBLE
)
3707 vtop
->c
.ld
= vtop
->c
.d
;
3710 if (sbt_bt
== VT_LLONG
) {
3711 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3712 vtop
->c
.ld
= vtop
->c
.i
;
3714 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3716 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3717 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3719 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3722 if (dbt
== VT_FLOAT
)
3723 vtop
->c
.f
= (float)vtop
->c
.ld
;
3724 else if (dbt
== VT_DOUBLE
)
3725 vtop
->c
.d
= (double)vtop
->c
.ld
;
3726 } else if (sf
&& dbt
== VT_BOOL
) {
3727 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3730 vtop
->c
.i
= vtop
->c
.ld
;
3731 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3733 else if (sbt
& VT_UNSIGNED
)
3734 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3736 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3738 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3740 else if (dbt
== VT_BOOL
)
3741 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3743 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3744 dbt_bt
== VT_SHORT
? 0xffff :
3747 if (!(dbt
& VT_UNSIGNED
))
3748 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3753 } else if (dbt
== VT_BOOL
3754 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3755 == (VT_CONST
| VT_SYM
)) {
3756 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3762 /* cannot generate code for global or static initializers */
3763 if (STATIC_DATA_WANTED
)
3766 /* non constant case: generate code */
3767 if (dbt
== VT_BOOL
) {
3768 gen_test_zero(TOK_NE
);
3774 /* convert from fp to fp */
3777 /* convert int to fp */
3780 /* convert fp to int */
3782 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3785 goto again
; /* may need char/short cast */
3790 ds
= btype_size(dbt_bt
);
3791 ss
= btype_size(sbt_bt
);
3792 if (ds
== 0 || ss
== 0) {
3793 if (dbt_bt
== VT_VOID
)
3795 cast_error(&vtop
->type
, type
);
3797 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3798 tcc_error("cast to incomplete type");
3800 /* same size and no sign conversion needed */
3801 if (ds
== ss
&& ds
>= 4)
3803 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3804 tcc_warning("cast between pointer and integer of different size");
3805 if (sbt_bt
== VT_PTR
) {
3806 /* put integer type to allow logical operations below */
3807 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3811 /* processor allows { int a = 0, b = *(char*)&a; }
3812 That means that if we cast to less width, we can just
3813 change the type and read it still later. */
3814 #define ALLOW_SUBTYPE_ACCESS 1
3816 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3817 /* value still in memory */
3821 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3823 goto done
; /* no 64bit envolved */
3831 /* generate high word */
3832 if (sbt
& VT_UNSIGNED
) {
3841 } else if (ss
== 8) {
3842 /* from long long: just take low order word */
3850 /* need to convert from 32bit to 64bit */
3851 if (sbt
& VT_UNSIGNED
) {
3852 #if defined(TCC_TARGET_RISCV64)
3853 /* RISC-V keeps 32bit vals in registers sign-extended.
3854 So here we need a zero-extension. */
3863 ss
= ds
, ds
= 4, dbt
= sbt
;
3864 } else if (ss
== 8) {
3865 /* RISC-V keeps 32bit vals in registers sign-extended.
3866 So here we need a sign-extension for signed types and
3867 zero-extension. for unsigned types. */
3868 #if !defined(TCC_TARGET_RISCV64)
3869 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3878 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3884 bits
= (ss
- ds
) * 8;
3885 /* for unsigned, gen_op will convert SAR to SHR */
3886 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3889 vpushi(bits
- trunc
);
3896 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3899 /* return type size as known at compile time. Put alignment at 'a' */
3900 ST_FUNC
int type_size(CType
*type
, int *a
)
3905 bt
= type
->t
& VT_BTYPE
;
3906 if (bt
== VT_STRUCT
) {
3911 } else if (bt
== VT_PTR
) {
3912 if (type
->t
& VT_ARRAY
) {
3916 ts
= type_size(&s
->type
, a
);
3918 if (ts
< 0 && s
->c
< 0)
3926 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3927 return -1; /* incomplete enum */
3928 } else if (bt
== VT_LDOUBLE
) {
3930 return LDOUBLE_SIZE
;
3931 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3932 #ifdef TCC_TARGET_I386
3933 #ifdef TCC_TARGET_PE
3938 #elif defined(TCC_TARGET_ARM)
3948 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3951 } else if (bt
== VT_SHORT
) {
3954 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3958 /* char, void, function, _Bool */
3964 /* push type size as known at runtime time on top of value stack. Put
3966 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
3968 if (type
->t
& VT_VLA
) {
3969 type_size(&type
->ref
->type
, a
);
3970 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3972 vpushi(type_size(type
, a
));
3976 /* return the pointed type of t */
3977 static inline CType
*pointed_type(CType
*type
)
3979 return &type
->ref
->type
;
3982 /* modify type so that its it is a pointer to type. */
3983 ST_FUNC
void mk_pointer(CType
*type
)
3986 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3987 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3991 /* return true if type1 and type2 are exactly the same (including
3994 static int is_compatible_types(CType
*type1
, CType
*type2
)
3996 return compare_types(type1
,type2
,0);
3999 /* return true if type1 and type2 are the same (ignoring qualifiers).
4001 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
4003 return compare_types(type1
,type2
,1);
4006 static void cast_error(CType
*st
, CType
*dt
)
4008 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
4011 /* verify type compatibility to store vtop in 'dt' type */
4012 static void verify_assign_cast(CType
*dt
)
4014 CType
*st
, *type1
, *type2
;
4015 int dbt
, sbt
, qualwarn
, lvl
;
4017 st
= &vtop
->type
; /* source type */
4018 dbt
= dt
->t
& VT_BTYPE
;
4019 sbt
= st
->t
& VT_BTYPE
;
4020 if (dt
->t
& VT_CONSTANT
)
4021 tcc_warning("assignment of read-only location");
4025 tcc_error("assignment to void expression");
4028 /* special cases for pointers */
4029 /* '0' can also be a pointer */
4030 if (is_null_pointer(vtop
))
4032 /* accept implicit pointer to integer cast with warning */
4033 if (is_integer_btype(sbt
)) {
4034 tcc_warning("assignment makes pointer from integer without a cast");
4037 type1
= pointed_type(dt
);
4039 type2
= pointed_type(st
);
4040 else if (sbt
== VT_FUNC
)
4041 type2
= st
; /* a function is implicitly a function pointer */
4044 if (is_compatible_types(type1
, type2
))
4046 for (qualwarn
= lvl
= 0;; ++lvl
) {
4047 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
4048 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
4050 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
4051 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
4052 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
4054 type1
= pointed_type(type1
);
4055 type2
= pointed_type(type2
);
4057 if (!is_compatible_unqualified_types(type1
, type2
)) {
4058 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
4059 /* void * can match anything */
4060 } else if (dbt
== sbt
4061 && is_integer_btype(sbt
& VT_BTYPE
)
4062 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
4063 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
4064 /* Like GCC don't warn by default for merely changes
4065 in pointer target signedness. Do warn for different
4066 base types, though, in particular for unsigned enums
4067 and signed int targets. */
4069 tcc_warning("assignment from incompatible pointer type");
4074 tcc_warning("assignment discards qualifiers from pointer target type");
4080 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
4081 tcc_warning("assignment makes integer from pointer without a cast");
4082 } else if (sbt
== VT_STRUCT
) {
4083 goto case_VT_STRUCT
;
4085 /* XXX: more tests */
4089 if (!is_compatible_unqualified_types(dt
, st
)) {
4097 static void gen_assign_cast(CType
*dt
)
4099 verify_assign_cast(dt
);
4103 /* store vtop in lvalue pushed on stack */
4104 ST_FUNC
void vstore(void)
4106 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
4108 ft
= vtop
[-1].type
.t
;
4109 sbt
= vtop
->type
.t
& VT_BTYPE
;
4110 dbt
= ft
& VT_BTYPE
;
4112 verify_assign_cast(&vtop
[-1].type
);
4114 if (sbt
== VT_STRUCT
) {
4115 /* if structure, only generate pointer */
4116 /* structure assignment : generate memcpy */
4117 /* XXX: optimize if small size */
4118 size
= type_size(&vtop
->type
, &align
);
4122 #ifdef CONFIG_TCC_BCHECK
4123 if (vtop
->r
& VT_MUSTBOUND
)
4124 gbound(); /* check would be wrong after gaddrof() */
4126 vtop
->type
.t
= VT_PTR
;
4129 /* address of memcpy() */
4132 vpush_helper_func(TOK_memmove8
);
4133 else if(!(align
& 3))
4134 vpush_helper_func(TOK_memmove4
);
4137 /* Use memmove, rather than memcpy, as dest and src may be same: */
4138 vpush_helper_func(TOK_memmove
);
4143 #ifdef CONFIG_TCC_BCHECK
4144 if (vtop
->r
& VT_MUSTBOUND
)
4147 vtop
->type
.t
= VT_PTR
;
4152 /* leave source on stack */
4154 } else if (ft
& VT_BITFIELD
) {
4155 /* bitfield store handling */
4157 /* save lvalue as expression result (example: s.b = s.a = n;) */
4158 vdup(), vtop
[-1] = vtop
[-2];
4160 bit_pos
= BIT_POS(ft
);
4161 bit_size
= BIT_SIZE(ft
);
4162 /* remove bit field info to avoid loops */
4163 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
4165 if (dbt
== VT_BOOL
) {
4166 gen_cast(&vtop
[-1].type
);
4167 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
4169 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
4170 if (dbt
!= VT_BOOL
) {
4171 gen_cast(&vtop
[-1].type
);
4172 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
4174 if (r
== VT_STRUCT
) {
4175 store_packed_bf(bit_pos
, bit_size
);
4177 unsigned long long mask
= (1ULL << bit_size
) - 1;
4178 if (dbt
!= VT_BOOL
) {
4180 if (dbt
== VT_LLONG
)
4183 vpushi((unsigned)mask
);
4190 /* duplicate destination */
4193 /* load destination, mask and or with source */
4194 if (dbt
== VT_LLONG
)
4195 vpushll(~(mask
<< bit_pos
));
4197 vpushi(~((unsigned)mask
<< bit_pos
));
4202 /* ... and discard */
4205 } else if (dbt
== VT_VOID
) {
4208 /* optimize char/short casts */
4210 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
4211 && is_integer_btype(sbt
)
4213 if ((vtop
->r
& VT_MUSTCAST
)
4214 && btype_size(dbt
) > btype_size(sbt
)
4216 force_charshort_cast();
4219 gen_cast(&vtop
[-1].type
);
4222 #ifdef CONFIG_TCC_BCHECK
4223 /* bound check case */
4224 if (vtop
[-1].r
& VT_MUSTBOUND
) {
4230 gv(RC_TYPE(dbt
)); /* generate value */
4233 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
4234 //tcc_warning("deley cast %x -> %x", sbt, dbt);
4235 vtop
->type
.t
= ft
& VT_TYPE
;
4238 /* if lvalue was saved on stack, must read it */
4239 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
4241 r
= get_reg(RC_INT
);
4242 sv
.type
.t
= VT_PTRDIFF_T
;
4243 sv
.r
= VT_LOCAL
| VT_LVAL
;
4244 sv
.c
.i
= vtop
[-1].c
.i
;
4246 vtop
[-1].r
= r
| VT_LVAL
;
4249 r
= vtop
->r
& VT_VALMASK
;
4250 /* two word case handling :
4251 store second register at word + 4 (or +8 for x86-64) */
4252 if (USING_TWO_WORDS(dbt
)) {
4253 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
4254 vtop
[-1].type
.t
= load_type
;
4257 /* convert to int to increment easily */
4258 vtop
->type
.t
= VT_PTRDIFF_T
;
4264 vtop
[-1].type
.t
= load_type
;
4265 /* XXX: it works because r2 is spilled last ! */
4266 store(vtop
->r2
, vtop
- 1);
4272 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
4276 /* post defines POST/PRE add. c is the token ++ or -- */
4277 ST_FUNC
void inc(int post
, int c
)
4280 vdup(); /* save lvalue */
4282 gv_dup(); /* duplicate value */
4287 vpushi(c
- TOK_MID
);
4289 vstore(); /* store value */
4291 vpop(); /* if post op, return saved value */
4294 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
4296 /* read the string */
4300 while (tok
== TOK_STR
) {
4301 /* XXX: add \0 handling too ? */
4302 cstr_cat(astr
, tokc
.str
.data
, -1);
4305 cstr_ccat(astr
, '\0');
4308 /* If I is >= 1 and a power of two, returns log2(i)+1.
4309 If I is 0 returns 0. */
4310 ST_FUNC
int exact_log2p1(int i
)
4315 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
4326 /* Parse __attribute__((...)) GNUC extension. */
4327 static void parse_attribute(AttributeDef
*ad
)
4333 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
4338 while (tok
!= ')') {
4339 if (tok
< TOK_IDENT
)
4340 expect("attribute name");
4352 tcc_warning("implicit declaration of function '%s'",
4353 get_tok_str(tok
, &tokc
));
4354 s
= external_global_sym(tok
, &func_old_type
);
4355 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4356 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
4357 ad
->cleanup_func
= s
;
4362 case TOK_CONSTRUCTOR1
:
4363 case TOK_CONSTRUCTOR2
:
4364 ad
->f
.func_ctor
= 1;
4366 case TOK_DESTRUCTOR1
:
4367 case TOK_DESTRUCTOR2
:
4368 ad
->f
.func_dtor
= 1;
4370 case TOK_ALWAYS_INLINE1
:
4371 case TOK_ALWAYS_INLINE2
:
4372 ad
->f
.func_alwinl
= 1;
4377 parse_mult_str(&astr
, "section name");
4378 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
4385 parse_mult_str(&astr
, "alias(\"target\")");
4386 ad
->alias_target
= /* save string as token, for later */
4387 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
4391 case TOK_VISIBILITY1
:
4392 case TOK_VISIBILITY2
:
4394 parse_mult_str(&astr
,
4395 "visibility(\"default|hidden|internal|protected\")");
4396 if (!strcmp (astr
.data
, "default"))
4397 ad
->a
.visibility
= STV_DEFAULT
;
4398 else if (!strcmp (astr
.data
, "hidden"))
4399 ad
->a
.visibility
= STV_HIDDEN
;
4400 else if (!strcmp (astr
.data
, "internal"))
4401 ad
->a
.visibility
= STV_INTERNAL
;
4402 else if (!strcmp (astr
.data
, "protected"))
4403 ad
->a
.visibility
= STV_PROTECTED
;
4405 expect("visibility(\"default|hidden|internal|protected\")");
4414 if (n
<= 0 || (n
& (n
- 1)) != 0)
4415 tcc_error("alignment must be a positive power of two");
4420 ad
->a
.aligned
= exact_log2p1(n
);
4421 if (n
!= 1 << (ad
->a
.aligned
- 1))
4422 tcc_error("alignment of %d is larger than implemented", n
);
4434 /* currently, no need to handle it because tcc does not
4435 track unused objects */
4439 ad
->f
.func_noreturn
= 1;
4444 ad
->f
.func_call
= FUNC_CDECL
;
4449 ad
->f
.func_call
= FUNC_STDCALL
;
4451 #ifdef TCC_TARGET_I386
4461 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
4467 ad
->f
.func_call
= FUNC_FASTCALLW
;
4474 ad
->attr_mode
= VT_LLONG
+ 1;
4477 ad
->attr_mode
= VT_BYTE
+ 1;
4480 ad
->attr_mode
= VT_SHORT
+ 1;
4484 ad
->attr_mode
= VT_INT
+ 1;
4487 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4494 ad
->a
.dllexport
= 1;
4496 case TOK_NODECORATE
:
4497 ad
->a
.nodecorate
= 1;
4500 ad
->a
.dllimport
= 1;
4503 if (tcc_state
->warn_unsupported
)
4504 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
4505 /* skip parameters */
4507 int parenthesis
= 0;
4511 else if (tok
== ')')
4514 } while (parenthesis
&& tok
!= -1);
4527 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4531 while ((s
= s
->next
) != NULL
) {
4532 if ((s
->v
& SYM_FIELD
) &&
4533 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
4534 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
4535 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
4547 static void check_fields (CType
*type
, int check
)
4551 while ((s
= s
->next
) != NULL
) {
4552 int v
= s
->v
& ~SYM_FIELD
;
4553 if (v
< SYM_FIRST_ANOM
) {
4554 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
4555 if (check
&& (ts
->tok
& SYM_FIELD
))
4556 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
4557 ts
->tok
^= SYM_FIELD
;
4558 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
4559 check_fields (&s
->type
, check
);
4563 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4565 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4566 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4567 int pcc
= !tcc_state
->ms_bitfields
;
4568 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4575 prevbt
= VT_STRUCT
; /* make it never match */
4580 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4581 if (f
->type
.t
& VT_BITFIELD
)
4582 bit_size
= BIT_SIZE(f
->type
.t
);
4585 size
= type_size(&f
->type
, &align
);
4586 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4589 if (pcc
&& bit_size
== 0) {
4590 /* in pcc mode, packing does not affect zero-width bitfields */
4593 /* in pcc mode, attribute packed overrides if set. */
4594 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4597 /* pragma pack overrides align if lesser and packs bitfields always */
4600 if (pragma_pack
< align
)
4601 align
= pragma_pack
;
4602 /* in pcc mode pragma pack also overrides individual align */
4603 if (pcc
&& pragma_pack
< a
)
4607 /* some individual align was specified */
4611 if (type
->ref
->type
.t
== VT_UNION
) {
4612 if (pcc
&& bit_size
>= 0)
4613 size
= (bit_size
+ 7) >> 3;
4618 } else if (bit_size
< 0) {
4620 c
+= (bit_pos
+ 7) >> 3;
4621 c
= (c
+ align
- 1) & -align
;
4630 /* A bit-field. Layout is more complicated. There are two
4631 options: PCC (GCC) compatible and MS compatible */
4633 /* In PCC layout a bit-field is placed adjacent to the
4634 preceding bit-fields, except if:
4636 - an individual alignment was given
4637 - it would overflow its base type container and
4638 there is no packing */
4639 if (bit_size
== 0) {
4641 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4643 } else if (f
->a
.aligned
) {
4645 } else if (!packed
) {
4647 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4648 if (ofs
> size
/ align
)
4652 /* in pcc mode, long long bitfields have type int if they fit */
4653 if (size
== 8 && bit_size
<= 32)
4654 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4656 while (bit_pos
>= align
* 8)
4657 c
+= align
, bit_pos
-= align
* 8;
4660 /* In PCC layout named bit-fields influence the alignment
4661 of the containing struct using the base types alignment,
4662 except for packed fields (which here have correct align). */
4663 if (f
->v
& SYM_FIRST_ANOM
4664 // && bit_size // ??? gcc on ARM/rpi does that
4669 bt
= f
->type
.t
& VT_BTYPE
;
4670 if ((bit_pos
+ bit_size
> size
* 8)
4671 || (bit_size
> 0) == (bt
!= prevbt
)
4673 c
= (c
+ align
- 1) & -align
;
4676 /* In MS bitfield mode a bit-field run always uses
4677 at least as many bits as the underlying type.
4678 To start a new run it's also required that this
4679 or the last bit-field had non-zero width. */
4680 if (bit_size
|| prev_bit_size
)
4683 /* In MS layout the records alignment is normally
4684 influenced by the field, except for a zero-width
4685 field at the start of a run (but by further zero-width
4686 fields it is again). */
4687 if (bit_size
== 0 && prevbt
!= bt
)
4690 prev_bit_size
= bit_size
;
4693 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4694 | (bit_pos
<< VT_STRUCT_SHIFT
);
4695 bit_pos
+= bit_size
;
4697 if (align
> maxalign
)
4701 printf("set field %s offset %-2d size %-2d align %-2d",
4702 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4703 if (f
->type
.t
& VT_BITFIELD
) {
4704 printf(" pos %-2d bits %-2d",
4717 c
+= (bit_pos
+ 7) >> 3;
4719 /* store size and alignment */
4720 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4724 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4725 /* can happen if individual align for some member was given. In
4726 this case MSVC ignores maxalign when aligning the size */
4731 c
= (c
+ a
- 1) & -a
;
4735 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4738 /* check whether we can access bitfields by their type */
4739 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4743 if (0 == (f
->type
.t
& VT_BITFIELD
))
4747 bit_size
= BIT_SIZE(f
->type
.t
);
4750 bit_pos
= BIT_POS(f
->type
.t
);
4751 size
= type_size(&f
->type
, &align
);
4753 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4754 #ifdef TCC_TARGET_ARM
4755 && !(f
->c
& (align
- 1))
4760 /* try to access the field using a different type */
4761 c0
= -1, s
= align
= 1;
4764 px
= f
->c
* 8 + bit_pos
;
4765 cx
= (px
>> 3) & -align
;
4766 px
= px
- (cx
<< 3);
4769 s
= (px
+ bit_size
+ 7) >> 3;
4779 s
= type_size(&t
, &align
);
4783 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4784 #ifdef TCC_TARGET_ARM
4785 && !(cx
& (align
- 1))
4788 /* update offset and bit position */
4791 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4792 | (bit_pos
<< VT_STRUCT_SHIFT
);
4796 printf("FIX field %s offset %-2d size %-2d align %-2d "
4797 "pos %-2d bits %-2d\n",
4798 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4799 cx
, s
, align
, px
, bit_size
);
4802 /* fall back to load/store single-byte wise */
4803 f
->auxtype
= VT_STRUCT
;
4805 printf("FIX field %s : load byte-wise\n",
4806 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4812 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4813 static void struct_decl(CType
*type
, int u
)
4815 int v
, c
, size
, align
, flexible
;
4816 int bit_size
, bsize
, bt
;
4818 AttributeDef ad
, ad1
;
4821 memset(&ad
, 0, sizeof ad
);
4823 parse_attribute(&ad
);
4827 /* struct already defined ? return it */
4829 expect("struct/union/enum name");
4831 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4834 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4836 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4841 /* Record the original enum/struct/union token. */
4842 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4844 /* we put an undefined size for struct/union */
4845 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4846 s
->r
= 0; /* default alignment is zero as gcc */
4848 type
->t
= s
->type
.t
;
4854 tcc_error("struct/union/enum already defined");
4856 /* cannot be empty */
4857 /* non empty enums are not allowed */
4860 long long ll
= 0, pl
= 0, nl
= 0;
4863 /* enum symbols have static storage */
4864 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4868 expect("identifier");
4870 if (ss
&& !local_stack
)
4871 tcc_error("redefinition of enumerator '%s'",
4872 get_tok_str(v
, NULL
));
4876 ll
= expr_const64();
4878 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4880 *ps
= ss
, ps
= &ss
->next
;
4889 /* NOTE: we accept a trailing comma */
4894 /* set integral type of the enum */
4897 if (pl
!= (unsigned)pl
)
4898 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4900 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4901 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4902 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4904 /* set type for enum members */
4905 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4907 if (ll
== (int)ll
) /* default is int if it fits */
4909 if (t
.t
& VT_UNSIGNED
) {
4910 ss
->type
.t
|= VT_UNSIGNED
;
4911 if (ll
== (unsigned)ll
)
4914 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4915 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4920 while (tok
!= '}') {
4921 if (!parse_btype(&btype
, &ad1
)) {
4927 tcc_error("flexible array member '%s' not at the end of struct",
4928 get_tok_str(v
, NULL
));
4934 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4936 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4937 expect("identifier");
4939 int v
= btype
.ref
->v
;
4940 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4941 if (tcc_state
->ms_extensions
== 0)
4942 expect("identifier");
4946 if (type_size(&type1
, &align
) < 0) {
4947 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4950 tcc_error("field '%s' has incomplete type",
4951 get_tok_str(v
, NULL
));
4953 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4954 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4955 (type1
.t
& VT_STORAGE
))
4956 tcc_error("invalid type for '%s'",
4957 get_tok_str(v
, NULL
));
4961 bit_size
= expr_const();
4962 /* XXX: handle v = 0 case for messages */
4964 tcc_error("negative width in bit-field '%s'",
4965 get_tok_str(v
, NULL
));
4966 if (v
&& bit_size
== 0)
4967 tcc_error("zero width for bit-field '%s'",
4968 get_tok_str(v
, NULL
));
4969 parse_attribute(&ad1
);
4971 size
= type_size(&type1
, &align
);
4972 if (bit_size
>= 0) {
4973 bt
= type1
.t
& VT_BTYPE
;
4979 tcc_error("bitfields must have scalar type");
4981 if (bit_size
> bsize
) {
4982 tcc_error("width of '%s' exceeds its type",
4983 get_tok_str(v
, NULL
));
4984 } else if (bit_size
== bsize
4985 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4986 /* no need for bit fields */
4988 } else if (bit_size
== 64) {
4989 tcc_error("field width 64 not implemented");
4991 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4993 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4996 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4997 /* Remember we've seen a real field to check
4998 for placement of flexible array member. */
5001 /* If member is a struct or bit-field, enforce
5002 placing into the struct (as anonymous). */
5004 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
5009 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
5014 if (tok
== ';' || tok
== TOK_EOF
)
5021 parse_attribute(&ad
);
5022 if (ad
.cleanup_func
) {
5023 tcc_warning("attribute '__cleanup__' ignored on type");
5025 check_fields(type
, 1);
5026 check_fields(type
, 0);
5027 struct_layout(type
, &ad
);
5032 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
5034 merge_symattr(&ad
->a
, &s
->a
);
5035 merge_funcattr(&ad
->f
, &s
->f
);
5038 /* Add type qualifiers to a type. If the type is an array then the qualifiers
5039 are added to the element type, copied because it could be a typedef. */
5040 static void parse_btype_qualify(CType
*type
, int qualifiers
)
5042 while (type
->t
& VT_ARRAY
) {
5043 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
5044 type
= &type
->ref
->type
;
5046 type
->t
|= qualifiers
;
5049 /* return 0 if no type declaration. otherwise, return the basic type
5052 static int parse_btype(CType
*type
, AttributeDef
*ad
)
5054 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
5058 memset(ad
, 0, sizeof(AttributeDef
));
5068 /* currently, we really ignore extension */
5078 if (u
== VT_SHORT
|| u
== VT_LONG
) {
5079 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
5080 tmbt
: tcc_error("too many basic types");
5083 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
5088 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
5105 memset(&ad1
, 0, sizeof(AttributeDef
));
5106 if (parse_btype(&type1
, &ad1
)) {
5107 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
5109 n
= 1 << (ad1
.a
.aligned
- 1);
5111 type_size(&type1
, &n
);
5114 if (n
<= 0 || (n
& (n
- 1)) != 0)
5115 tcc_error("alignment must be a positive power of two");
5118 ad
->a
.aligned
= exact_log2p1(n
);
5122 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
5123 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
5124 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
5125 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
5132 #ifdef TCC_TARGET_ARM64
5134 /* GCC's __uint128_t appears in some Linux header files. Make it a
5135 synonym for long double to get the size and alignment right. */
5146 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
5147 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
5155 struct_decl(&type1
, VT_ENUM
);
5158 type
->ref
= type1
.ref
;
5161 struct_decl(&type1
, VT_STRUCT
);
5164 struct_decl(&type1
, VT_UNION
);
5167 /* type modifiers */
5171 parse_btype_qualify(type
, VT_ATOMIC
);
5174 parse_expr_type(&type1
);
5175 /* remove all storage modifiers except typedef */
5176 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
5178 sym_to_attr(ad
, type1
.ref
);
5186 parse_btype_qualify(type
, VT_CONSTANT
);
5194 parse_btype_qualify(type
, VT_VOLATILE
);
5201 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
5202 tcc_error("signed and unsigned modifier");
5215 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
5216 tcc_error("signed and unsigned modifier");
5217 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
5233 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
5234 tcc_error("multiple storage classes");
5246 ad
->f
.func_noreturn
= 1;
5248 /* GNUC attribute */
5249 case TOK_ATTRIBUTE1
:
5250 case TOK_ATTRIBUTE2
:
5251 parse_attribute(ad
);
5252 if (ad
->attr_mode
) {
5253 u
= ad
->attr_mode
-1;
5254 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
5262 parse_expr_type(&type1
);
5263 /* remove all storage modifiers except typedef */
5264 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
5266 sym_to_attr(ad
, type1
.ref
);
5272 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
5276 if (tok
== ':' && !in_generic
) {
5277 /* ignore if it's a label */
5282 t
&= ~(VT_BTYPE
|VT_LONG
);
5283 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
5284 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
5285 type
->ref
= s
->type
.ref
;
5287 parse_btype_qualify(type
, t
);
5289 /* get attributes from typedef */
5298 if (tcc_state
->char_is_unsigned
) {
5299 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
5302 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5303 bt
= t
& (VT_BTYPE
|VT_LONG
);
5305 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
5306 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5307 if (bt
== VT_LDOUBLE
)
5308 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
5314 /* convert a function parameter type (array to pointer and function to
5315 function pointer) */
5316 static inline void convert_parameter_type(CType
*pt
)
5318 /* remove const and volatile qualifiers (XXX: const could be used
5319 to indicate a const function parameter */
5320 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5321 /* array must be transformed to pointer according to ANSI C */
5323 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
5328 ST_FUNC
void parse_asm_str(CString
*astr
)
5331 parse_mult_str(astr
, "string constant");
5334 /* Parse an asm label and return the token */
5335 static int asm_label_instr(void)
5341 parse_asm_str(&astr
);
5344 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
5346 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
5351 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
5353 int n
, l
, t1
, arg_size
, align
, unused_align
;
5354 Sym
**plast
, *s
, *first
;
5359 /* function type, or recursive declarator (return if so) */
5361 if (td
&& !(td
& TYPE_ABSTRACT
))
5365 else if (parse_btype(&pt
, &ad1
))
5368 merge_attr (ad
, &ad1
);
5377 /* read param name and compute offset */
5378 if (l
!= FUNC_OLD
) {
5379 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
5381 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
5382 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
5383 tcc_error("parameter declared as void");
5387 expect("identifier");
5388 pt
.t
= VT_VOID
; /* invalid type */
5392 convert_parameter_type(&pt
);
5393 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
5394 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
5400 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
5405 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
5406 tcc_error("invalid type");
5409 /* if no parameters, then old type prototype */
5412 /* NOTE: const is ignored in returned type as it has a special
5413 meaning in gcc / C++ */
5414 type
->t
&= ~VT_CONSTANT
;
5415 /* some ancient pre-K&R C allows a function to return an array
5416 and the array brackets to be put after the arguments, such
5417 that "int c()[]" means something like "int[] c()" */
5420 skip(']'); /* only handle simple "[]" */
5423 /* we push a anonymous symbol which will contain the function prototype */
5424 ad
->f
.func_args
= arg_size
;
5425 ad
->f
.func_type
= l
;
5426 s
= sym_push(SYM_FIELD
, type
, 0, 0);
5432 } else if (tok
== '[') {
5433 int saved_nocode_wanted
= nocode_wanted
;
5434 /* array definition */
5437 /* XXX The optional type-quals and static should only be accepted
5438 in parameter decls. The '*' as well, and then even only
5439 in prototypes (not function defs). */
5441 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5456 if (!local_stack
|| (storage
& VT_STATIC
))
5457 vpushi(expr_const());
5459 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5460 length must always be evaluated, even under nocode_wanted,
5461 so that its size slot is initialized (e.g. under sizeof
5466 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5469 tcc_error("invalid array size");
5471 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5472 tcc_error("size of variable length array should be an integer");
5478 /* parse next post type */
5479 post_type(type
, ad
, storage
, 0);
5481 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5482 tcc_error("declaration of an array of functions");
5483 if ((type
->t
& VT_BTYPE
) == VT_VOID
5484 || type_size(type
, &unused_align
) < 0)
5485 tcc_error("declaration of an array of incomplete type elements");
5487 t1
|= type
->t
& VT_VLA
;
5491 tcc_error("need explicit inner array size in VLAs");
5492 loc
-= type_size(&int_type
, &align
);
5496 vla_runtime_type_size(type
, &align
);
5498 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5504 nocode_wanted
= saved_nocode_wanted
;
5506 /* we push an anonymous symbol which will contain the array
5508 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5509 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5515 /* Parse a type declarator (except basic type), and return the type
5516 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5517 expected. 'type' should contain the basic type. 'ad' is the
5518 attribute definition of the basic type. It can be modified by
5519 type_decl(). If this (possibly abstract) declarator is a pointer chain
5520 it returns the innermost pointed to type (equals *type, but is a different
5521 pointer), otherwise returns type itself, that's used for recursive calls. */
5522 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5525 int qualifiers
, storage
;
5527 /* recursive type, remove storage bits first, apply them later again */
5528 storage
= type
->t
& VT_STORAGE
;
5529 type
->t
&= ~VT_STORAGE
;
5532 while (tok
== '*') {
5538 qualifiers
|= VT_ATOMIC
;
5543 qualifiers
|= VT_CONSTANT
;
5548 qualifiers
|= VT_VOLATILE
;
5554 /* XXX: clarify attribute handling */
5555 case TOK_ATTRIBUTE1
:
5556 case TOK_ATTRIBUTE2
:
5557 parse_attribute(ad
);
5561 type
->t
|= qualifiers
;
5563 /* innermost pointed to type is the one for the first derivation */
5564 ret
= pointed_type(type
);
5568 /* This is possibly a parameter type list for abstract declarators
5569 ('int ()'), use post_type for testing this. */
5570 if (!post_type(type
, ad
, 0, td
)) {
5571 /* It's not, so it's a nested declarator, and the post operations
5572 apply to the innermost pointed to type (if any). */
5573 /* XXX: this is not correct to modify 'ad' at this point, but
5574 the syntax is not clear */
5575 parse_attribute(ad
);
5576 post
= type_decl(type
, ad
, v
, td
);
5580 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5581 /* type identifier */
5586 if (!(td
& TYPE_ABSTRACT
))
5587 expect("identifier");
5590 post_type(post
, ad
, storage
, 0);
5591 parse_attribute(ad
);
5596 /* indirection with full error checking and bound check */
5597 ST_FUNC
void indir(void)
5599 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5600 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5604 if (vtop
->r
& VT_LVAL
)
5606 vtop
->type
= *pointed_type(&vtop
->type
);
5607 /* Arrays and functions are never lvalues */
5608 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5609 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5611 /* if bound checking, the referenced pointer must be checked */
5612 #ifdef CONFIG_TCC_BCHECK
5613 if (tcc_state
->do_bounds_check
)
5614 vtop
->r
|= VT_MUSTBOUND
;
5619 /* pass a parameter to a function and do type checking and casting */
5620 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5625 func_type
= func
->f
.func_type
;
5626 if (func_type
== FUNC_OLD
||
5627 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5628 /* default casting : only need to convert float to double */
5629 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5630 gen_cast_s(VT_DOUBLE
);
5631 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5632 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5633 type
.ref
= vtop
->type
.ref
;
5635 } else if (vtop
->r
& VT_MUSTCAST
) {
5636 force_charshort_cast();
5638 } else if (arg
== NULL
) {
5639 tcc_error("too many arguments to function");
5642 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5643 gen_assign_cast(&type
);
5647 /* parse an expression and return its type without any side effect. */
5648 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5657 /* parse an expression of the form '(type)' or '(expr)' and return its
5659 static void parse_expr_type(CType
*type
)
5665 if (parse_btype(type
, &ad
)) {
5666 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5668 expr_type(type
, gexpr
);
5673 static void parse_type(CType
*type
)
5678 if (!parse_btype(type
, &ad
)) {
5681 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5684 static void parse_builtin_params(int nc
, const char *args
)
5693 while ((c
= *args
++)) {
5708 type
.t
= VT_CONSTANT
;
5714 type
.t
= VT_CONSTANT
;
5716 type
.t
|= char_type
.t
;
5728 gen_assign_cast(&type
);
5735 static inline int is_memory_model(const SValue
*sv
)
5739 * The memory models should better be backed by an enumeration.
5741 * const int t = sv->type.t;
5743 * if (!IS_ENUM_VAL(t))
5746 * if (!(t & VT_STATIC))
5749 * Ideally we should check whether the model matches 1:1.
5750 * If it is possible, we should check by the name of the value.
5752 return (((sv
->type
.t
& VT_BTYPE
) == VT_INT
) && (sv
->c
.i
< 6));
5755 static void parse_atomic(int atok
)
5761 char const *params
= NULL
;
5764 char const *const params
;
5768 * A -- read-only atomic
5769 * p -- pointer to memory
5770 * P -- pointer to read-only memory
5774 {TOK___c11_atomic_init
, "-av"},
5775 {TOK___c11_atomic_store
, "-avm"},
5776 {TOK___c11_atomic_load
, "am"},
5777 {TOK___c11_atomic_exchange
, "avm"},
5778 {TOK___c11_atomic_compare_exchange_strong
, "apvmm"},
5779 {TOK___c11_atomic_compare_exchange_weak
, "apvmm"},
5780 {TOK___c11_atomic_fetch_add
, "avm"},
5781 {TOK___c11_atomic_fetch_sub
, "avm"},
5782 {TOK___c11_atomic_fetch_or
, "avm"},
5783 {TOK___c11_atomic_fetch_xor
, "avm"},
5784 {TOK___c11_atomic_fetch_and
, "avm"},
5789 for (op
= 0; op
< (sizeof(ops
) / sizeof(*ops
)); ++op
) {
5790 if (ops
[op
].tok
== atok
) {
5791 params
= ops
[op
].params
;
5796 tcc_error("unknown atomic operation");
5798 argc
= strlen(params
);
5799 if (params
[0] == '-') {
5805 vpushi(0); /* function address */
5808 for (arg
= 0; arg
< argc
; ++arg
) {
5811 switch (params
[arg
]) {
5815 expect_arg("exactly one pointer to atomic", arg
);
5816 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5817 expect_arg("pointer to atomic expected", arg
);
5818 atom
= pointed_type(&vtop
->type
);
5819 if (!(atom
->t
& VT_ATOMIC
))
5820 expect_arg("qualified pointer to atomic", arg
);
5821 if ((params
[arg
] == 'a') && (atom
->t
& VT_CONSTANT
))
5822 expect_arg("pointer to writable atomic", arg
);
5823 atom
->t
&= ~VT_ATOMIC
;
5824 switch (btype_size(atom
->t
& VT_BTYPE
)) {
5825 case 1: atok
+= 1; break;
5826 case 2: atok
+= 2; break;
5827 case 4: atok
+= 3; break;
5828 case 8: atok
+= 4; break;
5829 default: tcc_error("only integer-sized types are supported");
5833 vpush_helper_func(atok
);
5838 if (((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5839 || !is_compatible_unqualified_types(atom
, pointed_type(&vtop
->type
)))
5840 expect_arg("pointer to compatible type", arg
);
5844 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5845 expect_arg("integer type", arg
);
5849 if (!is_memory_model(vtop
))
5850 expect_arg("memory model", arg
);
5851 vtop
->type
.t
&= ~VT_MEMMODEL
;
5855 tcc_error("unknown parameter type");
5861 if (arg
< (argc
- 1))
5862 expect("more parameters");
5863 if (arg
> (argc
- 1))
5864 expect("less parameters");
5870 ST_FUNC
void unary(void)
5872 int n
, t
, align
, size
, r
, sizeof_caller
;
5877 /* generate line number info */
5879 tcc_debug_line(tcc_state
), tcc_tcov_check_line (1);
5881 sizeof_caller
= in_sizeof
;
5884 /* XXX: GCC 2.95.3 does not generate a table although it should be
5892 #ifdef TCC_TARGET_PE
5893 t
= VT_SHORT
|VT_UNSIGNED
;
5901 vsetc(&type
, VT_CONST
, &tokc
);
5905 t
= VT_INT
| VT_UNSIGNED
;
5911 t
= VT_LLONG
| VT_UNSIGNED
;
5923 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5926 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5928 case TOK___FUNCTION__
:
5930 goto tok_identifier
;
5936 /* special function name identifier */
5937 len
= strlen(funcname
) + 1;
5938 /* generate char[len] type */
5943 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
5944 if (!NODATA_WANTED
) {
5945 ptr
= section_ptr_add(data_section
, len
);
5946 memcpy(ptr
, funcname
, len
);
5952 #ifdef TCC_TARGET_PE
5953 t
= VT_SHORT
| VT_UNSIGNED
;
5959 /* string parsing */
5961 if (tcc_state
->char_is_unsigned
)
5962 t
= VT_BYTE
| VT_UNSIGNED
;
5964 if (tcc_state
->warn_write_strings
)
5969 memset(&ad
, 0, sizeof(AttributeDef
));
5970 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5975 if (parse_btype(&type
, &ad
)) {
5976 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5978 /* check ISOC99 compound literal */
5980 /* data is allocated locally by default */
5985 /* all except arrays are lvalues */
5986 if (!(type
.t
& VT_ARRAY
))
5988 memset(&ad
, 0, sizeof(AttributeDef
));
5989 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5991 if (sizeof_caller
) {
5998 } else if (tok
== '{') {
5999 int saved_nocode_wanted
= nocode_wanted
;
6000 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
6002 if (0 == local_scope
)
6003 tcc_error("statement expression outside of function");
6004 /* save all registers */
6006 /* statement expression : we do not accept break/continue
6007 inside as GCC does. We do retain the nocode_wanted state,
6008 as statement expressions can't ever be entered from the
6009 outside, so any reactivation of code emission (from labels
6010 or loop heads) can be disabled again after the end of it. */
6012 nocode_wanted
= saved_nocode_wanted
;
6027 /* functions names must be treated as function pointers,
6028 except for unary '&' and sizeof. Since we consider that
6029 functions are not lvalues, we only have to handle it
6030 there and in function calls. */
6031 /* arrays can also be used although they are not lvalues */
6032 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
6033 !(vtop
->type
.t
& VT_ARRAY
))
6036 vtop
->sym
->a
.addrtaken
= 1;
6037 mk_pointer(&vtop
->type
);
6043 gen_test_zero(TOK_EQ
);
6054 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
6055 tcc_error("pointer not accepted for unary plus");
6056 /* In order to force cast, we add zero, except for floating point
6057 where we really need an noop (otherwise -0.0 will be transformed
6059 if (!is_float(vtop
->type
.t
)) {
6071 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
6073 if (vtop
[1].r
& VT_SYM
)
6074 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
6075 size
= type_size(&type
, &align
);
6076 if (s
&& s
->a
.aligned
)
6077 align
= 1 << (s
->a
.aligned
- 1);
6078 if (t
== TOK_SIZEOF
) {
6079 if (!(type
.t
& VT_VLA
)) {
6081 tcc_error("sizeof applied to an incomplete type");
6084 vla_runtime_type_size(&type
, &align
);
6089 vtop
->type
.t
|= VT_UNSIGNED
;
6092 case TOK_builtin_expect
:
6093 /* __builtin_expect is a no-op for now */
6094 parse_builtin_params(0, "ee");
6097 case TOK_builtin_types_compatible_p
:
6098 parse_builtin_params(0, "tt");
6099 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
6100 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
6101 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
6105 case TOK_builtin_choose_expr
:
6132 case TOK_builtin_constant_p
:
6133 parse_builtin_params(1, "e");
6134 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6135 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
6139 case TOK_builtin_frame_address
:
6140 case TOK_builtin_return_address
:
6146 if (tok
!= TOK_CINT
) {
6147 tcc_error("%s only takes positive integers",
6148 tok1
== TOK_builtin_return_address
?
6149 "__builtin_return_address" :
6150 "__builtin_frame_address");
6152 level
= (uint32_t)tokc
.i
;
6157 vset(&type
, VT_LOCAL
, 0); /* local frame */
6159 #ifdef TCC_TARGET_RISCV64
6163 mk_pointer(&vtop
->type
);
6164 indir(); /* -> parent frame */
6166 if (tok1
== TOK_builtin_return_address
) {
6167 // assume return address is just above frame pointer on stack
6168 #ifdef TCC_TARGET_ARM
6171 #elif defined TCC_TARGET_RISCV64
6178 mk_pointer(&vtop
->type
);
6183 #ifdef TCC_TARGET_RISCV64
6184 case TOK_builtin_va_start
:
6185 parse_builtin_params(0, "ee");
6186 r
= vtop
->r
& VT_VALMASK
;
6190 tcc_error("__builtin_va_start expects a local variable");
6195 #ifdef TCC_TARGET_X86_64
6196 #ifdef TCC_TARGET_PE
6197 case TOK_builtin_va_start
:
6198 parse_builtin_params(0, "ee");
6199 r
= vtop
->r
& VT_VALMASK
;
6203 tcc_error("__builtin_va_start expects a local variable");
6205 vtop
->type
= char_pointer_type
;
6210 case TOK_builtin_va_arg_types
:
6211 parse_builtin_params(0, "t");
6212 vpushi(classify_x86_64_va_arg(&vtop
->type
));
6219 #ifdef TCC_TARGET_ARM64
6220 case TOK_builtin_va_start
: {
6221 parse_builtin_params(0, "ee");
6225 vtop
->type
.t
= VT_VOID
;
6228 case TOK_builtin_va_arg
: {
6229 parse_builtin_params(0, "et");
6237 case TOK___arm64_clear_cache
: {
6238 parse_builtin_params(0, "ee");
6241 vtop
->type
.t
= VT_VOID
;
6246 /* atomic operations */
6247 case TOK___c11_atomic_init
:
6248 case TOK___c11_atomic_store
:
6249 case TOK___c11_atomic_load
:
6250 case TOK___c11_atomic_exchange
:
6251 case TOK___c11_atomic_compare_exchange_strong
:
6252 case TOK___c11_atomic_compare_exchange_weak
:
6253 case TOK___c11_atomic_fetch_add
:
6254 case TOK___c11_atomic_fetch_sub
:
6255 case TOK___c11_atomic_fetch_or
:
6256 case TOK___c11_atomic_fetch_xor
:
6257 case TOK___c11_atomic_fetch_and
:
6261 /* pre operations */
6272 if (is_float(vtop
->type
.t
)) {
6282 goto tok_identifier
;
6284 /* allow to take the address of a label */
6285 if (tok
< TOK_UIDENT
)
6286 expect("label identifier");
6287 s
= label_find(tok
);
6289 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6291 if (s
->r
== LABEL_DECLARED
)
6292 s
->r
= LABEL_FORWARD
;
6295 s
->type
.t
= VT_VOID
;
6296 mk_pointer(&s
->type
);
6297 s
->type
.t
|= VT_STATIC
;
6299 vpushsym(&s
->type
, s
);
6305 CType controlling_type
;
6306 int has_default
= 0;
6309 TokenString
*str
= NULL
;
6310 int saved_const_wanted
= const_wanted
;
6315 expr_type(&controlling_type
, expr_eq
);
6316 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
6317 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
6318 mk_pointer(&controlling_type
);
6319 const_wanted
= saved_const_wanted
;
6323 if (tok
== TOK_DEFAULT
) {
6325 tcc_error("too many 'default'");
6331 AttributeDef ad_tmp
;
6336 parse_btype(&cur_type
, &ad_tmp
);
6339 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
6340 if (compare_types(&controlling_type
, &cur_type
, 0)) {
6342 tcc_error("type match twice");
6352 skip_or_save_block(&str
);
6354 skip_or_save_block(NULL
);
6361 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
6362 tcc_error("type '%s' does not match any association", buf
);
6364 begin_macro(str
, 1);
6373 // special qnan , snan and infinity values
6378 vtop
->type
.t
= VT_FLOAT
;
6383 goto special_math_val
;
6386 goto special_math_val
;
6393 expect("identifier");
6395 if (!s
|| IS_ASM_SYM(s
)) {
6396 const char *name
= get_tok_str(t
, NULL
);
6398 tcc_error("'%s' undeclared", name
);
6399 /* for simple function calls, we tolerate undeclared
6400 external reference to int() function */
6401 if (tcc_state
->warn_implicit_function_declaration
6402 #ifdef TCC_TARGET_PE
6403 /* people must be warned about using undeclared WINAPI functions
6404 (which usually start with uppercase letter) */
6405 || (name
[0] >= 'A' && name
[0] <= 'Z')
6408 tcc_warning("implicit declaration of function '%s'", name
);
6409 s
= external_global_sym(t
, &func_old_type
);
6413 /* A symbol that has a register is a local register variable,
6414 which starts out as VT_LOCAL value. */
6415 if ((r
& VT_VALMASK
) < VT_CONST
)
6416 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
6418 vset(&s
->type
, r
, s
->c
);
6419 /* Point to s as backpointer (even without r&VT_SYM).
6420 Will be used by at least the x86 inline asm parser for
6426 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
6427 vtop
->c
.i
= s
->enum_val
;
6432 /* post operations */
6434 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
6437 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
6438 int qualifiers
, cumofs
= 0;
6440 if (tok
== TOK_ARROW
)
6442 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
6445 /* expect pointer on structure */
6446 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
6447 expect("struct or union");
6448 if (tok
== TOK_CDOUBLE
)
6449 expect("field name");
6451 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
6452 expect("field name");
6453 s
= find_field(&vtop
->type
, tok
, &cumofs
);
6455 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
6456 /* add field offset to pointer */
6457 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
6458 vpushi(cumofs
+ s
->c
);
6460 /* change type to field type, and set to lvalue */
6461 vtop
->type
= s
->type
;
6462 vtop
->type
.t
|= qualifiers
;
6463 /* an array is never an lvalue */
6464 if (!(vtop
->type
.t
& VT_ARRAY
)) {
6466 #ifdef CONFIG_TCC_BCHECK
6467 /* if bound checking, the referenced pointer must be checked */
6468 if (tcc_state
->do_bounds_check
)
6469 vtop
->r
|= VT_MUSTBOUND
;
6473 } else if (tok
== '[') {
6479 } else if (tok
== '(') {
6482 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
6485 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
6486 /* pointer test (no array accepted) */
6487 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
6488 vtop
->type
= *pointed_type(&vtop
->type
);
6489 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6493 expect("function pointer");
6496 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6498 /* get return type */
6501 sa
= s
->next
; /* first parameter */
6502 nb_args
= regsize
= 0;
6504 /* compute first implicit argument if a structure is returned */
6505 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6506 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6507 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6508 &ret_align
, ®size
);
6509 if (ret_nregs
<= 0) {
6510 /* get some space for the returned structure */
6511 size
= type_size(&s
->type
, &align
);
6512 #ifdef TCC_TARGET_ARM64
6513 /* On arm64, a small struct is return in registers.
6514 It is much easier to write it to memory if we know
6515 that we are allowed to write some extra bytes, so
6516 round the allocated space up to a power of 2: */
6518 while (size
& (size
- 1))
6519 size
= (size
| (size
- 1)) + 1;
6521 loc
= (loc
- size
) & -align
;
6523 ret
.r
= VT_LOCAL
| VT_LVAL
;
6524 /* pass it as 'int' to avoid structure arg passing
6526 vseti(VT_LOCAL
, loc
);
6527 #ifdef CONFIG_TCC_BCHECK
6528 if (tcc_state
->do_bounds_check
)
6542 if (ret_nregs
> 0) {
6543 /* return in register */
6545 PUT_R_RET(&ret
, ret
.type
.t
);
6550 gfunc_param_typed(s
, sa
);
6560 tcc_error("too few arguments to function");
6562 gfunc_call(nb_args
);
6564 if (ret_nregs
< 0) {
6565 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6566 #ifdef TCC_TARGET_RISCV64
6567 arch_transfer_ret_regs(1);
6571 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6572 vsetc(&ret
.type
, r
, &ret
.c
);
6573 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6576 /* handle packed struct return */
6577 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6580 size
= type_size(&s
->type
, &align
);
6581 /* We're writing whole regs often, make sure there's enough
6582 space. Assume register size is power of 2. */
6583 if (regsize
> align
)
6585 loc
= (loc
- size
) & -align
;
6589 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6593 if (--ret_nregs
== 0)
6597 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6600 /* Promote char/short return values. This is matters only
6601 for calling function that were not compiled by TCC and
6602 only on some architectures. For those where it doesn't
6603 matter we expect things to be already promoted to int,
6605 t
= s
->type
.t
& VT_BTYPE
;
6606 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6608 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6610 vtop
->type
.t
= VT_INT
;
6614 if (s
->f
.func_noreturn
) {
6616 tcc_tcov_block_end (tcov_data
.line
);
6625 #ifndef precedence_parser /* original top-down parser */
6627 static void expr_prod(void)
6632 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6639 static void expr_sum(void)
6644 while ((t
= tok
) == '+' || t
== '-') {
6651 static void expr_shift(void)
6656 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6663 static void expr_cmp(void)
6668 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6669 t
== TOK_ULT
|| t
== TOK_UGE
) {
6676 static void expr_cmpeq(void)
6681 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6688 static void expr_and(void)
6691 while (tok
== '&') {
6698 static void expr_xor(void)
6701 while (tok
== '^') {
6708 static void expr_or(void)
6711 while (tok
== '|') {
6718 static void expr_landor(int op
);
6720 static void expr_land(void)
6723 if (tok
== TOK_LAND
)
6727 static void expr_lor(void)
6734 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6735 #else /* defined precedence_parser */
6736 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6737 # define expr_lor() unary(), expr_infix(1)
6739 static int precedence(int tok
)
6742 case TOK_LOR
: return 1;
6743 case TOK_LAND
: return 2;
6747 case TOK_EQ
: case TOK_NE
: return 6;
6748 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6749 case TOK_SHL
: case TOK_SAR
: return 8;
6750 case '+': case '-': return 9;
6751 case '*': case '/': case '%': return 10;
6753 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6758 static unsigned char prec
[256];
6759 static void init_prec(void)
6762 for (i
= 0; i
< 256; i
++)
6763 prec
[i
] = precedence(i
);
6765 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6767 static void expr_landor(int op
);
6769 static void expr_infix(int p
)
6772 while ((p2
= precedence(t
)) >= p
) {
6773 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6778 if (precedence(tok
) > p2
)
6787 /* Assuming vtop is a value used in a conditional context
6788 (i.e. compared with zero) return 0 if it's false, 1 if
6789 true and -1 if it can't be statically determined. */
6790 static int condition_3way(void)
6793 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6794 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6796 gen_cast_s(VT_BOOL
);
6803 static void expr_landor(int op
)
6805 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6807 c
= f
? i
: condition_3way();
6809 save_regs(1), cc
= 0;
6811 nocode_wanted
++, f
= 1;
6819 expr_landor_next(op
);
6831 static int is_cond_bool(SValue
*sv
)
6833 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6834 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6835 return (unsigned)sv
->c
.i
< 2;
6836 if (sv
->r
== VT_CMP
)
6841 static void expr_cond(void)
6843 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6851 c
= condition_3way();
6852 g
= (tok
== ':' && gnu_ext
);
6862 /* needed to avoid having different registers saved in
6869 ncw_prev
= nocode_wanted
;
6875 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6876 mk_pointer(&vtop
->type
);
6877 sv
= *vtop
; /* save value to handle it later */
6878 vtop
--; /* no vpop so that FP stack is not flushed */
6888 nocode_wanted
= ncw_prev
;
6894 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6895 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6896 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6897 this code jumps directly to the if's then/else branches. */
6902 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6905 nocode_wanted
= ncw_prev
;
6906 // tcc_warning("two conditions expr_cond");
6910 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6911 mk_pointer(&vtop
->type
);
6913 /* cast operands to correct type according to ISOC rules */
6914 if (!combine_types(&type
, &sv
, vtop
, '?'))
6915 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6916 "type mismatch in conditional expression (have '%s' and '%s')");
6917 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6918 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6919 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6921 /* now we convert second operand */
6925 mk_pointer(&vtop
->type
);
6927 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6931 rc
= RC_TYPE(type
.t
);
6932 /* for long longs, we use fixed registers to avoid having
6933 to handle a complicated move */
6934 if (USING_TWO_WORDS(type
.t
))
6935 rc
= RC_RET(type
.t
);
6943 nocode_wanted
= ncw_prev
;
6945 /* this is horrible, but we must also convert first
6951 mk_pointer(&vtop
->type
);
6953 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6959 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6969 static void expr_eq(void)
6974 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6982 gen_op(TOK_ASSIGN_OP(t
));
6988 ST_FUNC
void gexpr(void)
6999 /* parse a constant expression and return value in vtop. */
7000 static void expr_const1(void)
7003 nocode_wanted
+= unevalmask
+ 1;
7005 nocode_wanted
-= unevalmask
+ 1;
7009 /* parse an integer constant and return its value. */
7010 static inline int64_t expr_const64(void)
7014 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
7015 expect("constant expression");
7021 /* parse an integer constant and return its value.
7022 Complain if it doesn't fit 32bit (signed or unsigned). */
7023 ST_FUNC
int expr_const(void)
7026 int64_t wc
= expr_const64();
7028 if (c
!= wc
&& (unsigned)c
!= wc
)
7029 tcc_error("constant exceeds 32 bit");
7033 /* ------------------------------------------------------------------------- */
7034 /* return from function */
7036 #ifndef TCC_TARGET_ARM64
7037 static void gfunc_return(CType
*func_type
)
7039 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7040 CType type
, ret_type
;
7041 int ret_align
, ret_nregs
, regsize
;
7042 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
7043 &ret_align
, ®size
);
7044 if (ret_nregs
< 0) {
7045 #ifdef TCC_TARGET_RISCV64
7046 arch_transfer_ret_regs(0);
7048 } else if (0 == ret_nregs
) {
7049 /* if returning structure, must copy it to implicit
7050 first pointer arg location */
7053 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
7056 /* copy structure value to pointer */
7059 /* returning structure packed into registers */
7060 int size
, addr
, align
, rc
;
7061 size
= type_size(func_type
,&align
);
7062 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
7063 (vtop
->c
.i
& (ret_align
-1)))
7064 && (align
& (ret_align
-1))) {
7065 loc
= (loc
- size
) & -ret_align
;
7068 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
7072 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
7074 vtop
->type
= ret_type
;
7075 rc
= RC_RET(ret_type
.t
);
7083 if (--ret_nregs
== 0)
7085 /* We assume that when a structure is returned in multiple
7086 registers, their classes are consecutive values of the
7089 vtop
->c
.i
+= regsize
;
7094 gv(RC_RET(func_type
->t
));
7096 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
7100 static void check_func_return(void)
7102 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
7104 if (!strcmp (funcname
, "main")
7105 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
7106 /* main returns 0 by default */
7108 gen_assign_cast(&func_vt
);
7109 gfunc_return(&func_vt
);
7111 tcc_warning("function might return no value: '%s'", funcname
);
7115 /* ------------------------------------------------------------------------- */
7118 static int case_cmpi(const void *pa
, const void *pb
)
7120 int64_t a
= (*(struct case_t
**) pa
)->v1
;
7121 int64_t b
= (*(struct case_t
**) pb
)->v1
;
7122 return a
< b
? -1 : a
> b
;
7125 static int case_cmpu(const void *pa
, const void *pb
)
7127 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
7128 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
7129 return a
< b
? -1 : a
> b
;
7132 static void gtst_addr(int t
, int a
)
7134 gsym_addr(gvtst(0, t
), a
);
7137 static void gcase(struct case_t
**base
, int len
, int *bsym
)
7141 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
7158 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
7160 gcase(base
, len
/2, bsym
);
7164 base
+= e
; len
-= e
;
7174 if (p
->v1
== p
->v2
) {
7176 gtst_addr(0, p
->sym
);
7186 gtst_addr(0, p
->sym
);
7190 *bsym
= gjmp(*bsym
);
7193 /* ------------------------------------------------------------------------- */
7194 /* __attribute__((cleanup(fn))) */
7196 static void try_call_scope_cleanup(Sym
*stop
)
7198 Sym
*cls
= cur_scope
->cl
.s
;
7200 for (; cls
!= stop
; cls
= cls
->ncl
) {
7201 Sym
*fs
= cls
->next
;
7202 Sym
*vs
= cls
->prev_tok
;
7204 vpushsym(&fs
->type
, fs
);
7205 vset(&vs
->type
, vs
->r
, vs
->c
);
7207 mk_pointer(&vtop
->type
);
7213 static void try_call_cleanup_goto(Sym
*cleanupstate
)
7218 if (!cur_scope
->cl
.s
)
7221 /* search NCA of both cleanup chains given parents and initial depth */
7222 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
7223 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
7225 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
7227 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
7230 try_call_scope_cleanup(cc
);
7233 /* call 'func' for each __attribute__((cleanup(func))) */
7234 static void block_cleanup(struct scope
*o
)
7238 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
7239 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
7244 try_call_scope_cleanup(o
->cl
.s
);
7245 pcl
->jnext
= gjmp(0);
7247 goto remove_pending
;
7257 try_call_scope_cleanup(o
->cl
.s
);
7260 /* ------------------------------------------------------------------------- */
7263 static void vla_restore(int loc
)
7266 gen_vla_sp_restore(loc
);
7269 static void vla_leave(struct scope
*o
)
7271 struct scope
*c
= cur_scope
, *v
= NULL
;
7272 for (; c
!= o
&& c
; c
= c
->prev
)
7276 vla_restore(v
->vla
.locorig
);
7279 /* ------------------------------------------------------------------------- */
7282 void new_scope(struct scope
*o
)
7284 /* copy and link previous scope */
7286 o
->prev
= cur_scope
;
7288 cur_scope
->vla
.num
= 0;
7290 /* record local declaration stack position */
7291 o
->lstk
= local_stack
;
7292 o
->llstk
= local_label_stack
;
7296 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
7299 void prev_scope(struct scope
*o
, int is_expr
)
7303 if (o
->cl
.s
!= o
->prev
->cl
.s
)
7304 block_cleanup(o
->prev
);
7306 /* pop locally defined labels */
7307 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
7309 /* In the is_expr case (a statement expression is finished here),
7310 vtop might refer to symbols on the local_stack. Either via the
7311 type or via vtop->sym. We can't pop those nor any that in turn
7312 might be referred to. To make it easier we don't roll back
7313 any symbols in that case; some upper level call to block() will
7314 do that. We do have to remove such symbols from the lookup
7315 tables, though. sym_pop will do that. */
7317 /* pop locally defined symbols */
7318 pop_local_syms(o
->lstk
, is_expr
);
7319 cur_scope
= o
->prev
;
7323 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
7326 /* leave a scope via break/continue(/goto) */
7327 void leave_scope(struct scope
*o
)
7331 try_call_scope_cleanup(o
->cl
.s
);
7335 /* ------------------------------------------------------------------------- */
7336 /* call block from 'for do while' loops */
7338 static void lblock(int *bsym
, int *csym
)
7340 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
7341 int *b
= co
->bsym
, *c
= co
->csym
;
7355 static void block(int is_expr
)
7357 int a
, b
, c
, d
, e
, t
;
7362 /* default return value is (void) */
7364 vtop
->type
.t
= VT_VOID
;
7369 /* If the token carries a value, next() might destroy it. Only with
7370 invalid code such as f(){"123"4;} */
7371 if (TOK_HAS_VALUE(t
))
7376 tcc_tcov_check_line (0), tcc_tcov_block_begin ();
7384 if (tok
== TOK_ELSE
) {
7389 gsym(d
); /* patch else jmp */
7394 } else if (t
== TOK_WHILE
) {
7406 } else if (t
== '{') {
7409 /* handle local labels declarations */
7410 while (tok
== TOK_LABEL
) {
7413 if (tok
< TOK_UIDENT
)
7414 expect("label identifier");
7415 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
7417 } while (tok
== ',');
7421 while (tok
!= '}') {
7430 prev_scope(&o
, is_expr
);
7433 else if (!nocode_wanted
)
7434 check_func_return();
7436 } else if (t
== TOK_RETURN
) {
7437 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
7441 gen_assign_cast(&func_vt
);
7443 if (vtop
->type
.t
!= VT_VOID
)
7444 tcc_warning("void function returns a value");
7448 tcc_warning("'return' with no value");
7451 leave_scope(root_scope
);
7453 gfunc_return(&func_vt
);
7455 /* jump unless last stmt in top-level block */
7456 if (tok
!= '}' || local_scope
!= 1)
7459 tcc_tcov_block_end (tcov_data
.line
);
7462 } else if (t
== TOK_BREAK
) {
7464 if (!cur_scope
->bsym
)
7465 tcc_error("cannot break");
7466 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
7467 leave_scope(cur_switch
->scope
);
7469 leave_scope(loop_scope
);
7470 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
7473 } else if (t
== TOK_CONTINUE
) {
7475 if (!cur_scope
->csym
)
7476 tcc_error("cannot continue");
7477 leave_scope(loop_scope
);
7478 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
7481 } else if (t
== TOK_FOR
) {
7486 /* c99 for-loop init decl? */
7487 if (!decl0(VT_LOCAL
, 1, NULL
)) {
7488 /* no, regular for-loop init expr */
7516 } else if (t
== TOK_DO
) {
7530 } else if (t
== TOK_SWITCH
) {
7531 struct switch_t
*sw
;
7533 sw
= tcc_mallocz(sizeof *sw
);
7535 sw
->scope
= cur_scope
;
7536 sw
->prev
= cur_switch
;
7542 sw
->sv
= *vtop
--; /* save switch value */
7545 b
= gjmp(0); /* jump to first case */
7547 a
= gjmp(a
); /* add implicit break */
7551 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7552 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7554 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7556 for (b
= 1; b
< sw
->n
; b
++)
7557 if (sw
->sv
.type
.t
& VT_UNSIGNED
7558 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7559 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7560 tcc_error("duplicate case value");
7564 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7567 gsym_addr(d
, sw
->def_sym
);
7573 dynarray_reset(&sw
->p
, &sw
->n
);
7574 cur_switch
= sw
->prev
;
7577 } else if (t
== TOK_CASE
) {
7578 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7581 cr
->v1
= cr
->v2
= expr_const64();
7582 if (gnu_ext
&& tok
== TOK_DOTS
) {
7584 cr
->v2
= expr_const64();
7585 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7586 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7587 tcc_warning("empty case range");
7591 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7594 goto block_after_label
;
7596 } else if (t
== TOK_DEFAULT
) {
7599 if (cur_switch
->def_sym
)
7600 tcc_error("too many 'default'");
7602 cur_switch
->def_sym
= gind();
7605 goto block_after_label
;
7607 } else if (t
== TOK_GOTO
) {
7608 if (cur_scope
->vla
.num
)
7609 vla_restore(cur_scope
->vla
.locorig
);
7610 if (tok
== '*' && gnu_ext
) {
7614 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7618 } else if (tok
>= TOK_UIDENT
) {
7619 s
= label_find(tok
);
7620 /* put forward definition if needed */
7622 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7623 else if (s
->r
== LABEL_DECLARED
)
7624 s
->r
= LABEL_FORWARD
;
7626 if (s
->r
& LABEL_FORWARD
) {
7627 /* start new goto chain for cleanups, linked via label->next */
7628 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7629 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7630 pending_gotos
->prev_tok
= s
;
7631 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7632 pending_gotos
->next
= s
;
7634 s
->jnext
= gjmp(s
->jnext
);
7636 try_call_cleanup_goto(s
->cleanupstate
);
7637 gjmp_addr(s
->jnext
);
7642 expect("label identifier");
7646 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7650 if (tok
== ':' && t
>= TOK_UIDENT
) {
7655 if (s
->r
== LABEL_DEFINED
)
7656 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7657 s
->r
= LABEL_DEFINED
;
7659 Sym
*pcl
; /* pending cleanup goto */
7660 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7662 sym_pop(&s
->next
, NULL
, 0);
7666 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7669 s
->cleanupstate
= cur_scope
->cl
.s
;
7672 vla_restore(cur_scope
->vla
.loc
);
7673 /* we accept this, but it is a mistake */
7675 tcc_warning("deprecated use of label at end of compound statement");
7681 /* expression case */
7698 tcc_tcov_check_line (0), tcc_tcov_block_end (0);
7701 /* This skips over a stream of tokens containing balanced {} and ()
7702 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7703 with a '{'). If STR then allocates and stores the skipped tokens
7704 in *STR. This doesn't check if () and {} are nested correctly,
7705 i.e. "({)}" is accepted. */
7706 static void skip_or_save_block(TokenString
**str
)
7708 int braces
= tok
== '{';
7711 *str
= tok_str_alloc();
7713 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7715 if (tok
== TOK_EOF
) {
7716 if (str
|| level
> 0)
7717 tcc_error("unexpected end of file");
7722 tok_str_add_tok(*str
);
7725 if (t
== '{' || t
== '(') {
7727 } else if (t
== '}' || t
== ')') {
7729 if (level
== 0 && braces
&& t
== '}')
7734 tok_str_add(*str
, -1);
7735 tok_str_add(*str
, 0);
7739 #define EXPR_CONST 1
7742 static void parse_init_elem(int expr_type
)
7744 int saved_global_expr
;
7747 /* compound literals must be allocated globally in this case */
7748 saved_global_expr
= global_expr
;
7751 global_expr
= saved_global_expr
;
7752 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7753 (compound literals). */
7754 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7755 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7756 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7757 #ifdef TCC_TARGET_PE
7758 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7761 tcc_error("initializer element is not constant");
7770 static void init_assert(init_params
*p
, int offset
)
7772 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7773 : !nocode_wanted
&& offset
> p
->local_offset
)
7774 tcc_internal_error("initializer overflow");
7777 #define init_assert(sec, offset)
7780 /* put zeros for variable based init */
7781 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7783 init_assert(p
, c
+ size
);
7785 /* nothing to do because globals are already set to zero */
7787 vpush_helper_func(TOK_memset
);
7789 #ifdef TCC_TARGET_ARM
7801 #define DIF_SIZE_ONLY 2
7802 #define DIF_HAVE_ELEM 4
7805 /* delete relocations for specified range c ... c + size. Unfortunatly
7806 in very special cases, relocations may occur unordered */
7807 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7809 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7810 if (!sec
|| !sec
->reloc
)
7812 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7813 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7814 while (rel
< rel_end
) {
7815 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7816 sec
->reloc
->data_offset
-= sizeof *rel
;
7819 memcpy(rel2
, rel
, sizeof *rel
);
7826 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7828 if (ref
== p
->flex_array_ref
) {
7829 if (index
>= ref
->c
)
7831 } else if (ref
->c
< 0)
7832 tcc_error("flexible array has zero size in this context");
7835 /* t is the array or struct type. c is the array or struct
7836 address. cur_field is the pointer to the current
7837 field, for arrays the 'c' member contains the current start
7838 index. 'flags' is as in decl_initializer.
7839 'al' contains the already initialized length of the
7840 current container (starting at c). This returns the new length of that. */
7841 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7842 Sym
**cur_field
, int flags
, int al
)
7845 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7846 unsigned long corig
= c
;
7851 if (flags
& DIF_HAVE_ELEM
)
7854 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7861 /* NOTE: we only support ranges for last designator */
7862 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7864 if (!(type
->t
& VT_ARRAY
))
7865 expect("array type");
7867 index
= index_last
= expr_const();
7868 if (tok
== TOK_DOTS
&& gnu_ext
) {
7870 index_last
= expr_const();
7874 decl_design_flex(p
, s
, index_last
);
7875 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7876 tcc_error("index exceeds array bounds or range is empty");
7878 (*cur_field
)->c
= index_last
;
7879 type
= pointed_type(type
);
7880 elem_size
= type_size(type
, &align
);
7881 c
+= index
* elem_size
;
7882 nb_elems
= index_last
- index
+ 1;
7889 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7890 expect("struct/union type");
7892 f
= find_field(type
, l
, &cumofs
);
7905 } else if (!gnu_ext
) {
7910 if (type
->t
& VT_ARRAY
) {
7911 index
= (*cur_field
)->c
;
7913 decl_design_flex(p
, s
, index
);
7915 tcc_error("too many initializers");
7916 type
= pointed_type(type
);
7917 elem_size
= type_size(type
, &align
);
7918 c
+= index
* elem_size
;
7921 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7922 *cur_field
= f
= f
->next
;
7924 tcc_error("too many initializers");
7930 if (!elem_size
) /* for structs */
7931 elem_size
= type_size(type
, &align
);
7933 /* Using designators the same element can be initialized more
7934 than once. In that case we need to delete possibly already
7935 existing relocations. */
7936 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7937 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7938 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7941 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7943 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7947 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7948 /* make init_putv/vstore believe it were a struct */
7950 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7954 vpush_ref(type
, p
->sec
, c
, elem_size
);
7956 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7957 for (i
= 1; i
< nb_elems
; i
++) {
7959 init_putv(p
, type
, c
+ elem_size
* i
);
7964 c
+= nb_elems
* elem_size
;
7970 /* store a value or an expression directly in global data or in local array */
7971 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7977 Section
*sec
= p
->sec
;
7981 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7983 size
= type_size(type
, &align
);
7984 if (type
->t
& VT_BITFIELD
)
7985 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7986 init_assert(p
, c
+ size
);
7989 /* XXX: not portable */
7990 /* XXX: generate error if incorrect relocation */
7991 gen_assign_cast(&dtype
);
7992 bt
= type
->t
& VT_BTYPE
;
7994 if ((vtop
->r
& VT_SYM
)
7996 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7997 || (type
->t
& VT_BITFIELD
))
7998 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
8000 tcc_error("initializer element is not computable at load time");
8002 if (NODATA_WANTED
) {
8007 ptr
= sec
->data
+ c
;
8010 /* XXX: make code faster ? */
8011 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
8012 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
8013 /* XXX This rejects compound literals like
8014 '(void *){ptr}'. The problem is that '&sym' is
8015 represented the same way, which would be ruled out
8016 by the SYM_FIRST_ANOM check above, but also '"string"'
8017 in 'char *p = "string"' is represented the same
8018 with the type being VT_PTR and the symbol being an
8019 anonymous one. That is, there's no difference in vtop
8020 between '(void *){x}' and '&(void *){x}'. Ignore
8021 pointer typed entities here. Hopefully no real code
8022 will ever use compound literals with scalar type. */
8023 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
8024 /* These come from compound literals, memcpy stuff over. */
8028 esym
= elfsym(vtop
->sym
);
8029 ssec
= tcc_state
->sections
[esym
->st_shndx
];
8030 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
8032 /* We need to copy over all memory contents, and that
8033 includes relocations. Use the fact that relocs are
8034 created it order, so look from the end of relocs
8035 until we hit one before the copied region. */
8036 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
8037 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
8038 while (num_relocs
--) {
8040 if (rel
->r_offset
>= esym
->st_value
+ size
)
8042 if (rel
->r_offset
< esym
->st_value
)
8044 put_elf_reloca(symtab_section
, sec
,
8045 c
+ rel
->r_offset
- esym
->st_value
,
8046 ELFW(R_TYPE
)(rel
->r_info
),
8047 ELFW(R_SYM
)(rel
->r_info
),
8057 if (type
->t
& VT_BITFIELD
) {
8058 int bit_pos
, bit_size
, bits
, n
;
8059 unsigned char *p
, v
, m
;
8060 bit_pos
= BIT_POS(vtop
->type
.t
);
8061 bit_size
= BIT_SIZE(vtop
->type
.t
);
8062 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
8063 bit_pos
&= 7, bits
= 0;
8068 v
= val
>> bits
<< bit_pos
;
8069 m
= ((1 << n
) - 1) << bit_pos
;
8070 *p
= (*p
& ~m
) | (v
& m
);
8071 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
8076 *(char *)ptr
= val
!= 0;
8082 write16le(ptr
, val
);
8085 write32le(ptr
, val
);
8088 write64le(ptr
, val
);
8091 #if defined TCC_IS_NATIVE_387
8092 /* Host and target platform may be different but both have x87.
8093 On windows, tcc does not use VT_LDOUBLE, except when it is a
8094 cross compiler. In this case a mingw gcc as host compiler
8095 comes here with 10-byte long doubles, while msvc or tcc won't.
8096 tcc itself can still translate by asm.
8097 In any case we avoid possibly random bytes 11 and 12.
8099 if (sizeof (long double) >= 10)
8100 memcpy(ptr
, &vtop
->c
.ld
, 10);
8102 else if (sizeof (long double) == sizeof (double))
8103 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
8105 else if (vtop
->c
.ld
== (f1
-f1
) )
8109 /* For other platforms it should work natively, but may not work
8110 for cross compilers */
8111 if (sizeof(long double) == LDOUBLE_SIZE
)
8112 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
8113 else if (sizeof(double) == LDOUBLE_SIZE
)
8114 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
8115 #ifndef TCC_CROSS_TEST
8117 tcc_error("can't cross compile long double constants");
8122 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
8125 if (vtop
->r
& VT_SYM
)
8126 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
8128 write64le(ptr
, val
);
8131 write32le(ptr
, val
);
8135 write64le(ptr
, val
);
8139 if (vtop
->r
& VT_SYM
)
8140 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
8141 write32le(ptr
, val
);
8145 //tcc_internal_error("unexpected type");
8151 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
8158 /* 't' contains the type and storage info. 'c' is the offset of the
8159 object in section 'sec'. If 'sec' is NULL, it means stack based
8160 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
8161 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
8162 size only evaluation is wanted (only for arrays). */
8163 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
8165 int len
, n
, no_oblock
, i
;
8171 /* generate line number info */
8172 if (debug_modes
&& !p
->sec
)
8173 tcc_debug_line(tcc_state
), tcc_tcov_check_line (1);
8175 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
8176 /* In case of strings we have special handling for arrays, so
8177 don't consume them as initializer value (which would commit them
8178 to some anonymous symbol). */
8179 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
8180 !(flags
& DIF_SIZE_ONLY
)) {
8181 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
8182 flags
|= DIF_HAVE_ELEM
;
8185 if ((flags
& DIF_HAVE_ELEM
) &&
8186 !(type
->t
& VT_ARRAY
) &&
8187 /* Use i_c_parameter_t, to strip toplevel qualifiers.
8188 The source type might have VT_CONSTANT set, which is
8189 of course assignable to non-const elements. */
8190 is_compatible_unqualified_types(type
, &vtop
->type
)) {
8193 } else if (type
->t
& VT_ARRAY
) {
8195 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
8203 t1
= pointed_type(type
);
8204 size1
= type_size(t1
, &align1
);
8206 /* only parse strings here if correct type (otherwise: handle
8207 them as ((w)char *) expressions */
8208 if ((tok
== TOK_LSTR
&&
8209 #ifdef TCC_TARGET_PE
8210 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
8212 (t1
->t
& VT_BTYPE
) == VT_INT
8214 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
8216 cstr_reset(&initstr
);
8217 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
8218 tcc_error("unhandled string literal merging");
8219 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8221 initstr
.size
-= size1
;
8223 len
+= tokc
.str
.size
;
8225 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
8227 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
8230 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
8231 && tok
!= TOK_EOF
) {
8232 /* Not a lone literal but part of a bigger expression. */
8233 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
8234 tokc
.str
.size
= initstr
.size
;
8235 tokc
.str
.data
= initstr
.data
;
8239 if (!(flags
& DIF_SIZE_ONLY
)) {
8244 tcc_warning("initializer-string for array is too long");
8245 /* in order to go faster for common case (char
8246 string in global variable, we handle it
8248 if (p
->sec
&& size1
== 1) {
8249 init_assert(p
, c
+ nb
);
8251 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
8255 /* only add trailing zero if enough storage (no
8256 warning in this case since it is standard) */
8257 if (flags
& DIF_CLEAR
)
8260 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
8264 } else if (size1
== 1)
8265 ch
= ((unsigned char *)initstr
.data
)[i
];
8267 ch
= ((nwchar_t
*)initstr
.data
)[i
];
8269 init_putv(p
, t1
, c
+ i
* size1
);
8273 decl_design_flex(p
, s
, len
);
8282 /* zero memory once in advance */
8283 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
8284 init_putz(p
, c
, n
*size1
);
8289 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
8290 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
8291 flags
&= ~DIF_HAVE_ELEM
;
8292 if (type
->t
& VT_ARRAY
) {
8294 /* special test for multi dimensional arrays (may not
8295 be strictly correct if designators are used at the
8297 if (no_oblock
&& len
>= n
*size1
)
8300 if (s
->type
.t
== VT_UNION
)
8304 if (no_oblock
&& f
== NULL
)
8315 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8317 if ((flags
& DIF_FIRST
) || tok
== '{') {
8326 } else if (tok
== '{') {
8327 if (flags
& DIF_HAVE_ELEM
)
8330 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
8332 } else if ((flags
& DIF_SIZE_ONLY
)) {
8333 /* If we supported only ISO C we wouldn't have to accept calling
8334 this on anything than an array if DIF_SIZE_ONLY (and even then
8335 only on the outermost level, so no recursion would be needed),
8336 because initializing a flex array member isn't supported.
8337 But GNU C supports it, so we need to recurse even into
8338 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
8339 /* just skip expression */
8340 skip_or_save_block(NULL
);
8342 if (!(flags
& DIF_HAVE_ELEM
)) {
8343 /* This should happen only when we haven't parsed
8344 the init element above for fear of committing a
8345 string constant to memory too early. */
8346 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
8347 expect("string constant");
8348 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
8351 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
8352 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
8354 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
8358 init_putv(p
, type
, c
);
8362 /* parse an initializer for type 't' if 'has_init' is non zero, and
8363 allocate space in local or global data space ('r' is either
8364 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
8365 variable 'v' of scope 'scope' is declared before initializers
8366 are parsed. If 'v' is zero, then a reference to the new object
8367 is put in the value stack. If 'has_init' is 2, a special parsing
8368 is done to handle string constants. */
8369 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
8370 int has_init
, int v
, int scope
)
8372 int size
, align
, addr
;
8373 TokenString
*init_str
= NULL
;
8376 Sym
*flexible_array
;
8378 int saved_nocode_wanted
= nocode_wanted
;
8379 #ifdef CONFIG_TCC_BCHECK
8380 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
8382 init_params p
= {0};
8384 /* Always allocate static or global variables */
8385 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
8386 nocode_wanted
|= 0x80000000;
8388 flexible_array
= NULL
;
8389 size
= type_size(type
, &align
);
8391 /* exactly one flexible array may be initialized, either the
8392 toplevel array or the last member of the toplevel struct */
8395 /* If the base type itself was an array type of unspecified size
8396 (like in 'typedef int arr[]; arr x = {1};') then we will
8397 overwrite the unknown size by the real one for this decl.
8398 We need to unshare the ref symbol holding that size. */
8399 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
8400 p
.flex_array_ref
= type
->ref
;
8402 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8403 Sym
*field
= type
->ref
->next
;
8406 field
= field
->next
;
8407 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
8408 flexible_array
= field
;
8409 p
.flex_array_ref
= field
->type
.ref
;
8416 /* If unknown size, do a dry-run 1st pass */
8418 tcc_error("unknown type size");
8419 if (has_init
== 2) {
8420 /* only get strings */
8421 init_str
= tok_str_alloc();
8422 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8423 tok_str_add_tok(init_str
);
8426 tok_str_add(init_str
, -1);
8427 tok_str_add(init_str
, 0);
8429 skip_or_save_block(&init_str
);
8433 begin_macro(init_str
, 1);
8435 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
8436 /* prepare second initializer parsing */
8437 macro_ptr
= init_str
->str
;
8440 /* if still unknown size, error */
8441 size
= type_size(type
, &align
);
8443 tcc_error("unknown type size");
8445 /* If there's a flex member and it was used in the initializer
8447 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
8448 size
+= flexible_array
->type
.ref
->c
8449 * pointed_size(&flexible_array
->type
);
8452 /* take into account specified alignment if bigger */
8453 if (ad
->a
.aligned
) {
8454 int speca
= 1 << (ad
->a
.aligned
- 1);
8457 } else if (ad
->a
.packed
) {
8461 if (!v
&& NODATA_WANTED
)
8462 size
= 0, align
= 1;
8464 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
8466 #ifdef CONFIG_TCC_BCHECK
8468 /* add padding between stack variables for bound checking */
8472 loc
= (loc
- size
) & -align
;
8474 p
.local_offset
= addr
+ size
;
8475 #ifdef CONFIG_TCC_BCHECK
8477 /* add padding between stack variables for bound checking */
8482 /* local variable */
8483 #ifdef CONFIG_TCC_ASM
8484 if (ad
->asm_label
) {
8485 int reg
= asm_parse_regvar(ad
->asm_label
);
8487 r
= (r
& ~VT_VALMASK
) | reg
;
8490 sym
= sym_push(v
, type
, r
, addr
);
8491 if (ad
->cleanup_func
) {
8492 Sym
*cls
= sym_push2(&all_cleanups
,
8493 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
8494 cls
->prev_tok
= sym
;
8495 cls
->next
= ad
->cleanup_func
;
8496 cls
->ncl
= cur_scope
->cl
.s
;
8497 cur_scope
->cl
.s
= cls
;
8502 /* push local reference */
8503 vset(type
, r
, addr
);
8506 if (v
&& scope
== VT_CONST
) {
8507 /* see if the symbol was already defined */
8510 patch_storage(sym
, ad
, type
);
8511 /* we accept several definitions of the same global variable. */
8512 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8517 /* allocate symbol in corresponding section */
8520 if (type
->t
& VT_CONSTANT
)
8521 sec
= data_ro_section
;
8524 else if (tcc_state
->nocommon
)
8529 addr
= section_add(sec
, size
, align
);
8530 #ifdef CONFIG_TCC_BCHECK
8531 /* add padding if bound check */
8533 section_add(sec
, 1, 1);
8536 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8537 sec
= common_section
;
8542 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8543 patch_storage(sym
, ad
, NULL
);
8545 /* update symbol definition */
8546 put_extern_sym(sym
, sec
, addr
, size
);
8548 /* push global reference */
8549 vpush_ref(type
, sec
, addr
, size
);
8554 #ifdef CONFIG_TCC_BCHECK
8555 /* handles bounds now because the symbol must be defined
8556 before for the relocation */
8560 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8561 /* then add global bound info */
8562 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8563 bounds_ptr
[0] = 0; /* relocated */
8564 bounds_ptr
[1] = size
;
8569 if (type
->t
& VT_VLA
) {
8575 /* save before-VLA stack pointer if needed */
8576 if (cur_scope
->vla
.num
== 0) {
8577 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8578 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8580 gen_vla_sp_save(loc
-= PTR_SIZE
);
8581 cur_scope
->vla
.locorig
= loc
;
8585 vla_runtime_type_size(type
, &a
);
8586 gen_vla_alloc(type
, a
);
8587 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8588 /* on _WIN64, because of the function args scratch area, the
8589 result of alloca differs from RSP and is returned in RAX. */
8590 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8592 gen_vla_sp_save(addr
);
8593 cur_scope
->vla
.loc
= addr
;
8594 cur_scope
->vla
.num
++;
8595 } else if (has_init
) {
8597 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8598 /* patch flexible array member size back to -1, */
8599 /* for possible subsequent similar declarations */
8601 flexible_array
->type
.ref
->c
= -1;
8605 /* restore parse state if needed */
8611 nocode_wanted
= saved_nocode_wanted
;
8614 /* parse a function defined by symbol 'sym' and generate its code in
8615 'cur_text_section' */
8616 static void gen_function(Sym
*sym
)
8618 struct scope f
= { 0 };
8619 cur_scope
= root_scope
= &f
;
8621 ind
= cur_text_section
->data_offset
;
8622 if (sym
->a
.aligned
) {
8623 size_t newoff
= section_add(cur_text_section
, 0,
8624 1 << (sym
->a
.aligned
- 1));
8625 gen_fill_nops(newoff
- ind
);
8627 /* NOTE: we patch the symbol size later */
8628 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8629 if (sym
->type
.ref
->f
.func_ctor
)
8630 add_array (tcc_state
, ".init_array", sym
->c
);
8631 if (sym
->type
.ref
->f
.func_dtor
)
8632 add_array (tcc_state
, ".fini_array", sym
->c
);
8634 funcname
= get_tok_str(sym
->v
, NULL
);
8636 func_vt
= sym
->type
.ref
->type
;
8637 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8639 /* put debug symbol */
8640 tcc_debug_funcstart(tcc_state
, sym
);
8641 /* push a dummy symbol to enable local sym storage */
8642 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8643 local_scope
= 1; /* for function parameters */
8647 clear_temp_local_var_list();
8651 /* reset local stack */
8652 pop_local_syms(NULL
, 0);
8654 cur_text_section
->data_offset
= ind
;
8656 label_pop(&global_label_stack
, NULL
, 0);
8657 sym_pop(&all_cleanups
, NULL
, 0);
8658 /* patch symbol size */
8659 elfsym(sym
)->st_size
= ind
- func_ind
;
8660 /* end of function */
8661 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8662 /* It's better to crash than to generate wrong code */
8663 cur_text_section
= NULL
;
8664 funcname
= ""; /* for safety */
8665 func_vt
.t
= VT_VOID
; /* for safety */
8666 func_var
= 0; /* for safety */
8667 ind
= 0; /* for safety */
8668 nocode_wanted
= 0x80000000;
8670 /* do this after funcend debug info */
8674 static void gen_inline_functions(TCCState
*s
)
8677 int inline_generated
, i
;
8678 struct InlineFunc
*fn
;
8680 tcc_open_bf(s
, ":inline:", 0);
8681 /* iterate while inline function are referenced */
8683 inline_generated
= 0;
8684 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8685 fn
= s
->inline_fns
[i
];
8687 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8688 /* the function was used or forced (and then not internal):
8689 generate its code and convert it to a normal function */
8691 tcc_debug_putfile(s
, fn
->filename
);
8692 begin_macro(fn
->func_str
, 1);
8694 cur_text_section
= text_section
;
8698 inline_generated
= 1;
8701 } while (inline_generated
);
8705 static void free_inline_functions(TCCState
*s
)
8708 /* free tokens of unused inline functions */
8709 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8710 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8712 tok_str_free(fn
->func_str
);
8714 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8717 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8718 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8719 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8721 int v
, has_init
, r
, oldint
;
8724 AttributeDef ad
, adbase
;
8727 if (tok
== TOK_STATIC_ASSERT
) {
8737 tcc_error("_Static_assert fail");
8739 goto static_assert_out
;
8743 parse_mult_str(&error_str
, "string constant");
8745 tcc_error("%s", (char *)error_str
.data
);
8746 cstr_free(&error_str
);
8754 if (!parse_btype(&btype
, &adbase
)) {
8755 if (is_for_loop_init
)
8757 /* skip redundant ';' if not in old parameter decl scope */
8758 if (tok
== ';' && l
!= VT_CMP
) {
8764 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8765 /* global asm block */
8769 if (tok
>= TOK_UIDENT
) {
8770 /* special test for old K&R protos without explicit int
8771 type. Only accepted when defining global data */
8776 expect("declaration");
8782 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8784 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8785 tcc_warning("unnamed struct/union that defines no instances");
8789 if (IS_ENUM(btype
.t
)) {
8795 while (1) { /* iterate thru each declaration */
8798 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8802 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8803 printf("type = '%s'\n", buf
);
8806 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8807 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8808 tcc_error("function without file scope cannot be static");
8809 /* if old style function prototype, we accept a
8812 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8813 decl0(VT_CMP
, 0, sym
);
8814 #ifdef TCC_TARGET_MACHO
8815 if (sym
->f
.func_alwinl
8816 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8817 == (VT_EXTERN
| VT_INLINE
))) {
8818 /* always_inline functions must be handled as if they
8819 don't generate multiple global defs, even if extern
8820 inline, i.e. GNU inline semantics for those. Rewrite
8821 them into static inline. */
8822 type
.t
&= ~VT_EXTERN
;
8823 type
.t
|= VT_STATIC
;
8826 /* always compile 'extern inline' */
8827 if (type
.t
& VT_EXTERN
)
8828 type
.t
&= ~VT_INLINE
;
8830 } else if (oldint
) {
8831 tcc_warning("type defaults to int");
8834 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8835 ad
.asm_label
= asm_label_instr();
8836 /* parse one last attribute list, after asm label */
8837 parse_attribute(&ad
);
8839 /* gcc does not allow __asm__("label") with function definition,
8846 #ifdef TCC_TARGET_PE
8847 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8848 if (type
.t
& VT_STATIC
)
8849 tcc_error("cannot have dll linkage with static");
8850 if (type
.t
& VT_TYPEDEF
) {
8851 tcc_warning("'%s' attribute ignored for typedef",
8852 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8853 (ad
.a
.dllexport
= 0, "dllexport"));
8854 } else if (ad
.a
.dllimport
) {
8855 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8858 type
.t
|= VT_EXTERN
;
8864 tcc_error("cannot use local functions");
8865 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8866 expect("function definition");
8868 /* reject abstract declarators in function definition
8869 make old style params without decl have int type */
8871 while ((sym
= sym
->next
) != NULL
) {
8872 if (!(sym
->v
& ~SYM_FIELD
))
8873 expect("identifier");
8874 if (sym
->type
.t
== VT_VOID
)
8875 sym
->type
= int_type
;
8878 /* apply post-declaraton attributes */
8879 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8881 /* put function symbol */
8882 type
.t
&= ~VT_EXTERN
;
8883 sym
= external_sym(v
, &type
, 0, &ad
);
8885 /* static inline functions are just recorded as a kind
8886 of macro. Their code will be emitted at the end of
8887 the compilation unit only if they are used */
8888 if (sym
->type
.t
& VT_INLINE
) {
8889 struct InlineFunc
*fn
;
8890 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8891 strcpy(fn
->filename
, file
->filename
);
8893 skip_or_save_block(&fn
->func_str
);
8894 dynarray_add(&tcc_state
->inline_fns
,
8895 &tcc_state
->nb_inline_fns
, fn
);
8897 /* compute text section */
8898 cur_text_section
= ad
.section
;
8899 if (!cur_text_section
)
8900 cur_text_section
= text_section
;
8906 /* find parameter in function parameter list */
8907 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8908 if ((sym
->v
& ~SYM_FIELD
) == v
)
8910 tcc_error("declaration for parameter '%s' but no such parameter",
8911 get_tok_str(v
, NULL
));
8913 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8914 tcc_error("storage class specified for '%s'",
8915 get_tok_str(v
, NULL
));
8916 if (sym
->type
.t
!= VT_VOID
)
8917 tcc_error("redefinition of parameter '%s'",
8918 get_tok_str(v
, NULL
));
8919 convert_parameter_type(&type
);
8921 } else if (type
.t
& VT_TYPEDEF
) {
8922 /* save typedefed type */
8923 /* XXX: test storage specifiers ? */
8925 if (sym
&& sym
->sym_scope
== local_scope
) {
8926 if (!is_compatible_types(&sym
->type
, &type
)
8927 || !(sym
->type
.t
& VT_TYPEDEF
))
8928 tcc_error("incompatible redefinition of '%s'",
8929 get_tok_str(v
, NULL
));
8932 sym
= sym_push(v
, &type
, 0, 0);
8937 tcc_debug_typedef (tcc_state
, sym
);
8938 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8939 && !(type
.t
& VT_EXTERN
)) {
8940 tcc_error("declaration of void object");
8943 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8944 /* external function definition */
8945 /* specific case for func_call attribute */
8947 } else if (!(type
.t
& VT_ARRAY
)) {
8948 /* not lvalue if array */
8951 has_init
= (tok
== '=');
8952 if (has_init
&& (type
.t
& VT_VLA
))
8953 tcc_error("variable length array cannot be initialized");
8954 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8955 || (type
.t
& VT_BTYPE
) == VT_FUNC
8956 /* as with GCC, uninitialized global arrays with no size
8957 are considered extern: */
8958 || ((type
.t
& VT_ARRAY
) && !has_init
8959 && l
== VT_CONST
&& type
.ref
->c
< 0)
8961 /* external variable or function */
8962 type
.t
|= VT_EXTERN
;
8963 sym
= external_sym(v
, &type
, r
, &ad
);
8964 if (ad
.alias_target
) {
8965 /* Aliases need to be emitted when their target
8966 symbol is emitted, even if perhaps unreferenced.
8967 We only support the case where the base is
8968 already defined, otherwise we would need
8969 deferring to emit the aliases until the end of
8970 the compile unit. */
8971 Sym
*alias_target
= sym_find(ad
.alias_target
);
8972 ElfSym
*esym
= elfsym(alias_target
);
8974 tcc_error("unsupported forward __alias__ attribute");
8975 put_extern_sym2(sym
, esym
->st_shndx
,
8976 esym
->st_value
, esym
->st_size
, 1);
8979 if (type
.t
& VT_STATIC
)
8985 else if (l
== VT_CONST
)
8986 /* uninitialized global variables may be overridden */
8987 type
.t
|= VT_EXTERN
;
8988 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8992 if (is_for_loop_init
)
9004 static void decl(int l
)
9009 /* ------------------------------------------------------------------------- */
9012 /* ------------------------------------------------------------------------- */