2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
49 ST_DATA
char debug_modes
;
52 static SValue _vstack
[1 + VSTACK_SIZE
];
53 #define vstack (_vstack + 1)
55 ST_DATA
int const_wanted
; /* true if constant wanted */
56 ST_DATA
int nocode_wanted
; /* no code generation wanted */
57 #define unevalmask 0xffff /* unevaluated subexpression */
58 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
59 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
61 /* Automagical code suppression ----> */
62 #define CODE_OFF() (nocode_wanted |= 0x20000000)
63 #define CODE_ON() (nocode_wanted &= ~0x20000000)
65 static void tcc_tcov_block_begin(void);
67 /* Clear 'nocode_wanted' at label if it was used */
68 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
69 static int gind(void) { int t
= ind
; CODE_ON(); if (debug_modes
) tcc_tcov_block_begin(); return t
; }
71 /* Set 'nocode_wanted' after unconditional jumps */
72 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
73 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
75 /* These are #undef'd at the end of this file */
76 #define gjmp_addr gjmp_addr_acs
80 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
81 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
82 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
84 static int last_line_num
, new_file
, func_ind
; /* debug info control */
85 ST_DATA
const char *funcname
;
86 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
87 static CString initstr
;
90 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_INT
93 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T VT_LLONG
96 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
97 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
100 ST_DATA
struct switch_t
{
104 } **p
; int n
; /* list of case ranges */
105 int def_sym
; /* default symbol */
108 struct switch_t
*prev
;
110 } *cur_switch
; /* current switch */
112 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
113 /*list of temporary local variables on the stack in current function. */
114 ST_DATA
struct temp_local_variable
{
115 int location
; //offset on stack. Svalue.c.i
118 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
119 short nb_temp_local_vars
;
121 static struct scope
{
123 struct { int loc
, locorig
, num
; } vla
;
124 struct { Sym
*s
; int n
; } cl
;
127 } *cur_scope
, *loop_scope
, *root_scope
;
135 /********************************************************/
136 /* stab debug support */
138 static const struct {
141 } default_debug
[] = {
142 { VT_INT
, "int:t1=r1;-2147483648;2147483647;" },
143 { VT_BYTE
, "char:t2=r2;0;127;" },
145 { VT_LONG
| VT_INT
, "long int:t3=r3;-2147483648;2147483647;" },
147 { VT_LLONG
| VT_LONG
, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
149 { VT_INT
| VT_UNSIGNED
, "unsigned int:t4=r4;0;037777777777;" },
151 { VT_LONG
| VT_INT
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;037777777777;" },
153 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
154 { VT_LLONG
| VT_LONG
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;01777777777777777777777;" },
156 { VT_QLONG
, "__int128:t6=r6;0;-1;" },
157 { VT_QLONG
| VT_UNSIGNED
, "__int128 unsigned:t7=r7;0;-1;" },
158 { VT_LLONG
, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
159 { VT_LLONG
| VT_UNSIGNED
, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
160 { VT_SHORT
, "short int:t10=r10;-32768;32767;" },
161 { VT_SHORT
| VT_UNSIGNED
, "short unsigned int:t11=r11;0;65535;" },
162 { VT_BYTE
| VT_DEFSIGN
, "signed char:t12=r12;-128;127;" },
163 { VT_BYTE
| VT_DEFSIGN
| VT_UNSIGNED
, "unsigned char:t13=r13;0;255;" },
164 { VT_FLOAT
, "float:t14=r1;4;0;" },
165 { VT_DOUBLE
, "double:t15=r1;8;0;" },
166 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
167 { VT_DOUBLE
| VT_LONG
, "long double:t16=r1;8;0;" },
169 { VT_LDOUBLE
, "long double:t16=r1;16;0;" },
171 { -1, "_Float32:t17=r1;4;0;" },
172 { -1, "_Float64:t18=r1;8;0;" },
173 { -1, "_Float128:t19=r1;16;0;" },
174 { -1, "_Float32x:t20=r1;8;0;" },
175 { -1, "_Float64x:t21=r1;16;0;" },
176 { -1, "_Decimal32:t22=r1;4;0;" },
177 { -1, "_Decimal64:t23=r1;8;0;" },
178 { -1, "_Decimal128:t24=r1;16;0;" },
179 /* if default char is unsigned */
180 { VT_BYTE
| VT_UNSIGNED
, "unsigned char:t25=r25;0;255;" },
182 { VT_BOOL
, "bool:t26=r26;0;255;" },
183 { VT_VOID
, "void:t27=27" },
186 static int debug_next_type
;
188 static struct debug_hash
{
193 static int n_debug_hash
;
195 static struct debug_info
{
206 struct debug_info
*child
, *next
, *last
, *parent
;
207 } *debug_info
, *debug_info_root
;
210 unsigned long offset
;
211 unsigned long last_file_name
;
212 unsigned long last_func_name
;
217 /********************************************************/
219 #define precedence_parser
220 static void init_prec(void);
222 /********************************************************/
223 #ifndef CONFIG_TCC_ASM
224 ST_FUNC
void asm_instr(void)
226 tcc_error("inline asm() not supported");
228 ST_FUNC
void asm_global_instr(void)
230 tcc_error("inline asm() not supported");
234 /* ------------------------------------------------------------------------- */
235 static void gen_cast(CType
*type
);
236 static void gen_cast_s(int t
);
237 static inline CType
*pointed_type(CType
*type
);
238 static int is_compatible_types(CType
*type1
, CType
*type2
);
239 static int parse_btype(CType
*type
, AttributeDef
*ad
);
240 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
241 static void parse_expr_type(CType
*type
);
242 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
243 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
244 static void block(int is_expr
);
245 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
246 static void decl(int l
);
247 static int decl0(int l
, int is_for_loop_init
, Sym
*);
248 static void expr_eq(void);
249 static void vla_runtime_type_size(CType
*type
, int *a
);
250 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
251 static inline int64_t expr_const64(void);
252 static void vpush64(int ty
, unsigned long long v
);
253 static void vpush(CType
*type
);
254 static int gvtst(int inv
, int t
);
255 static void gen_inline_functions(TCCState
*s
);
256 static void free_inline_functions(TCCState
*s
);
257 static void skip_or_save_block(TokenString
**str
);
258 static void gv_dup(void);
259 static int get_temp_local_var(int size
,int align
);
260 static void clear_temp_local_var_list();
261 static void cast_error(CType
*st
, CType
*dt
);
263 ST_INLN
int is_float(int t
)
265 int bt
= t
& VT_BTYPE
;
266 return bt
== VT_LDOUBLE
272 static inline int is_integer_btype(int bt
)
281 static int btype_size(int bt
)
283 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
287 bt
== VT_PTR
? PTR_SIZE
: 0;
290 /* returns function return register from type */
291 static int R_RET(int t
)
295 #ifdef TCC_TARGET_X86_64
296 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
298 #elif defined TCC_TARGET_RISCV64
299 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
305 /* returns 2nd function return register, if any */
306 static int R2_RET(int t
)
312 #elif defined TCC_TARGET_X86_64
317 #elif defined TCC_TARGET_RISCV64
324 /* returns true for two-word types */
325 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
327 /* put function return registers to stack value */
328 static void PUT_R_RET(SValue
*sv
, int t
)
330 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
333 /* returns function return register class for type t */
334 static int RC_RET(int t
)
336 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
339 /* returns generic register class for type t */
340 static int RC_TYPE(int t
)
344 #ifdef TCC_TARGET_X86_64
345 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
347 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
349 #elif defined TCC_TARGET_RISCV64
350 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
356 /* returns 2nd register class corresponding to t and rc */
357 static int RC2_TYPE(int t
, int rc
)
359 if (!USING_TWO_WORDS(t
))
374 /* we use our own 'finite' function to avoid potential problems with
375 non standard math libs */
376 /* XXX: endianness dependent */
377 ST_FUNC
int ieee_finite(double d
)
380 memcpy(p
, &d
, sizeof(double));
381 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
384 /* compiling intel long double natively */
385 #if (defined __i386__ || defined __x86_64__) \
386 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
387 # define TCC_IS_NATIVE_387
390 ST_FUNC
void test_lvalue(void)
392 if (!(vtop
->r
& VT_LVAL
))
396 ST_FUNC
void check_vstack(void)
398 if (vtop
!= vstack
- 1)
399 tcc_error("internal compiler error: vstack leak (%d)",
400 (int)(vtop
- vstack
+ 1));
403 /* ------------------------------------------------------------------------- */
404 /* vstack debugging aid */
407 void pv (const char *lbl
, int a
, int b
)
410 for (i
= a
; i
< a
+ b
; ++i
) {
411 SValue
*p
= &vtop
[-i
];
412 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
413 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
418 /* ------------------------------------------------------------------------- */
419 /* start of translation unit info */
420 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
426 /* file info: full path + filename */
427 section_sym
= put_elf_sym(symtab_section
, 0, 0,
428 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
429 text_section
->sh_num
, NULL
);
430 getcwd(buf
, sizeof(buf
));
432 normalize_slashes(buf
);
434 pstrcat(buf
, sizeof(buf
), "/");
435 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
436 text_section
->data_offset
, text_section
, section_sym
);
437 put_stabs_r(s1
, file
->prev
? file
->prev
->filename
: file
->filename
,
439 text_section
->data_offset
, text_section
, section_sym
);
440 for (i
= 0; i
< sizeof (default_debug
) / sizeof (default_debug
[0]); i
++)
441 put_stabs(s1
, default_debug
[i
].name
, N_LSYM
, 0, 0, 0);
443 new_file
= last_line_num
= 0;
445 debug_next_type
= sizeof(default_debug
) / sizeof(default_debug
[0]);
449 /* we're currently 'including' the <command line> */
453 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
454 symbols can be safely used */
455 put_elf_sym(symtab_section
, 0, 0,
456 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
457 SHN_ABS
, file
->filename
);
460 /* put end of translation unit info */
461 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
465 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
466 text_section
->data_offset
, text_section
, section_sym
);
467 tcc_free(debug_hash
);
470 static BufferedFile
* put_new_file(TCCState
*s1
)
472 BufferedFile
*f
= file
;
473 /* use upper file if from inline ":asm:" */
474 if (f
->filename
[0] == ':')
477 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
478 new_file
= last_line_num
= 0;
483 /* put alternative filename */
484 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
486 if (0 == strcmp(file
->filename
, filename
))
488 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
492 /* begin of #include */
493 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
497 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
501 /* end of #include */
502 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
506 put_stabn(s1
, N_EINCL
, 0, 0, 0);
510 /* generate line number info */
511 static void tcc_debug_line(TCCState
*s1
)
515 || cur_text_section
!= text_section
516 || !(f
= put_new_file(s1
))
517 || last_line_num
== f
->line_num
)
519 if (func_ind
!= -1) {
520 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
522 /* from tcc_assemble */
523 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
525 last_line_num
= f
->line_num
;
528 static void tcc_debug_stabs (TCCState
*s1
, const char *str
, int type
, unsigned long value
,
529 Section
*sec
, int sym_index
)
535 (struct debug_sym
*)tcc_realloc (debug_info
->sym
,
536 sizeof(struct debug_sym
) *
537 (debug_info
->n_sym
+ 1));
538 s
= debug_info
->sym
+ debug_info
->n_sym
++;
541 s
->str
= tcc_strdup(str
);
543 s
->sym_index
= sym_index
;
546 put_stabs_r (s1
, str
, type
, 0, 0, value
, sec
, sym_index
);
548 put_stabs (s1
, str
, type
, 0, 0, value
);
551 static void tcc_debug_stabn(TCCState
*s1
, int type
, int value
)
555 if (type
== N_LBRAC
) {
556 struct debug_info
*info
=
557 (struct debug_info
*) tcc_mallocz(sizeof (*info
));
560 info
->parent
= debug_info
;
562 if (debug_info
->child
) {
563 if (debug_info
->child
->last
)
564 debug_info
->child
->last
->next
= info
;
566 debug_info
->child
->next
= info
;
567 debug_info
->child
->last
= info
;
570 debug_info
->child
= info
;
573 debug_info_root
= info
;
577 debug_info
->end
= value
;
578 debug_info
= debug_info
->parent
;
582 static void tcc_get_debug_info(TCCState
*s1
, Sym
*s
, CString
*result
)
591 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
592 if ((type
& VT_BTYPE
) != VT_BYTE
)
594 if (type
== VT_PTR
|| type
== (VT_PTR
| VT_ARRAY
))
595 n
++, t
= t
->type
.ref
;
599 if ((type
& VT_BTYPE
) == VT_STRUCT
) {
603 for (i
= 0; i
< n_debug_hash
; i
++) {
604 if (t
== debug_hash
[i
].type
) {
605 debug_type
= debug_hash
[i
].debug_type
;
609 if (debug_type
== -1) {
610 debug_type
= ++debug_next_type
;
611 debug_hash
= (struct debug_hash
*)
612 tcc_realloc (debug_hash
,
613 (n_debug_hash
+ 1) * sizeof(*debug_hash
));
614 debug_hash
[n_debug_hash
].debug_type
= debug_type
;
615 debug_hash
[n_debug_hash
++].type
= t
;
617 cstr_printf (&str
, "%s:T%d=%c%d",
618 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
619 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
621 IS_UNION (t
->type
.t
) ? 'u' : 's',
624 int pos
, size
, align
;
627 cstr_printf (&str
, "%s:",
628 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
629 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
630 tcc_get_debug_info (s1
, t
, &str
);
631 if (t
->type
.t
& VT_BITFIELD
) {
632 pos
= t
->c
* 8 + BIT_POS(t
->type
.t
);
633 size
= BIT_SIZE(t
->type
.t
);
637 size
= type_size(&t
->type
, &align
) * 8;
639 cstr_printf (&str
, ",%d,%d;", pos
, size
);
641 cstr_printf (&str
, ";");
642 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
646 else if (IS_ENUM(type
)) {
647 Sym
*e
= t
= t
->type
.ref
;
649 debug_type
= ++debug_next_type
;
651 cstr_printf (&str
, "%s:T%d=e",
652 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
653 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
657 cstr_printf (&str
, "%s:",
658 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
659 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
660 cstr_printf (&str
, e
->type
.t
& VT_UNSIGNED
? "%u," : "%d,",
663 cstr_printf (&str
, ";");
664 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
667 else if ((type
& VT_BTYPE
) != VT_FUNC
) {
668 type
&= ~VT_STRUCT_MASK
;
670 debug_type
<= sizeof(default_debug
) / sizeof(default_debug
[0]);
672 if (default_debug
[debug_type
- 1].type
== type
)
674 if (debug_type
> sizeof(default_debug
) / sizeof(default_debug
[0]))
678 cstr_printf (result
, "%d=", ++debug_next_type
);
681 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
682 if ((type
& VT_BTYPE
) != VT_BYTE
)
685 cstr_printf (result
, "%d=*", ++debug_next_type
);
686 else if (type
== (VT_PTR
| VT_ARRAY
))
687 cstr_printf (result
, "%d=ar1;0;%d;",
688 ++debug_next_type
, t
->type
.ref
->c
- 1);
689 else if (type
== VT_FUNC
) {
690 cstr_printf (result
, "%d=f", ++debug_next_type
);
691 tcc_get_debug_info (s1
, t
->type
.ref
, result
);
698 cstr_printf (result
, "%d", debug_type
);
701 static void tcc_debug_finish (TCCState
*s1
, struct debug_info
*cur
)
705 struct debug_info
*next
= cur
->next
;
707 for (i
= 0; i
< cur
->n_sym
; i
++) {
708 struct debug_sym
*s
= &cur
->sym
[i
];
711 put_stabs_r(s1
, s
->str
, s
->type
, 0, 0, s
->value
,
712 s
->sec
, s
->sym_index
);
714 put_stabs(s1
, s
->str
, s
->type
, 0, 0, s
->value
);
718 put_stabn(s1
, N_LBRAC
, 0, 0, cur
->start
);
719 tcc_debug_finish (s1
, cur
->child
);
720 put_stabn(s1
, N_RBRAC
, 0, 0, cur
->end
);
726 static void tcc_add_debug_info(TCCState
*s1
, int param
, Sym
*s
, Sym
*e
)
731 cstr_new (&debug_str
);
732 for (; s
!= e
; s
= s
->prev
) {
733 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
735 cstr_reset (&debug_str
);
736 cstr_printf (&debug_str
, "%s:%s", get_tok_str(s
->v
, NULL
), param
? "p" : "");
737 tcc_get_debug_info(s1
, s
, &debug_str
);
738 tcc_debug_stabs(s1
, debug_str
.data
, param
? N_PSYM
: N_LSYM
, s
->c
, NULL
, 0);
740 cstr_free (&debug_str
);
743 /* put function symbol */
744 static void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
750 debug_info_root
= NULL
;
752 tcc_debug_stabn(s1
, N_LBRAC
, ind
- func_ind
);
753 if (!(f
= put_new_file(s1
)))
755 cstr_new (&debug_str
);
756 cstr_printf(&debug_str
, "%s:%c", funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
757 tcc_get_debug_info(s1
, sym
->type
.ref
, &debug_str
);
758 put_stabs_r(s1
, debug_str
.data
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
759 cstr_free (&debug_str
);
764 /* put function size */
765 static void tcc_debug_funcend(TCCState
*s1
, int size
)
769 tcc_debug_stabn(s1
, N_RBRAC
, size
);
770 tcc_debug_finish (s1
, debug_info_root
);
774 static void tcc_debug_extern_sym(TCCState
*s1
, Sym
*sym
, int sh_num
, int sym_bind
, int sym_type
)
781 if (sym_type
== STT_FUNC
|| sym
->v
>= SYM_FIRST_ANOM
)
783 s
= s1
->sections
[sh_num
];
786 cstr_printf (&str
, "%s:%c",
787 get_tok_str(sym
->v
, NULL
),
788 sym_bind
== STB_GLOBAL
? 'G' : local_scope
? 'V' : 'S'
790 tcc_get_debug_info(s1
, sym
, &str
);
791 if (sym_bind
== STB_GLOBAL
)
792 tcc_debug_stabs(s1
, str
.data
, N_GSYM
, 0, NULL
, 0);
794 tcc_debug_stabs(s1
, str
.data
,
795 (sym
->type
.t
& VT_STATIC
) && data_section
== s
796 ? N_STSYM
: N_LCSYM
, 0, s
, sym
->c
);
800 static void tcc_debug_typedef(TCCState
*s1
, Sym
*sym
)
807 cstr_printf (&str
, "%s:t",
808 (sym
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
809 ? "" : get_tok_str(sym
->v
& ~SYM_FIELD
, NULL
));
810 tcc_get_debug_info(s1
, sym
, &str
);
811 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
815 /* ------------------------------------------------------------------------- */
816 /* for section layout see lib/tcov.c */
818 static void tcc_tcov_block_end(int line
);
820 static void tcc_tcov_block_begin(void)
824 unsigned long last_offset
= tcov_data
.offset
;
826 tcc_tcov_block_end (0);
827 if (tcc_state
->test_coverage
== 0 || nocode_wanted
)
830 if (tcov_data
.last_file_name
== 0 ||
831 strcmp ((const char *)(tcov_section
->data
+ tcov_data
.last_file_name
),
832 file
->true_filename
) != 0) {
836 if (tcov_data
.last_func_name
)
837 section_ptr_add(tcov_section
, 1);
838 if (tcov_data
.last_file_name
)
839 section_ptr_add(tcov_section
, 1);
840 tcov_data
.last_func_name
= 0;
842 if (file
->true_filename
[0] == '/') {
843 tcov_data
.last_file_name
= tcov_section
->data_offset
;
844 cstr_printf (&cstr
, "%s", file
->true_filename
);
847 getcwd (wd
, sizeof(wd
));
848 tcov_data
.last_file_name
= tcov_section
->data_offset
+ strlen(wd
) + 1;
849 cstr_printf (&cstr
, "%s/%s", wd
, file
->true_filename
);
851 ptr
= section_ptr_add(tcov_section
, cstr
.size
+ 1);
852 strncpy((char *)ptr
, cstr
.data
, cstr
.size
);
854 normalize_slashes((char *)ptr
);
858 if (tcov_data
.last_func_name
== 0 ||
859 strcmp ((const char *)(tcov_section
->data
+ tcov_data
.last_func_name
),
863 if (tcov_data
.last_func_name
)
864 section_ptr_add(tcov_section
, 1);
865 tcov_data
.last_func_name
= tcov_section
->data_offset
;
866 len
= strlen (funcname
);
867 ptr
= section_ptr_add(tcov_section
, len
+ 1);
868 strncpy((char *)ptr
, funcname
, len
);
869 section_ptr_add(tcov_section
, -tcov_section
->data_offset
& 7);
870 ptr
= section_ptr_add(tcov_section
, 8);
871 write64le (ptr
, file
->line_num
);
873 if (ind
== tcov_data
.ind
&& tcov_data
.line
== file
->line_num
)
874 tcov_data
.offset
= last_offset
;
877 label
.type
.t
= VT_LLONG
| VT_STATIC
;
879 ptr
= section_ptr_add(tcov_section
, 16);
880 tcov_data
.line
= file
->line_num
;
881 write64le (ptr
, (tcov_data
.line
<< 8) | 0xff);
882 put_extern_sym(&label
, tcov_section
,
883 ((unsigned char *)ptr
- tcov_section
->data
) + 8, 0);
884 sv
.type
= label
.type
;
885 sv
.r
= VT_SYM
| VT_LVAL
| VT_CONST
;
889 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || \
890 defined TCC_TARGET_ARM || defined TCC_TARGET_ARM64 || \
891 defined TCC_TARGET_RISCV64
892 gen_increment_tcov (&sv
);
898 tcov_data
.offset
= (unsigned char *)ptr
- tcov_section
->data
;
903 static void tcc_tcov_block_end(int line
)
905 if (tcc_state
->test_coverage
== 0)
907 if (tcov_data
.offset
) {
908 void *ptr
= tcov_section
->data
+ tcov_data
.offset
;
909 unsigned long long nline
= line
? line
: file
->line_num
;
911 write64le (ptr
, (read64le (ptr
) & 0xfffffffffull
) | (nline
<< 36));
912 tcov_data
.offset
= 0;
916 static void tcc_tcov_check_line(int start
)
918 if (tcc_state
->test_coverage
== 0)
920 if (tcov_data
.line
!= file
->line_num
) {
921 if ((tcov_data
.line
+ 1) != file
->line_num
) {
922 tcc_tcov_block_end (tcov_data
.line
);
924 tcc_tcov_block_begin ();
927 tcov_data
.line
= file
->line_num
;
931 static void tcc_tcov_start(void)
933 if (tcc_state
->test_coverage
== 0)
935 memset (&tcov_data
, 0, sizeof (tcov_data
));
936 if (tcov_section
== NULL
) {
937 tcov_section
= new_section(tcc_state
, ".tcov", SHT_PROGBITS
,
938 SHF_ALLOC
| SHF_WRITE
);
939 section_ptr_add(tcov_section
, 4); // pointer to executable name
943 static void tcc_tcov_end(void)
945 if (tcc_state
->test_coverage
== 0)
947 if (tcov_data
.last_func_name
)
948 section_ptr_add(tcov_section
, 1);
949 if (tcov_data
.last_file_name
)
950 section_ptr_add(tcov_section
, 1);
953 /* ------------------------------------------------------------------------- */
954 /* initialize vstack and types. This must be done also for tcc -E */
955 ST_FUNC
void tccgen_init(TCCState
*s1
)
958 memset(vtop
, 0, sizeof *vtop
);
960 /* define some often used types */
963 char_type
.t
= VT_BYTE
;
964 if (s1
->char_is_unsigned
)
965 char_type
.t
|= VT_UNSIGNED
;
966 char_pointer_type
= char_type
;
967 mk_pointer(&char_pointer_type
);
969 func_old_type
.t
= VT_FUNC
;
970 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
971 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
972 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
973 #ifdef precedence_parser
979 ST_FUNC
int tccgen_compile(TCCState
*s1
)
981 cur_text_section
= NULL
;
983 anon_sym
= SYM_FIRST_ANOM
;
986 nocode_wanted
= 0x80000000;
988 debug_modes
= s1
->do_debug
| s1
->test_coverage
<< 1;
992 #ifdef TCC_TARGET_ARM
996 printf("%s: **** new file\n", file
->filename
);
998 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
1001 gen_inline_functions(s1
);
1003 /* end of translation unit info */
1009 ST_FUNC
void tccgen_finish(TCCState
*s1
)
1011 cstr_free(&initstr
);
1012 free_inline_functions(s1
);
1013 sym_pop(&global_stack
, NULL
, 0);
1014 sym_pop(&local_stack
, NULL
, 0);
1015 /* free preprocessor macros */
1017 /* free sym_pools */
1018 dynarray_reset(&sym_pools
, &nb_sym_pools
);
1019 sym_free_first
= NULL
;
1022 /* ------------------------------------------------------------------------- */
1023 ST_FUNC ElfSym
*elfsym(Sym
*s
)
1027 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
1030 /* apply storage attributes to Elf symbol */
1031 ST_FUNC
void update_storage(Sym
*sym
)
1034 int sym_bind
, old_sym_bind
;
1040 if (sym
->a
.visibility
)
1041 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
1042 | sym
->a
.visibility
;
1044 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
1045 sym_bind
= STB_LOCAL
;
1046 else if (sym
->a
.weak
)
1047 sym_bind
= STB_WEAK
;
1049 sym_bind
= STB_GLOBAL
;
1050 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
1051 if (sym_bind
!= old_sym_bind
) {
1052 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
1055 #ifdef TCC_TARGET_PE
1056 if (sym
->a
.dllimport
)
1057 esym
->st_other
|= ST_PE_IMPORT
;
1058 if (sym
->a
.dllexport
)
1059 esym
->st_other
|= ST_PE_EXPORT
;
1063 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
1064 get_tok_str(sym
->v
, NULL
),
1065 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
1073 /* ------------------------------------------------------------------------- */
1074 /* update sym->c so that it points to an external symbol in section
1075 'section' with value 'value' */
1077 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
1078 addr_t value
, unsigned long size
,
1079 int can_add_underscore
)
1081 int sym_type
, sym_bind
, info
, other
, t
;
1087 name
= get_tok_str(sym
->v
, NULL
);
1089 if ((t
& VT_BTYPE
) == VT_FUNC
) {
1090 sym_type
= STT_FUNC
;
1091 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
1092 sym_type
= STT_NOTYPE
;
1093 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
1094 sym_type
= STT_FUNC
;
1096 sym_type
= STT_OBJECT
;
1098 if (t
& (VT_STATIC
| VT_INLINE
))
1099 sym_bind
= STB_LOCAL
;
1101 sym_bind
= STB_GLOBAL
;
1104 #ifdef TCC_TARGET_PE
1105 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
1106 Sym
*ref
= sym
->type
.ref
;
1107 if (ref
->a
.nodecorate
) {
1108 can_add_underscore
= 0;
1110 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
1111 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
1113 other
|= ST_PE_STDCALL
;
1114 can_add_underscore
= 0;
1119 if (sym
->asm_label
) {
1120 name
= get_tok_str(sym
->asm_label
, NULL
);
1121 can_add_underscore
= 0;
1124 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
1126 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
1130 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
1131 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
1134 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
1138 esym
->st_value
= value
;
1139 esym
->st_size
= size
;
1140 esym
->st_shndx
= sh_num
;
1142 update_storage(sym
);
1145 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
1146 addr_t value
, unsigned long size
)
1148 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
1149 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
1152 /* add a new relocation entry to symbol 'sym' in section 's' */
1153 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
1158 if (nocode_wanted
&& s
== cur_text_section
)
1163 put_extern_sym(sym
, NULL
, 0, 0);
1167 /* now we can add ELF relocation info */
1168 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
1172 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
1174 greloca(s
, sym
, offset
, type
, 0);
1178 /* ------------------------------------------------------------------------- */
1179 /* symbol allocator */
1180 static Sym
*__sym_malloc(void)
1182 Sym
*sym_pool
, *sym
, *last_sym
;
1185 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
1186 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
1188 last_sym
= sym_free_first
;
1190 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
1191 sym
->next
= last_sym
;
1195 sym_free_first
= last_sym
;
1199 static inline Sym
*sym_malloc(void)
1203 sym
= sym_free_first
;
1205 sym
= __sym_malloc();
1206 sym_free_first
= sym
->next
;
1209 sym
= tcc_malloc(sizeof(Sym
));
1214 ST_INLN
void sym_free(Sym
*sym
)
1217 sym
->next
= sym_free_first
;
1218 sym_free_first
= sym
;
1224 /* push, without hashing */
1225 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
1230 memset(s
, 0, sizeof *s
);
1240 /* find a symbol and return its associated structure. 's' is the top
1241 of the symbol stack */
1242 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
1247 else if (s
->v
== -1)
1254 /* structure lookup */
1255 ST_INLN Sym
*struct_find(int v
)
1258 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1260 return table_ident
[v
]->sym_struct
;
1263 /* find an identifier */
1264 ST_INLN Sym
*sym_find(int v
)
1267 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1269 return table_ident
[v
]->sym_identifier
;
1272 static int sym_scope(Sym
*s
)
1274 if (IS_ENUM_VAL (s
->type
.t
))
1275 return s
->type
.ref
->sym_scope
;
1277 return s
->sym_scope
;
1280 /* push a given symbol on the symbol stack */
1281 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
1290 s
= sym_push2(ps
, v
, type
->t
, c
);
1291 s
->type
.ref
= type
->ref
;
1293 /* don't record fields or anonymous symbols */
1295 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1296 /* record symbol in token array */
1297 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1299 ps
= &ts
->sym_struct
;
1301 ps
= &ts
->sym_identifier
;
1304 s
->sym_scope
= local_scope
;
1305 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
1306 tcc_error("redeclaration of '%s'",
1307 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
1312 /* push a global identifier */
1313 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
1316 s
= sym_push2(&global_stack
, v
, t
, c
);
1317 s
->r
= VT_CONST
| VT_SYM
;
1318 /* don't record anonymous symbol */
1319 if (v
< SYM_FIRST_ANOM
) {
1320 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
1321 /* modify the top most local identifier, so that sym_identifier will
1322 point to 's' when popped; happens when called from inline asm */
1323 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
1324 ps
= &(*ps
)->prev_tok
;
1331 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1332 pop them yet from the list, but do remove them from the token array. */
1333 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
1343 /* remove symbol in token array */
1345 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1346 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1348 ps
= &ts
->sym_struct
;
1350 ps
= &ts
->sym_identifier
;
1361 /* ------------------------------------------------------------------------- */
1362 static void vcheck_cmp(void)
1364 /* cannot let cpu flags if other instruction are generated. Also
1365 avoid leaving VT_JMP anywhere except on the top of the stack
1366 because it would complicate the code generator.
1368 Don't do this when nocode_wanted. vtop might come from
1369 !nocode_wanted regions (see 88_codeopt.c) and transforming
1370 it to a register without actually generating code is wrong
1371 as their value might still be used for real. All values
1372 we push under nocode_wanted will eventually be popped
1373 again, so that the VT_CMP/VT_JMP value will be in vtop
1374 when code is unsuppressed again. */
1376 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
1380 static void vsetc(CType
*type
, int r
, CValue
*vc
)
1382 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1383 tcc_error("memory full (vstack)");
1388 vtop
->r2
= VT_CONST
;
1393 ST_FUNC
void vswap(void)
1403 /* pop stack value */
1404 ST_FUNC
void vpop(void)
1407 v
= vtop
->r
& VT_VALMASK
;
1408 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1409 /* for x86, we need to pop the FP stack */
1410 if (v
== TREG_ST0
) {
1411 o(0xd8dd); /* fstp %st(0) */
1415 /* need to put correct jump if && or || without test */
1422 /* push constant of type "type" with useless value */
1423 static void vpush(CType
*type
)
1425 vset(type
, VT_CONST
, 0);
1428 /* push arbitrary 64bit constant */
1429 static void vpush64(int ty
, unsigned long long v
)
1436 vsetc(&ctype
, VT_CONST
, &cval
);
1439 /* push integer constant */
1440 ST_FUNC
void vpushi(int v
)
1445 /* push a pointer sized constant */
1446 static void vpushs(addr_t v
)
1448 vpush64(VT_SIZE_T
, v
);
1451 /* push long long constant */
1452 static inline void vpushll(long long v
)
1454 vpush64(VT_LLONG
, v
);
1457 ST_FUNC
void vset(CType
*type
, int r
, int v
)
1461 vsetc(type
, r
, &cval
);
1464 static void vseti(int r
, int v
)
1472 ST_FUNC
void vpushv(SValue
*v
)
1474 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1475 tcc_error("memory full (vstack)");
1480 static void vdup(void)
1485 /* rotate n first stack elements to the bottom
1486 I1 ... In -> I2 ... In I1 [top is right]
1488 ST_FUNC
void vrotb(int n
)
1495 for(i
=-n
+1;i
!=0;i
++)
1496 vtop
[i
] = vtop
[i
+1];
1500 /* rotate the n elements before entry e towards the top
1501 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1503 ST_FUNC
void vrote(SValue
*e
, int n
)
1510 for(i
= 0;i
< n
- 1; i
++)
1515 /* rotate n first stack elements to the top
1516 I1 ... In -> In I1 ... I(n-1) [top is right]
1518 ST_FUNC
void vrott(int n
)
1523 /* ------------------------------------------------------------------------- */
1524 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1526 /* called from generators to set the result from relational ops */
1527 ST_FUNC
void vset_VT_CMP(int op
)
1535 /* called once before asking generators to load VT_CMP to a register */
1536 static void vset_VT_JMP(void)
1538 int op
= vtop
->cmp_op
;
1540 if (vtop
->jtrue
|| vtop
->jfalse
) {
1541 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1542 int inv
= op
& (op
< 2); /* small optimization */
1543 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1545 /* otherwise convert flags (rsp. 0/1) to register */
1547 if (op
< 2) /* doesn't seem to happen */
1552 /* Set CPU Flags, doesn't yet jump */
1553 static void gvtst_set(int inv
, int t
)
1557 if (vtop
->r
!= VT_CMP
) {
1560 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1561 vset_VT_CMP(vtop
->c
.i
!= 0);
1564 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1565 *p
= gjmp_append(*p
, t
);
1568 /* Generate value test
1570 * Generate a test for any value (jump, comparison and integers) */
1571 static int gvtst(int inv
, int t
)
1576 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1578 x
= u
, u
= t
, t
= x
;
1581 /* jump to the wanted target */
1583 t
= gjmp_cond(op
^ inv
, t
);
1586 /* resolve complementary jumps to here */
1593 /* generate a zero or nozero test */
1594 static void gen_test_zero(int op
)
1596 if (vtop
->r
== VT_CMP
) {
1600 vtop
->jfalse
= vtop
->jtrue
;
1610 /* ------------------------------------------------------------------------- */
1611 /* push a symbol value of TYPE */
1612 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1616 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1620 /* Return a static symbol pointing to a section */
1621 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1627 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1628 sym
->type
.t
|= VT_STATIC
;
1629 put_extern_sym(sym
, sec
, offset
, size
);
1633 /* push a reference to a section offset by adding a dummy symbol */
1634 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1636 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1639 /* define a new external reference to a symbol 'v' of type 'u' */
1640 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1646 /* push forward reference */
1647 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1648 s
->type
.ref
= type
->ref
;
1649 } else if (IS_ASM_SYM(s
)) {
1650 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1651 s
->type
.ref
= type
->ref
;
1657 /* create an external reference with no specific type similar to asm labels.
1658 This avoids type conflicts if the symbol is used from C too */
1659 ST_FUNC Sym
*external_helper_sym(int v
)
1661 CType ct
= { VT_ASM_FUNC
, NULL
};
1662 return external_global_sym(v
, &ct
);
1665 /* push a reference to an helper function (such as memmove) */
1666 ST_FUNC
void vpush_helper_func(int v
)
1668 vpushsym(&func_old_type
, external_helper_sym(v
));
1671 /* Merge symbol attributes. */
1672 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1674 if (sa1
->aligned
&& !sa
->aligned
)
1675 sa
->aligned
= sa1
->aligned
;
1676 sa
->packed
|= sa1
->packed
;
1677 sa
->weak
|= sa1
->weak
;
1678 if (sa1
->visibility
!= STV_DEFAULT
) {
1679 int vis
= sa
->visibility
;
1680 if (vis
== STV_DEFAULT
1681 || vis
> sa1
->visibility
)
1682 vis
= sa1
->visibility
;
1683 sa
->visibility
= vis
;
1685 sa
->dllexport
|= sa1
->dllexport
;
1686 sa
->nodecorate
|= sa1
->nodecorate
;
1687 sa
->dllimport
|= sa1
->dllimport
;
1690 /* Merge function attributes. */
1691 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1693 if (fa1
->func_call
&& !fa
->func_call
)
1694 fa
->func_call
= fa1
->func_call
;
1695 if (fa1
->func_type
&& !fa
->func_type
)
1696 fa
->func_type
= fa1
->func_type
;
1697 if (fa1
->func_args
&& !fa
->func_args
)
1698 fa
->func_args
= fa1
->func_args
;
1699 if (fa1
->func_noreturn
)
1700 fa
->func_noreturn
= 1;
1707 /* Merge attributes. */
1708 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1710 merge_symattr(&ad
->a
, &ad1
->a
);
1711 merge_funcattr(&ad
->f
, &ad1
->f
);
1714 ad
->section
= ad1
->section
;
1715 if (ad1
->alias_target
)
1716 ad
->alias_target
= ad1
->alias_target
;
1718 ad
->asm_label
= ad1
->asm_label
;
1720 ad
->attr_mode
= ad1
->attr_mode
;
1723 /* Merge some type attributes. */
1724 static void patch_type(Sym
*sym
, CType
*type
)
1726 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1727 if (!(sym
->type
.t
& VT_EXTERN
))
1728 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1729 sym
->type
.t
&= ~VT_EXTERN
;
1732 if (IS_ASM_SYM(sym
)) {
1733 /* stay static if both are static */
1734 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1735 sym
->type
.ref
= type
->ref
;
1738 if (!is_compatible_types(&sym
->type
, type
)) {
1739 tcc_error("incompatible types for redefinition of '%s'",
1740 get_tok_str(sym
->v
, NULL
));
1742 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1743 int static_proto
= sym
->type
.t
& VT_STATIC
;
1744 /* warn if static follows non-static function declaration */
1745 if ((type
->t
& VT_STATIC
) && !static_proto
1746 /* XXX this test for inline shouldn't be here. Until we
1747 implement gnu-inline mode again it silences a warning for
1748 mingw caused by our workarounds. */
1749 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1750 tcc_warning("static storage ignored for redefinition of '%s'",
1751 get_tok_str(sym
->v
, NULL
));
1753 /* set 'inline' if both agree or if one has static */
1754 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1755 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1756 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1757 static_proto
|= VT_INLINE
;
1760 if (0 == (type
->t
& VT_EXTERN
)) {
1761 struct FuncAttr f
= sym
->type
.ref
->f
;
1762 /* put complete type, use static from prototype */
1763 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1764 sym
->type
.ref
= type
->ref
;
1765 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1767 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1770 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1771 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1772 sym
->type
.ref
= type
->ref
;
1776 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1777 /* set array size if it was omitted in extern declaration */
1778 sym
->type
.ref
->c
= type
->ref
->c
;
1780 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1781 tcc_warning("storage mismatch for redefinition of '%s'",
1782 get_tok_str(sym
->v
, NULL
));
1786 /* Merge some storage attributes. */
1787 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1790 patch_type(sym
, type
);
1792 #ifdef TCC_TARGET_PE
1793 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1794 tcc_error("incompatible dll linkage for redefinition of '%s'",
1795 get_tok_str(sym
->v
, NULL
));
1797 merge_symattr(&sym
->a
, &ad
->a
);
1799 sym
->asm_label
= ad
->asm_label
;
1800 update_storage(sym
);
1803 /* copy sym to other stack */
1804 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1807 s
= sym_malloc(), *s
= *s0
;
1808 s
->prev
= *ps
, *ps
= s
;
1809 if (s
->v
< SYM_FIRST_ANOM
) {
1810 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1811 s
->prev_tok
= *ps
, *ps
= s
;
1816 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1817 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1819 int bt
= s
->type
.t
& VT_BTYPE
;
1820 if (bt
== VT_FUNC
|| bt
== VT_PTR
) {
1821 Sym
**sp
= &s
->type
.ref
;
1822 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1823 Sym
*s2
= sym_copy(s
, ps
);
1824 sp
= &(*sp
= s2
)->next
;
1825 sym_copy_ref(s2
, ps
);
1830 /* define a new external reference to a symbol 'v' */
1831 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1835 /* look for global symbol */
1837 while (s
&& s
->sym_scope
)
1841 /* push forward reference */
1842 s
= global_identifier_push(v
, type
->t
, 0);
1845 s
->asm_label
= ad
->asm_label
;
1846 s
->type
.ref
= type
->ref
;
1847 /* copy type to the global stack */
1849 sym_copy_ref(s
, &global_stack
);
1851 patch_storage(s
, ad
, type
);
1853 /* push variables on local_stack if any */
1854 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1855 s
= sym_copy(s
, &local_stack
);
1859 /* save registers up to (vtop - n) stack entry */
1860 ST_FUNC
void save_regs(int n
)
1863 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1867 /* save r to the memory stack, and mark it as being free */
1868 ST_FUNC
void save_reg(int r
)
1870 save_reg_upstack(r
, 0);
1873 /* save r to the memory stack, and mark it as being free,
1874 if seen up to (vtop - n) stack entry */
1875 ST_FUNC
void save_reg_upstack(int r
, int n
)
1877 int l
, size
, align
, bt
;
1880 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1885 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1886 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1887 /* must save value on stack if not already done */
1889 bt
= p
->type
.t
& VT_BTYPE
;
1892 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1895 size
= type_size(&sv
.type
, &align
);
1896 l
= get_temp_local_var(size
,align
);
1897 sv
.r
= VT_LOCAL
| VT_LVAL
;
1899 store(p
->r
& VT_VALMASK
, &sv
);
1900 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1901 /* x86 specific: need to pop fp register ST0 if saved */
1902 if (r
== TREG_ST0
) {
1903 o(0xd8dd); /* fstp %st(0) */
1906 /* special long long case */
1907 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1912 /* mark that stack entry as being saved on the stack */
1913 if (p
->r
& VT_LVAL
) {
1914 /* also clear the bounded flag because the
1915 relocation address of the function was stored in
1917 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1919 p
->r
= VT_LVAL
| VT_LOCAL
;
1928 #ifdef TCC_TARGET_ARM
1929 /* find a register of class 'rc2' with at most one reference on stack.
1930 * If none, call get_reg(rc) */
1931 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1936 for(r
=0;r
<NB_REGS
;r
++) {
1937 if (reg_classes
[r
] & rc2
) {
1940 for(p
= vstack
; p
<= vtop
; p
++) {
1941 if ((p
->r
& VT_VALMASK
) == r
||
1953 /* find a free register of class 'rc'. If none, save one register */
1954 ST_FUNC
int get_reg(int rc
)
1959 /* find a free register */
1960 for(r
=0;r
<NB_REGS
;r
++) {
1961 if (reg_classes
[r
] & rc
) {
1964 for(p
=vstack
;p
<=vtop
;p
++) {
1965 if ((p
->r
& VT_VALMASK
) == r
||
1974 /* no register left : free the first one on the stack (VERY
1975 IMPORTANT to start from the bottom to ensure that we don't
1976 spill registers used in gen_opi()) */
1977 for(p
=vstack
;p
<=vtop
;p
++) {
1978 /* look at second register (if long long) */
1980 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1982 r
= p
->r
& VT_VALMASK
;
1983 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1989 /* Should never comes here */
1993 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1994 static int get_temp_local_var(int size
,int align
){
1996 struct temp_local_variable
*temp_var
;
2003 for(i
=0;i
<nb_temp_local_vars
;i
++){
2004 temp_var
=&arr_temp_local_vars
[i
];
2005 if(temp_var
->size
<size
||align
!=temp_var
->align
){
2008 /*check if temp_var is free*/
2010 for(p
=vstack
;p
<=vtop
;p
++) {
2012 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
2013 if(p
->c
.i
==temp_var
->location
){
2020 found_var
=temp_var
->location
;
2026 loc
= (loc
- size
) & -align
;
2027 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
2028 temp_var
=&arr_temp_local_vars
[i
];
2029 temp_var
->location
=loc
;
2030 temp_var
->size
=size
;
2031 temp_var
->align
=align
;
2032 nb_temp_local_vars
++;
2039 static void clear_temp_local_var_list(){
2040 nb_temp_local_vars
=0;
2043 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
2045 static void move_reg(int r
, int s
, int t
)
2059 /* get address of vtop (vtop MUST BE an lvalue) */
2060 ST_FUNC
void gaddrof(void)
2062 vtop
->r
&= ~VT_LVAL
;
2063 /* tricky: if saved lvalue, then we can go back to lvalue */
2064 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
2065 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
2068 #ifdef CONFIG_TCC_BCHECK
2069 /* generate a bounded pointer addition */
2070 static void gen_bounded_ptr_add(void)
2072 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
2077 vpush_helper_func(TOK___bound_ptr_add
);
2082 /* returned pointer is in REG_IRET */
2083 vtop
->r
= REG_IRET
| VT_BOUNDED
;
2086 /* relocation offset of the bounding function call point */
2087 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
2090 /* patch pointer addition in vtop so that pointer dereferencing is
2092 static void gen_bounded_ptr_deref(void)
2102 size
= type_size(&vtop
->type
, &align
);
2104 case 1: func
= TOK___bound_ptr_indir1
; break;
2105 case 2: func
= TOK___bound_ptr_indir2
; break;
2106 case 4: func
= TOK___bound_ptr_indir4
; break;
2107 case 8: func
= TOK___bound_ptr_indir8
; break;
2108 case 12: func
= TOK___bound_ptr_indir12
; break;
2109 case 16: func
= TOK___bound_ptr_indir16
; break;
2111 /* may happen with struct member access */
2114 sym
= external_helper_sym(func
);
2116 put_extern_sym(sym
, NULL
, 0, 0);
2117 /* patch relocation */
2118 /* XXX: find a better solution ? */
2119 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
2120 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
2123 /* generate lvalue bound code */
2124 static void gbound(void)
2128 vtop
->r
&= ~VT_MUSTBOUND
;
2129 /* if lvalue, then use checking code before dereferencing */
2130 if (vtop
->r
& VT_LVAL
) {
2131 /* if not VT_BOUNDED value, then make one */
2132 if (!(vtop
->r
& VT_BOUNDED
)) {
2133 /* must save type because we must set it to int to get pointer */
2135 vtop
->type
.t
= VT_PTR
;
2138 gen_bounded_ptr_add();
2142 /* then check for dereferencing */
2143 gen_bounded_ptr_deref();
2147 /* we need to call __bound_ptr_add before we start to load function
2148 args into registers */
2149 ST_FUNC
void gbound_args(int nb_args
)
2154 for (i
= 1; i
<= nb_args
; ++i
)
2155 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
2161 sv
= vtop
- nb_args
;
2162 if (sv
->r
& VT_SYM
) {
2166 #ifndef TCC_TARGET_PE
2167 || v
== TOK_sigsetjmp
2168 || v
== TOK___sigsetjmp
2171 vpush_helper_func(TOK___bound_setjmp
);
2174 func_bound_add_epilog
= 1;
2176 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
2177 if (v
== TOK_alloca
)
2178 func_bound_add_epilog
= 1;
2181 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
2182 sv
->sym
->asm_label
= TOK___bound_longjmp
;
2187 /* Add bounds for local symbols from S to E (via ->prev) */
2188 static void add_local_bounds(Sym
*s
, Sym
*e
)
2190 for (; s
!= e
; s
= s
->prev
) {
2191 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
2193 /* Add arrays/structs/unions because we always take address */
2194 if ((s
->type
.t
& VT_ARRAY
)
2195 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
2196 || s
->a
.addrtaken
) {
2197 /* add local bound info */
2198 int align
, size
= type_size(&s
->type
, &align
);
2199 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
2200 2 * sizeof(addr_t
));
2201 bounds_ptr
[0] = s
->c
;
2202 bounds_ptr
[1] = size
;
2208 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2209 static void pop_local_syms(Sym
*b
, int keep
)
2211 #ifdef CONFIG_TCC_BCHECK
2212 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
2213 add_local_bounds(local_stack
, b
);
2216 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
2217 sym_pop(&local_stack
, b
, keep
);
2220 static void incr_bf_adr(int o
)
2222 vtop
->type
= char_pointer_type
;
2226 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
2230 /* single-byte load mode for packed or otherwise unaligned bitfields */
2231 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
2234 save_reg_upstack(vtop
->r
, 1);
2235 vpush64(type
->t
& VT_BTYPE
, 0); // B X
2236 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2245 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
2247 vpushi((1 << n
) - 1), gen_op('&');
2250 vpushi(bits
), gen_op(TOK_SHL
);
2253 bits
+= n
, bit_size
-= n
, o
= 1;
2256 if (!(type
->t
& VT_UNSIGNED
)) {
2257 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
2258 vpushi(n
), gen_op(TOK_SHL
);
2259 vpushi(n
), gen_op(TOK_SAR
);
2263 /* single-byte store mode for packed or otherwise unaligned bitfields */
2264 static void store_packed_bf(int bit_pos
, int bit_size
)
2266 int bits
, n
, o
, m
, c
;
2267 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2269 save_reg_upstack(vtop
->r
, 1);
2270 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2272 incr_bf_adr(o
); // X B
2274 c
? vdup() : gv_dup(); // B V X
2277 vpushi(bits
), gen_op(TOK_SHR
);
2279 vpushi(bit_pos
), gen_op(TOK_SHL
);
2284 m
= ((1 << n
) - 1) << bit_pos
;
2285 vpushi(m
), gen_op('&'); // X B V1
2286 vpushv(vtop
-1); // X B V1 B
2287 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
2288 gen_op('&'); // X B V1 B1
2289 gen_op('|'); // X B V2
2291 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
2292 vstore(), vpop(); // X B
2293 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
2298 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
2301 if (0 == sv
->type
.ref
)
2303 t
= sv
->type
.ref
->auxtype
;
2304 if (t
!= -1 && t
!= VT_STRUCT
) {
2305 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
2311 /* store vtop a register belonging to class 'rc'. lvalues are
2312 converted to values. Cannot be used if cannot be converted to
2313 register value (such as structures). */
2314 ST_FUNC
int gv(int rc
)
2316 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
2317 int bit_pos
, bit_size
, size
, align
;
2319 /* NOTE: get_reg can modify vstack[] */
2320 if (vtop
->type
.t
& VT_BITFIELD
) {
2323 bit_pos
= BIT_POS(vtop
->type
.t
);
2324 bit_size
= BIT_SIZE(vtop
->type
.t
);
2325 /* remove bit field info to avoid loops */
2326 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
2329 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
2330 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
2331 type
.t
|= VT_UNSIGNED
;
2333 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
2335 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2340 if (r
== VT_STRUCT
) {
2341 load_packed_bf(&type
, bit_pos
, bit_size
);
2343 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
2344 /* cast to int to propagate signedness in following ops */
2346 /* generate shifts */
2347 vpushi(bits
- (bit_pos
+ bit_size
));
2349 vpushi(bits
- bit_size
);
2350 /* NOTE: transformed to SHR if unsigned */
2355 if (is_float(vtop
->type
.t
) &&
2356 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2357 /* CPUs usually cannot use float constants, so we store them
2358 generically in data segment */
2359 init_params p
= { data_section
};
2360 unsigned long offset
;
2361 size
= type_size(&vtop
->type
, &align
);
2363 size
= 0, align
= 1;
2364 offset
= section_add(p
.sec
, size
, align
);
2365 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
2367 init_putv(&p
, &vtop
->type
, offset
);
2370 #ifdef CONFIG_TCC_BCHECK
2371 if (vtop
->r
& VT_MUSTBOUND
)
2375 bt
= vtop
->type
.t
& VT_BTYPE
;
2377 #ifdef TCC_TARGET_RISCV64
2379 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
2382 rc2
= RC2_TYPE(bt
, rc
);
2384 /* need to reload if:
2386 - lvalue (need to dereference pointer)
2387 - already a register, but not in the right class */
2388 r
= vtop
->r
& VT_VALMASK
;
2389 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
2390 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
2392 if (!r_ok
|| !r2_ok
) {
2396 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
2397 int original_type
= vtop
->type
.t
;
2399 /* two register type load :
2400 expand to two words temporarily */
2401 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2403 unsigned long long ll
= vtop
->c
.i
;
2404 vtop
->c
.i
= ll
; /* first word */
2406 vtop
->r
= r
; /* save register value */
2407 vpushi(ll
>> 32); /* second word */
2408 } else if (vtop
->r
& VT_LVAL
) {
2409 /* We do not want to modifier the long long pointer here.
2410 So we save any other instances down the stack */
2411 save_reg_upstack(vtop
->r
, 1);
2412 /* load from memory */
2413 vtop
->type
.t
= load_type
;
2416 vtop
[-1].r
= r
; /* save register value */
2417 /* increment pointer to get second word */
2418 vtop
->type
.t
= VT_PTRDIFF_T
;
2423 vtop
->type
.t
= load_type
;
2425 /* move registers */
2428 if (r2_ok
&& vtop
->r2
< VT_CONST
)
2431 vtop
[-1].r
= r
; /* save register value */
2432 vtop
->r
= vtop
[-1].r2
;
2434 /* Allocate second register. Here we rely on the fact that
2435 get_reg() tries first to free r2 of an SValue. */
2439 /* write second register */
2442 vtop
->type
.t
= original_type
;
2444 if (vtop
->r
== VT_CMP
)
2446 /* one register type load */
2451 #ifdef TCC_TARGET_C67
2452 /* uses register pairs for doubles */
2453 if (bt
== VT_DOUBLE
)
2460 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2461 ST_FUNC
void gv2(int rc1
, int rc2
)
2463 /* generate more generic register first. But VT_JMP or VT_CMP
2464 values must be generated first in all cases to avoid possible
2466 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
2471 /* test if reload is needed for first register */
2472 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
2482 /* test if reload is needed for first register */
2483 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
2490 /* expand 64bit on stack in two ints */
2491 ST_FUNC
void lexpand(void)
2494 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
2495 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
2496 if (v
== VT_CONST
) {
2499 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
2505 vtop
[0].r
= vtop
[-1].r2
;
2506 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2508 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2513 /* build a long long from two ints */
2514 static void lbuild(int t
)
2516 gv2(RC_INT
, RC_INT
);
2517 vtop
[-1].r2
= vtop
[0].r
;
2518 vtop
[-1].type
.t
= t
;
2523 /* convert stack entry to register and duplicate its value in another
2525 static void gv_dup(void)
2531 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2532 if (t
& VT_BITFIELD
) {
2542 /* stack: H L L1 H1 */
2552 /* duplicate value */
2562 /* generate CPU independent (unsigned) long long operations */
2563 static void gen_opl(int op
)
2565 int t
, a
, b
, op1
, c
, i
;
2567 unsigned short reg_iret
= REG_IRET
;
2568 unsigned short reg_lret
= REG_IRE2
;
2574 func
= TOK___divdi3
;
2577 func
= TOK___udivdi3
;
2580 func
= TOK___moddi3
;
2583 func
= TOK___umoddi3
;
2590 /* call generic long long function */
2591 vpush_helper_func(func
);
2596 vtop
->r2
= reg_lret
;
2604 //pv("gen_opl A",0,2);
2610 /* stack: L1 H1 L2 H2 */
2615 vtop
[-2] = vtop
[-3];
2618 /* stack: H1 H2 L1 L2 */
2619 //pv("gen_opl B",0,4);
2625 /* stack: H1 H2 L1 L2 ML MH */
2628 /* stack: ML MH H1 H2 L1 L2 */
2632 /* stack: ML MH H1 L2 H2 L1 */
2637 /* stack: ML MH M1 M2 */
2640 } else if (op
== '+' || op
== '-') {
2641 /* XXX: add non carry method too (for MIPS or alpha) */
2647 /* stack: H1 H2 (L1 op L2) */
2650 gen_op(op1
+ 1); /* TOK_xxxC2 */
2653 /* stack: H1 H2 (L1 op L2) */
2656 /* stack: (L1 op L2) H1 H2 */
2658 /* stack: (L1 op L2) (H1 op H2) */
2666 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2667 t
= vtop
[-1].type
.t
;
2671 /* stack: L H shift */
2673 /* constant: simpler */
2674 /* NOTE: all comments are for SHL. the other cases are
2675 done by swapping words */
2686 if (op
!= TOK_SAR
) {
2719 /* XXX: should provide a faster fallback on x86 ? */
2722 func
= TOK___ashrdi3
;
2725 func
= TOK___lshrdi3
;
2728 func
= TOK___ashldi3
;
2734 /* compare operations */
2740 /* stack: L1 H1 L2 H2 */
2742 vtop
[-1] = vtop
[-2];
2744 /* stack: L1 L2 H1 H2 */
2748 /* when values are equal, we need to compare low words. since
2749 the jump is inverted, we invert the test too. */
2752 else if (op1
== TOK_GT
)
2754 else if (op1
== TOK_ULT
)
2756 else if (op1
== TOK_UGT
)
2766 /* generate non equal test */
2768 vset_VT_CMP(TOK_NE
);
2772 /* compare low. Always unsigned */
2776 else if (op1
== TOK_LE
)
2778 else if (op1
== TOK_GT
)
2780 else if (op1
== TOK_GE
)
2783 #if 0//def TCC_TARGET_I386
2784 if (op
== TOK_NE
) { gsym(b
); break; }
2785 if (op
== TOK_EQ
) { gsym(a
); break; }
2794 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2796 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2797 return (a
^ b
) >> 63 ? -x
: x
;
2800 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2802 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2805 /* handle integer constant optimizations and various machine
2807 static void gen_opic(int op
)
2809 SValue
*v1
= vtop
- 1;
2811 int t1
= v1
->type
.t
& VT_BTYPE
;
2812 int t2
= v2
->type
.t
& VT_BTYPE
;
2813 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2814 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2815 uint64_t l1
= c1
? v1
->c
.i
: 0;
2816 uint64_t l2
= c2
? v2
->c
.i
: 0;
2817 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2819 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2820 l1
= ((uint32_t)l1
|
2821 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2822 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2823 l2
= ((uint32_t)l2
|
2824 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2828 case '+': l1
+= l2
; break;
2829 case '-': l1
-= l2
; break;
2830 case '&': l1
&= l2
; break;
2831 case '^': l1
^= l2
; break;
2832 case '|': l1
|= l2
; break;
2833 case '*': l1
*= l2
; break;
2840 /* if division by zero, generate explicit division */
2842 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2843 tcc_error("division by zero in constant");
2847 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2848 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2849 case TOK_UDIV
: l1
= l1
/ l2
; break;
2850 case TOK_UMOD
: l1
= l1
% l2
; break;
2853 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2854 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2856 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2859 case TOK_ULT
: l1
= l1
< l2
; break;
2860 case TOK_UGE
: l1
= l1
>= l2
; break;
2861 case TOK_EQ
: l1
= l1
== l2
; break;
2862 case TOK_NE
: l1
= l1
!= l2
; break;
2863 case TOK_ULE
: l1
= l1
<= l2
; break;
2864 case TOK_UGT
: l1
= l1
> l2
; break;
2865 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2866 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2867 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2868 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2870 case TOK_LAND
: l1
= l1
&& l2
; break;
2871 case TOK_LOR
: l1
= l1
|| l2
; break;
2875 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2876 l1
= ((uint32_t)l1
|
2877 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2881 /* if commutative ops, put c2 as constant */
2882 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2883 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2885 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2886 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2888 if (!const_wanted
&&
2890 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2891 (l1
== -1 && op
== TOK_SAR
))) {
2892 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2894 } else if (!const_wanted
&&
2895 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2897 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2898 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2899 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2904 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2907 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2908 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2911 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2912 /* filter out NOP operations like x*1, x-0, x&-1... */
2914 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2915 /* try to use shifts instead of muls or divs */
2916 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2925 else if (op
== TOK_PDIV
)
2931 } else if (c2
&& (op
== '+' || op
== '-') &&
2932 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2933 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2934 /* symbol + constant case */
2938 /* The backends can't always deal with addends to symbols
2939 larger than +-1<<31. Don't construct such. */
2946 /* call low level op generator */
2947 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2948 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2956 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2957 # define gen_negf gen_opf
2958 #elif defined TCC_TARGET_ARM
2959 void gen_negf(int op
)
2961 /* arm will detect 0-x and replace by vneg */
2962 vpushi(0), vswap(), gen_op('-');
2965 /* XXX: implement in gen_opf() for other backends too */
2966 void gen_negf(int op
)
2968 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2969 subtract(-0, x), but with them it's really a sign flip
2970 operation. We implement this with bit manipulation and have
2971 to do some type reinterpretation for this, which TCC can do
2974 int align
, size
, bt
;
2976 size
= type_size(&vtop
->type
, &align
);
2977 bt
= vtop
->type
.t
& VT_BTYPE
;
2978 save_reg(gv(RC_TYPE(bt
)));
2980 incr_bf_adr(size
- 1);
2982 vpushi(0x80); /* flip sign */
2989 /* generate a floating point operation with constant propagation */
2990 static void gen_opif(int op
)
2994 #if defined _MSC_VER && defined __x86_64__
2995 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
3005 /* currently, we cannot do computations with forward symbols */
3006 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3007 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3009 if (v1
->type
.t
== VT_FLOAT
) {
3012 } else if (v1
->type
.t
== VT_DOUBLE
) {
3019 /* NOTE: we only do constant propagation if finite number (not
3020 NaN or infinity) (ANSI spec) */
3021 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !const_wanted
)
3024 case '+': f1
+= f2
; break;
3025 case '-': f1
-= f2
; break;
3026 case '*': f1
*= f2
; break;
3029 union { float f
; unsigned u
; } x1
, x2
, y
;
3030 /* If not in initializer we need to potentially generate
3031 FP exceptions at runtime, otherwise we want to fold. */
3034 /* the run-time result of 0.0/0.0 on x87, also of other compilers
3035 when used to compile the f1 /= f2 below, would be -nan */
3036 x1
.f
= f1
, x2
.f
= f2
;
3038 y
.u
= 0x7fc00000; /* nan */
3040 y
.u
= 0x7f800000; /* infinity */
3041 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
3050 /* XXX: also handles tests ? */
3056 /* XXX: overflow test ? */
3057 if (v1
->type
.t
== VT_FLOAT
) {
3059 } else if (v1
->type
.t
== VT_DOUBLE
) {
3066 if (op
== TOK_NEG
) {
3074 /* print a type. If 'varstr' is not NULL, then the variable is also
3075 printed in the type */
3077 /* XXX: add array and function pointers */
3078 static void type_to_str(char *buf
, int buf_size
,
3079 CType
*type
, const char *varstr
)
3091 pstrcat(buf
, buf_size
, "extern ");
3093 pstrcat(buf
, buf_size
, "static ");
3095 pstrcat(buf
, buf_size
, "typedef ");
3097 pstrcat(buf
, buf_size
, "inline ");
3098 if (t
& VT_VOLATILE
)
3099 pstrcat(buf
, buf_size
, "volatile ");
3100 if (t
& VT_CONSTANT
)
3101 pstrcat(buf
, buf_size
, "const ");
3103 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
3104 || ((t
& VT_UNSIGNED
)
3105 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
3108 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
3110 buf_size
-= strlen(buf
);
3146 tstr
= "long double";
3148 pstrcat(buf
, buf_size
, tstr
);
3155 pstrcat(buf
, buf_size
, tstr
);
3156 v
= type
->ref
->v
& ~SYM_STRUCT
;
3157 if (v
>= SYM_FIRST_ANOM
)
3158 pstrcat(buf
, buf_size
, "<anonymous>");
3160 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3165 if (varstr
&& '*' == *varstr
) {
3166 pstrcat(buf1
, sizeof(buf1
), "(");
3167 pstrcat(buf1
, sizeof(buf1
), varstr
);
3168 pstrcat(buf1
, sizeof(buf1
), ")");
3170 pstrcat(buf1
, buf_size
, "(");
3172 while (sa
!= NULL
) {
3174 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3175 pstrcat(buf1
, sizeof(buf1
), buf2
);
3178 pstrcat(buf1
, sizeof(buf1
), ", ");
3180 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3181 pstrcat(buf1
, sizeof(buf1
), ", ...");
3182 pstrcat(buf1
, sizeof(buf1
), ")");
3183 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3188 if (varstr
&& '*' == *varstr
)
3189 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3191 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3192 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3195 pstrcpy(buf1
, sizeof(buf1
), "*");
3196 if (t
& VT_CONSTANT
)
3197 pstrcat(buf1
, buf_size
, "const ");
3198 if (t
& VT_VOLATILE
)
3199 pstrcat(buf1
, buf_size
, "volatile ");
3201 pstrcat(buf1
, sizeof(buf1
), varstr
);
3202 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3206 pstrcat(buf
, buf_size
, " ");
3207 pstrcat(buf
, buf_size
, varstr
);
3212 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
3214 char buf1
[256], buf2
[256];
3215 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3216 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3217 tcc_error(fmt
, buf1
, buf2
);
3220 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
3222 char buf1
[256], buf2
[256];
3223 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3224 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3225 tcc_warning(fmt
, buf1
, buf2
);
3228 static int pointed_size(CType
*type
)
3231 return type_size(pointed_type(type
), &align
);
3234 static void vla_runtime_pointed_size(CType
*type
)
3237 vla_runtime_type_size(pointed_type(type
), &align
);
3240 static inline int is_null_pointer(SValue
*p
)
3242 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
3244 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
3245 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
3246 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
3247 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
3248 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
3249 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3253 /* compare function types. OLD functions match any new functions */
3254 static int is_compatible_func(CType
*type1
, CType
*type2
)
3260 if (s1
->f
.func_call
!= s2
->f
.func_call
)
3262 if (s1
->f
.func_type
!= s2
->f
.func_type
3263 && s1
->f
.func_type
!= FUNC_OLD
3264 && s2
->f
.func_type
!= FUNC_OLD
)
3267 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
3269 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
3280 /* return true if type1 and type2 are the same. If unqualified is
3281 true, qualifiers on the types are ignored.
3283 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3287 t1
= type1
->t
& VT_TYPE
;
3288 t2
= type2
->t
& VT_TYPE
;
3290 /* strip qualifiers before comparing */
3291 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3292 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3295 /* Default Vs explicit signedness only matters for char */
3296 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3300 /* XXX: bitfields ? */
3305 && !(type1
->ref
->c
< 0
3306 || type2
->ref
->c
< 0
3307 || type1
->ref
->c
== type2
->ref
->c
))
3310 /* test more complicated cases */
3311 bt1
= t1
& VT_BTYPE
;
3312 if (bt1
== VT_PTR
) {
3313 type1
= pointed_type(type1
);
3314 type2
= pointed_type(type2
);
3315 return is_compatible_types(type1
, type2
);
3316 } else if (bt1
== VT_STRUCT
) {
3317 return (type1
->ref
== type2
->ref
);
3318 } else if (bt1
== VT_FUNC
) {
3319 return is_compatible_func(type1
, type2
);
3320 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
3321 /* If both are enums then they must be the same, if only one is then
3322 t1 and t2 must be equal, which was checked above already. */
3323 return type1
->ref
== type2
->ref
;
3329 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3330 type is stored in DEST if non-null (except for pointer plus/minus) . */
3331 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
3333 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
3334 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
3340 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
3341 ret
= op
== '?' ? 1 : 0;
3342 /* NOTE: as an extension, we accept void on only one side */
3344 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3345 if (op
== '+') ; /* Handled in caller */
3346 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3347 /* If one is a null ptr constant the result type is the other. */
3348 else if (is_null_pointer (op2
)) type
= *type1
;
3349 else if (is_null_pointer (op1
)) type
= *type2
;
3350 else if (bt1
!= bt2
) {
3351 /* accept comparison or cond-expr between pointer and integer
3353 if ((op
== '?' || TOK_ISCOND(op
))
3354 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
3355 tcc_warning("pointer/integer mismatch in %s",
3356 op
== '?' ? "conditional expression" : "comparison");
3357 else if (op
!= '-' || !is_integer_btype(bt2
))
3359 type
= *(bt1
== VT_PTR
? type1
: type2
);
3361 CType
*pt1
= pointed_type(type1
);
3362 CType
*pt2
= pointed_type(type2
);
3363 int pbt1
= pt1
->t
& VT_BTYPE
;
3364 int pbt2
= pt2
->t
& VT_BTYPE
;
3365 int newquals
, copied
= 0;
3366 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
3367 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
3368 if (op
!= '?' && !TOK_ISCOND(op
))
3371 type_incompatibility_warning(type1
, type2
,
3373 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3374 : "pointer type mismatch in comparison('%s' and '%s')");
3377 /* pointers to void get preferred, otherwise the
3378 pointed to types minus qualifs should be compatible */
3379 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
3380 /* combine qualifs */
3381 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
3382 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3385 /* copy the pointer target symbol */
3386 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3389 pointed_type(&type
)->t
|= newquals
;
3391 /* pointers to incomplete arrays get converted to
3392 pointers to completed ones if possible */
3393 if (pt1
->t
& VT_ARRAY
3394 && pt2
->t
& VT_ARRAY
3395 && pointed_type(&type
)->ref
->c
< 0
3396 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
3399 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3401 pointed_type(&type
)->ref
=
3402 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
3403 0, pointed_type(&type
)->ref
->c
);
3404 pointed_type(&type
)->ref
->c
=
3405 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
3411 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
3412 if (op
!= '?' || !compare_types(type1
, type2
, 1))
3415 } else if (is_float(bt1
) || is_float(bt2
)) {
3416 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
3417 type
.t
= VT_LDOUBLE
;
3418 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
3423 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
3424 /* cast to biggest op */
3425 type
.t
= VT_LLONG
| VT_LONG
;
3426 if (bt1
== VT_LLONG
)
3428 if (bt2
== VT_LLONG
)
3430 /* convert to unsigned if it does not fit in a long long */
3431 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
3432 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
3433 type
.t
|= VT_UNSIGNED
;
3435 /* integer operations */
3436 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
3437 /* convert to unsigned if it does not fit in an integer */
3438 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
3439 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
3440 type
.t
|= VT_UNSIGNED
;
3447 /* generic gen_op: handles types problems */
3448 ST_FUNC
void gen_op(int op
)
3450 int u
, t1
, t2
, bt1
, bt2
, t
;
3451 CType type1
, combtype
;
3454 t1
= vtop
[-1].type
.t
;
3455 t2
= vtop
[0].type
.t
;
3456 bt1
= t1
& VT_BTYPE
;
3457 bt2
= t2
& VT_BTYPE
;
3459 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
3460 if (bt2
== VT_FUNC
) {
3461 mk_pointer(&vtop
->type
);
3464 if (bt1
== VT_FUNC
) {
3466 mk_pointer(&vtop
->type
);
3471 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
3472 tcc_error_noabort("invalid operand types for binary operation");
3474 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3475 /* at least one operand is a pointer */
3476 /* relational op: must be both pointers */
3479 /* if both pointers, then it must be the '-' op */
3480 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3482 tcc_error("cannot use pointers here");
3483 if (vtop
[-1].type
.t
& VT_VLA
) {
3484 vla_runtime_pointed_size(&vtop
[-1].type
);
3486 vpushi(pointed_size(&vtop
[-1].type
));
3490 vtop
->type
.t
= VT_PTRDIFF_T
;
3494 /* exactly one pointer : must be '+' or '-'. */
3495 if (op
!= '-' && op
!= '+')
3496 tcc_error("cannot use pointers here");
3497 /* Put pointer as first operand */
3498 if (bt2
== VT_PTR
) {
3500 t
= t1
, t1
= t2
, t2
= t
;
3503 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
3504 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3507 type1
= vtop
[-1].type
;
3508 if (vtop
[-1].type
.t
& VT_VLA
)
3509 vla_runtime_pointed_size(&vtop
[-1].type
);
3511 u
= pointed_size(&vtop
[-1].type
);
3513 tcc_error("unknown array element size");
3517 /* XXX: cast to int ? (long long case) */
3522 #ifdef CONFIG_TCC_BCHECK
3523 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
3524 /* if bounded pointers, we generate a special code to
3531 gen_bounded_ptr_add();
3537 type1
.t
&= ~VT_ARRAY
;
3538 /* put again type if gen_opic() swaped operands */
3542 /* floats can only be used for a few operations */
3543 if (is_float(combtype
.t
)
3544 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3546 tcc_error("invalid operands for binary operation");
3547 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
3548 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
3549 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
3551 t
|= (VT_LONG
& t1
);
3555 t
= t2
= combtype
.t
;
3556 /* XXX: currently, some unsigned operations are explicit, so
3557 we modify them here */
3558 if (t
& VT_UNSIGNED
) {
3565 else if (op
== TOK_LT
)
3567 else if (op
== TOK_GT
)
3569 else if (op
== TOK_LE
)
3571 else if (op
== TOK_GE
)
3577 /* special case for shifts and long long: we keep the shift as
3579 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3586 if (TOK_ISCOND(op
)) {
3587 /* relational op: the result is an int */
3588 vtop
->type
.t
= VT_INT
;
3593 // Make sure that we have converted to an rvalue:
3594 if (vtop
->r
& VT_LVAL
)
3595 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3598 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3599 #define gen_cvt_itof1 gen_cvt_itof
3601 /* generic itof for unsigned long long case */
3602 static void gen_cvt_itof1(int t
)
3604 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3605 (VT_LLONG
| VT_UNSIGNED
)) {
3608 vpush_helper_func(TOK___floatundisf
);
3609 #if LDOUBLE_SIZE != 8
3610 else if (t
== VT_LDOUBLE
)
3611 vpush_helper_func(TOK___floatundixf
);
3614 vpush_helper_func(TOK___floatundidf
);
3625 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3626 #define gen_cvt_ftoi1 gen_cvt_ftoi
3628 /* generic ftoi for unsigned long long case */
3629 static void gen_cvt_ftoi1(int t
)
3632 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3633 /* not handled natively */
3634 st
= vtop
->type
.t
& VT_BTYPE
;
3636 vpush_helper_func(TOK___fixunssfdi
);
3637 #if LDOUBLE_SIZE != 8
3638 else if (st
== VT_LDOUBLE
)
3639 vpush_helper_func(TOK___fixunsxfdi
);
3642 vpush_helper_func(TOK___fixunsdfdi
);
3653 /* special delayed cast for char/short */
3654 static void force_charshort_cast(void)
3656 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3657 int dbt
= vtop
->type
.t
;
3658 vtop
->r
&= ~VT_MUSTCAST
;
3660 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3664 static void gen_cast_s(int t
)
3672 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3673 static void gen_cast(CType
*type
)
3675 int sbt
, dbt
, sf
, df
, c
;
3676 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3678 /* special delayed cast for char/short */
3679 if (vtop
->r
& VT_MUSTCAST
)
3680 force_charshort_cast();
3682 /* bitfields first get cast to ints */
3683 if (vtop
->type
.t
& VT_BITFIELD
)
3686 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3687 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3695 dbt_bt
= dbt
& VT_BTYPE
;
3696 sbt_bt
= sbt
& VT_BTYPE
;
3698 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3699 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3700 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3703 /* constant case: we can do it now */
3704 /* XXX: in ISOC, cannot do it if error in convert */
3705 if (sbt
== VT_FLOAT
)
3706 vtop
->c
.ld
= vtop
->c
.f
;
3707 else if (sbt
== VT_DOUBLE
)
3708 vtop
->c
.ld
= vtop
->c
.d
;
3711 if (sbt_bt
== VT_LLONG
) {
3712 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3713 vtop
->c
.ld
= vtop
->c
.i
;
3715 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3717 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3718 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3720 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3723 if (dbt
== VT_FLOAT
)
3724 vtop
->c
.f
= (float)vtop
->c
.ld
;
3725 else if (dbt
== VT_DOUBLE
)
3726 vtop
->c
.d
= (double)vtop
->c
.ld
;
3727 } else if (sf
&& dbt
== VT_BOOL
) {
3728 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3731 vtop
->c
.i
= vtop
->c
.ld
;
3732 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3734 else if (sbt
& VT_UNSIGNED
)
3735 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3737 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3739 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3741 else if (dbt
== VT_BOOL
)
3742 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3744 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3745 dbt_bt
== VT_SHORT
? 0xffff :
3748 if (!(dbt
& VT_UNSIGNED
))
3749 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3754 } else if (dbt
== VT_BOOL
3755 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3756 == (VT_CONST
| VT_SYM
)) {
3757 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3763 /* cannot generate code for global or static initializers */
3764 if (STATIC_DATA_WANTED
)
3767 /* non constant case: generate code */
3768 if (dbt
== VT_BOOL
) {
3769 gen_test_zero(TOK_NE
);
3775 /* convert from fp to fp */
3778 /* convert int to fp */
3781 /* convert fp to int */
3783 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3786 goto again
; /* may need char/short cast */
3791 ds
= btype_size(dbt_bt
);
3792 ss
= btype_size(sbt_bt
);
3793 if (ds
== 0 || ss
== 0) {
3794 if (dbt_bt
== VT_VOID
)
3796 cast_error(&vtop
->type
, type
);
3798 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3799 tcc_error("cast to incomplete type");
3801 /* same size and no sign conversion needed */
3802 if (ds
== ss
&& ds
>= 4)
3804 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3805 tcc_warning("cast between pointer and integer of different size");
3806 if (sbt_bt
== VT_PTR
) {
3807 /* put integer type to allow logical operations below */
3808 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3812 /* processor allows { int a = 0, b = *(char*)&a; }
3813 That means that if we cast to less width, we can just
3814 change the type and read it still later. */
3815 #define ALLOW_SUBTYPE_ACCESS 1
3817 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3818 /* value still in memory */
3822 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3824 goto done
; /* no 64bit envolved */
3832 /* generate high word */
3833 if (sbt
& VT_UNSIGNED
) {
3842 } else if (ss
== 8) {
3843 /* from long long: just take low order word */
3851 /* need to convert from 32bit to 64bit */
3852 if (sbt
& VT_UNSIGNED
) {
3853 #if defined(TCC_TARGET_RISCV64)
3854 /* RISC-V keeps 32bit vals in registers sign-extended.
3855 So here we need a zero-extension. */
3864 ss
= ds
, ds
= 4, dbt
= sbt
;
3865 } else if (ss
== 8) {
3866 /* RISC-V keeps 32bit vals in registers sign-extended.
3867 So here we need a sign-extension for signed types and
3868 zero-extension. for unsigned types. */
3869 #if !defined(TCC_TARGET_RISCV64)
3870 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3879 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3885 bits
= (ss
- ds
) * 8;
3886 /* for unsigned, gen_op will convert SAR to SHR */
3887 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3890 vpushi(bits
- trunc
);
3897 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3900 /* return type size as known at compile time. Put alignment at 'a' */
3901 ST_FUNC
int type_size(CType
*type
, int *a
)
3906 bt
= type
->t
& VT_BTYPE
;
3907 if (bt
== VT_STRUCT
) {
3912 } else if (bt
== VT_PTR
) {
3913 if (type
->t
& VT_ARRAY
) {
3917 ts
= type_size(&s
->type
, a
);
3919 if (ts
< 0 && s
->c
< 0)
3927 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3928 return -1; /* incomplete enum */
3929 } else if (bt
== VT_LDOUBLE
) {
3931 return LDOUBLE_SIZE
;
3932 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3933 #ifdef TCC_TARGET_I386
3934 #ifdef TCC_TARGET_PE
3939 #elif defined(TCC_TARGET_ARM)
3949 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3952 } else if (bt
== VT_SHORT
) {
3955 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3959 /* char, void, function, _Bool */
3965 /* push type size as known at runtime time on top of value stack. Put
3967 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
3969 if (type
->t
& VT_VLA
) {
3970 type_size(&type
->ref
->type
, a
);
3971 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3973 vpushi(type_size(type
, a
));
3977 /* return the pointed type of t */
3978 static inline CType
*pointed_type(CType
*type
)
3980 return &type
->ref
->type
;
3983 /* modify type so that its it is a pointer to type. */
3984 ST_FUNC
void mk_pointer(CType
*type
)
3987 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3988 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3992 /* return true if type1 and type2 are exactly the same (including
3995 static int is_compatible_types(CType
*type1
, CType
*type2
)
3997 return compare_types(type1
,type2
,0);
4000 /* return true if type1 and type2 are the same (ignoring qualifiers).
4002 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
4004 return compare_types(type1
,type2
,1);
4007 static void cast_error(CType
*st
, CType
*dt
)
4009 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
4012 /* verify type compatibility to store vtop in 'dt' type */
4013 static void verify_assign_cast(CType
*dt
)
4015 CType
*st
, *type1
, *type2
;
4016 int dbt
, sbt
, qualwarn
, lvl
;
4018 st
= &vtop
->type
; /* source type */
4019 dbt
= dt
->t
& VT_BTYPE
;
4020 sbt
= st
->t
& VT_BTYPE
;
4021 if (dt
->t
& VT_CONSTANT
)
4022 tcc_warning("assignment of read-only location");
4026 tcc_error("assignment to void expression");
4029 /* special cases for pointers */
4030 /* '0' can also be a pointer */
4031 if (is_null_pointer(vtop
))
4033 /* accept implicit pointer to integer cast with warning */
4034 if (is_integer_btype(sbt
)) {
4035 tcc_warning("assignment makes pointer from integer without a cast");
4038 type1
= pointed_type(dt
);
4040 type2
= pointed_type(st
);
4041 else if (sbt
== VT_FUNC
)
4042 type2
= st
; /* a function is implicitly a function pointer */
4045 if (is_compatible_types(type1
, type2
))
4047 for (qualwarn
= lvl
= 0;; ++lvl
) {
4048 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
4049 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
4051 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
4052 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
4053 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
4055 type1
= pointed_type(type1
);
4056 type2
= pointed_type(type2
);
4058 if (!is_compatible_unqualified_types(type1
, type2
)) {
4059 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
4060 /* void * can match anything */
4061 } else if (dbt
== sbt
4062 && is_integer_btype(sbt
& VT_BTYPE
)
4063 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
4064 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
4065 /* Like GCC don't warn by default for merely changes
4066 in pointer target signedness. Do warn for different
4067 base types, though, in particular for unsigned enums
4068 and signed int targets. */
4070 tcc_warning("assignment from incompatible pointer type");
4075 tcc_warning("assignment discards qualifiers from pointer target type");
4081 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
4082 tcc_warning("assignment makes integer from pointer without a cast");
4083 } else if (sbt
== VT_STRUCT
) {
4084 goto case_VT_STRUCT
;
4086 /* XXX: more tests */
4090 if (!is_compatible_unqualified_types(dt
, st
)) {
4098 static void gen_assign_cast(CType
*dt
)
4100 verify_assign_cast(dt
);
4104 /* store vtop in lvalue pushed on stack */
4105 ST_FUNC
void vstore(void)
4107 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
4109 ft
= vtop
[-1].type
.t
;
4110 sbt
= vtop
->type
.t
& VT_BTYPE
;
4111 dbt
= ft
& VT_BTYPE
;
4113 verify_assign_cast(&vtop
[-1].type
);
4115 if (sbt
== VT_STRUCT
) {
4116 /* if structure, only generate pointer */
4117 /* structure assignment : generate memcpy */
4118 /* XXX: optimize if small size */
4119 size
= type_size(&vtop
->type
, &align
);
4123 #ifdef CONFIG_TCC_BCHECK
4124 if (vtop
->r
& VT_MUSTBOUND
)
4125 gbound(); /* check would be wrong after gaddrof() */
4127 vtop
->type
.t
= VT_PTR
;
4130 /* address of memcpy() */
4133 vpush_helper_func(TOK_memmove8
);
4134 else if(!(align
& 3))
4135 vpush_helper_func(TOK_memmove4
);
4138 /* Use memmove, rather than memcpy, as dest and src may be same: */
4139 vpush_helper_func(TOK_memmove
);
4144 #ifdef CONFIG_TCC_BCHECK
4145 if (vtop
->r
& VT_MUSTBOUND
)
4148 vtop
->type
.t
= VT_PTR
;
4153 /* leave source on stack */
4155 } else if (ft
& VT_BITFIELD
) {
4156 /* bitfield store handling */
4158 /* save lvalue as expression result (example: s.b = s.a = n;) */
4159 vdup(), vtop
[-1] = vtop
[-2];
4161 bit_pos
= BIT_POS(ft
);
4162 bit_size
= BIT_SIZE(ft
);
4163 /* remove bit field info to avoid loops */
4164 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
4166 if (dbt
== VT_BOOL
) {
4167 gen_cast(&vtop
[-1].type
);
4168 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
4170 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
4171 if (dbt
!= VT_BOOL
) {
4172 gen_cast(&vtop
[-1].type
);
4173 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
4175 if (r
== VT_STRUCT
) {
4176 store_packed_bf(bit_pos
, bit_size
);
4178 unsigned long long mask
= (1ULL << bit_size
) - 1;
4179 if (dbt
!= VT_BOOL
) {
4181 if (dbt
== VT_LLONG
)
4184 vpushi((unsigned)mask
);
4191 /* duplicate destination */
4194 /* load destination, mask and or with source */
4195 if (dbt
== VT_LLONG
)
4196 vpushll(~(mask
<< bit_pos
));
4198 vpushi(~((unsigned)mask
<< bit_pos
));
4203 /* ... and discard */
4206 } else if (dbt
== VT_VOID
) {
4209 /* optimize char/short casts */
4211 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
4212 && is_integer_btype(sbt
)
4214 if ((vtop
->r
& VT_MUSTCAST
)
4215 && btype_size(dbt
) > btype_size(sbt
)
4217 force_charshort_cast();
4220 gen_cast(&vtop
[-1].type
);
4223 #ifdef CONFIG_TCC_BCHECK
4224 /* bound check case */
4225 if (vtop
[-1].r
& VT_MUSTBOUND
) {
4231 gv(RC_TYPE(dbt
)); /* generate value */
4234 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
4235 //tcc_warning("deley cast %x -> %x", sbt, dbt);
4236 vtop
->type
.t
= ft
& VT_TYPE
;
4239 /* if lvalue was saved on stack, must read it */
4240 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
4242 r
= get_reg(RC_INT
);
4243 sv
.type
.t
= VT_PTRDIFF_T
;
4244 sv
.r
= VT_LOCAL
| VT_LVAL
;
4245 sv
.c
.i
= vtop
[-1].c
.i
;
4247 vtop
[-1].r
= r
| VT_LVAL
;
4250 r
= vtop
->r
& VT_VALMASK
;
4251 /* two word case handling :
4252 store second register at word + 4 (or +8 for x86-64) */
4253 if (USING_TWO_WORDS(dbt
)) {
4254 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
4255 vtop
[-1].type
.t
= load_type
;
4258 /* convert to int to increment easily */
4259 vtop
->type
.t
= VT_PTRDIFF_T
;
4265 vtop
[-1].type
.t
= load_type
;
4266 /* XXX: it works because r2 is spilled last ! */
4267 store(vtop
->r2
, vtop
- 1);
4273 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
4277 /* post defines POST/PRE add. c is the token ++ or -- */
4278 ST_FUNC
void inc(int post
, int c
)
4281 vdup(); /* save lvalue */
4283 gv_dup(); /* duplicate value */
4288 vpushi(c
- TOK_MID
);
4290 vstore(); /* store value */
4292 vpop(); /* if post op, return saved value */
4295 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
4297 /* read the string */
4301 while (tok
== TOK_STR
) {
4302 /* XXX: add \0 handling too ? */
4303 cstr_cat(astr
, tokc
.str
.data
, -1);
4306 cstr_ccat(astr
, '\0');
4309 /* If I is >= 1 and a power of two, returns log2(i)+1.
4310 If I is 0 returns 0. */
4311 ST_FUNC
int exact_log2p1(int i
)
4316 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
4327 /* Parse __attribute__((...)) GNUC extension. */
4328 static void parse_attribute(AttributeDef
*ad
)
4334 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
4339 while (tok
!= ')') {
4340 if (tok
< TOK_IDENT
)
4341 expect("attribute name");
4353 tcc_warning("implicit declaration of function '%s'",
4354 get_tok_str(tok
, &tokc
));
4355 s
= external_global_sym(tok
, &func_old_type
);
4356 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4357 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
4358 ad
->cleanup_func
= s
;
4363 case TOK_CONSTRUCTOR1
:
4364 case TOK_CONSTRUCTOR2
:
4365 ad
->f
.func_ctor
= 1;
4367 case TOK_DESTRUCTOR1
:
4368 case TOK_DESTRUCTOR2
:
4369 ad
->f
.func_dtor
= 1;
4371 case TOK_ALWAYS_INLINE1
:
4372 case TOK_ALWAYS_INLINE2
:
4373 ad
->f
.func_alwinl
= 1;
4378 parse_mult_str(&astr
, "section name");
4379 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
4386 parse_mult_str(&astr
, "alias(\"target\")");
4387 ad
->alias_target
= /* save string as token, for later */
4388 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
4392 case TOK_VISIBILITY1
:
4393 case TOK_VISIBILITY2
:
4395 parse_mult_str(&astr
,
4396 "visibility(\"default|hidden|internal|protected\")");
4397 if (!strcmp (astr
.data
, "default"))
4398 ad
->a
.visibility
= STV_DEFAULT
;
4399 else if (!strcmp (astr
.data
, "hidden"))
4400 ad
->a
.visibility
= STV_HIDDEN
;
4401 else if (!strcmp (astr
.data
, "internal"))
4402 ad
->a
.visibility
= STV_INTERNAL
;
4403 else if (!strcmp (astr
.data
, "protected"))
4404 ad
->a
.visibility
= STV_PROTECTED
;
4406 expect("visibility(\"default|hidden|internal|protected\")");
4415 if (n
<= 0 || (n
& (n
- 1)) != 0)
4416 tcc_error("alignment must be a positive power of two");
4421 ad
->a
.aligned
= exact_log2p1(n
);
4422 if (n
!= 1 << (ad
->a
.aligned
- 1))
4423 tcc_error("alignment of %d is larger than implemented", n
);
4435 /* currently, no need to handle it because tcc does not
4436 track unused objects */
4440 ad
->f
.func_noreturn
= 1;
4445 ad
->f
.func_call
= FUNC_CDECL
;
4450 ad
->f
.func_call
= FUNC_STDCALL
;
4452 #ifdef TCC_TARGET_I386
4462 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
4468 ad
->f
.func_call
= FUNC_FASTCALLW
;
4475 ad
->attr_mode
= VT_LLONG
+ 1;
4478 ad
->attr_mode
= VT_BYTE
+ 1;
4481 ad
->attr_mode
= VT_SHORT
+ 1;
4485 ad
->attr_mode
= VT_INT
+ 1;
4488 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4495 ad
->a
.dllexport
= 1;
4497 case TOK_NODECORATE
:
4498 ad
->a
.nodecorate
= 1;
4501 ad
->a
.dllimport
= 1;
4504 if (tcc_state
->warn_unsupported
)
4505 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
4506 /* skip parameters */
4508 int parenthesis
= 0;
4512 else if (tok
== ')')
4515 } while (parenthesis
&& tok
!= -1);
4528 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4532 while ((s
= s
->next
) != NULL
) {
4533 if ((s
->v
& SYM_FIELD
) &&
4534 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
4535 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
4536 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
4548 static void check_fields (CType
*type
, int check
)
4552 while ((s
= s
->next
) != NULL
) {
4553 int v
= s
->v
& ~SYM_FIELD
;
4554 if (v
< SYM_FIRST_ANOM
) {
4555 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
4556 if (check
&& (ts
->tok
& SYM_FIELD
))
4557 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
4558 ts
->tok
^= SYM_FIELD
;
4559 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
4560 check_fields (&s
->type
, check
);
4564 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4566 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4567 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4568 int pcc
= !tcc_state
->ms_bitfields
;
4569 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4576 prevbt
= VT_STRUCT
; /* make it never match */
4581 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4582 if (f
->type
.t
& VT_BITFIELD
)
4583 bit_size
= BIT_SIZE(f
->type
.t
);
4586 size
= type_size(&f
->type
, &align
);
4587 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4590 if (pcc
&& bit_size
== 0) {
4591 /* in pcc mode, packing does not affect zero-width bitfields */
4594 /* in pcc mode, attribute packed overrides if set. */
4595 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4598 /* pragma pack overrides align if lesser and packs bitfields always */
4601 if (pragma_pack
< align
)
4602 align
= pragma_pack
;
4603 /* in pcc mode pragma pack also overrides individual align */
4604 if (pcc
&& pragma_pack
< a
)
4608 /* some individual align was specified */
4612 if (type
->ref
->type
.t
== VT_UNION
) {
4613 if (pcc
&& bit_size
>= 0)
4614 size
= (bit_size
+ 7) >> 3;
4619 } else if (bit_size
< 0) {
4621 c
+= (bit_pos
+ 7) >> 3;
4622 c
= (c
+ align
- 1) & -align
;
4631 /* A bit-field. Layout is more complicated. There are two
4632 options: PCC (GCC) compatible and MS compatible */
4634 /* In PCC layout a bit-field is placed adjacent to the
4635 preceding bit-fields, except if:
4637 - an individual alignment was given
4638 - it would overflow its base type container and
4639 there is no packing */
4640 if (bit_size
== 0) {
4642 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4644 } else if (f
->a
.aligned
) {
4646 } else if (!packed
) {
4648 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4649 if (ofs
> size
/ align
)
4653 /* in pcc mode, long long bitfields have type int if they fit */
4654 if (size
== 8 && bit_size
<= 32)
4655 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4657 while (bit_pos
>= align
* 8)
4658 c
+= align
, bit_pos
-= align
* 8;
4661 /* In PCC layout named bit-fields influence the alignment
4662 of the containing struct using the base types alignment,
4663 except for packed fields (which here have correct align). */
4664 if (f
->v
& SYM_FIRST_ANOM
4665 // && bit_size // ??? gcc on ARM/rpi does that
4670 bt
= f
->type
.t
& VT_BTYPE
;
4671 if ((bit_pos
+ bit_size
> size
* 8)
4672 || (bit_size
> 0) == (bt
!= prevbt
)
4674 c
= (c
+ align
- 1) & -align
;
4677 /* In MS bitfield mode a bit-field run always uses
4678 at least as many bits as the underlying type.
4679 To start a new run it's also required that this
4680 or the last bit-field had non-zero width. */
4681 if (bit_size
|| prev_bit_size
)
4684 /* In MS layout the records alignment is normally
4685 influenced by the field, except for a zero-width
4686 field at the start of a run (but by further zero-width
4687 fields it is again). */
4688 if (bit_size
== 0 && prevbt
!= bt
)
4691 prev_bit_size
= bit_size
;
4694 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4695 | (bit_pos
<< VT_STRUCT_SHIFT
);
4696 bit_pos
+= bit_size
;
4698 if (align
> maxalign
)
4702 printf("set field %s offset %-2d size %-2d align %-2d",
4703 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4704 if (f
->type
.t
& VT_BITFIELD
) {
4705 printf(" pos %-2d bits %-2d",
4718 c
+= (bit_pos
+ 7) >> 3;
4720 /* store size and alignment */
4721 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4725 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4726 /* can happen if individual align for some member was given. In
4727 this case MSVC ignores maxalign when aligning the size */
4732 c
= (c
+ a
- 1) & -a
;
4736 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4739 /* check whether we can access bitfields by their type */
4740 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4744 if (0 == (f
->type
.t
& VT_BITFIELD
))
4748 bit_size
= BIT_SIZE(f
->type
.t
);
4751 bit_pos
= BIT_POS(f
->type
.t
);
4752 size
= type_size(&f
->type
, &align
);
4754 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4755 #ifdef TCC_TARGET_ARM
4756 && !(f
->c
& (align
- 1))
4761 /* try to access the field using a different type */
4762 c0
= -1, s
= align
= 1;
4765 px
= f
->c
* 8 + bit_pos
;
4766 cx
= (px
>> 3) & -align
;
4767 px
= px
- (cx
<< 3);
4770 s
= (px
+ bit_size
+ 7) >> 3;
4780 s
= type_size(&t
, &align
);
4784 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4785 #ifdef TCC_TARGET_ARM
4786 && !(cx
& (align
- 1))
4789 /* update offset and bit position */
4792 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4793 | (bit_pos
<< VT_STRUCT_SHIFT
);
4797 printf("FIX field %s offset %-2d size %-2d align %-2d "
4798 "pos %-2d bits %-2d\n",
4799 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4800 cx
, s
, align
, px
, bit_size
);
4803 /* fall back to load/store single-byte wise */
4804 f
->auxtype
= VT_STRUCT
;
4806 printf("FIX field %s : load byte-wise\n",
4807 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4813 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4814 static void struct_decl(CType
*type
, int u
)
4816 int v
, c
, size
, align
, flexible
;
4817 int bit_size
, bsize
, bt
;
4819 AttributeDef ad
, ad1
;
4822 memset(&ad
, 0, sizeof ad
);
4824 parse_attribute(&ad
);
4828 /* struct already defined ? return it */
4830 expect("struct/union/enum name");
4832 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4835 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4837 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4842 /* Record the original enum/struct/union token. */
4843 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4845 /* we put an undefined size for struct/union */
4846 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4847 s
->r
= 0; /* default alignment is zero as gcc */
4849 type
->t
= s
->type
.t
;
4855 tcc_error("struct/union/enum already defined");
4857 /* cannot be empty */
4858 /* non empty enums are not allowed */
4861 long long ll
= 0, pl
= 0, nl
= 0;
4864 /* enum symbols have static storage */
4865 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4869 expect("identifier");
4871 if (ss
&& !local_stack
)
4872 tcc_error("redefinition of enumerator '%s'",
4873 get_tok_str(v
, NULL
));
4877 ll
= expr_const64();
4879 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4881 *ps
= ss
, ps
= &ss
->next
;
4890 /* NOTE: we accept a trailing comma */
4895 /* set integral type of the enum */
4898 if (pl
!= (unsigned)pl
)
4899 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4901 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4902 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4903 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4905 /* set type for enum members */
4906 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4908 if (ll
== (int)ll
) /* default is int if it fits */
4910 if (t
.t
& VT_UNSIGNED
) {
4911 ss
->type
.t
|= VT_UNSIGNED
;
4912 if (ll
== (unsigned)ll
)
4915 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4916 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4921 while (tok
!= '}') {
4922 if (!parse_btype(&btype
, &ad1
)) {
4928 tcc_error("flexible array member '%s' not at the end of struct",
4929 get_tok_str(v
, NULL
));
4935 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4937 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4938 expect("identifier");
4940 int v
= btype
.ref
->v
;
4941 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4942 if (tcc_state
->ms_extensions
== 0)
4943 expect("identifier");
4947 if (type_size(&type1
, &align
) < 0) {
4948 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4951 tcc_error("field '%s' has incomplete type",
4952 get_tok_str(v
, NULL
));
4954 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4955 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4956 (type1
.t
& VT_STORAGE
))
4957 tcc_error("invalid type for '%s'",
4958 get_tok_str(v
, NULL
));
4962 bit_size
= expr_const();
4963 /* XXX: handle v = 0 case for messages */
4965 tcc_error("negative width in bit-field '%s'",
4966 get_tok_str(v
, NULL
));
4967 if (v
&& bit_size
== 0)
4968 tcc_error("zero width for bit-field '%s'",
4969 get_tok_str(v
, NULL
));
4970 parse_attribute(&ad1
);
4972 size
= type_size(&type1
, &align
);
4973 if (bit_size
>= 0) {
4974 bt
= type1
.t
& VT_BTYPE
;
4980 tcc_error("bitfields must have scalar type");
4982 if (bit_size
> bsize
) {
4983 tcc_error("width of '%s' exceeds its type",
4984 get_tok_str(v
, NULL
));
4985 } else if (bit_size
== bsize
4986 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4987 /* no need for bit fields */
4989 } else if (bit_size
== 64) {
4990 tcc_error("field width 64 not implemented");
4992 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4994 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4997 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4998 /* Remember we've seen a real field to check
4999 for placement of flexible array member. */
5002 /* If member is a struct or bit-field, enforce
5003 placing into the struct (as anonymous). */
5005 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
5010 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
5015 if (tok
== ';' || tok
== TOK_EOF
)
5022 parse_attribute(&ad
);
5023 if (ad
.cleanup_func
) {
5024 tcc_warning("attribute '__cleanup__' ignored on type");
5026 check_fields(type
, 1);
5027 check_fields(type
, 0);
5028 struct_layout(type
, &ad
);
5033 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
5035 merge_symattr(&ad
->a
, &s
->a
);
5036 merge_funcattr(&ad
->f
, &s
->f
);
5039 /* Add type qualifiers to a type. If the type is an array then the qualifiers
5040 are added to the element type, copied because it could be a typedef. */
5041 static void parse_btype_qualify(CType
*type
, int qualifiers
)
5043 while (type
->t
& VT_ARRAY
) {
5044 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
5045 type
= &type
->ref
->type
;
5047 type
->t
|= qualifiers
;
5050 /* return 0 if no type declaration. otherwise, return the basic type
5053 static int parse_btype(CType
*type
, AttributeDef
*ad
)
5055 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
5059 memset(ad
, 0, sizeof(AttributeDef
));
5069 /* currently, we really ignore extension */
5079 if (u
== VT_SHORT
|| u
== VT_LONG
) {
5080 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
5081 tmbt
: tcc_error("too many basic types");
5084 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
5089 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
5106 memset(&ad1
, 0, sizeof(AttributeDef
));
5107 if (parse_btype(&type1
, &ad1
)) {
5108 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
5110 n
= 1 << (ad1
.a
.aligned
- 1);
5112 type_size(&type1
, &n
);
5115 if (n
<= 0 || (n
& (n
- 1)) != 0)
5116 tcc_error("alignment must be a positive power of two");
5119 ad
->a
.aligned
= exact_log2p1(n
);
5123 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
5124 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
5125 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
5126 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
5133 #ifdef TCC_TARGET_ARM64
5135 /* GCC's __uint128_t appears in some Linux header files. Make it a
5136 synonym for long double to get the size and alignment right. */
5147 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
5148 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
5156 struct_decl(&type1
, VT_ENUM
);
5159 type
->ref
= type1
.ref
;
5162 struct_decl(&type1
, VT_STRUCT
);
5165 struct_decl(&type1
, VT_UNION
);
5168 /* type modifiers */
5172 parse_btype_qualify(type
, VT_ATOMIC
);
5175 parse_expr_type(&type1
);
5176 /* remove all storage modifiers except typedef */
5177 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
5179 sym_to_attr(ad
, type1
.ref
);
5187 parse_btype_qualify(type
, VT_CONSTANT
);
5195 parse_btype_qualify(type
, VT_VOLATILE
);
5202 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
5203 tcc_error("signed and unsigned modifier");
5216 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
5217 tcc_error("signed and unsigned modifier");
5218 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
5234 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
5235 tcc_error("multiple storage classes");
5247 ad
->f
.func_noreturn
= 1;
5249 /* GNUC attribute */
5250 case TOK_ATTRIBUTE1
:
5251 case TOK_ATTRIBUTE2
:
5252 parse_attribute(ad
);
5253 if (ad
->attr_mode
) {
5254 u
= ad
->attr_mode
-1;
5255 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
5263 parse_expr_type(&type1
);
5264 /* remove all storage modifiers except typedef */
5265 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
5267 sym_to_attr(ad
, type1
.ref
);
5273 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
5277 if (tok
== ':' && !in_generic
) {
5278 /* ignore if it's a label */
5283 t
&= ~(VT_BTYPE
|VT_LONG
);
5284 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
5285 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
5286 type
->ref
= s
->type
.ref
;
5288 parse_btype_qualify(type
, t
);
5290 /* get attributes from typedef */
5299 if (tcc_state
->char_is_unsigned
) {
5300 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
5303 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5304 bt
= t
& (VT_BTYPE
|VT_LONG
);
5306 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
5307 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5308 if (bt
== VT_LDOUBLE
)
5309 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
5315 /* convert a function parameter type (array to pointer and function to
5316 function pointer) */
5317 static inline void convert_parameter_type(CType
*pt
)
5319 /* remove const and volatile qualifiers (XXX: const could be used
5320 to indicate a const function parameter */
5321 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5322 /* array must be transformed to pointer according to ANSI C */
5324 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
5329 ST_FUNC
void parse_asm_str(CString
*astr
)
5332 parse_mult_str(astr
, "string constant");
5335 /* Parse an asm label and return the token */
5336 static int asm_label_instr(void)
5342 parse_asm_str(&astr
);
5345 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
5347 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
5352 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
5354 int n
, l
, t1
, arg_size
, align
, unused_align
;
5355 Sym
**plast
, *s
, *first
;
5360 /* function type, or recursive declarator (return if so) */
5362 if (td
&& !(td
& TYPE_ABSTRACT
))
5366 else if (parse_btype(&pt
, &ad1
))
5369 merge_attr (ad
, &ad1
);
5378 /* read param name and compute offset */
5379 if (l
!= FUNC_OLD
) {
5380 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
5382 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
5383 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
5384 tcc_error("parameter declared as void");
5388 expect("identifier");
5389 pt
.t
= VT_VOID
; /* invalid type */
5393 convert_parameter_type(&pt
);
5394 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
5395 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
5401 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
5406 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
5407 tcc_error("invalid type");
5410 /* if no parameters, then old type prototype */
5413 /* NOTE: const is ignored in returned type as it has a special
5414 meaning in gcc / C++ */
5415 type
->t
&= ~VT_CONSTANT
;
5416 /* some ancient pre-K&R C allows a function to return an array
5417 and the array brackets to be put after the arguments, such
5418 that "int c()[]" means something like "int[] c()" */
5421 skip(']'); /* only handle simple "[]" */
5424 /* we push a anonymous symbol which will contain the function prototype */
5425 ad
->f
.func_args
= arg_size
;
5426 ad
->f
.func_type
= l
;
5427 s
= sym_push(SYM_FIELD
, type
, 0, 0);
5433 } else if (tok
== '[') {
5434 int saved_nocode_wanted
= nocode_wanted
;
5435 /* array definition */
5438 /* XXX The optional type-quals and static should only be accepted
5439 in parameter decls. The '*' as well, and then even only
5440 in prototypes (not function defs). */
5442 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5457 if (!local_stack
|| (storage
& VT_STATIC
))
5458 vpushi(expr_const());
5460 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5461 length must always be evaluated, even under nocode_wanted,
5462 so that its size slot is initialized (e.g. under sizeof
5467 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5470 tcc_error("invalid array size");
5472 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5473 tcc_error("size of variable length array should be an integer");
5479 /* parse next post type */
5480 post_type(type
, ad
, storage
, 0);
5482 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5483 tcc_error("declaration of an array of functions");
5484 if ((type
->t
& VT_BTYPE
) == VT_VOID
5485 || type_size(type
, &unused_align
) < 0)
5486 tcc_error("declaration of an array of incomplete type elements");
5488 t1
|= type
->t
& VT_VLA
;
5492 tcc_error("need explicit inner array size in VLAs");
5493 loc
-= type_size(&int_type
, &align
);
5497 vla_runtime_type_size(type
, &align
);
5499 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5505 nocode_wanted
= saved_nocode_wanted
;
5507 /* we push an anonymous symbol which will contain the array
5509 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5510 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5516 /* Parse a type declarator (except basic type), and return the type
5517 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5518 expected. 'type' should contain the basic type. 'ad' is the
5519 attribute definition of the basic type. It can be modified by
5520 type_decl(). If this (possibly abstract) declarator is a pointer chain
5521 it returns the innermost pointed to type (equals *type, but is a different
5522 pointer), otherwise returns type itself, that's used for recursive calls. */
5523 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5526 int qualifiers
, storage
;
5528 /* recursive type, remove storage bits first, apply them later again */
5529 storage
= type
->t
& VT_STORAGE
;
5530 type
->t
&= ~VT_STORAGE
;
5533 while (tok
== '*') {
5539 qualifiers
|= VT_ATOMIC
;
5544 qualifiers
|= VT_CONSTANT
;
5549 qualifiers
|= VT_VOLATILE
;
5555 /* XXX: clarify attribute handling */
5556 case TOK_ATTRIBUTE1
:
5557 case TOK_ATTRIBUTE2
:
5558 parse_attribute(ad
);
5562 type
->t
|= qualifiers
;
5564 /* innermost pointed to type is the one for the first derivation */
5565 ret
= pointed_type(type
);
5569 /* This is possibly a parameter type list for abstract declarators
5570 ('int ()'), use post_type for testing this. */
5571 if (!post_type(type
, ad
, 0, td
)) {
5572 /* It's not, so it's a nested declarator, and the post operations
5573 apply to the innermost pointed to type (if any). */
5574 /* XXX: this is not correct to modify 'ad' at this point, but
5575 the syntax is not clear */
5576 parse_attribute(ad
);
5577 post
= type_decl(type
, ad
, v
, td
);
5581 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5582 /* type identifier */
5587 if (!(td
& TYPE_ABSTRACT
))
5588 expect("identifier");
5591 post_type(post
, ad
, storage
, 0);
5592 parse_attribute(ad
);
5597 /* indirection with full error checking and bound check */
5598 ST_FUNC
void indir(void)
5600 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5601 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5605 if (vtop
->r
& VT_LVAL
)
5607 vtop
->type
= *pointed_type(&vtop
->type
);
5608 /* Arrays and functions are never lvalues */
5609 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5610 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5612 /* if bound checking, the referenced pointer must be checked */
5613 #ifdef CONFIG_TCC_BCHECK
5614 if (tcc_state
->do_bounds_check
)
5615 vtop
->r
|= VT_MUSTBOUND
;
5620 /* pass a parameter to a function and do type checking and casting */
5621 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5626 func_type
= func
->f
.func_type
;
5627 if (func_type
== FUNC_OLD
||
5628 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5629 /* default casting : only need to convert float to double */
5630 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5631 gen_cast_s(VT_DOUBLE
);
5632 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5633 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5634 type
.ref
= vtop
->type
.ref
;
5636 } else if (vtop
->r
& VT_MUSTCAST
) {
5637 force_charshort_cast();
5639 } else if (arg
== NULL
) {
5640 tcc_error("too many arguments to function");
5643 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5644 gen_assign_cast(&type
);
5648 /* parse an expression and return its type without any side effect. */
5649 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5658 /* parse an expression of the form '(type)' or '(expr)' and return its
5660 static void parse_expr_type(CType
*type
)
5666 if (parse_btype(type
, &ad
)) {
5667 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5669 expr_type(type
, gexpr
);
5674 static void parse_type(CType
*type
)
5679 if (!parse_btype(type
, &ad
)) {
5682 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5685 static void parse_builtin_params(int nc
, const char *args
)
5694 while ((c
= *args
++)) {
5709 type
.t
= VT_CONSTANT
;
5715 type
.t
= VT_CONSTANT
;
5717 type
.t
|= char_type
.t
;
5729 gen_assign_cast(&type
);
5736 static inline int is_memory_model(const SValue
*sv
)
5740 * The memory models should better be backed by an enumeration.
5742 * const int t = sv->type.t;
5744 * if (!IS_ENUM_VAL(t))
5747 * if (!(t & VT_STATIC))
5750 * Ideally we should check whether the model matches 1:1.
5751 * If it is possible, we should check by the name of the value.
5753 return (((sv
->type
.t
& VT_BTYPE
) == VT_INT
) && (sv
->c
.i
< 6));
5756 static void parse_atomic(int atok
)
5762 char const *params
= NULL
;
5765 char const *const params
;
5769 * A -- read-only atomic
5770 * p -- pointer to memory
5771 * P -- pointer to read-only memory
5775 {TOK___c11_atomic_init
, "-av"},
5776 {TOK___c11_atomic_store
, "-avm"},
5777 {TOK___c11_atomic_load
, "am"},
5778 {TOK___c11_atomic_exchange
, "avm"},
5779 {TOK___c11_atomic_compare_exchange_strong
, "apvmm"},
5780 {TOK___c11_atomic_compare_exchange_weak
, "apvmm"},
5781 {TOK___c11_atomic_fetch_add
, "avm"},
5782 {TOK___c11_atomic_fetch_sub
, "avm"},
5783 {TOK___c11_atomic_fetch_or
, "avm"},
5784 {TOK___c11_atomic_fetch_xor
, "avm"},
5785 {TOK___c11_atomic_fetch_and
, "avm"},
5790 for (op
= 0; op
< (sizeof(ops
) / sizeof(*ops
)); ++op
) {
5791 if (ops
[op
].tok
== atok
) {
5792 params
= ops
[op
].params
;
5797 tcc_error("unknown atomic operation");
5799 argc
= strlen(params
);
5800 if (params
[0] == '-') {
5806 vpushi(0); /* function address */
5809 for (arg
= 0; arg
< argc
; ++arg
) {
5812 switch (params
[arg
]) {
5816 expect_arg("exactly one pointer to atomic", arg
);
5817 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5818 expect_arg("pointer to atomic expected", arg
);
5819 atom
= pointed_type(&vtop
->type
);
5820 if (!(atom
->t
& VT_ATOMIC
))
5821 expect_arg("qualified pointer to atomic", arg
);
5822 if ((params
[arg
] == 'a') && (atom
->t
& VT_CONSTANT
))
5823 expect_arg("pointer to writable atomic", arg
);
5824 atom
->t
&= ~VT_ATOMIC
;
5825 switch (btype_size(atom
->t
& VT_BTYPE
)) {
5826 case 1: atok
+= 1; break;
5827 case 2: atok
+= 2; break;
5828 case 4: atok
+= 3; break;
5829 case 8: atok
+= 4; break;
5830 default: tcc_error("only integer-sized types are supported");
5834 vpush_helper_func(atok
);
5839 if (((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
5840 || !is_compatible_unqualified_types(atom
, pointed_type(&vtop
->type
)))
5841 expect_arg("pointer to compatible type", arg
);
5845 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5846 expect_arg("integer type", arg
);
5850 if (!is_memory_model(vtop
))
5851 expect_arg("memory model", arg
);
5852 vtop
->type
.t
&= ~VT_MEMMODEL
;
5856 tcc_error("unknown parameter type");
5862 if (arg
< (argc
- 1))
5863 expect("more parameters");
5864 if (arg
> (argc
- 1))
5865 expect("less parameters");
5871 ST_FUNC
void unary(void)
5873 int n
, t
, align
, size
, r
, sizeof_caller
;
5878 /* generate line number info */
5880 tcc_debug_line(tcc_state
), tcc_tcov_check_line (1);
5882 sizeof_caller
= in_sizeof
;
5885 /* XXX: GCC 2.95.3 does not generate a table although it should be
5893 #ifdef TCC_TARGET_PE
5894 t
= VT_SHORT
|VT_UNSIGNED
;
5902 vsetc(&type
, VT_CONST
, &tokc
);
5906 t
= VT_INT
| VT_UNSIGNED
;
5912 t
= VT_LLONG
| VT_UNSIGNED
;
5924 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5927 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5929 case TOK___FUNCTION__
:
5931 goto tok_identifier
;
5937 /* special function name identifier */
5938 len
= strlen(funcname
) + 1;
5939 /* generate char[len] type */
5944 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
5945 if (!NODATA_WANTED
) {
5946 ptr
= section_ptr_add(data_section
, len
);
5947 memcpy(ptr
, funcname
, len
);
5953 #ifdef TCC_TARGET_PE
5954 t
= VT_SHORT
| VT_UNSIGNED
;
5960 /* string parsing */
5962 if (tcc_state
->char_is_unsigned
)
5963 t
= VT_BYTE
| VT_UNSIGNED
;
5965 if (tcc_state
->warn_write_strings
)
5970 memset(&ad
, 0, sizeof(AttributeDef
));
5971 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5976 if (parse_btype(&type
, &ad
)) {
5977 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5979 /* check ISOC99 compound literal */
5981 /* data is allocated locally by default */
5986 /* all except arrays are lvalues */
5987 if (!(type
.t
& VT_ARRAY
))
5989 memset(&ad
, 0, sizeof(AttributeDef
));
5990 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5992 if (sizeof_caller
) {
5999 } else if (tok
== '{') {
6000 int saved_nocode_wanted
= nocode_wanted
;
6001 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
6003 if (0 == local_scope
)
6004 tcc_error("statement expression outside of function");
6005 /* save all registers */
6007 /* statement expression : we do not accept break/continue
6008 inside as GCC does. We do retain the nocode_wanted state,
6009 as statement expressions can't ever be entered from the
6010 outside, so any reactivation of code emission (from labels
6011 or loop heads) can be disabled again after the end of it. */
6013 nocode_wanted
= saved_nocode_wanted
;
6028 /* functions names must be treated as function pointers,
6029 except for unary '&' and sizeof. Since we consider that
6030 functions are not lvalues, we only have to handle it
6031 there and in function calls. */
6032 /* arrays can also be used although they are not lvalues */
6033 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
6034 !(vtop
->type
.t
& VT_ARRAY
))
6037 vtop
->sym
->a
.addrtaken
= 1;
6038 mk_pointer(&vtop
->type
);
6044 gen_test_zero(TOK_EQ
);
6055 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
6056 tcc_error("pointer not accepted for unary plus");
6057 /* In order to force cast, we add zero, except for floating point
6058 where we really need an noop (otherwise -0.0 will be transformed
6060 if (!is_float(vtop
->type
.t
)) {
6072 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
6074 if (vtop
[1].r
& VT_SYM
)
6075 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
6076 size
= type_size(&type
, &align
);
6077 if (s
&& s
->a
.aligned
)
6078 align
= 1 << (s
->a
.aligned
- 1);
6079 if (t
== TOK_SIZEOF
) {
6080 if (!(type
.t
& VT_VLA
)) {
6082 tcc_error("sizeof applied to an incomplete type");
6085 vla_runtime_type_size(&type
, &align
);
6090 vtop
->type
.t
|= VT_UNSIGNED
;
6093 case TOK_builtin_expect
:
6094 /* __builtin_expect is a no-op for now */
6095 parse_builtin_params(0, "ee");
6098 case TOK_builtin_types_compatible_p
:
6099 parse_builtin_params(0, "tt");
6100 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
6101 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
6102 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
6106 case TOK_builtin_choose_expr
:
6133 case TOK_builtin_constant_p
:
6134 parse_builtin_params(1, "e");
6135 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6136 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
6140 case TOK_builtin_frame_address
:
6141 case TOK_builtin_return_address
:
6147 if (tok
!= TOK_CINT
) {
6148 tcc_error("%s only takes positive integers",
6149 tok1
== TOK_builtin_return_address
?
6150 "__builtin_return_address" :
6151 "__builtin_frame_address");
6153 level
= (uint32_t)tokc
.i
;
6158 vset(&type
, VT_LOCAL
, 0); /* local frame */
6160 #ifdef TCC_TARGET_RISCV64
6164 mk_pointer(&vtop
->type
);
6165 indir(); /* -> parent frame */
6167 if (tok1
== TOK_builtin_return_address
) {
6168 // assume return address is just above frame pointer on stack
6169 #ifdef TCC_TARGET_ARM
6172 #elif defined TCC_TARGET_RISCV64
6179 mk_pointer(&vtop
->type
);
6184 #ifdef TCC_TARGET_RISCV64
6185 case TOK_builtin_va_start
:
6186 parse_builtin_params(0, "ee");
6187 r
= vtop
->r
& VT_VALMASK
;
6191 tcc_error("__builtin_va_start expects a local variable");
6196 #ifdef TCC_TARGET_X86_64
6197 #ifdef TCC_TARGET_PE
6198 case TOK_builtin_va_start
:
6199 parse_builtin_params(0, "ee");
6200 r
= vtop
->r
& VT_VALMASK
;
6204 tcc_error("__builtin_va_start expects a local variable");
6206 vtop
->type
= char_pointer_type
;
6211 case TOK_builtin_va_arg_types
:
6212 parse_builtin_params(0, "t");
6213 vpushi(classify_x86_64_va_arg(&vtop
->type
));
6220 #ifdef TCC_TARGET_ARM64
6221 case TOK_builtin_va_start
: {
6222 parse_builtin_params(0, "ee");
6226 vtop
->type
.t
= VT_VOID
;
6229 case TOK_builtin_va_arg
: {
6230 parse_builtin_params(0, "et");
6238 case TOK___arm64_clear_cache
: {
6239 parse_builtin_params(0, "ee");
6242 vtop
->type
.t
= VT_VOID
;
6247 /* atomic operations */
6248 case TOK___c11_atomic_init
:
6249 case TOK___c11_atomic_store
:
6250 case TOK___c11_atomic_load
:
6251 case TOK___c11_atomic_exchange
:
6252 case TOK___c11_atomic_compare_exchange_strong
:
6253 case TOK___c11_atomic_compare_exchange_weak
:
6254 case TOK___c11_atomic_fetch_add
:
6255 case TOK___c11_atomic_fetch_sub
:
6256 case TOK___c11_atomic_fetch_or
:
6257 case TOK___c11_atomic_fetch_xor
:
6258 case TOK___c11_atomic_fetch_and
:
6262 /* pre operations */
6273 if (is_float(vtop
->type
.t
)) {
6283 goto tok_identifier
;
6285 /* allow to take the address of a label */
6286 if (tok
< TOK_UIDENT
)
6287 expect("label identifier");
6288 s
= label_find(tok
);
6290 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6292 if (s
->r
== LABEL_DECLARED
)
6293 s
->r
= LABEL_FORWARD
;
6296 s
->type
.t
= VT_VOID
;
6297 mk_pointer(&s
->type
);
6298 s
->type
.t
|= VT_STATIC
;
6300 vpushsym(&s
->type
, s
);
6306 CType controlling_type
;
6307 int has_default
= 0;
6310 TokenString
*str
= NULL
;
6311 int saved_const_wanted
= const_wanted
;
6316 expr_type(&controlling_type
, expr_eq
);
6317 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
6318 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
6319 mk_pointer(&controlling_type
);
6320 const_wanted
= saved_const_wanted
;
6324 if (tok
== TOK_DEFAULT
) {
6326 tcc_error("too many 'default'");
6332 AttributeDef ad_tmp
;
6337 parse_btype(&cur_type
, &ad_tmp
);
6340 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
6341 if (compare_types(&controlling_type
, &cur_type
, 0)) {
6343 tcc_error("type match twice");
6353 skip_or_save_block(&str
);
6355 skip_or_save_block(NULL
);
6362 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
6363 tcc_error("type '%s' does not match any association", buf
);
6365 begin_macro(str
, 1);
6374 // special qnan , snan and infinity values
6379 vtop
->type
.t
= VT_FLOAT
;
6384 goto special_math_val
;
6387 goto special_math_val
;
6394 expect("identifier");
6396 if (!s
|| IS_ASM_SYM(s
)) {
6397 const char *name
= get_tok_str(t
, NULL
);
6399 tcc_error("'%s' undeclared", name
);
6400 /* for simple function calls, we tolerate undeclared
6401 external reference to int() function */
6402 if (tcc_state
->warn_implicit_function_declaration
6403 #ifdef TCC_TARGET_PE
6404 /* people must be warned about using undeclared WINAPI functions
6405 (which usually start with uppercase letter) */
6406 || (name
[0] >= 'A' && name
[0] <= 'Z')
6409 tcc_warning("implicit declaration of function '%s'", name
);
6410 s
= external_global_sym(t
, &func_old_type
);
6414 /* A symbol that has a register is a local register variable,
6415 which starts out as VT_LOCAL value. */
6416 if ((r
& VT_VALMASK
) < VT_CONST
)
6417 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
6419 vset(&s
->type
, r
, s
->c
);
6420 /* Point to s as backpointer (even without r&VT_SYM).
6421 Will be used by at least the x86 inline asm parser for
6427 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
6428 vtop
->c
.i
= s
->enum_val
;
6433 /* post operations */
6435 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
6438 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
6439 int qualifiers
, cumofs
= 0;
6441 if (tok
== TOK_ARROW
)
6443 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
6446 /* expect pointer on structure */
6447 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
6448 expect("struct or union");
6449 if (tok
== TOK_CDOUBLE
)
6450 expect("field name");
6452 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
6453 expect("field name");
6454 s
= find_field(&vtop
->type
, tok
, &cumofs
);
6456 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
6457 /* add field offset to pointer */
6458 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
6459 vpushi(cumofs
+ s
->c
);
6461 /* change type to field type, and set to lvalue */
6462 vtop
->type
= s
->type
;
6463 vtop
->type
.t
|= qualifiers
;
6464 /* an array is never an lvalue */
6465 if (!(vtop
->type
.t
& VT_ARRAY
)) {
6467 #ifdef CONFIG_TCC_BCHECK
6468 /* if bound checking, the referenced pointer must be checked */
6469 if (tcc_state
->do_bounds_check
)
6470 vtop
->r
|= VT_MUSTBOUND
;
6474 } else if (tok
== '[') {
6480 } else if (tok
== '(') {
6483 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
6486 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
6487 /* pointer test (no array accepted) */
6488 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
6489 vtop
->type
= *pointed_type(&vtop
->type
);
6490 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6494 expect("function pointer");
6497 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6499 /* get return type */
6502 sa
= s
->next
; /* first parameter */
6503 nb_args
= regsize
= 0;
6505 /* compute first implicit argument if a structure is returned */
6506 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6507 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6508 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6509 &ret_align
, ®size
);
6510 if (ret_nregs
<= 0) {
6511 /* get some space for the returned structure */
6512 size
= type_size(&s
->type
, &align
);
6513 #ifdef TCC_TARGET_ARM64
6514 /* On arm64, a small struct is return in registers.
6515 It is much easier to write it to memory if we know
6516 that we are allowed to write some extra bytes, so
6517 round the allocated space up to a power of 2: */
6519 while (size
& (size
- 1))
6520 size
= (size
| (size
- 1)) + 1;
6522 loc
= (loc
- size
) & -align
;
6524 ret
.r
= VT_LOCAL
| VT_LVAL
;
6525 /* pass it as 'int' to avoid structure arg passing
6527 vseti(VT_LOCAL
, loc
);
6528 #ifdef CONFIG_TCC_BCHECK
6529 if (tcc_state
->do_bounds_check
)
6543 if (ret_nregs
> 0) {
6544 /* return in register */
6546 PUT_R_RET(&ret
, ret
.type
.t
);
6551 gfunc_param_typed(s
, sa
);
6561 tcc_error("too few arguments to function");
6563 gfunc_call(nb_args
);
6565 if (ret_nregs
< 0) {
6566 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6567 #ifdef TCC_TARGET_RISCV64
6568 arch_transfer_ret_regs(1);
6572 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6573 vsetc(&ret
.type
, r
, &ret
.c
);
6574 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6577 /* handle packed struct return */
6578 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6581 size
= type_size(&s
->type
, &align
);
6582 /* We're writing whole regs often, make sure there's enough
6583 space. Assume register size is power of 2. */
6584 if (regsize
> align
)
6586 loc
= (loc
- size
) & -align
;
6590 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6594 if (--ret_nregs
== 0)
6598 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6601 /* Promote char/short return values. This is matters only
6602 for calling function that were not compiled by TCC and
6603 only on some architectures. For those where it doesn't
6604 matter we expect things to be already promoted to int,
6606 t
= s
->type
.t
& VT_BTYPE
;
6607 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6609 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6611 vtop
->type
.t
= VT_INT
;
6615 if (s
->f
.func_noreturn
) {
6617 tcc_tcov_block_end (tcov_data
.line
);
6626 #ifndef precedence_parser /* original top-down parser */
6628 static void expr_prod(void)
6633 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6640 static void expr_sum(void)
6645 while ((t
= tok
) == '+' || t
== '-') {
6652 static void expr_shift(void)
6657 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6664 static void expr_cmp(void)
6669 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6670 t
== TOK_ULT
|| t
== TOK_UGE
) {
6677 static void expr_cmpeq(void)
6682 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6689 static void expr_and(void)
6692 while (tok
== '&') {
6699 static void expr_xor(void)
6702 while (tok
== '^') {
6709 static void expr_or(void)
6712 while (tok
== '|') {
6719 static void expr_landor(int op
);
6721 static void expr_land(void)
6724 if (tok
== TOK_LAND
)
6728 static void expr_lor(void)
6735 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6736 #else /* defined precedence_parser */
6737 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6738 # define expr_lor() unary(), expr_infix(1)
6740 static int precedence(int tok
)
6743 case TOK_LOR
: return 1;
6744 case TOK_LAND
: return 2;
6748 case TOK_EQ
: case TOK_NE
: return 6;
6749 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6750 case TOK_SHL
: case TOK_SAR
: return 8;
6751 case '+': case '-': return 9;
6752 case '*': case '/': case '%': return 10;
6754 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6759 static unsigned char prec
[256];
6760 static void init_prec(void)
6763 for (i
= 0; i
< 256; i
++)
6764 prec
[i
] = precedence(i
);
6766 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6768 static void expr_landor(int op
);
6770 static void expr_infix(int p
)
6773 while ((p2
= precedence(t
)) >= p
) {
6774 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6779 if (precedence(tok
) > p2
)
6788 /* Assuming vtop is a value used in a conditional context
6789 (i.e. compared with zero) return 0 if it's false, 1 if
6790 true and -1 if it can't be statically determined. */
6791 static int condition_3way(void)
6794 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6795 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6797 gen_cast_s(VT_BOOL
);
6804 static void expr_landor(int op
)
6806 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6808 c
= f
? i
: condition_3way();
6810 save_regs(1), cc
= 0;
6812 nocode_wanted
++, f
= 1;
6820 expr_landor_next(op
);
6832 static int is_cond_bool(SValue
*sv
)
6834 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6835 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6836 return (unsigned)sv
->c
.i
< 2;
6837 if (sv
->r
== VT_CMP
)
6842 static void expr_cond(void)
6844 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6852 c
= condition_3way();
6853 g
= (tok
== ':' && gnu_ext
);
6863 /* needed to avoid having different registers saved in
6870 ncw_prev
= nocode_wanted
;
6876 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6877 mk_pointer(&vtop
->type
);
6878 sv
= *vtop
; /* save value to handle it later */
6879 vtop
--; /* no vpop so that FP stack is not flushed */
6889 nocode_wanted
= ncw_prev
;
6895 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6896 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6897 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6898 this code jumps directly to the if's then/else branches. */
6903 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6906 nocode_wanted
= ncw_prev
;
6907 // tcc_warning("two conditions expr_cond");
6911 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6912 mk_pointer(&vtop
->type
);
6914 /* cast operands to correct type according to ISOC rules */
6915 if (!combine_types(&type
, &sv
, vtop
, '?'))
6916 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6917 "type mismatch in conditional expression (have '%s' and '%s')");
6918 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6919 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6920 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6922 /* now we convert second operand */
6926 mk_pointer(&vtop
->type
);
6928 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6932 rc
= RC_TYPE(type
.t
);
6933 /* for long longs, we use fixed registers to avoid having
6934 to handle a complicated move */
6935 if (USING_TWO_WORDS(type
.t
))
6936 rc
= RC_RET(type
.t
);
6944 nocode_wanted
= ncw_prev
;
6946 /* this is horrible, but we must also convert first
6952 mk_pointer(&vtop
->type
);
6954 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6960 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6970 static void expr_eq(void)
6975 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6983 gen_op(TOK_ASSIGN_OP(t
));
6989 ST_FUNC
void gexpr(void)
7000 /* parse a constant expression and return value in vtop. */
7001 static void expr_const1(void)
7004 nocode_wanted
+= unevalmask
+ 1;
7006 nocode_wanted
-= unevalmask
+ 1;
7010 /* parse an integer constant and return its value. */
7011 static inline int64_t expr_const64(void)
7015 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
7016 expect("constant expression");
7022 /* parse an integer constant and return its value.
7023 Complain if it doesn't fit 32bit (signed or unsigned). */
7024 ST_FUNC
int expr_const(void)
7027 int64_t wc
= expr_const64();
7029 if (c
!= wc
&& (unsigned)c
!= wc
)
7030 tcc_error("constant exceeds 32 bit");
7034 /* ------------------------------------------------------------------------- */
7035 /* return from function */
7037 #ifndef TCC_TARGET_ARM64
7038 static void gfunc_return(CType
*func_type
)
7040 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7041 CType type
, ret_type
;
7042 int ret_align
, ret_nregs
, regsize
;
7043 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
7044 &ret_align
, ®size
);
7045 if (ret_nregs
< 0) {
7046 #ifdef TCC_TARGET_RISCV64
7047 arch_transfer_ret_regs(0);
7049 } else if (0 == ret_nregs
) {
7050 /* if returning structure, must copy it to implicit
7051 first pointer arg location */
7054 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
7057 /* copy structure value to pointer */
7060 /* returning structure packed into registers */
7061 int size
, addr
, align
, rc
;
7062 size
= type_size(func_type
,&align
);
7063 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
7064 (vtop
->c
.i
& (ret_align
-1)))
7065 && (align
& (ret_align
-1))) {
7066 loc
= (loc
- size
) & -ret_align
;
7069 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
7073 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
7075 vtop
->type
= ret_type
;
7076 rc
= RC_RET(ret_type
.t
);
7084 if (--ret_nregs
== 0)
7086 /* We assume that when a structure is returned in multiple
7087 registers, their classes are consecutive values of the
7090 vtop
->c
.i
+= regsize
;
7095 gv(RC_RET(func_type
->t
));
7097 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
7101 static void check_func_return(void)
7103 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
7105 if (!strcmp (funcname
, "main")
7106 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
7107 /* main returns 0 by default */
7109 gen_assign_cast(&func_vt
);
7110 gfunc_return(&func_vt
);
7112 tcc_warning("function might return no value: '%s'", funcname
);
7116 /* ------------------------------------------------------------------------- */
7119 static int case_cmpi(const void *pa
, const void *pb
)
7121 int64_t a
= (*(struct case_t
**) pa
)->v1
;
7122 int64_t b
= (*(struct case_t
**) pb
)->v1
;
7123 return a
< b
? -1 : a
> b
;
7126 static int case_cmpu(const void *pa
, const void *pb
)
7128 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
7129 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
7130 return a
< b
? -1 : a
> b
;
7133 static void gtst_addr(int t
, int a
)
7135 gsym_addr(gvtst(0, t
), a
);
7138 static void gcase(struct case_t
**base
, int len
, int *bsym
)
7142 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
7159 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
7161 gcase(base
, len
/2, bsym
);
7165 base
+= e
; len
-= e
;
7175 if (p
->v1
== p
->v2
) {
7177 gtst_addr(0, p
->sym
);
7187 gtst_addr(0, p
->sym
);
7191 *bsym
= gjmp(*bsym
);
7194 /* ------------------------------------------------------------------------- */
7195 /* __attribute__((cleanup(fn))) */
7197 static void try_call_scope_cleanup(Sym
*stop
)
7199 Sym
*cls
= cur_scope
->cl
.s
;
7201 for (; cls
!= stop
; cls
= cls
->ncl
) {
7202 Sym
*fs
= cls
->next
;
7203 Sym
*vs
= cls
->prev_tok
;
7205 vpushsym(&fs
->type
, fs
);
7206 vset(&vs
->type
, vs
->r
, vs
->c
);
7208 mk_pointer(&vtop
->type
);
7214 static void try_call_cleanup_goto(Sym
*cleanupstate
)
7219 if (!cur_scope
->cl
.s
)
7222 /* search NCA of both cleanup chains given parents and initial depth */
7223 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
7224 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
7226 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
7228 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
7231 try_call_scope_cleanup(cc
);
7234 /* call 'func' for each __attribute__((cleanup(func))) */
7235 static void block_cleanup(struct scope
*o
)
7239 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
7240 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
7245 try_call_scope_cleanup(o
->cl
.s
);
7246 pcl
->jnext
= gjmp(0);
7248 goto remove_pending
;
7258 try_call_scope_cleanup(o
->cl
.s
);
7261 /* ------------------------------------------------------------------------- */
7264 static void vla_restore(int loc
)
7267 gen_vla_sp_restore(loc
);
7270 static void vla_leave(struct scope
*o
)
7272 struct scope
*c
= cur_scope
, *v
= NULL
;
7273 for (; c
!= o
&& c
; c
= c
->prev
)
7277 vla_restore(v
->vla
.locorig
);
7280 /* ------------------------------------------------------------------------- */
7283 void new_scope(struct scope
*o
)
7285 /* copy and link previous scope */
7287 o
->prev
= cur_scope
;
7289 cur_scope
->vla
.num
= 0;
7291 /* record local declaration stack position */
7292 o
->lstk
= local_stack
;
7293 o
->llstk
= local_label_stack
;
7297 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
7300 void prev_scope(struct scope
*o
, int is_expr
)
7304 if (o
->cl
.s
!= o
->prev
->cl
.s
)
7305 block_cleanup(o
->prev
);
7307 /* pop locally defined labels */
7308 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
7310 /* In the is_expr case (a statement expression is finished here),
7311 vtop might refer to symbols on the local_stack. Either via the
7312 type or via vtop->sym. We can't pop those nor any that in turn
7313 might be referred to. To make it easier we don't roll back
7314 any symbols in that case; some upper level call to block() will
7315 do that. We do have to remove such symbols from the lookup
7316 tables, though. sym_pop will do that. */
7318 /* pop locally defined symbols */
7319 pop_local_syms(o
->lstk
, is_expr
);
7320 cur_scope
= o
->prev
;
7324 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
7327 /* leave a scope via break/continue(/goto) */
7328 void leave_scope(struct scope
*o
)
7332 try_call_scope_cleanup(o
->cl
.s
);
7336 /* ------------------------------------------------------------------------- */
7337 /* call block from 'for do while' loops */
7339 static void lblock(int *bsym
, int *csym
)
7341 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
7342 int *b
= co
->bsym
, *c
= co
->csym
;
7356 static void block(int is_expr
)
7358 int a
, b
, c
, d
, e
, t
;
7363 /* default return value is (void) */
7365 vtop
->type
.t
= VT_VOID
;
7370 /* If the token carries a value, next() might destroy it. Only with
7371 invalid code such as f(){"123"4;} */
7372 if (TOK_HAS_VALUE(t
))
7377 tcc_tcov_check_line (0), tcc_tcov_block_begin ();
7385 if (tok
== TOK_ELSE
) {
7390 gsym(d
); /* patch else jmp */
7395 } else if (t
== TOK_WHILE
) {
7407 } else if (t
== '{') {
7410 /* handle local labels declarations */
7411 while (tok
== TOK_LABEL
) {
7414 if (tok
< TOK_UIDENT
)
7415 expect("label identifier");
7416 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
7418 } while (tok
== ',');
7422 while (tok
!= '}') {
7431 prev_scope(&o
, is_expr
);
7434 else if (!nocode_wanted
)
7435 check_func_return();
7437 } else if (t
== TOK_RETURN
) {
7438 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
7442 gen_assign_cast(&func_vt
);
7444 if (vtop
->type
.t
!= VT_VOID
)
7445 tcc_warning("void function returns a value");
7449 tcc_warning("'return' with no value");
7452 leave_scope(root_scope
);
7454 gfunc_return(&func_vt
);
7456 /* jump unless last stmt in top-level block */
7457 if (tok
!= '}' || local_scope
!= 1)
7460 tcc_tcov_block_end (tcov_data
.line
);
7463 } else if (t
== TOK_BREAK
) {
7465 if (!cur_scope
->bsym
)
7466 tcc_error("cannot break");
7467 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
7468 leave_scope(cur_switch
->scope
);
7470 leave_scope(loop_scope
);
7471 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
7474 } else if (t
== TOK_CONTINUE
) {
7476 if (!cur_scope
->csym
)
7477 tcc_error("cannot continue");
7478 leave_scope(loop_scope
);
7479 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
7482 } else if (t
== TOK_FOR
) {
7487 /* c99 for-loop init decl? */
7488 if (!decl0(VT_LOCAL
, 1, NULL
)) {
7489 /* no, regular for-loop init expr */
7517 } else if (t
== TOK_DO
) {
7531 } else if (t
== TOK_SWITCH
) {
7532 struct switch_t
*sw
;
7534 sw
= tcc_mallocz(sizeof *sw
);
7536 sw
->scope
= cur_scope
;
7537 sw
->prev
= cur_switch
;
7543 sw
->sv
= *vtop
--; /* save switch value */
7546 b
= gjmp(0); /* jump to first case */
7548 a
= gjmp(a
); /* add implicit break */
7552 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7553 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7555 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7557 for (b
= 1; b
< sw
->n
; b
++)
7558 if (sw
->sv
.type
.t
& VT_UNSIGNED
7559 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7560 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7561 tcc_error("duplicate case value");
7565 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7568 gsym_addr(d
, sw
->def_sym
);
7574 dynarray_reset(&sw
->p
, &sw
->n
);
7575 cur_switch
= sw
->prev
;
7578 } else if (t
== TOK_CASE
) {
7579 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7582 cr
->v1
= cr
->v2
= expr_const64();
7583 if (gnu_ext
&& tok
== TOK_DOTS
) {
7585 cr
->v2
= expr_const64();
7586 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7587 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7588 tcc_warning("empty case range");
7592 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7595 goto block_after_label
;
7597 } else if (t
== TOK_DEFAULT
) {
7600 if (cur_switch
->def_sym
)
7601 tcc_error("too many 'default'");
7603 cur_switch
->def_sym
= gind();
7606 goto block_after_label
;
7608 } else if (t
== TOK_GOTO
) {
7609 if (cur_scope
->vla
.num
)
7610 vla_restore(cur_scope
->vla
.locorig
);
7611 if (tok
== '*' && gnu_ext
) {
7615 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7619 } else if (tok
>= TOK_UIDENT
) {
7620 s
= label_find(tok
);
7621 /* put forward definition if needed */
7623 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7624 else if (s
->r
== LABEL_DECLARED
)
7625 s
->r
= LABEL_FORWARD
;
7627 if (s
->r
& LABEL_FORWARD
) {
7628 /* start new goto chain for cleanups, linked via label->next */
7629 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7630 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7631 pending_gotos
->prev_tok
= s
;
7632 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7633 pending_gotos
->next
= s
;
7635 s
->jnext
= gjmp(s
->jnext
);
7637 try_call_cleanup_goto(s
->cleanupstate
);
7638 gjmp_addr(s
->jnext
);
7643 expect("label identifier");
7647 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7651 if (tok
== ':' && t
>= TOK_UIDENT
) {
7656 if (s
->r
== LABEL_DEFINED
)
7657 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7658 s
->r
= LABEL_DEFINED
;
7660 Sym
*pcl
; /* pending cleanup goto */
7661 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7663 sym_pop(&s
->next
, NULL
, 0);
7667 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7670 s
->cleanupstate
= cur_scope
->cl
.s
;
7673 vla_restore(cur_scope
->vla
.loc
);
7674 /* we accept this, but it is a mistake */
7676 tcc_warning("deprecated use of label at end of compound statement");
7682 /* expression case */
7699 tcc_tcov_check_line (0), tcc_tcov_block_end (0);
7702 /* This skips over a stream of tokens containing balanced {} and ()
7703 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7704 with a '{'). If STR then allocates and stores the skipped tokens
7705 in *STR. This doesn't check if () and {} are nested correctly,
7706 i.e. "({)}" is accepted. */
7707 static void skip_or_save_block(TokenString
**str
)
7709 int braces
= tok
== '{';
7712 *str
= tok_str_alloc();
7714 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7716 if (tok
== TOK_EOF
) {
7717 if (str
|| level
> 0)
7718 tcc_error("unexpected end of file");
7723 tok_str_add_tok(*str
);
7726 if (t
== '{' || t
== '(') {
7728 } else if (t
== '}' || t
== ')') {
7730 if (level
== 0 && braces
&& t
== '}')
7735 tok_str_add(*str
, -1);
7736 tok_str_add(*str
, 0);
7740 #define EXPR_CONST 1
7743 static void parse_init_elem(int expr_type
)
7745 int saved_global_expr
;
7748 /* compound literals must be allocated globally in this case */
7749 saved_global_expr
= global_expr
;
7752 global_expr
= saved_global_expr
;
7753 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7754 (compound literals). */
7755 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7756 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7757 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7758 #ifdef TCC_TARGET_PE
7759 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7762 tcc_error("initializer element is not constant");
7771 static void init_assert(init_params
*p
, int offset
)
7773 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7774 : !nocode_wanted
&& offset
> p
->local_offset
)
7775 tcc_internal_error("initializer overflow");
7778 #define init_assert(sec, offset)
7781 /* put zeros for variable based init */
7782 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7784 init_assert(p
, c
+ size
);
7786 /* nothing to do because globals are already set to zero */
7788 vpush_helper_func(TOK_memset
);
7790 #ifdef TCC_TARGET_ARM
7802 #define DIF_SIZE_ONLY 2
7803 #define DIF_HAVE_ELEM 4
7806 /* delete relocations for specified range c ... c + size. Unfortunatly
7807 in very special cases, relocations may occur unordered */
7808 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7810 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7811 if (!sec
|| !sec
->reloc
)
7813 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7814 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7815 while (rel
< rel_end
) {
7816 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7817 sec
->reloc
->data_offset
-= sizeof *rel
;
7820 memcpy(rel2
, rel
, sizeof *rel
);
7827 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7829 if (ref
== p
->flex_array_ref
) {
7830 if (index
>= ref
->c
)
7832 } else if (ref
->c
< 0)
7833 tcc_error("flexible array has zero size in this context");
7836 /* t is the array or struct type. c is the array or struct
7837 address. cur_field is the pointer to the current
7838 field, for arrays the 'c' member contains the current start
7839 index. 'flags' is as in decl_initializer.
7840 'al' contains the already initialized length of the
7841 current container (starting at c). This returns the new length of that. */
7842 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7843 Sym
**cur_field
, int flags
, int al
)
7846 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7847 unsigned long corig
= c
;
7852 if (flags
& DIF_HAVE_ELEM
)
7855 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7862 /* NOTE: we only support ranges for last designator */
7863 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7865 if (!(type
->t
& VT_ARRAY
))
7866 expect("array type");
7868 index
= index_last
= expr_const();
7869 if (tok
== TOK_DOTS
&& gnu_ext
) {
7871 index_last
= expr_const();
7875 decl_design_flex(p
, s
, index_last
);
7876 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7877 tcc_error("index exceeds array bounds or range is empty");
7879 (*cur_field
)->c
= index_last
;
7880 type
= pointed_type(type
);
7881 elem_size
= type_size(type
, &align
);
7882 c
+= index
* elem_size
;
7883 nb_elems
= index_last
- index
+ 1;
7890 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7891 expect("struct/union type");
7893 f
= find_field(type
, l
, &cumofs
);
7906 } else if (!gnu_ext
) {
7911 if (type
->t
& VT_ARRAY
) {
7912 index
= (*cur_field
)->c
;
7914 decl_design_flex(p
, s
, index
);
7916 tcc_error("too many initializers");
7917 type
= pointed_type(type
);
7918 elem_size
= type_size(type
, &align
);
7919 c
+= index
* elem_size
;
7922 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7923 *cur_field
= f
= f
->next
;
7925 tcc_error("too many initializers");
7931 if (!elem_size
) /* for structs */
7932 elem_size
= type_size(type
, &align
);
7934 /* Using designators the same element can be initialized more
7935 than once. In that case we need to delete possibly already
7936 existing relocations. */
7937 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7938 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7939 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7942 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7944 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7948 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7949 /* make init_putv/vstore believe it were a struct */
7951 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7955 vpush_ref(type
, p
->sec
, c
, elem_size
);
7957 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7958 for (i
= 1; i
< nb_elems
; i
++) {
7960 init_putv(p
, type
, c
+ elem_size
* i
);
7965 c
+= nb_elems
* elem_size
;
7971 /* store a value or an expression directly in global data or in local array */
7972 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7978 Section
*sec
= p
->sec
;
7982 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7984 size
= type_size(type
, &align
);
7985 if (type
->t
& VT_BITFIELD
)
7986 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7987 init_assert(p
, c
+ size
);
7990 /* XXX: not portable */
7991 /* XXX: generate error if incorrect relocation */
7992 gen_assign_cast(&dtype
);
7993 bt
= type
->t
& VT_BTYPE
;
7995 if ((vtop
->r
& VT_SYM
)
7997 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7998 || (type
->t
& VT_BITFIELD
))
7999 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
8001 tcc_error("initializer element is not computable at load time");
8003 if (NODATA_WANTED
) {
8008 ptr
= sec
->data
+ c
;
8011 /* XXX: make code faster ? */
8012 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
8013 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
8014 /* XXX This rejects compound literals like
8015 '(void *){ptr}'. The problem is that '&sym' is
8016 represented the same way, which would be ruled out
8017 by the SYM_FIRST_ANOM check above, but also '"string"'
8018 in 'char *p = "string"' is represented the same
8019 with the type being VT_PTR and the symbol being an
8020 anonymous one. That is, there's no difference in vtop
8021 between '(void *){x}' and '&(void *){x}'. Ignore
8022 pointer typed entities here. Hopefully no real code
8023 will ever use compound literals with scalar type. */
8024 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
8025 /* These come from compound literals, memcpy stuff over. */
8029 esym
= elfsym(vtop
->sym
);
8030 ssec
= tcc_state
->sections
[esym
->st_shndx
];
8031 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
8033 /* We need to copy over all memory contents, and that
8034 includes relocations. Use the fact that relocs are
8035 created it order, so look from the end of relocs
8036 until we hit one before the copied region. */
8037 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
8038 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
8039 while (num_relocs
--) {
8041 if (rel
->r_offset
>= esym
->st_value
+ size
)
8043 if (rel
->r_offset
< esym
->st_value
)
8045 put_elf_reloca(symtab_section
, sec
,
8046 c
+ rel
->r_offset
- esym
->st_value
,
8047 ELFW(R_TYPE
)(rel
->r_info
),
8048 ELFW(R_SYM
)(rel
->r_info
),
8058 if (type
->t
& VT_BITFIELD
) {
8059 int bit_pos
, bit_size
, bits
, n
;
8060 unsigned char *p
, v
, m
;
8061 bit_pos
= BIT_POS(vtop
->type
.t
);
8062 bit_size
= BIT_SIZE(vtop
->type
.t
);
8063 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
8064 bit_pos
&= 7, bits
= 0;
8069 v
= val
>> bits
<< bit_pos
;
8070 m
= ((1 << n
) - 1) << bit_pos
;
8071 *p
= (*p
& ~m
) | (v
& m
);
8072 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
8077 *(char *)ptr
= val
!= 0;
8083 write16le(ptr
, val
);
8086 write32le(ptr
, val
);
8089 write64le(ptr
, val
);
8092 #if defined TCC_IS_NATIVE_387
8093 /* Host and target platform may be different but both have x87.
8094 On windows, tcc does not use VT_LDOUBLE, except when it is a
8095 cross compiler. In this case a mingw gcc as host compiler
8096 comes here with 10-byte long doubles, while msvc or tcc won't.
8097 tcc itself can still translate by asm.
8098 In any case we avoid possibly random bytes 11 and 12.
8100 if (sizeof (long double) >= 10)
8101 memcpy(ptr
, &vtop
->c
.ld
, 10);
8103 else if (sizeof (long double) == sizeof (double))
8104 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
8106 else if (vtop
->c
.ld
== 0.0)
8110 /* For other platforms it should work natively, but may not work
8111 for cross compilers */
8112 if (sizeof(long double) == LDOUBLE_SIZE
)
8113 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
8114 else if (sizeof(double) == LDOUBLE_SIZE
)
8115 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
8116 #ifndef TCC_CROSS_TEST
8118 tcc_error("can't cross compile long double constants");
8123 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
8126 if (vtop
->r
& VT_SYM
)
8127 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
8129 write64le(ptr
, val
);
8132 write32le(ptr
, val
);
8136 write64le(ptr
, val
);
8140 if (vtop
->r
& VT_SYM
)
8141 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
8142 write32le(ptr
, val
);
8146 //tcc_internal_error("unexpected type");
8152 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
8159 /* 't' contains the type and storage info. 'c' is the offset of the
8160 object in section 'sec'. If 'sec' is NULL, it means stack based
8161 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
8162 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
8163 size only evaluation is wanted (only for arrays). */
8164 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
8166 int len
, n
, no_oblock
, i
;
8172 /* generate line number info */
8173 if (debug_modes
&& !p
->sec
)
8174 tcc_debug_line(tcc_state
), tcc_tcov_check_line (1);
8176 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
8177 /* In case of strings we have special handling for arrays, so
8178 don't consume them as initializer value (which would commit them
8179 to some anonymous symbol). */
8180 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
8181 !(flags
& DIF_SIZE_ONLY
)) {
8182 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
8183 flags
|= DIF_HAVE_ELEM
;
8186 if ((flags
& DIF_HAVE_ELEM
) &&
8187 !(type
->t
& VT_ARRAY
) &&
8188 /* Use i_c_parameter_t, to strip toplevel qualifiers.
8189 The source type might have VT_CONSTANT set, which is
8190 of course assignable to non-const elements. */
8191 is_compatible_unqualified_types(type
, &vtop
->type
)) {
8194 } else if (type
->t
& VT_ARRAY
) {
8196 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
8204 t1
= pointed_type(type
);
8205 size1
= type_size(t1
, &align1
);
8207 /* only parse strings here if correct type (otherwise: handle
8208 them as ((w)char *) expressions */
8209 if ((tok
== TOK_LSTR
&&
8210 #ifdef TCC_TARGET_PE
8211 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
8213 (t1
->t
& VT_BTYPE
) == VT_INT
8215 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
8217 cstr_reset(&initstr
);
8218 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
8219 tcc_error("unhandled string literal merging");
8220 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8222 initstr
.size
-= size1
;
8224 len
+= tokc
.str
.size
;
8226 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
8228 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
8231 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
8232 && tok
!= TOK_EOF
) {
8233 /* Not a lone literal but part of a bigger expression. */
8234 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
8235 tokc
.str
.size
= initstr
.size
;
8236 tokc
.str
.data
= initstr
.data
;
8240 if (!(flags
& DIF_SIZE_ONLY
)) {
8245 tcc_warning("initializer-string for array is too long");
8246 /* in order to go faster for common case (char
8247 string in global variable, we handle it
8249 if (p
->sec
&& size1
== 1) {
8250 init_assert(p
, c
+ nb
);
8252 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
8256 /* only add trailing zero if enough storage (no
8257 warning in this case since it is standard) */
8258 if (flags
& DIF_CLEAR
)
8261 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
8265 } else if (size1
== 1)
8266 ch
= ((unsigned char *)initstr
.data
)[i
];
8268 ch
= ((nwchar_t
*)initstr
.data
)[i
];
8270 init_putv(p
, t1
, c
+ i
* size1
);
8274 decl_design_flex(p
, s
, len
);
8283 /* zero memory once in advance */
8284 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
8285 init_putz(p
, c
, n
*size1
);
8290 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
8291 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
8292 flags
&= ~DIF_HAVE_ELEM
;
8293 if (type
->t
& VT_ARRAY
) {
8295 /* special test for multi dimensional arrays (may not
8296 be strictly correct if designators are used at the
8298 if (no_oblock
&& len
>= n
*size1
)
8301 if (s
->type
.t
== VT_UNION
)
8305 if (no_oblock
&& f
== NULL
)
8316 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8318 if ((flags
& DIF_FIRST
) || tok
== '{') {
8327 } else if (tok
== '{') {
8328 if (flags
& DIF_HAVE_ELEM
)
8331 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
8333 } else if ((flags
& DIF_SIZE_ONLY
)) {
8334 /* If we supported only ISO C we wouldn't have to accept calling
8335 this on anything than an array if DIF_SIZE_ONLY (and even then
8336 only on the outermost level, so no recursion would be needed),
8337 because initializing a flex array member isn't supported.
8338 But GNU C supports it, so we need to recurse even into
8339 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
8340 /* just skip expression */
8341 skip_or_save_block(NULL
);
8343 if (!(flags
& DIF_HAVE_ELEM
)) {
8344 /* This should happen only when we haven't parsed
8345 the init element above for fear of committing a
8346 string constant to memory too early. */
8347 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
8348 expect("string constant");
8349 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
8352 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
8353 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
8355 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
8359 init_putv(p
, type
, c
);
8363 /* parse an initializer for type 't' if 'has_init' is non zero, and
8364 allocate space in local or global data space ('r' is either
8365 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
8366 variable 'v' of scope 'scope' is declared before initializers
8367 are parsed. If 'v' is zero, then a reference to the new object
8368 is put in the value stack. If 'has_init' is 2, a special parsing
8369 is done to handle string constants. */
8370 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
8371 int has_init
, int v
, int scope
)
8373 int size
, align
, addr
;
8374 TokenString
*init_str
= NULL
;
8377 Sym
*flexible_array
;
8379 int saved_nocode_wanted
= nocode_wanted
;
8380 #ifdef CONFIG_TCC_BCHECK
8381 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
8383 init_params p
= {0};
8385 /* Always allocate static or global variables */
8386 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
8387 nocode_wanted
|= 0x80000000;
8389 flexible_array
= NULL
;
8390 size
= type_size(type
, &align
);
8392 /* exactly one flexible array may be initialized, either the
8393 toplevel array or the last member of the toplevel struct */
8396 /* If the base type itself was an array type of unspecified size
8397 (like in 'typedef int arr[]; arr x = {1};') then we will
8398 overwrite the unknown size by the real one for this decl.
8399 We need to unshare the ref symbol holding that size. */
8400 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
8401 p
.flex_array_ref
= type
->ref
;
8403 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8404 Sym
*field
= type
->ref
->next
;
8407 field
= field
->next
;
8408 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
8409 flexible_array
= field
;
8410 p
.flex_array_ref
= field
->type
.ref
;
8417 /* If unknown size, do a dry-run 1st pass */
8419 tcc_error("unknown type size");
8420 if (has_init
== 2) {
8421 /* only get strings */
8422 init_str
= tok_str_alloc();
8423 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8424 tok_str_add_tok(init_str
);
8427 tok_str_add(init_str
, -1);
8428 tok_str_add(init_str
, 0);
8430 skip_or_save_block(&init_str
);
8434 begin_macro(init_str
, 1);
8436 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
8437 /* prepare second initializer parsing */
8438 macro_ptr
= init_str
->str
;
8441 /* if still unknown size, error */
8442 size
= type_size(type
, &align
);
8444 tcc_error("unknown type size");
8446 /* If there's a flex member and it was used in the initializer
8448 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
8449 size
+= flexible_array
->type
.ref
->c
8450 * pointed_size(&flexible_array
->type
);
8453 /* take into account specified alignment if bigger */
8454 if (ad
->a
.aligned
) {
8455 int speca
= 1 << (ad
->a
.aligned
- 1);
8458 } else if (ad
->a
.packed
) {
8462 if (!v
&& NODATA_WANTED
)
8463 size
= 0, align
= 1;
8465 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
8467 #ifdef CONFIG_TCC_BCHECK
8469 /* add padding between stack variables for bound checking */
8473 loc
= (loc
- size
) & -align
;
8475 p
.local_offset
= addr
+ size
;
8476 #ifdef CONFIG_TCC_BCHECK
8478 /* add padding between stack variables for bound checking */
8483 /* local variable */
8484 #ifdef CONFIG_TCC_ASM
8485 if (ad
->asm_label
) {
8486 int reg
= asm_parse_regvar(ad
->asm_label
);
8488 r
= (r
& ~VT_VALMASK
) | reg
;
8491 sym
= sym_push(v
, type
, r
, addr
);
8492 if (ad
->cleanup_func
) {
8493 Sym
*cls
= sym_push2(&all_cleanups
,
8494 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
8495 cls
->prev_tok
= sym
;
8496 cls
->next
= ad
->cleanup_func
;
8497 cls
->ncl
= cur_scope
->cl
.s
;
8498 cur_scope
->cl
.s
= cls
;
8503 /* push local reference */
8504 vset(type
, r
, addr
);
8507 if (v
&& scope
== VT_CONST
) {
8508 /* see if the symbol was already defined */
8511 patch_storage(sym
, ad
, type
);
8512 /* we accept several definitions of the same global variable. */
8513 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8518 /* allocate symbol in corresponding section */
8521 if (type
->t
& VT_CONSTANT
)
8522 sec
= data_ro_section
;
8525 else if (tcc_state
->nocommon
)
8530 addr
= section_add(sec
, size
, align
);
8531 #ifdef CONFIG_TCC_BCHECK
8532 /* add padding if bound check */
8534 section_add(sec
, 1, 1);
8537 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8538 sec
= common_section
;
8543 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8544 patch_storage(sym
, ad
, NULL
);
8546 /* update symbol definition */
8547 put_extern_sym(sym
, sec
, addr
, size
);
8549 /* push global reference */
8550 vpush_ref(type
, sec
, addr
, size
);
8555 #ifdef CONFIG_TCC_BCHECK
8556 /* handles bounds now because the symbol must be defined
8557 before for the relocation */
8561 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8562 /* then add global bound info */
8563 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8564 bounds_ptr
[0] = 0; /* relocated */
8565 bounds_ptr
[1] = size
;
8570 if (type
->t
& VT_VLA
) {
8576 /* save before-VLA stack pointer if needed */
8577 if (cur_scope
->vla
.num
== 0) {
8578 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8579 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8581 gen_vla_sp_save(loc
-= PTR_SIZE
);
8582 cur_scope
->vla
.locorig
= loc
;
8586 vla_runtime_type_size(type
, &a
);
8587 gen_vla_alloc(type
, a
);
8588 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8589 /* on _WIN64, because of the function args scratch area, the
8590 result of alloca differs from RSP and is returned in RAX. */
8591 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8593 gen_vla_sp_save(addr
);
8594 cur_scope
->vla
.loc
= addr
;
8595 cur_scope
->vla
.num
++;
8596 } else if (has_init
) {
8598 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8599 /* patch flexible array member size back to -1, */
8600 /* for possible subsequent similar declarations */
8602 flexible_array
->type
.ref
->c
= -1;
8606 /* restore parse state if needed */
8612 nocode_wanted
= saved_nocode_wanted
;
8615 /* parse a function defined by symbol 'sym' and generate its code in
8616 'cur_text_section' */
8617 static void gen_function(Sym
*sym
)
8619 struct scope f
= { 0 };
8620 cur_scope
= root_scope
= &f
;
8622 ind
= cur_text_section
->data_offset
;
8623 if (sym
->a
.aligned
) {
8624 size_t newoff
= section_add(cur_text_section
, 0,
8625 1 << (sym
->a
.aligned
- 1));
8626 gen_fill_nops(newoff
- ind
);
8628 /* NOTE: we patch the symbol size later */
8629 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8630 if (sym
->type
.ref
->f
.func_ctor
)
8631 add_array (tcc_state
, ".init_array", sym
->c
);
8632 if (sym
->type
.ref
->f
.func_dtor
)
8633 add_array (tcc_state
, ".fini_array", sym
->c
);
8635 funcname
= get_tok_str(sym
->v
, NULL
);
8637 func_vt
= sym
->type
.ref
->type
;
8638 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8640 /* put debug symbol */
8641 tcc_debug_funcstart(tcc_state
, sym
);
8642 /* push a dummy symbol to enable local sym storage */
8643 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8644 local_scope
= 1; /* for function parameters */
8648 clear_temp_local_var_list();
8652 /* reset local stack */
8653 pop_local_syms(NULL
, 0);
8655 cur_text_section
->data_offset
= ind
;
8657 label_pop(&global_label_stack
, NULL
, 0);
8658 sym_pop(&all_cleanups
, NULL
, 0);
8659 /* patch symbol size */
8660 elfsym(sym
)->st_size
= ind
- func_ind
;
8661 /* end of function */
8662 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8663 /* It's better to crash than to generate wrong code */
8664 cur_text_section
= NULL
;
8665 funcname
= ""; /* for safety */
8666 func_vt
.t
= VT_VOID
; /* for safety */
8667 func_var
= 0; /* for safety */
8668 ind
= 0; /* for safety */
8669 nocode_wanted
= 0x80000000;
8671 /* do this after funcend debug info */
8675 static void gen_inline_functions(TCCState
*s
)
8678 int inline_generated
, i
;
8679 struct InlineFunc
*fn
;
8681 tcc_open_bf(s
, ":inline:", 0);
8682 /* iterate while inline function are referenced */
8684 inline_generated
= 0;
8685 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8686 fn
= s
->inline_fns
[i
];
8688 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8689 /* the function was used or forced (and then not internal):
8690 generate its code and convert it to a normal function */
8692 tcc_debug_putfile(s
, fn
->filename
);
8693 begin_macro(fn
->func_str
, 1);
8695 cur_text_section
= text_section
;
8699 inline_generated
= 1;
8702 } while (inline_generated
);
8706 static void free_inline_functions(TCCState
*s
)
8709 /* free tokens of unused inline functions */
8710 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8711 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8713 tok_str_free(fn
->func_str
);
8715 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8718 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8719 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8720 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8722 int v
, has_init
, r
, oldint
;
8725 AttributeDef ad
, adbase
;
8728 if (tok
== TOK_STATIC_ASSERT
) {
8738 tcc_error("_Static_assert fail");
8740 goto static_assert_out
;
8744 parse_mult_str(&error_str
, "string constant");
8746 tcc_error("%s", (char *)error_str
.data
);
8747 cstr_free(&error_str
);
8755 if (!parse_btype(&btype
, &adbase
)) {
8756 if (is_for_loop_init
)
8758 /* skip redundant ';' if not in old parameter decl scope */
8759 if (tok
== ';' && l
!= VT_CMP
) {
8765 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8766 /* global asm block */
8770 if (tok
>= TOK_UIDENT
) {
8771 /* special test for old K&R protos without explicit int
8772 type. Only accepted when defining global data */
8777 expect("declaration");
8783 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8785 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8786 tcc_warning("unnamed struct/union that defines no instances");
8790 if (IS_ENUM(btype
.t
)) {
8796 while (1) { /* iterate thru each declaration */
8799 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8803 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8804 printf("type = '%s'\n", buf
);
8807 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8808 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8809 tcc_error("function without file scope cannot be static");
8810 /* if old style function prototype, we accept a
8813 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8814 decl0(VT_CMP
, 0, sym
);
8815 #ifdef TCC_TARGET_MACHO
8816 if (sym
->f
.func_alwinl
8817 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8818 == (VT_EXTERN
| VT_INLINE
))) {
8819 /* always_inline functions must be handled as if they
8820 don't generate multiple global defs, even if extern
8821 inline, i.e. GNU inline semantics for those. Rewrite
8822 them into static inline. */
8823 type
.t
&= ~VT_EXTERN
;
8824 type
.t
|= VT_STATIC
;
8827 /* always compile 'extern inline' */
8828 if (type
.t
& VT_EXTERN
)
8829 type
.t
&= ~VT_INLINE
;
8831 } else if (oldint
) {
8832 tcc_warning("type defaults to int");
8835 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8836 ad
.asm_label
= asm_label_instr();
8837 /* parse one last attribute list, after asm label */
8838 parse_attribute(&ad
);
8840 /* gcc does not allow __asm__("label") with function definition,
8847 #ifdef TCC_TARGET_PE
8848 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8849 if (type
.t
& VT_STATIC
)
8850 tcc_error("cannot have dll linkage with static");
8851 if (type
.t
& VT_TYPEDEF
) {
8852 tcc_warning("'%s' attribute ignored for typedef",
8853 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8854 (ad
.a
.dllexport
= 0, "dllexport"));
8855 } else if (ad
.a
.dllimport
) {
8856 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8859 type
.t
|= VT_EXTERN
;
8865 tcc_error("cannot use local functions");
8866 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8867 expect("function definition");
8869 /* reject abstract declarators in function definition
8870 make old style params without decl have int type */
8872 while ((sym
= sym
->next
) != NULL
) {
8873 if (!(sym
->v
& ~SYM_FIELD
))
8874 expect("identifier");
8875 if (sym
->type
.t
== VT_VOID
)
8876 sym
->type
= int_type
;
8879 /* apply post-declaraton attributes */
8880 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8882 /* put function symbol */
8883 type
.t
&= ~VT_EXTERN
;
8884 sym
= external_sym(v
, &type
, 0, &ad
);
8886 /* static inline functions are just recorded as a kind
8887 of macro. Their code will be emitted at the end of
8888 the compilation unit only if they are used */
8889 if (sym
->type
.t
& VT_INLINE
) {
8890 struct InlineFunc
*fn
;
8891 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8892 strcpy(fn
->filename
, file
->filename
);
8894 skip_or_save_block(&fn
->func_str
);
8895 dynarray_add(&tcc_state
->inline_fns
,
8896 &tcc_state
->nb_inline_fns
, fn
);
8898 /* compute text section */
8899 cur_text_section
= ad
.section
;
8900 if (!cur_text_section
)
8901 cur_text_section
= text_section
;
8907 /* find parameter in function parameter list */
8908 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8909 if ((sym
->v
& ~SYM_FIELD
) == v
)
8911 tcc_error("declaration for parameter '%s' but no such parameter",
8912 get_tok_str(v
, NULL
));
8914 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8915 tcc_error("storage class specified for '%s'",
8916 get_tok_str(v
, NULL
));
8917 if (sym
->type
.t
!= VT_VOID
)
8918 tcc_error("redefinition of parameter '%s'",
8919 get_tok_str(v
, NULL
));
8920 convert_parameter_type(&type
);
8922 } else if (type
.t
& VT_TYPEDEF
) {
8923 /* save typedefed type */
8924 /* XXX: test storage specifiers ? */
8926 if (sym
&& sym
->sym_scope
== local_scope
) {
8927 if (!is_compatible_types(&sym
->type
, &type
)
8928 || !(sym
->type
.t
& VT_TYPEDEF
))
8929 tcc_error("incompatible redefinition of '%s'",
8930 get_tok_str(v
, NULL
));
8933 sym
= sym_push(v
, &type
, 0, 0);
8938 tcc_debug_typedef (tcc_state
, sym
);
8939 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8940 && !(type
.t
& VT_EXTERN
)) {
8941 tcc_error("declaration of void object");
8944 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8945 /* external function definition */
8946 /* specific case for func_call attribute */
8948 } else if (!(type
.t
& VT_ARRAY
)) {
8949 /* not lvalue if array */
8952 has_init
= (tok
== '=');
8953 if (has_init
&& (type
.t
& VT_VLA
))
8954 tcc_error("variable length array cannot be initialized");
8955 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8956 || (type
.t
& VT_BTYPE
) == VT_FUNC
8957 /* as with GCC, uninitialized global arrays with no size
8958 are considered extern: */
8959 || ((type
.t
& VT_ARRAY
) && !has_init
8960 && l
== VT_CONST
&& type
.ref
->c
< 0)
8962 /* external variable or function */
8963 type
.t
|= VT_EXTERN
;
8964 sym
= external_sym(v
, &type
, r
, &ad
);
8965 if (ad
.alias_target
) {
8966 /* Aliases need to be emitted when their target
8967 symbol is emitted, even if perhaps unreferenced.
8968 We only support the case where the base is
8969 already defined, otherwise we would need
8970 deferring to emit the aliases until the end of
8971 the compile unit. */
8972 Sym
*alias_target
= sym_find(ad
.alias_target
);
8973 ElfSym
*esym
= elfsym(alias_target
);
8975 tcc_error("unsupported forward __alias__ attribute");
8976 put_extern_sym2(sym
, esym
->st_shndx
,
8977 esym
->st_value
, esym
->st_size
, 1);
8980 if (type
.t
& VT_STATIC
)
8986 else if (l
== VT_CONST
)
8987 /* uninitialized global variables may be overridden */
8988 type
.t
|= VT_EXTERN
;
8989 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8993 if (is_for_loop_init
)
9005 static void decl(int l
)
9010 /* ------------------------------------------------------------------------- */
9013 /* ------------------------------------------------------------------------- */