2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
49 ST_DATA
char debug_modes
;
52 static SValue _vstack
[1 + VSTACK_SIZE
];
53 #define vstack (_vstack + 1)
55 ST_DATA
int const_wanted
; /* true if constant wanted */
56 ST_DATA
int nocode_wanted
; /* no code generation wanted */
57 #define unevalmask 0xffff /* unevaluated subexpression */
58 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
59 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
61 /* Automagical code suppression ----> */
62 #define CODE_OFF() (nocode_wanted |= 0x20000000)
63 #define CODE_ON() (nocode_wanted &= ~0x20000000)
65 static void tcc_tcov_block_begin(void);
67 /* Clear 'nocode_wanted' at label if it was used */
68 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
69 static int gind(void) { int t
= ind
; CODE_ON(); if (debug_modes
) tcc_tcov_block_begin(); return t
; }
71 /* Set 'nocode_wanted' after unconditional jumps */
72 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
73 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
75 /* These are #undef'd at the end of this file */
76 #define gjmp_addr gjmp_addr_acs
80 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
81 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
82 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
84 static int last_line_num
, new_file
, func_ind
; /* debug info control */
85 ST_DATA
const char *funcname
;
86 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
87 static CString initstr
;
90 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_INT
93 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T VT_LLONG
96 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
97 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
100 static struct switch_t
{
104 } **p
; int n
; /* list of case ranges */
105 int def_sym
; /* default symbol */
108 struct switch_t
*prev
;
110 } *cur_switch
; /* current switch */
112 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
113 /*list of temporary local variables on the stack in current function. */
114 static struct temp_local_variable
{
115 int location
; //offset on stack. Svalue.c.i
118 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
119 static int nb_temp_local_vars
;
121 static struct scope
{
123 struct { int loc
, locorig
, num
; } vla
;
124 struct { Sym
*s
; int n
; } cl
;
127 } *cur_scope
, *loop_scope
, *root_scope
;
136 #define precedence_parser
137 static void init_prec(void);
140 /********************************************************/
141 /* stab debug support */
143 static const struct {
146 } default_debug
[] = {
147 { VT_INT
, "int:t1=r1;-2147483648;2147483647;" },
148 { VT_BYTE
, "char:t2=r2;0;127;" },
150 { VT_LONG
| VT_INT
, "long int:t3=r3;-2147483648;2147483647;" },
152 { VT_LLONG
| VT_LONG
, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
154 { VT_INT
| VT_UNSIGNED
, "unsigned int:t4=r4;0;037777777777;" },
156 { VT_LONG
| VT_INT
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;037777777777;" },
158 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
159 { VT_LLONG
| VT_LONG
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;01777777777777777777777;" },
161 { VT_QLONG
, "__int128:t6=r6;0;-1;" },
162 { VT_QLONG
| VT_UNSIGNED
, "__int128 unsigned:t7=r7;0;-1;" },
163 { VT_LLONG
, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
164 { VT_LLONG
| VT_UNSIGNED
, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
165 { VT_SHORT
, "short int:t10=r10;-32768;32767;" },
166 { VT_SHORT
| VT_UNSIGNED
, "short unsigned int:t11=r11;0;65535;" },
167 { VT_BYTE
| VT_DEFSIGN
, "signed char:t12=r12;-128;127;" },
168 { VT_BYTE
| VT_DEFSIGN
| VT_UNSIGNED
, "unsigned char:t13=r13;0;255;" },
169 { VT_FLOAT
, "float:t14=r1;4;0;" },
170 { VT_DOUBLE
, "double:t15=r1;8;0;" },
171 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
172 { VT_DOUBLE
| VT_LONG
, "long double:t16=r1;8;0;" },
174 { VT_LDOUBLE
, "long double:t16=r1;16;0;" },
176 { -1, "_Float32:t17=r1;4;0;" },
177 { -1, "_Float64:t18=r1;8;0;" },
178 { -1, "_Float128:t19=r1;16;0;" },
179 { -1, "_Float32x:t20=r1;8;0;" },
180 { -1, "_Float64x:t21=r1;16;0;" },
181 { -1, "_Decimal32:t22=r1;4;0;" },
182 { -1, "_Decimal64:t23=r1;8;0;" },
183 { -1, "_Decimal128:t24=r1;16;0;" },
184 /* if default char is unsigned */
185 { VT_BYTE
| VT_UNSIGNED
, "unsigned char:t25=r25;0;255;" },
187 { VT_BOOL
, "bool:t26=r26;0;255;" },
188 { VT_VOID
, "void:t27=27" },
191 static int debug_next_type
;
193 static struct debug_hash
{
198 static int n_debug_hash
;
200 static struct debug_info
{
211 struct debug_info
*child
, *next
, *last
, *parent
;
212 } *debug_info
, *debug_info_root
;
215 unsigned long offset
;
216 unsigned long last_file_name
;
217 unsigned long last_func_name
;
222 /********************************************************/
223 static void gen_cast(CType
*type
);
224 static void gen_cast_s(int t
);
225 static inline CType
*pointed_type(CType
*type
);
226 static int is_compatible_types(CType
*type1
, CType
*type2
);
227 static int parse_btype(CType
*type
, AttributeDef
*ad
);
228 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
229 static void parse_expr_type(CType
*type
);
230 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
231 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
232 static void block(int is_expr
);
233 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
234 static void decl(int l
);
235 static int decl0(int l
, int is_for_loop_init
, Sym
*);
236 static void expr_eq(void);
237 static void vpush_type_size(CType
*type
, int *a
);
238 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
239 static inline int64_t expr_const64(void);
240 static void vpush64(int ty
, unsigned long long v
);
241 static void vpush(CType
*type
);
242 static int gvtst(int inv
, int t
);
243 static void gen_inline_functions(TCCState
*s
);
244 static void free_inline_functions(TCCState
*s
);
245 static void skip_or_save_block(TokenString
**str
);
246 static void gv_dup(void);
247 static int get_temp_local_var(int size
,int align
);
248 static void clear_temp_local_var_list();
249 static void cast_error(CType
*st
, CType
*dt
);
251 ST_INLN
int is_float(int t
)
253 int bt
= t
& VT_BTYPE
;
254 return bt
== VT_LDOUBLE
260 static inline int is_integer_btype(int bt
)
269 static int btype_size(int bt
)
271 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
275 bt
== VT_PTR
? PTR_SIZE
: 0;
278 /* returns function return register from type */
279 static int R_RET(int t
)
283 #ifdef TCC_TARGET_X86_64
284 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
286 #elif defined TCC_TARGET_RISCV64
287 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
293 /* returns 2nd function return register, if any */
294 static int R2_RET(int t
)
300 #elif defined TCC_TARGET_X86_64
305 #elif defined TCC_TARGET_RISCV64
312 /* returns true for two-word types */
313 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
315 /* put function return registers to stack value */
316 static void PUT_R_RET(SValue
*sv
, int t
)
318 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
321 /* returns function return register class for type t */
322 static int RC_RET(int t
)
324 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
327 /* returns generic register class for type t */
328 static int RC_TYPE(int t
)
332 #ifdef TCC_TARGET_X86_64
333 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
335 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
337 #elif defined TCC_TARGET_RISCV64
338 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
344 /* returns 2nd register class corresponding to t and rc */
345 static int RC2_TYPE(int t
, int rc
)
347 if (!USING_TWO_WORDS(t
))
362 /* we use our own 'finite' function to avoid potential problems with
363 non standard math libs */
364 /* XXX: endianness dependent */
365 ST_FUNC
int ieee_finite(double d
)
368 memcpy(p
, &d
, sizeof(double));
369 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
372 /* compiling intel long double natively */
373 #if (defined __i386__ || defined __x86_64__) \
374 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
375 # define TCC_IS_NATIVE_387
378 ST_FUNC
void test_lvalue(void)
380 if (!(vtop
->r
& VT_LVAL
))
384 ST_FUNC
void check_vstack(void)
386 if (vtop
!= vstack
- 1)
387 tcc_error("internal compiler error: vstack leak (%d)",
388 (int)(vtop
- vstack
+ 1));
391 /* ------------------------------------------------------------------------- */
392 /* vstack debugging aid */
395 void pv (const char *lbl
, int a
, int b
)
398 for (i
= a
; i
< a
+ b
; ++i
) {
399 SValue
*p
= &vtop
[-i
];
400 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
401 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
406 /* ------------------------------------------------------------------------- */
407 /* start of translation unit info */
408 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
414 /* file info: full path + filename */
415 section_sym
= put_elf_sym(symtab_section
, 0, 0,
416 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
417 text_section
->sh_num
, NULL
);
418 getcwd(buf
, sizeof(buf
));
420 normalize_slashes(buf
);
422 pstrcat(buf
, sizeof(buf
), "/");
423 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
424 text_section
->data_offset
, text_section
, section_sym
);
425 put_stabs_r(s1
, file
->prev
? file
->prev
->filename
: file
->filename
,
427 text_section
->data_offset
, text_section
, section_sym
);
428 for (i
= 0; i
< sizeof (default_debug
) / sizeof (default_debug
[0]); i
++)
429 put_stabs(s1
, default_debug
[i
].name
, N_LSYM
, 0, 0, 0);
431 new_file
= last_line_num
= 0;
433 debug_next_type
= sizeof(default_debug
) / sizeof(default_debug
[0]);
437 /* we're currently 'including' the <command line> */
441 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
442 symbols can be safely used */
443 put_elf_sym(symtab_section
, 0, 0,
444 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
445 SHN_ABS
, file
->filename
);
448 /* put end of translation unit info */
449 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
453 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
454 text_section
->data_offset
, text_section
, section_sym
);
455 tcc_free(debug_hash
);
458 static BufferedFile
* put_new_file(TCCState
*s1
)
460 BufferedFile
*f
= file
;
461 /* use upper file if from inline ":asm:" */
462 if (f
->filename
[0] == ':')
465 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
466 new_file
= last_line_num
= 0;
471 /* put alternative filename */
472 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
474 if (0 == strcmp(file
->filename
, filename
))
476 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
480 /* begin of #include */
481 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
485 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
489 /* end of #include */
490 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
494 put_stabn(s1
, N_EINCL
, 0, 0, 0);
498 /* generate line number info */
499 static void tcc_debug_line(TCCState
*s1
)
503 || cur_text_section
!= text_section
504 || !(f
= put_new_file(s1
))
505 || last_line_num
== f
->line_num
)
507 if (func_ind
!= -1) {
508 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
510 /* from tcc_assemble */
511 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
513 last_line_num
= f
->line_num
;
516 static void tcc_debug_stabs (TCCState
*s1
, const char *str
, int type
, unsigned long value
,
517 Section
*sec
, int sym_index
)
523 (struct debug_sym
*)tcc_realloc (debug_info
->sym
,
524 sizeof(struct debug_sym
) *
525 (debug_info
->n_sym
+ 1));
526 s
= debug_info
->sym
+ debug_info
->n_sym
++;
529 s
->str
= tcc_strdup(str
);
531 s
->sym_index
= sym_index
;
534 put_stabs_r (s1
, str
, type
, 0, 0, value
, sec
, sym_index
);
536 put_stabs (s1
, str
, type
, 0, 0, value
);
539 static void tcc_debug_stabn(TCCState
*s1
, int type
, int value
)
543 if (type
== N_LBRAC
) {
544 struct debug_info
*info
=
545 (struct debug_info
*) tcc_mallocz(sizeof (*info
));
548 info
->parent
= debug_info
;
550 if (debug_info
->child
) {
551 if (debug_info
->child
->last
)
552 debug_info
->child
->last
->next
= info
;
554 debug_info
->child
->next
= info
;
555 debug_info
->child
->last
= info
;
558 debug_info
->child
= info
;
561 debug_info_root
= info
;
565 debug_info
->end
= value
;
566 debug_info
= debug_info
->parent
;
570 static void tcc_get_debug_info(TCCState
*s1
, Sym
*s
, CString
*result
)
579 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
580 if ((type
& VT_BTYPE
) != VT_BYTE
)
582 if (type
== VT_PTR
|| type
== (VT_PTR
| VT_ARRAY
))
583 n
++, t
= t
->type
.ref
;
587 if ((type
& VT_BTYPE
) == VT_STRUCT
) {
591 for (i
= 0; i
< n_debug_hash
; i
++) {
592 if (t
== debug_hash
[i
].type
) {
593 debug_type
= debug_hash
[i
].debug_type
;
597 if (debug_type
== -1) {
598 debug_type
= ++debug_next_type
;
599 debug_hash
= (struct debug_hash
*)
600 tcc_realloc (debug_hash
,
601 (n_debug_hash
+ 1) * sizeof(*debug_hash
));
602 debug_hash
[n_debug_hash
].debug_type
= debug_type
;
603 debug_hash
[n_debug_hash
++].type
= t
;
605 cstr_printf (&str
, "%s:T%d=%c%d",
606 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
607 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
609 IS_UNION (t
->type
.t
) ? 'u' : 's',
612 int pos
, size
, align
;
615 cstr_printf (&str
, "%s:",
616 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
617 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
618 tcc_get_debug_info (s1
, t
, &str
);
619 if (t
->type
.t
& VT_BITFIELD
) {
620 pos
= t
->c
* 8 + BIT_POS(t
->type
.t
);
621 size
= BIT_SIZE(t
->type
.t
);
625 size
= type_size(&t
->type
, &align
) * 8;
627 cstr_printf (&str
, ",%d,%d;", pos
, size
);
629 cstr_printf (&str
, ";");
630 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
634 else if (IS_ENUM(type
)) {
635 Sym
*e
= t
= t
->type
.ref
;
637 debug_type
= ++debug_next_type
;
639 cstr_printf (&str
, "%s:T%d=e",
640 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
641 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
645 cstr_printf (&str
, "%s:",
646 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
647 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
648 cstr_printf (&str
, e
->type
.t
& VT_UNSIGNED
? "%u," : "%d,",
651 cstr_printf (&str
, ";");
652 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
655 else if ((type
& VT_BTYPE
) != VT_FUNC
) {
656 type
&= ~VT_STRUCT_MASK
;
658 debug_type
<= sizeof(default_debug
) / sizeof(default_debug
[0]);
660 if (default_debug
[debug_type
- 1].type
== type
)
662 if (debug_type
> sizeof(default_debug
) / sizeof(default_debug
[0]))
666 cstr_printf (result
, "%d=", ++debug_next_type
);
669 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
670 if ((type
& VT_BTYPE
) != VT_BYTE
)
673 cstr_printf (result
, "%d=*", ++debug_next_type
);
674 else if (type
== (VT_PTR
| VT_ARRAY
))
675 cstr_printf (result
, "%d=ar1;0;%d;",
676 ++debug_next_type
, t
->type
.ref
->c
- 1);
677 else if (type
== VT_FUNC
) {
678 cstr_printf (result
, "%d=f", ++debug_next_type
);
679 tcc_get_debug_info (s1
, t
->type
.ref
, result
);
686 cstr_printf (result
, "%d", debug_type
);
689 static void tcc_debug_finish (TCCState
*s1
, struct debug_info
*cur
)
693 struct debug_info
*next
= cur
->next
;
695 for (i
= 0; i
< cur
->n_sym
; i
++) {
696 struct debug_sym
*s
= &cur
->sym
[i
];
699 put_stabs_r(s1
, s
->str
, s
->type
, 0, 0, s
->value
,
700 s
->sec
, s
->sym_index
);
702 put_stabs(s1
, s
->str
, s
->type
, 0, 0, s
->value
);
706 put_stabn(s1
, N_LBRAC
, 0, 0, cur
->start
);
707 tcc_debug_finish (s1
, cur
->child
);
708 put_stabn(s1
, N_RBRAC
, 0, 0, cur
->end
);
714 static void tcc_add_debug_info(TCCState
*s1
, int param
, Sym
*s
, Sym
*e
)
719 cstr_new (&debug_str
);
720 for (; s
!= e
; s
= s
->prev
) {
721 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
723 cstr_reset (&debug_str
);
724 cstr_printf (&debug_str
, "%s:%s", get_tok_str(s
->v
, NULL
), param
? "p" : "");
725 tcc_get_debug_info(s1
, s
, &debug_str
);
726 tcc_debug_stabs(s1
, debug_str
.data
, param
? N_PSYM
: N_LSYM
, s
->c
, NULL
, 0);
728 cstr_free (&debug_str
);
731 /* put function symbol */
732 static void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
738 debug_info_root
= NULL
;
740 tcc_debug_stabn(s1
, N_LBRAC
, ind
- func_ind
);
741 if (!(f
= put_new_file(s1
)))
743 cstr_new (&debug_str
);
744 cstr_printf(&debug_str
, "%s:%c", funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
745 tcc_get_debug_info(s1
, sym
->type
.ref
, &debug_str
);
746 put_stabs_r(s1
, debug_str
.data
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
747 cstr_free (&debug_str
);
752 /* put function size */
753 static void tcc_debug_funcend(TCCState
*s1
, int size
)
757 tcc_debug_stabn(s1
, N_RBRAC
, size
);
758 tcc_debug_finish (s1
, debug_info_root
);
762 static void tcc_debug_extern_sym(TCCState
*s1
, Sym
*sym
, int sh_num
, int sym_bind
, int sym_type
)
769 if (sym_type
== STT_FUNC
|| sym
->v
>= SYM_FIRST_ANOM
)
771 s
= s1
->sections
[sh_num
];
774 cstr_printf (&str
, "%s:%c",
775 get_tok_str(sym
->v
, NULL
),
776 sym_bind
== STB_GLOBAL
? 'G' : local_scope
? 'V' : 'S'
778 tcc_get_debug_info(s1
, sym
, &str
);
779 if (sym_bind
== STB_GLOBAL
)
780 tcc_debug_stabs(s1
, str
.data
, N_GSYM
, 0, NULL
, 0);
782 tcc_debug_stabs(s1
, str
.data
,
783 (sym
->type
.t
& VT_STATIC
) && data_section
== s
784 ? N_STSYM
: N_LCSYM
, 0, s
, sym
->c
);
788 static void tcc_debug_typedef(TCCState
*s1
, Sym
*sym
)
795 cstr_printf (&str
, "%s:t",
796 (sym
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
797 ? "" : get_tok_str(sym
->v
& ~SYM_FIELD
, NULL
));
798 tcc_get_debug_info(s1
, sym
, &str
);
799 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
803 /* ------------------------------------------------------------------------- */
804 /* for section layout see lib/tcov.c */
806 static void tcc_tcov_block_end(int line
);
808 static void tcc_tcov_block_begin(void)
812 unsigned long last_offset
= tcov_data
.offset
;
814 tcc_tcov_block_end (0);
815 if (tcc_state
->test_coverage
== 0 || nocode_wanted
)
818 if (tcov_data
.last_file_name
== 0 ||
819 strcmp ((const char *)(tcov_section
->data
+ tcov_data
.last_file_name
),
820 file
->true_filename
) != 0) {
824 if (tcov_data
.last_func_name
)
825 section_ptr_add(tcov_section
, 1);
826 if (tcov_data
.last_file_name
)
827 section_ptr_add(tcov_section
, 1);
828 tcov_data
.last_func_name
= 0;
830 if (file
->true_filename
[0] == '/') {
831 tcov_data
.last_file_name
= tcov_section
->data_offset
;
832 cstr_printf (&cstr
, "%s", file
->true_filename
);
835 getcwd (wd
, sizeof(wd
));
836 tcov_data
.last_file_name
= tcov_section
->data_offset
+ strlen(wd
) + 1;
837 cstr_printf (&cstr
, "%s/%s", wd
, file
->true_filename
);
839 ptr
= section_ptr_add(tcov_section
, cstr
.size
+ 1);
840 strcpy((char *)ptr
, cstr
.data
);
842 normalize_slashes((char *)ptr
);
846 if (tcov_data
.last_func_name
== 0 ||
847 strcmp ((const char *)(tcov_section
->data
+ tcov_data
.last_func_name
),
851 if (tcov_data
.last_func_name
)
852 section_ptr_add(tcov_section
, 1);
853 tcov_data
.last_func_name
= tcov_section
->data_offset
;
854 len
= strlen (funcname
);
855 ptr
= section_ptr_add(tcov_section
, len
+ 1);
856 strcpy((char *)ptr
, funcname
);
857 section_ptr_add(tcov_section
, -tcov_section
->data_offset
& 7);
858 ptr
= section_ptr_add(tcov_section
, 8);
859 write64le (ptr
, file
->line_num
);
861 if (ind
== tcov_data
.ind
&& tcov_data
.line
== file
->line_num
)
862 tcov_data
.offset
= last_offset
;
865 label
.type
.t
= VT_LLONG
| VT_STATIC
;
867 ptr
= section_ptr_add(tcov_section
, 16);
868 tcov_data
.line
= file
->line_num
;
869 write64le (ptr
, (tcov_data
.line
<< 8) | 0xff);
870 put_extern_sym(&label
, tcov_section
,
871 ((unsigned char *)ptr
- tcov_section
->data
) + 8, 0);
872 sv
.type
= label
.type
;
873 sv
.r
= VT_SYM
| VT_LVAL
| VT_CONST
;
877 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || \
878 defined TCC_TARGET_ARM || defined TCC_TARGET_ARM64 || \
879 defined TCC_TARGET_RISCV64
880 gen_increment_tcov (&sv
);
886 tcov_data
.offset
= (unsigned char *)ptr
- tcov_section
->data
;
891 static void tcc_tcov_block_end(int line
)
893 if (tcc_state
->test_coverage
== 0)
895 if (tcov_data
.offset
) {
896 void *ptr
= tcov_section
->data
+ tcov_data
.offset
;
897 unsigned long long nline
= line
? line
: file
->line_num
;
899 write64le (ptr
, (read64le (ptr
) & 0xfffffffffull
) | (nline
<< 36));
900 tcov_data
.offset
= 0;
904 static void tcc_tcov_check_line(int start
)
906 if (tcc_state
->test_coverage
== 0)
908 if (tcov_data
.line
!= file
->line_num
) {
909 if ((tcov_data
.line
+ 1) != file
->line_num
) {
910 tcc_tcov_block_end (tcov_data
.line
);
912 tcc_tcov_block_begin ();
915 tcov_data
.line
= file
->line_num
;
919 static void tcc_tcov_start(void)
921 if (tcc_state
->test_coverage
== 0)
923 memset (&tcov_data
, 0, sizeof (tcov_data
));
924 if (tcov_section
== NULL
) {
925 tcov_section
= new_section(tcc_state
, ".tcov", SHT_PROGBITS
,
926 SHF_ALLOC
| SHF_WRITE
);
927 section_ptr_add(tcov_section
, 4); // pointer to executable name
931 static void tcc_tcov_end(void)
933 if (tcc_state
->test_coverage
== 0)
935 if (tcov_data
.last_func_name
)
936 section_ptr_add(tcov_section
, 1);
937 if (tcov_data
.last_file_name
)
938 section_ptr_add(tcov_section
, 1);
941 /* ------------------------------------------------------------------------- */
942 /* initialize vstack and types. This must be done also for tcc -E */
943 ST_FUNC
void tccgen_init(TCCState
*s1
)
946 memset(vtop
, 0, sizeof *vtop
);
948 /* define some often used types */
951 char_type
.t
= VT_BYTE
;
952 if (s1
->char_is_unsigned
)
953 char_type
.t
|= VT_UNSIGNED
;
954 char_pointer_type
= char_type
;
955 mk_pointer(&char_pointer_type
);
957 func_old_type
.t
= VT_FUNC
;
958 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
959 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
960 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
961 #ifdef precedence_parser
967 ST_FUNC
int tccgen_compile(TCCState
*s1
)
969 cur_text_section
= NULL
;
971 anon_sym
= SYM_FIRST_ANOM
;
974 nocode_wanted
= 0x80000000;
976 debug_modes
= s1
->do_debug
| s1
->test_coverage
<< 1;
980 #ifdef TCC_TARGET_ARM
984 printf("%s: **** new file\n", file
->filename
);
986 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
989 gen_inline_functions(s1
);
991 /* end of translation unit info */
997 ST_FUNC
void tccgen_finish(TCCState
*s1
)
1000 free_inline_functions(s1
);
1001 sym_pop(&global_stack
, NULL
, 0);
1002 sym_pop(&local_stack
, NULL
, 0);
1003 /* free preprocessor macros */
1005 /* free sym_pools */
1006 dynarray_reset(&sym_pools
, &nb_sym_pools
);
1007 sym_free_first
= NULL
;
1010 /* ------------------------------------------------------------------------- */
1011 ST_FUNC ElfSym
*elfsym(Sym
*s
)
1015 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
1018 /* apply storage attributes to Elf symbol */
1019 ST_FUNC
void update_storage(Sym
*sym
)
1022 int sym_bind
, old_sym_bind
;
1028 if (sym
->a
.visibility
)
1029 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
1030 | sym
->a
.visibility
;
1032 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
1033 sym_bind
= STB_LOCAL
;
1034 else if (sym
->a
.weak
)
1035 sym_bind
= STB_WEAK
;
1037 sym_bind
= STB_GLOBAL
;
1038 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
1039 if (sym_bind
!= old_sym_bind
) {
1040 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
1043 #ifdef TCC_TARGET_PE
1044 if (sym
->a
.dllimport
)
1045 esym
->st_other
|= ST_PE_IMPORT
;
1046 if (sym
->a
.dllexport
)
1047 esym
->st_other
|= ST_PE_EXPORT
;
1051 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
1052 get_tok_str(sym
->v
, NULL
),
1053 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
1061 /* ------------------------------------------------------------------------- */
1062 /* update sym->c so that it points to an external symbol in section
1063 'section' with value 'value' */
1065 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
1066 addr_t value
, unsigned long size
,
1067 int can_add_underscore
)
1069 int sym_type
, sym_bind
, info
, other
, t
;
1075 name
= get_tok_str(sym
->v
, NULL
);
1077 if ((t
& VT_BTYPE
) == VT_FUNC
) {
1078 sym_type
= STT_FUNC
;
1079 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
1080 sym_type
= STT_NOTYPE
;
1081 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
1082 sym_type
= STT_FUNC
;
1084 sym_type
= STT_OBJECT
;
1086 if (t
& (VT_STATIC
| VT_INLINE
))
1087 sym_bind
= STB_LOCAL
;
1089 sym_bind
= STB_GLOBAL
;
1092 #ifdef TCC_TARGET_PE
1093 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
1094 Sym
*ref
= sym
->type
.ref
;
1095 if (ref
->a
.nodecorate
) {
1096 can_add_underscore
= 0;
1098 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
1099 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
1101 other
|= ST_PE_STDCALL
;
1102 can_add_underscore
= 0;
1107 if (sym
->asm_label
) {
1108 name
= get_tok_str(sym
->asm_label
, NULL
);
1109 can_add_underscore
= 0;
1112 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
1114 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
1118 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
1119 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
1122 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
1126 esym
->st_value
= value
;
1127 esym
->st_size
= size
;
1128 esym
->st_shndx
= sh_num
;
1130 update_storage(sym
);
1133 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
1134 addr_t value
, unsigned long size
)
1136 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
1137 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
1140 /* add a new relocation entry to symbol 'sym' in section 's' */
1141 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
1146 if (nocode_wanted
&& s
== cur_text_section
)
1151 put_extern_sym(sym
, NULL
, 0, 0);
1155 /* now we can add ELF relocation info */
1156 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
1160 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
1162 greloca(s
, sym
, offset
, type
, 0);
1166 /* ------------------------------------------------------------------------- */
1167 /* symbol allocator */
1168 static Sym
*__sym_malloc(void)
1170 Sym
*sym_pool
, *sym
, *last_sym
;
1173 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
1174 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
1176 last_sym
= sym_free_first
;
1178 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
1179 sym
->next
= last_sym
;
1183 sym_free_first
= last_sym
;
1187 static inline Sym
*sym_malloc(void)
1191 sym
= sym_free_first
;
1193 sym
= __sym_malloc();
1194 sym_free_first
= sym
->next
;
1197 sym
= tcc_malloc(sizeof(Sym
));
1202 ST_INLN
void sym_free(Sym
*sym
)
1205 sym
->next
= sym_free_first
;
1206 sym_free_first
= sym
;
1212 /* push, without hashing */
1213 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
1218 memset(s
, 0, sizeof *s
);
1228 /* find a symbol and return its associated structure. 's' is the top
1229 of the symbol stack */
1230 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
1235 else if (s
->v
== -1)
1242 /* structure lookup */
1243 ST_INLN Sym
*struct_find(int v
)
1246 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1248 return table_ident
[v
]->sym_struct
;
1251 /* find an identifier */
1252 ST_INLN Sym
*sym_find(int v
)
1255 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1257 return table_ident
[v
]->sym_identifier
;
1260 static int sym_scope(Sym
*s
)
1262 if (IS_ENUM_VAL (s
->type
.t
))
1263 return s
->type
.ref
->sym_scope
;
1265 return s
->sym_scope
;
1268 /* push a given symbol on the symbol stack */
1269 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
1278 s
= sym_push2(ps
, v
, type
->t
, c
);
1279 s
->type
.ref
= type
->ref
;
1281 /* don't record fields or anonymous symbols */
1283 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1284 /* record symbol in token array */
1285 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1287 ps
= &ts
->sym_struct
;
1289 ps
= &ts
->sym_identifier
;
1292 s
->sym_scope
= local_scope
;
1293 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
1294 tcc_error("redeclaration of '%s'",
1295 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
1300 /* push a global identifier */
1301 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
1304 s
= sym_push2(&global_stack
, v
, t
, c
);
1305 s
->r
= VT_CONST
| VT_SYM
;
1306 /* don't record anonymous symbol */
1307 if (v
< SYM_FIRST_ANOM
) {
1308 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
1309 /* modify the top most local identifier, so that sym_identifier will
1310 point to 's' when popped; happens when called from inline asm */
1311 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
1312 ps
= &(*ps
)->prev_tok
;
1319 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1320 pop them yet from the list, but do remove them from the token array. */
1321 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
1331 /* remove symbol in token array */
1333 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1334 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1336 ps
= &ts
->sym_struct
;
1338 ps
= &ts
->sym_identifier
;
1349 /* ------------------------------------------------------------------------- */
1350 static void vcheck_cmp(void)
1352 /* cannot let cpu flags if other instruction are generated. Also
1353 avoid leaving VT_JMP anywhere except on the top of the stack
1354 because it would complicate the code generator.
1356 Don't do this when nocode_wanted. vtop might come from
1357 !nocode_wanted regions (see 88_codeopt.c) and transforming
1358 it to a register without actually generating code is wrong
1359 as their value might still be used for real. All values
1360 we push under nocode_wanted will eventually be popped
1361 again, so that the VT_CMP/VT_JMP value will be in vtop
1362 when code is unsuppressed again. */
1364 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
1368 static void vsetc(CType
*type
, int r
, CValue
*vc
)
1370 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1371 tcc_error("memory full (vstack)");
1376 vtop
->r2
= VT_CONST
;
1381 ST_FUNC
void vswap(void)
1391 /* pop stack value */
1392 ST_FUNC
void vpop(void)
1395 v
= vtop
->r
& VT_VALMASK
;
1396 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1397 /* for x86, we need to pop the FP stack */
1398 if (v
== TREG_ST0
) {
1399 o(0xd8dd); /* fstp %st(0) */
1403 /* need to put correct jump if && or || without test */
1410 /* push constant of type "type" with useless value */
1411 static void vpush(CType
*type
)
1413 vset(type
, VT_CONST
, 0);
1416 /* push arbitrary 64bit constant */
1417 static void vpush64(int ty
, unsigned long long v
)
1424 vsetc(&ctype
, VT_CONST
, &cval
);
1427 /* push integer constant */
1428 ST_FUNC
void vpushi(int v
)
1433 /* push a pointer sized constant */
1434 static void vpushs(addr_t v
)
1436 vpush64(VT_SIZE_T
, v
);
1439 /* push long long constant */
1440 static inline void vpushll(long long v
)
1442 vpush64(VT_LLONG
, v
);
1445 ST_FUNC
void vset(CType
*type
, int r
, int v
)
1449 vsetc(type
, r
, &cval
);
1452 static void vseti(int r
, int v
)
1460 ST_FUNC
void vpushv(SValue
*v
)
1462 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1463 tcc_error("memory full (vstack)");
1468 static void vdup(void)
1473 /* rotate n first stack elements to the bottom
1474 I1 ... In -> I2 ... In I1 [top is right]
1476 ST_FUNC
void vrotb(int n
)
1483 for(i
=-n
+1;i
!=0;i
++)
1484 vtop
[i
] = vtop
[i
+1];
1488 /* rotate the n elements before entry e towards the top
1489 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1491 ST_FUNC
void vrote(SValue
*e
, int n
)
1498 for(i
= 0;i
< n
- 1; i
++)
1503 /* rotate n first stack elements to the top
1504 I1 ... In -> In I1 ... I(n-1) [top is right]
1506 ST_FUNC
void vrott(int n
)
1511 /* ------------------------------------------------------------------------- */
1512 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1514 /* called from generators to set the result from relational ops */
1515 ST_FUNC
void vset_VT_CMP(int op
)
1523 /* called once before asking generators to load VT_CMP to a register */
1524 static void vset_VT_JMP(void)
1526 int op
= vtop
->cmp_op
;
1528 if (vtop
->jtrue
|| vtop
->jfalse
) {
1529 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1530 int inv
= op
& (op
< 2); /* small optimization */
1531 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1533 /* otherwise convert flags (rsp. 0/1) to register */
1535 if (op
< 2) /* doesn't seem to happen */
1540 /* Set CPU Flags, doesn't yet jump */
1541 static void gvtst_set(int inv
, int t
)
1545 if (vtop
->r
!= VT_CMP
) {
1548 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1549 vset_VT_CMP(vtop
->c
.i
!= 0);
1552 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1553 *p
= gjmp_append(*p
, t
);
1556 /* Generate value test
1558 * Generate a test for any value (jump, comparison and integers) */
1559 static int gvtst(int inv
, int t
)
1564 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1566 x
= u
, u
= t
, t
= x
;
1569 /* jump to the wanted target */
1571 t
= gjmp_cond(op
^ inv
, t
);
1574 /* resolve complementary jumps to here */
1581 /* generate a zero or nozero test */
1582 static void gen_test_zero(int op
)
1584 if (vtop
->r
== VT_CMP
) {
1588 vtop
->jfalse
= vtop
->jtrue
;
1598 /* ------------------------------------------------------------------------- */
1599 /* push a symbol value of TYPE */
1600 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1604 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1608 /* Return a static symbol pointing to a section */
1609 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1615 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1616 sym
->type
.t
|= VT_STATIC
;
1617 put_extern_sym(sym
, sec
, offset
, size
);
1621 /* push a reference to a section offset by adding a dummy symbol */
1622 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1624 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1627 /* define a new external reference to a symbol 'v' of type 'u' */
1628 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1634 /* push forward reference */
1635 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1636 s
->type
.ref
= type
->ref
;
1637 } else if (IS_ASM_SYM(s
)) {
1638 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1639 s
->type
.ref
= type
->ref
;
1645 /* create an external reference with no specific type similar to asm labels.
1646 This avoids type conflicts if the symbol is used from C too */
1647 ST_FUNC Sym
*external_helper_sym(int v
)
1649 CType ct
= { VT_ASM_FUNC
, NULL
};
1650 return external_global_sym(v
, &ct
);
1653 /* push a reference to an helper function (such as memmove) */
1654 ST_FUNC
void vpush_helper_func(int v
)
1656 vpushsym(&func_old_type
, external_helper_sym(v
));
1659 /* Merge symbol attributes. */
1660 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1662 if (sa1
->aligned
&& !sa
->aligned
)
1663 sa
->aligned
= sa1
->aligned
;
1664 sa
->packed
|= sa1
->packed
;
1665 sa
->weak
|= sa1
->weak
;
1666 if (sa1
->visibility
!= STV_DEFAULT
) {
1667 int vis
= sa
->visibility
;
1668 if (vis
== STV_DEFAULT
1669 || vis
> sa1
->visibility
)
1670 vis
= sa1
->visibility
;
1671 sa
->visibility
= vis
;
1673 sa
->dllexport
|= sa1
->dllexport
;
1674 sa
->nodecorate
|= sa1
->nodecorate
;
1675 sa
->dllimport
|= sa1
->dllimport
;
1678 /* Merge function attributes. */
1679 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1681 if (fa1
->func_call
&& !fa
->func_call
)
1682 fa
->func_call
= fa1
->func_call
;
1683 if (fa1
->func_type
&& !fa
->func_type
)
1684 fa
->func_type
= fa1
->func_type
;
1685 if (fa1
->func_args
&& !fa
->func_args
)
1686 fa
->func_args
= fa1
->func_args
;
1687 if (fa1
->func_noreturn
)
1688 fa
->func_noreturn
= 1;
1695 /* Merge attributes. */
1696 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1698 merge_symattr(&ad
->a
, &ad1
->a
);
1699 merge_funcattr(&ad
->f
, &ad1
->f
);
1702 ad
->section
= ad1
->section
;
1703 if (ad1
->alias_target
)
1704 ad
->alias_target
= ad1
->alias_target
;
1706 ad
->asm_label
= ad1
->asm_label
;
1708 ad
->attr_mode
= ad1
->attr_mode
;
1711 /* Merge some type attributes. */
1712 static void patch_type(Sym
*sym
, CType
*type
)
1714 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1715 if (!(sym
->type
.t
& VT_EXTERN
))
1716 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1717 sym
->type
.t
&= ~VT_EXTERN
;
1720 if (IS_ASM_SYM(sym
)) {
1721 /* stay static if both are static */
1722 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1723 sym
->type
.ref
= type
->ref
;
1726 if (!is_compatible_types(&sym
->type
, type
)) {
1727 tcc_error("incompatible types for redefinition of '%s'",
1728 get_tok_str(sym
->v
, NULL
));
1730 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1731 int static_proto
= sym
->type
.t
& VT_STATIC
;
1732 /* warn if static follows non-static function declaration */
1733 if ((type
->t
& VT_STATIC
) && !static_proto
1734 /* XXX this test for inline shouldn't be here. Until we
1735 implement gnu-inline mode again it silences a warning for
1736 mingw caused by our workarounds. */
1737 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1738 tcc_warning("static storage ignored for redefinition of '%s'",
1739 get_tok_str(sym
->v
, NULL
));
1741 /* set 'inline' if both agree or if one has static */
1742 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1743 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1744 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1745 static_proto
|= VT_INLINE
;
1748 if (0 == (type
->t
& VT_EXTERN
)) {
1749 struct FuncAttr f
= sym
->type
.ref
->f
;
1750 /* put complete type, use static from prototype */
1751 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1752 sym
->type
.ref
= type
->ref
;
1753 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1755 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1758 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1759 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1760 sym
->type
.ref
= type
->ref
;
1764 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1765 /* set array size if it was omitted in extern declaration */
1766 sym
->type
.ref
->c
= type
->ref
->c
;
1768 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1769 tcc_warning("storage mismatch for redefinition of '%s'",
1770 get_tok_str(sym
->v
, NULL
));
1774 /* Merge some storage attributes. */
1775 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1778 patch_type(sym
, type
);
1780 #ifdef TCC_TARGET_PE
1781 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1782 tcc_error("incompatible dll linkage for redefinition of '%s'",
1783 get_tok_str(sym
->v
, NULL
));
1785 merge_symattr(&sym
->a
, &ad
->a
);
1787 sym
->asm_label
= ad
->asm_label
;
1788 update_storage(sym
);
1791 /* copy sym to other stack */
1792 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1795 s
= sym_malloc(), *s
= *s0
;
1796 s
->prev
= *ps
, *ps
= s
;
1797 if (s
->v
< SYM_FIRST_ANOM
) {
1798 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1799 s
->prev_tok
= *ps
, *ps
= s
;
1804 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1805 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1807 int bt
= s
->type
.t
& VT_BTYPE
;
1808 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1809 Sym
**sp
= &s
->type
.ref
;
1810 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1811 Sym
*s2
= sym_copy(s
, ps
);
1812 sp
= &(*sp
= s2
)->next
;
1813 sym_copy_ref(s2
, ps
);
1818 /* define a new external reference to a symbol 'v' */
1819 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1823 /* look for global symbol */
1825 while (s
&& s
->sym_scope
)
1829 /* push forward reference */
1830 s
= global_identifier_push(v
, type
->t
, 0);
1833 s
->asm_label
= ad
->asm_label
;
1834 s
->type
.ref
= type
->ref
;
1835 /* copy type to the global stack */
1837 sym_copy_ref(s
, &global_stack
);
1839 patch_storage(s
, ad
, type
);
1841 /* push variables on local_stack if any */
1842 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1843 s
= sym_copy(s
, &local_stack
);
1847 /* save registers up to (vtop - n) stack entry */
1848 ST_FUNC
void save_regs(int n
)
1851 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1855 /* save r to the memory stack, and mark it as being free */
1856 ST_FUNC
void save_reg(int r
)
1858 save_reg_upstack(r
, 0);
1861 /* save r to the memory stack, and mark it as being free,
1862 if seen up to (vtop - n) stack entry */
1863 ST_FUNC
void save_reg_upstack(int r
, int n
)
1865 int l
, size
, align
, bt
;
1868 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1873 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1874 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1875 /* must save value on stack if not already done */
1877 bt
= p
->type
.t
& VT_BTYPE
;
1880 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1883 size
= type_size(&sv
.type
, &align
);
1884 l
= get_temp_local_var(size
,align
);
1885 sv
.r
= VT_LOCAL
| VT_LVAL
;
1887 store(p
->r
& VT_VALMASK
, &sv
);
1888 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1889 /* x86 specific: need to pop fp register ST0 if saved */
1890 if (r
== TREG_ST0
) {
1891 o(0xd8dd); /* fstp %st(0) */
1894 /* special long long case */
1895 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1900 /* mark that stack entry as being saved on the stack */
1901 if (p
->r
& VT_LVAL
) {
1902 /* also clear the bounded flag because the
1903 relocation address of the function was stored in
1905 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1907 p
->r
= VT_LVAL
| VT_LOCAL
;
1916 #ifdef TCC_TARGET_ARM
1917 /* find a register of class 'rc2' with at most one reference on stack.
1918 * If none, call get_reg(rc) */
1919 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1924 for(r
=0;r
<NB_REGS
;r
++) {
1925 if (reg_classes
[r
] & rc2
) {
1928 for(p
= vstack
; p
<= vtop
; p
++) {
1929 if ((p
->r
& VT_VALMASK
) == r
||
1941 /* find a free register of class 'rc'. If none, save one register */
1942 ST_FUNC
int get_reg(int rc
)
1947 /* find a free register */
1948 for(r
=0;r
<NB_REGS
;r
++) {
1949 if (reg_classes
[r
] & rc
) {
1952 for(p
=vstack
;p
<=vtop
;p
++) {
1953 if ((p
->r
& VT_VALMASK
) == r
||
1962 /* no register left : free the first one on the stack (VERY
1963 IMPORTANT to start from the bottom to ensure that we don't
1964 spill registers used in gen_opi()) */
1965 for(p
=vstack
;p
<=vtop
;p
++) {
1966 /* look at second register (if long long) */
1968 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1970 r
= p
->r
& VT_VALMASK
;
1971 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1977 /* Should never comes here */
1981 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1982 static int get_temp_local_var(int size
,int align
){
1984 struct temp_local_variable
*temp_var
;
1991 for(i
=0;i
<nb_temp_local_vars
;i
++){
1992 temp_var
=&arr_temp_local_vars
[i
];
1993 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1996 /*check if temp_var is free*/
1998 for(p
=vstack
;p
<=vtop
;p
++) {
2000 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
2001 if(p
->c
.i
==temp_var
->location
){
2008 found_var
=temp_var
->location
;
2014 loc
= (loc
- size
) & -align
;
2015 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
2016 temp_var
=&arr_temp_local_vars
[i
];
2017 temp_var
->location
=loc
;
2018 temp_var
->size
=size
;
2019 temp_var
->align
=align
;
2020 nb_temp_local_vars
++;
2027 static void clear_temp_local_var_list(){
2028 nb_temp_local_vars
=0;
2031 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
2033 static void move_reg(int r
, int s
, int t
)
2047 /* get address of vtop (vtop MUST BE an lvalue) */
2048 ST_FUNC
void gaddrof(void)
2050 vtop
->r
&= ~VT_LVAL
;
2051 /* tricky: if saved lvalue, then we can go back to lvalue */
2052 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
2053 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
2056 #ifdef CONFIG_TCC_BCHECK
2057 /* generate a bounded pointer addition */
2058 static void gen_bounded_ptr_add(void)
2060 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
2065 vpush_helper_func(TOK___bound_ptr_add
);
2070 /* returned pointer is in REG_IRET */
2071 vtop
->r
= REG_IRET
| VT_BOUNDED
;
2074 /* relocation offset of the bounding function call point */
2075 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
2078 /* patch pointer addition in vtop so that pointer dereferencing is
2080 static void gen_bounded_ptr_deref(void)
2090 size
= type_size(&vtop
->type
, &align
);
2092 case 1: func
= TOK___bound_ptr_indir1
; break;
2093 case 2: func
= TOK___bound_ptr_indir2
; break;
2094 case 4: func
= TOK___bound_ptr_indir4
; break;
2095 case 8: func
= TOK___bound_ptr_indir8
; break;
2096 case 12: func
= TOK___bound_ptr_indir12
; break;
2097 case 16: func
= TOK___bound_ptr_indir16
; break;
2099 /* may happen with struct member access */
2102 sym
= external_helper_sym(func
);
2104 put_extern_sym(sym
, NULL
, 0, 0);
2105 /* patch relocation */
2106 /* XXX: find a better solution ? */
2107 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
2108 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
2111 /* generate lvalue bound code */
2112 static void gbound(void)
2116 vtop
->r
&= ~VT_MUSTBOUND
;
2117 /* if lvalue, then use checking code before dereferencing */
2118 if (vtop
->r
& VT_LVAL
) {
2119 /* if not VT_BOUNDED value, then make one */
2120 if (!(vtop
->r
& VT_BOUNDED
)) {
2121 /* must save type because we must set it to int to get pointer */
2123 vtop
->type
.t
= VT_PTR
;
2126 gen_bounded_ptr_add();
2130 /* then check for dereferencing */
2131 gen_bounded_ptr_deref();
2135 /* we need to call __bound_ptr_add before we start to load function
2136 args into registers */
2137 ST_FUNC
void gbound_args(int nb_args
)
2142 for (i
= 1; i
<= nb_args
; ++i
)
2143 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
2149 sv
= vtop
- nb_args
;
2150 if (sv
->r
& VT_SYM
) {
2154 #ifndef TCC_TARGET_PE
2155 || v
== TOK_sigsetjmp
2156 || v
== TOK___sigsetjmp
2159 vpush_helper_func(TOK___bound_setjmp
);
2162 func_bound_add_epilog
= 1;
2164 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
2165 if (v
== TOK_alloca
)
2166 func_bound_add_epilog
= 1;
2169 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
2170 sv
->sym
->asm_label
= TOK___bound_longjmp
;
2175 /* Add bounds for local symbols from S to E (via ->prev) */
2176 static void add_local_bounds(Sym
*s
, Sym
*e
)
2178 for (; s
!= e
; s
= s
->prev
) {
2179 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
2181 /* Add arrays/structs/unions because we always take address */
2182 if ((s
->type
.t
& VT_ARRAY
)
2183 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
2184 || s
->a
.addrtaken
) {
2185 /* add local bound info */
2186 int align
, size
= type_size(&s
->type
, &align
);
2187 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
2188 2 * sizeof(addr_t
));
2189 bounds_ptr
[0] = s
->c
;
2190 bounds_ptr
[1] = size
;
2196 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2197 static void pop_local_syms(Sym
*b
, int keep
)
2199 #ifdef CONFIG_TCC_BCHECK
2200 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
2201 add_local_bounds(local_stack
, b
);
2204 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
2205 sym_pop(&local_stack
, b
, keep
);
2208 static void incr_bf_adr(int o
)
2210 vtop
->type
= char_pointer_type
;
2214 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
2218 /* single-byte load mode for packed or otherwise unaligned bitfields */
2219 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
2222 save_reg_upstack(vtop
->r
, 1);
2223 vpush64(type
->t
& VT_BTYPE
, 0); // B X
2224 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2233 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
2235 vpushi((1 << n
) - 1), gen_op('&');
2238 vpushi(bits
), gen_op(TOK_SHL
);
2241 bits
+= n
, bit_size
-= n
, o
= 1;
2244 if (!(type
->t
& VT_UNSIGNED
)) {
2245 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
2246 vpushi(n
), gen_op(TOK_SHL
);
2247 vpushi(n
), gen_op(TOK_SAR
);
2251 /* single-byte store mode for packed or otherwise unaligned bitfields */
2252 static void store_packed_bf(int bit_pos
, int bit_size
)
2254 int bits
, n
, o
, m
, c
;
2255 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2257 save_reg_upstack(vtop
->r
, 1);
2258 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2260 incr_bf_adr(o
); // X B
2262 c
? vdup() : gv_dup(); // B V X
2265 vpushi(bits
), gen_op(TOK_SHR
);
2267 vpushi(bit_pos
), gen_op(TOK_SHL
);
2272 m
= ((1 << n
) - 1) << bit_pos
;
2273 vpushi(m
), gen_op('&'); // X B V1
2274 vpushv(vtop
-1); // X B V1 B
2275 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
2276 gen_op('&'); // X B V1 B1
2277 gen_op('|'); // X B V2
2279 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
2280 vstore(), vpop(); // X B
2281 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
2286 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
2289 if (0 == sv
->type
.ref
)
2291 t
= sv
->type
.ref
->auxtype
;
2292 if (t
!= -1 && t
!= VT_STRUCT
) {
2293 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
2299 /* store vtop a register belonging to class 'rc'. lvalues are
2300 converted to values. Cannot be used if cannot be converted to
2301 register value (such as structures). */
2302 ST_FUNC
int gv(int rc
)
2304 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
2305 int bit_pos
, bit_size
, size
, align
;
2307 /* NOTE: get_reg can modify vstack[] */
2308 if (vtop
->type
.t
& VT_BITFIELD
) {
2311 bit_pos
= BIT_POS(vtop
->type
.t
);
2312 bit_size
= BIT_SIZE(vtop
->type
.t
);
2313 /* remove bit field info to avoid loops */
2314 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
2317 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
2318 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
2319 type
.t
|= VT_UNSIGNED
;
2321 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
2323 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2328 if (r
== VT_STRUCT
) {
2329 load_packed_bf(&type
, bit_pos
, bit_size
);
2331 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
2332 /* cast to int to propagate signedness in following ops */
2334 /* generate shifts */
2335 vpushi(bits
- (bit_pos
+ bit_size
));
2337 vpushi(bits
- bit_size
);
2338 /* NOTE: transformed to SHR if unsigned */
2343 if (is_float(vtop
->type
.t
) &&
2344 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2345 /* CPUs usually cannot use float constants, so we store them
2346 generically in data segment */
2347 init_params p
= { rodata_section
};
2348 unsigned long offset
;
2349 size
= type_size(&vtop
->type
, &align
);
2351 size
= 0, align
= 1;
2352 offset
= section_add(p
.sec
, size
, align
);
2353 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
2355 init_putv(&p
, &vtop
->type
, offset
);
2358 #ifdef CONFIG_TCC_BCHECK
2359 if (vtop
->r
& VT_MUSTBOUND
)
2363 bt
= vtop
->type
.t
& VT_BTYPE
;
2365 #ifdef TCC_TARGET_RISCV64
2367 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
2370 rc2
= RC2_TYPE(bt
, rc
);
2372 /* need to reload if:
2374 - lvalue (need to dereference pointer)
2375 - already a register, but not in the right class */
2376 r
= vtop
->r
& VT_VALMASK
;
2377 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
2378 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
2380 if (!r_ok
|| !r2_ok
) {
2384 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
2385 int original_type
= vtop
->type
.t
;
2387 /* two register type load :
2388 expand to two words temporarily */
2389 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2391 unsigned long long ll
= vtop
->c
.i
;
2392 vtop
->c
.i
= ll
; /* first word */
2394 vtop
->r
= r
; /* save register value */
2395 vpushi(ll
>> 32); /* second word */
2396 } else if (vtop
->r
& VT_LVAL
) {
2397 /* We do not want to modifier the long long pointer here.
2398 So we save any other instances down the stack */
2399 save_reg_upstack(vtop
->r
, 1);
2400 /* load from memory */
2401 vtop
->type
.t
= load_type
;
2404 vtop
[-1].r
= r
; /* save register value */
2405 /* increment pointer to get second word */
2406 vtop
->type
.t
= VT_PTRDIFF_T
;
2411 vtop
->type
.t
= load_type
;
2413 /* move registers */
2416 if (r2_ok
&& vtop
->r2
< VT_CONST
)
2419 vtop
[-1].r
= r
; /* save register value */
2420 vtop
->r
= vtop
[-1].r2
;
2422 /* Allocate second register. Here we rely on the fact that
2423 get_reg() tries first to free r2 of an SValue. */
2427 /* write second register */
2430 vtop
->type
.t
= original_type
;
2432 if (vtop
->r
== VT_CMP
)
2434 /* one register type load */
2439 #ifdef TCC_TARGET_C67
2440 /* uses register pairs for doubles */
2441 if (bt
== VT_DOUBLE
)
2448 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2449 ST_FUNC
void gv2(int rc1
, int rc2
)
2451 /* generate more generic register first. But VT_JMP or VT_CMP
2452 values must be generated first in all cases to avoid possible
2454 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
2459 /* test if reload is needed for first register */
2460 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
2470 /* test if reload is needed for first register */
2471 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
2478 /* expand 64bit on stack in two ints */
2479 ST_FUNC
void lexpand(void)
2482 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
2483 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
2484 if (v
== VT_CONST
) {
2487 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
2493 vtop
[0].r
= vtop
[-1].r2
;
2494 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2496 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2501 /* build a long long from two ints */
2502 static void lbuild(int t
)
2504 gv2(RC_INT
, RC_INT
);
2505 vtop
[-1].r2
= vtop
[0].r
;
2506 vtop
[-1].type
.t
= t
;
2511 /* convert stack entry to register and duplicate its value in another
2513 static void gv_dup(void)
2519 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2520 if (t
& VT_BITFIELD
) {
2530 /* stack: H L L1 H1 */
2540 /* duplicate value */
2550 /* generate CPU independent (unsigned) long long operations */
2551 static void gen_opl(int op
)
2553 int t
, a
, b
, op1
, c
, i
;
2555 unsigned short reg_iret
= REG_IRET
;
2556 unsigned short reg_lret
= REG_IRE2
;
2562 func
= TOK___divdi3
;
2565 func
= TOK___udivdi3
;
2568 func
= TOK___moddi3
;
2571 func
= TOK___umoddi3
;
2578 /* call generic long long function */
2579 vpush_helper_func(func
);
2584 vtop
->r2
= reg_lret
;
2592 //pv("gen_opl A",0,2);
2598 /* stack: L1 H1 L2 H2 */
2603 vtop
[-2] = vtop
[-3];
2606 /* stack: H1 H2 L1 L2 */
2607 //pv("gen_opl B",0,4);
2613 /* stack: H1 H2 L1 L2 ML MH */
2616 /* stack: ML MH H1 H2 L1 L2 */
2620 /* stack: ML MH H1 L2 H2 L1 */
2625 /* stack: ML MH M1 M2 */
2628 } else if (op
== '+' || op
== '-') {
2629 /* XXX: add non carry method too (for MIPS or alpha) */
2635 /* stack: H1 H2 (L1 op L2) */
2638 gen_op(op1
+ 1); /* TOK_xxxC2 */
2641 /* stack: H1 H2 (L1 op L2) */
2644 /* stack: (L1 op L2) H1 H2 */
2646 /* stack: (L1 op L2) (H1 op H2) */
2654 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2655 t
= vtop
[-1].type
.t
;
2659 /* stack: L H shift */
2661 /* constant: simpler */
2662 /* NOTE: all comments are for SHL. the other cases are
2663 done by swapping words */
2674 if (op
!= TOK_SAR
) {
2707 /* XXX: should provide a faster fallback on x86 ? */
2710 func
= TOK___ashrdi3
;
2713 func
= TOK___lshrdi3
;
2716 func
= TOK___ashldi3
;
2722 /* compare operations */
2728 /* stack: L1 H1 L2 H2 */
2730 vtop
[-1] = vtop
[-2];
2732 /* stack: L1 L2 H1 H2 */
2736 /* when values are equal, we need to compare low words. since
2737 the jump is inverted, we invert the test too. */
2740 else if (op1
== TOK_GT
)
2742 else if (op1
== TOK_ULT
)
2744 else if (op1
== TOK_UGT
)
2754 /* generate non equal test */
2756 vset_VT_CMP(TOK_NE
);
2760 /* compare low. Always unsigned */
2764 else if (op1
== TOK_LE
)
2766 else if (op1
== TOK_GT
)
2768 else if (op1
== TOK_GE
)
2771 #if 0//def TCC_TARGET_I386
2772 if (op
== TOK_NE
) { gsym(b
); break; }
2773 if (op
== TOK_EQ
) { gsym(a
); break; }
2782 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2784 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2785 return (a
^ b
) >> 63 ? -x
: x
;
2788 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2790 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2793 /* handle integer constant optimizations and various machine
2795 static void gen_opic(int op
)
2797 SValue
*v1
= vtop
- 1;
2799 int t1
= v1
->type
.t
& VT_BTYPE
;
2800 int t2
= v2
->type
.t
& VT_BTYPE
;
2801 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2802 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2803 uint64_t l1
= c1
? v1
->c
.i
: 0;
2804 uint64_t l2
= c2
? v2
->c
.i
: 0;
2805 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2807 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2808 l1
= ((uint32_t)l1
|
2809 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2810 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2811 l2
= ((uint32_t)l2
|
2812 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2816 case '+': l1
+= l2
; break;
2817 case '-': l1
-= l2
; break;
2818 case '&': l1
&= l2
; break;
2819 case '^': l1
^= l2
; break;
2820 case '|': l1
|= l2
; break;
2821 case '*': l1
*= l2
; break;
2828 /* if division by zero, generate explicit division */
2830 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2831 tcc_error("division by zero in constant");
2835 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2836 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2837 case TOK_UDIV
: l1
= l1
/ l2
; break;
2838 case TOK_UMOD
: l1
= l1
% l2
; break;
2841 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2842 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2844 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2847 case TOK_ULT
: l1
= l1
< l2
; break;
2848 case TOK_UGE
: l1
= l1
>= l2
; break;
2849 case TOK_EQ
: l1
= l1
== l2
; break;
2850 case TOK_NE
: l1
= l1
!= l2
; break;
2851 case TOK_ULE
: l1
= l1
<= l2
; break;
2852 case TOK_UGT
: l1
= l1
> l2
; break;
2853 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2854 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2855 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2856 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2858 case TOK_LAND
: l1
= l1
&& l2
; break;
2859 case TOK_LOR
: l1
= l1
|| l2
; break;
2863 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2864 l1
= ((uint32_t)l1
|
2865 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2869 /* if commutative ops, put c2 as constant */
2870 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2871 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2873 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2874 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2876 if (!const_wanted
&&
2878 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2879 (l1
== -1 && op
== TOK_SAR
))) {
2880 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2882 } else if (!const_wanted
&&
2883 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2885 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2886 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2887 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2892 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2895 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2896 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2899 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2900 /* filter out NOP operations like x*1, x-0, x&-1... */
2902 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2903 /* try to use shifts instead of muls or divs */
2904 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2913 else if (op
== TOK_PDIV
)
2919 } else if (c2
&& (op
== '+' || op
== '-') &&
2920 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2921 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2922 /* symbol + constant case */
2926 /* The backends can't always deal with addends to symbols
2927 larger than +-1<<31. Don't construct such. */
2934 /* call low level op generator */
2935 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2936 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2944 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2945 # define gen_negf gen_opf
2946 #elif defined TCC_TARGET_ARM
2947 void gen_negf(int op
)
2949 /* arm will detect 0-x and replace by vneg */
2950 vpushi(0), vswap(), gen_op('-');
2953 /* XXX: implement in gen_opf() for other backends too */
2954 void gen_negf(int op
)
2956 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2957 subtract(-0, x), but with them it's really a sign flip
2958 operation. We implement this with bit manipulation and have
2959 to do some type reinterpretation for this, which TCC can do
2962 int align
, size
, bt
;
2964 size
= type_size(&vtop
->type
, &align
);
2965 bt
= vtop
->type
.t
& VT_BTYPE
;
2966 save_reg(gv(RC_TYPE(bt
)));
2968 incr_bf_adr(size
- 1);
2970 vpushi(0x80); /* flip sign */
2977 /* generate a floating point operation with constant propagation */
2978 static void gen_opif(int op
)
2982 #if defined _MSC_VER && defined __x86_64__
2983 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2993 /* currently, we cannot do computations with forward symbols */
2994 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2995 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2997 if (v1
->type
.t
== VT_FLOAT
) {
3000 } else if (v1
->type
.t
== VT_DOUBLE
) {
3007 /* NOTE: we only do constant propagation if finite number (not
3008 NaN or infinity) (ANSI spec) */
3009 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !const_wanted
)
3012 case '+': f1
+= f2
; break;
3013 case '-': f1
-= f2
; break;
3014 case '*': f1
*= f2
; break;
3017 union { float f
; unsigned u
; } x1
, x2
, y
;
3018 /* If not in initializer we need to potentially generate
3019 FP exceptions at runtime, otherwise we want to fold. */
3022 /* the run-time result of 0.0/0.0 on x87, also of other compilers
3023 when used to compile the f1 /= f2 below, would be -nan */
3024 x1
.f
= f1
, x2
.f
= f2
;
3026 y
.u
= 0x7fc00000; /* nan */
3028 y
.u
= 0x7f800000; /* infinity */
3029 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
3038 /* XXX: also handles tests ? */
3044 /* XXX: overflow test ? */
3045 if (v1
->type
.t
== VT_FLOAT
) {
3047 } else if (v1
->type
.t
== VT_DOUBLE
) {
3054 if (op
== TOK_NEG
) {
3062 /* print a type. If 'varstr' is not NULL, then the variable is also
3063 printed in the type */
3065 /* XXX: add array and function pointers */
3066 static void type_to_str(char *buf
, int buf_size
,
3067 CType
*type
, const char *varstr
)
3079 pstrcat(buf
, buf_size
, "extern ");
3081 pstrcat(buf
, buf_size
, "static ");
3083 pstrcat(buf
, buf_size
, "typedef ");
3085 pstrcat(buf
, buf_size
, "inline ");
3087 if (t
& VT_VOLATILE
)
3088 pstrcat(buf
, buf_size
, "volatile ");
3089 if (t
& VT_CONSTANT
)
3090 pstrcat(buf
, buf_size
, "const ");
3092 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
3093 || ((t
& VT_UNSIGNED
)
3094 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
3097 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
3099 buf_size
-= strlen(buf
);
3135 tstr
= "long double";
3137 pstrcat(buf
, buf_size
, tstr
);
3144 pstrcat(buf
, buf_size
, tstr
);
3145 v
= type
->ref
->v
& ~SYM_STRUCT
;
3146 if (v
>= SYM_FIRST_ANOM
)
3147 pstrcat(buf
, buf_size
, "<anonymous>");
3149 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3154 if (varstr
&& '*' == *varstr
) {
3155 pstrcat(buf1
, sizeof(buf1
), "(");
3156 pstrcat(buf1
, sizeof(buf1
), varstr
);
3157 pstrcat(buf1
, sizeof(buf1
), ")");
3159 pstrcat(buf1
, buf_size
, "(");
3161 while (sa
!= NULL
) {
3163 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3164 pstrcat(buf1
, sizeof(buf1
), buf2
);
3167 pstrcat(buf1
, sizeof(buf1
), ", ");
3169 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3170 pstrcat(buf1
, sizeof(buf1
), ", ...");
3171 pstrcat(buf1
, sizeof(buf1
), ")");
3172 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3176 if (t
& (VT_ARRAY
|VT_VLA
)) {
3177 if (varstr
&& '*' == *varstr
)
3178 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3180 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3181 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3184 pstrcpy(buf1
, sizeof(buf1
), "*");
3185 if (t
& VT_CONSTANT
)
3186 pstrcat(buf1
, buf_size
, "const ");
3187 if (t
& VT_VOLATILE
)
3188 pstrcat(buf1
, buf_size
, "volatile ");
3190 pstrcat(buf1
, sizeof(buf1
), varstr
);
3191 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3195 pstrcat(buf
, buf_size
, " ");
3196 pstrcat(buf
, buf_size
, varstr
);
3201 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
3203 char buf1
[256], buf2
[256];
3204 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3205 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3206 tcc_error(fmt
, buf1
, buf2
);
3209 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
3211 char buf1
[256], buf2
[256];
3212 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3213 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3214 tcc_warning(fmt
, buf1
, buf2
);
3217 static int pointed_size(CType
*type
)
3220 return type_size(pointed_type(type
), &align
);
3223 static inline int is_null_pointer(SValue
*p
)
3225 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
3227 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
3228 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
3229 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
3230 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
3231 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
3232 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3236 /* compare function types. OLD functions match any new functions */
3237 static int is_compatible_func(CType
*type1
, CType
*type2
)
3243 if (s1
->f
.func_call
!= s2
->f
.func_call
)
3245 if (s1
->f
.func_type
!= s2
->f
.func_type
3246 && s1
->f
.func_type
!= FUNC_OLD
3247 && s2
->f
.func_type
!= FUNC_OLD
)
3250 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
3252 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
3263 /* return true if type1 and type2 are the same. If unqualified is
3264 true, qualifiers on the types are ignored.
3266 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3270 t1
= type1
->t
& VT_TYPE
;
3271 t2
= type2
->t
& VT_TYPE
;
3273 /* strip qualifiers before comparing */
3274 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3275 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3278 /* Default Vs explicit signedness only matters for char */
3279 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3283 /* XXX: bitfields ? */
3288 && !(type1
->ref
->c
< 0
3289 || type2
->ref
->c
< 0
3290 || type1
->ref
->c
== type2
->ref
->c
))
3293 /* test more complicated cases */
3294 bt1
= t1
& VT_BTYPE
;
3295 if (bt1
== VT_PTR
) {
3296 type1
= pointed_type(type1
);
3297 type2
= pointed_type(type2
);
3298 return is_compatible_types(type1
, type2
);
3299 } else if (bt1
== VT_STRUCT
) {
3300 return (type1
->ref
== type2
->ref
);
3301 } else if (bt1
== VT_FUNC
) {
3302 return is_compatible_func(type1
, type2
);
3303 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
3304 /* If both are enums then they must be the same, if only one is then
3305 t1 and t2 must be equal, which was checked above already. */
3306 return type1
->ref
== type2
->ref
;
3312 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3313 type is stored in DEST if non-null (except for pointer plus/minus) . */
3314 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
3316 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
3317 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
3323 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
3324 ret
= op
== '?' ? 1 : 0;
3325 /* NOTE: as an extension, we accept void on only one side */
3327 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3328 if (op
== '+') ; /* Handled in caller */
3329 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3330 /* If one is a null ptr constant the result type is the other. */
3331 else if (is_null_pointer (op2
)) type
= *type1
;
3332 else if (is_null_pointer (op1
)) type
= *type2
;
3333 else if (bt1
!= bt2
) {
3334 /* accept comparison or cond-expr between pointer and integer
3336 if ((op
== '?' || TOK_ISCOND(op
))
3337 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
3338 tcc_warning("pointer/integer mismatch in %s",
3339 op
== '?' ? "conditional expression" : "comparison");
3340 else if (op
!= '-' || !is_integer_btype(bt2
))
3342 type
= *(bt1
== VT_PTR
? type1
: type2
);
3344 CType
*pt1
= pointed_type(type1
);
3345 CType
*pt2
= pointed_type(type2
);
3346 int pbt1
= pt1
->t
& VT_BTYPE
;
3347 int pbt2
= pt2
->t
& VT_BTYPE
;
3348 int newquals
, copied
= 0;
3349 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
3350 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
3351 if (op
!= '?' && !TOK_ISCOND(op
))
3354 type_incompatibility_warning(type1
, type2
,
3356 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3357 : "pointer type mismatch in comparison('%s' and '%s')");
3360 /* pointers to void get preferred, otherwise the
3361 pointed to types minus qualifs should be compatible */
3362 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
3363 /* combine qualifs */
3364 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
3365 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3368 /* copy the pointer target symbol */
3369 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3372 pointed_type(&type
)->t
|= newquals
;
3374 /* pointers to incomplete arrays get converted to
3375 pointers to completed ones if possible */
3376 if (pt1
->t
& VT_ARRAY
3377 && pt2
->t
& VT_ARRAY
3378 && pointed_type(&type
)->ref
->c
< 0
3379 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
3382 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3384 pointed_type(&type
)->ref
=
3385 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
3386 0, pointed_type(&type
)->ref
->c
);
3387 pointed_type(&type
)->ref
->c
=
3388 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
3394 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
3395 if (op
!= '?' || !compare_types(type1
, type2
, 1))
3398 } else if (is_float(bt1
) || is_float(bt2
)) {
3399 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
3400 type
.t
= VT_LDOUBLE
;
3401 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
3406 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
3407 /* cast to biggest op */
3408 type
.t
= VT_LLONG
| VT_LONG
;
3409 if (bt1
== VT_LLONG
)
3411 if (bt2
== VT_LLONG
)
3413 /* convert to unsigned if it does not fit in a long long */
3414 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
3415 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
3416 type
.t
|= VT_UNSIGNED
;
3418 /* integer operations */
3419 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
3420 /* convert to unsigned if it does not fit in an integer */
3421 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
3422 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
3423 type
.t
|= VT_UNSIGNED
;
3430 /* generic gen_op: handles types problems */
3431 ST_FUNC
void gen_op(int op
)
3433 int t1
, t2
, bt1
, bt2
, t
;
3434 CType type1
, combtype
;
3437 t1
= vtop
[-1].type
.t
;
3438 t2
= vtop
[0].type
.t
;
3439 bt1
= t1
& VT_BTYPE
;
3440 bt2
= t2
& VT_BTYPE
;
3442 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
3443 if (bt2
== VT_FUNC
) {
3444 mk_pointer(&vtop
->type
);
3447 if (bt1
== VT_FUNC
) {
3449 mk_pointer(&vtop
->type
);
3454 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
3455 tcc_error_noabort("invalid operand types for binary operation");
3457 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3458 /* at least one operand is a pointer */
3459 /* relational op: must be both pointers */
3463 /* if both pointers, then it must be the '-' op */
3464 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3466 tcc_error("cannot use pointers here");
3467 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
3470 vtop
->type
.t
= VT_PTRDIFF_T
;
3474 /* exactly one pointer : must be '+' or '-'. */
3475 if (op
!= '-' && op
!= '+')
3476 tcc_error("cannot use pointers here");
3477 /* Put pointer as first operand */
3478 if (bt2
== VT_PTR
) {
3480 t
= t1
, t1
= t2
, t2
= t
;
3483 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
3484 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3487 type1
= vtop
[-1].type
;
3488 vpush_type_size(pointed_type(&vtop
[-1].type
), &align
);
3490 #ifdef CONFIG_TCC_BCHECK
3491 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
3492 /* if bounded pointers, we generate a special code to
3499 gen_bounded_ptr_add();
3505 type1
.t
&= ~(VT_ARRAY
|VT_VLA
);
3506 /* put again type if gen_opic() swaped operands */
3510 /* floats can only be used for a few operations */
3511 if (is_float(combtype
.t
)
3512 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3514 tcc_error("invalid operands for binary operation");
3515 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
3516 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
3517 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
3519 t
|= (VT_LONG
& t1
);
3523 t
= t2
= combtype
.t
;
3524 /* XXX: currently, some unsigned operations are explicit, so
3525 we modify them here */
3526 if (t
& VT_UNSIGNED
) {
3533 else if (op
== TOK_LT
)
3535 else if (op
== TOK_GT
)
3537 else if (op
== TOK_LE
)
3539 else if (op
== TOK_GE
)
3545 /* special case for shifts and long long: we keep the shift as
3547 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3554 if (TOK_ISCOND(op
)) {
3555 /* relational op: the result is an int */
3556 vtop
->type
.t
= VT_INT
;
3561 // Make sure that we have converted to an rvalue:
3562 if (vtop
->r
& VT_LVAL
)
3563 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3566 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3567 #define gen_cvt_itof1 gen_cvt_itof
3569 /* generic itof for unsigned long long case */
3570 static void gen_cvt_itof1(int t
)
3572 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3573 (VT_LLONG
| VT_UNSIGNED
)) {
3576 vpush_helper_func(TOK___floatundisf
);
3577 #if LDOUBLE_SIZE != 8
3578 else if (t
== VT_LDOUBLE
)
3579 vpush_helper_func(TOK___floatundixf
);
3582 vpush_helper_func(TOK___floatundidf
);
3593 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3594 #define gen_cvt_ftoi1 gen_cvt_ftoi
3596 /* generic ftoi for unsigned long long case */
3597 static void gen_cvt_ftoi1(int t
)
3600 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3601 /* not handled natively */
3602 st
= vtop
->type
.t
& VT_BTYPE
;
3604 vpush_helper_func(TOK___fixunssfdi
);
3605 #if LDOUBLE_SIZE != 8
3606 else if (st
== VT_LDOUBLE
)
3607 vpush_helper_func(TOK___fixunsxfdi
);
3610 vpush_helper_func(TOK___fixunsdfdi
);
3621 /* special delayed cast for char/short */
3622 static void force_charshort_cast(void)
3624 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3625 int dbt
= vtop
->type
.t
;
3626 vtop
->r
&= ~VT_MUSTCAST
;
3628 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3632 static void gen_cast_s(int t
)
3640 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3641 static void gen_cast(CType
*type
)
3643 int sbt
, dbt
, sf
, df
, c
;
3644 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3646 /* special delayed cast for char/short */
3647 if (vtop
->r
& VT_MUSTCAST
)
3648 force_charshort_cast();
3650 /* bitfields first get cast to ints */
3651 if (vtop
->type
.t
& VT_BITFIELD
)
3654 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3655 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3663 dbt_bt
= dbt
& VT_BTYPE
;
3664 sbt_bt
= sbt
& VT_BTYPE
;
3665 if (dbt_bt
== VT_VOID
)
3667 if (sbt_bt
== VT_VOID
) {
3669 cast_error(&vtop
->type
, type
);
3672 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3673 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3674 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3677 /* constant case: we can do it now */
3678 /* XXX: in ISOC, cannot do it if error in convert */
3679 if (sbt
== VT_FLOAT
)
3680 vtop
->c
.ld
= vtop
->c
.f
;
3681 else if (sbt
== VT_DOUBLE
)
3682 vtop
->c
.ld
= vtop
->c
.d
;
3685 if (sbt_bt
== VT_LLONG
) {
3686 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3687 vtop
->c
.ld
= vtop
->c
.i
;
3689 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3691 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3692 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3694 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3697 if (dbt
== VT_FLOAT
)
3698 vtop
->c
.f
= (float)vtop
->c
.ld
;
3699 else if (dbt
== VT_DOUBLE
)
3700 vtop
->c
.d
= (double)vtop
->c
.ld
;
3701 } else if (sf
&& dbt
== VT_BOOL
) {
3702 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3705 vtop
->c
.i
= vtop
->c
.ld
;
3706 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3708 else if (sbt
& VT_UNSIGNED
)
3709 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3711 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3713 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3715 else if (dbt
== VT_BOOL
)
3716 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3718 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3719 dbt_bt
== VT_SHORT
? 0xffff :
3722 if (!(dbt
& VT_UNSIGNED
))
3723 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3728 } else if (dbt
== VT_BOOL
3729 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3730 == (VT_CONST
| VT_SYM
)) {
3731 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3737 /* cannot generate code for global or static initializers */
3738 if (STATIC_DATA_WANTED
)
3741 /* non constant case: generate code */
3742 if (dbt
== VT_BOOL
) {
3743 gen_test_zero(TOK_NE
);
3749 /* convert from fp to fp */
3752 /* convert int to fp */
3755 /* convert fp to int */
3757 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3760 goto again
; /* may need char/short cast */
3765 ds
= btype_size(dbt_bt
);
3766 ss
= btype_size(sbt_bt
);
3767 if (ds
== 0 || ss
== 0)
3770 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3771 tcc_error("cast to incomplete type");
3773 /* same size and no sign conversion needed */
3774 if (ds
== ss
&& ds
>= 4)
3776 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3777 tcc_warning("cast between pointer and integer of different size");
3778 if (sbt_bt
== VT_PTR
) {
3779 /* put integer type to allow logical operations below */
3780 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3784 /* processor allows { int a = 0, b = *(char*)&a; }
3785 That means that if we cast to less width, we can just
3786 change the type and read it still later. */
3787 #define ALLOW_SUBTYPE_ACCESS 1
3789 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3790 /* value still in memory */
3794 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3796 goto done
; /* no 64bit envolved */
3804 /* generate high word */
3805 if (sbt
& VT_UNSIGNED
) {
3814 } else if (ss
== 8) {
3815 /* from long long: just take low order word */
3823 /* need to convert from 32bit to 64bit */
3824 if (sbt
& VT_UNSIGNED
) {
3825 #if defined(TCC_TARGET_RISCV64)
3826 /* RISC-V keeps 32bit vals in registers sign-extended.
3827 So here we need a zero-extension. */
3836 ss
= ds
, ds
= 4, dbt
= sbt
;
3837 } else if (ss
== 8) {
3838 /* RISC-V keeps 32bit vals in registers sign-extended.
3839 So here we need a sign-extension for signed types and
3840 zero-extension. for unsigned types. */
3841 #if !defined(TCC_TARGET_RISCV64)
3842 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3851 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3857 bits
= (ss
- ds
) * 8;
3858 /* for unsigned, gen_op will convert SAR to SHR */
3859 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3862 vpushi(bits
- trunc
);
3869 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3872 /* return type size as known at compile time. Put alignment at 'a' */
3873 ST_FUNC
int type_size(CType
*type
, int *a
)
3878 bt
= type
->t
& VT_BTYPE
;
3879 if (bt
== VT_STRUCT
) {
3884 } else if (bt
== VT_PTR
) {
3885 if (type
->t
& VT_ARRAY
) {
3889 ts
= type_size(&s
->type
, a
);
3891 if (ts
< 0 && s
->c
< 0)
3899 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3901 return -1; /* incomplete enum */
3902 } else if (bt
== VT_LDOUBLE
) {
3904 return LDOUBLE_SIZE
;
3905 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3906 #ifdef TCC_TARGET_I386
3907 #ifdef TCC_TARGET_PE
3912 #elif defined(TCC_TARGET_ARM)
3922 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3925 } else if (bt
== VT_SHORT
) {
3928 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3932 /* char, void, function, _Bool */
3938 /* push type size as known at runtime time on top of value stack. Put
3940 static void vpush_type_size(CType
*type
, int *a
)
3942 if (type
->t
& VT_VLA
) {
3943 type_size(&type
->ref
->type
, a
);
3944 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3946 int size
= type_size(type
, a
);
3948 tcc_error("unknown type size");
3957 /* return the pointed type of t */
3958 static inline CType
*pointed_type(CType
*type
)
3960 return &type
->ref
->type
;
3963 /* modify type so that its it is a pointer to type. */
3964 ST_FUNC
void mk_pointer(CType
*type
)
3967 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3968 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3972 /* return true if type1 and type2 are exactly the same (including
3975 static int is_compatible_types(CType
*type1
, CType
*type2
)
3977 return compare_types(type1
,type2
,0);
3980 /* return true if type1 and type2 are the same (ignoring qualifiers).
3982 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3984 return compare_types(type1
,type2
,1);
3987 static void cast_error(CType
*st
, CType
*dt
)
3989 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3992 /* verify type compatibility to store vtop in 'dt' type */
3993 static void verify_assign_cast(CType
*dt
)
3995 CType
*st
, *type1
, *type2
;
3996 int dbt
, sbt
, qualwarn
, lvl
;
3998 st
= &vtop
->type
; /* source type */
3999 dbt
= dt
->t
& VT_BTYPE
;
4000 sbt
= st
->t
& VT_BTYPE
;
4001 if (dt
->t
& VT_CONSTANT
)
4002 tcc_warning("assignment of read-only location");
4006 tcc_error("assignment to void expression");
4009 /* special cases for pointers */
4010 /* '0' can also be a pointer */
4011 if (is_null_pointer(vtop
))
4013 /* accept implicit pointer to integer cast with warning */
4014 if (is_integer_btype(sbt
)) {
4015 tcc_warning("assignment makes pointer from integer without a cast");
4018 type1
= pointed_type(dt
);
4020 type2
= pointed_type(st
);
4021 else if (sbt
== VT_FUNC
)
4022 type2
= st
; /* a function is implicitly a function pointer */
4025 if (is_compatible_types(type1
, type2
))
4027 for (qualwarn
= lvl
= 0;; ++lvl
) {
4028 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
4029 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
4031 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
4032 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
4033 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
4035 type1
= pointed_type(type1
);
4036 type2
= pointed_type(type2
);
4038 if (!is_compatible_unqualified_types(type1
, type2
)) {
4039 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
4040 /* void * can match anything */
4041 } else if (dbt
== sbt
4042 && is_integer_btype(sbt
& VT_BTYPE
)
4043 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
4044 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
4045 /* Like GCC don't warn by default for merely changes
4046 in pointer target signedness. Do warn for different
4047 base types, though, in particular for unsigned enums
4048 and signed int targets. */
4050 tcc_warning("assignment from incompatible pointer type");
4055 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
4061 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
4062 tcc_warning("assignment makes integer from pointer without a cast");
4063 } else if (sbt
== VT_STRUCT
) {
4064 goto case_VT_STRUCT
;
4066 /* XXX: more tests */
4070 if (!is_compatible_unqualified_types(dt
, st
)) {
4078 static void gen_assign_cast(CType
*dt
)
4080 verify_assign_cast(dt
);
4084 /* store vtop in lvalue pushed on stack */
4085 ST_FUNC
void vstore(void)
4087 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
4089 ft
= vtop
[-1].type
.t
;
4090 sbt
= vtop
->type
.t
& VT_BTYPE
;
4091 dbt
= ft
& VT_BTYPE
;
4093 verify_assign_cast(&vtop
[-1].type
);
4095 if (sbt
== VT_STRUCT
) {
4096 /* if structure, only generate pointer */
4097 /* structure assignment : generate memcpy */
4098 /* XXX: optimize if small size */
4099 size
= type_size(&vtop
->type
, &align
);
4103 #ifdef CONFIG_TCC_BCHECK
4104 if (vtop
->r
& VT_MUSTBOUND
)
4105 gbound(); /* check would be wrong after gaddrof() */
4107 vtop
->type
.t
= VT_PTR
;
4110 /* address of memcpy() */
4113 vpush_helper_func(TOK_memmove8
);
4114 else if(!(align
& 3))
4115 vpush_helper_func(TOK_memmove4
);
4118 /* Use memmove, rather than memcpy, as dest and src may be same: */
4119 vpush_helper_func(TOK_memmove
);
4124 #ifdef CONFIG_TCC_BCHECK
4125 if (vtop
->r
& VT_MUSTBOUND
)
4128 vtop
->type
.t
= VT_PTR
;
4133 /* leave source on stack */
4135 } else if (ft
& VT_BITFIELD
) {
4136 /* bitfield store handling */
4138 /* save lvalue as expression result (example: s.b = s.a = n;) */
4139 vdup(), vtop
[-1] = vtop
[-2];
4141 bit_pos
= BIT_POS(ft
);
4142 bit_size
= BIT_SIZE(ft
);
4143 /* remove bit field info to avoid loops */
4144 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
4146 if (dbt
== VT_BOOL
) {
4147 gen_cast(&vtop
[-1].type
);
4148 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
4150 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
4151 if (dbt
!= VT_BOOL
) {
4152 gen_cast(&vtop
[-1].type
);
4153 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
4155 if (r
== VT_STRUCT
) {
4156 store_packed_bf(bit_pos
, bit_size
);
4158 unsigned long long mask
= (1ULL << bit_size
) - 1;
4159 if (dbt
!= VT_BOOL
) {
4161 if (dbt
== VT_LLONG
)
4164 vpushi((unsigned)mask
);
4171 /* duplicate destination */
4174 /* load destination, mask and or with source */
4175 if (dbt
== VT_LLONG
)
4176 vpushll(~(mask
<< bit_pos
));
4178 vpushi(~((unsigned)mask
<< bit_pos
));
4183 /* ... and discard */
4186 } else if (dbt
== VT_VOID
) {
4189 /* optimize char/short casts */
4191 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
4192 && is_integer_btype(sbt
)
4194 if ((vtop
->r
& VT_MUSTCAST
)
4195 && btype_size(dbt
) > btype_size(sbt
)
4197 force_charshort_cast();
4200 gen_cast(&vtop
[-1].type
);
4203 #ifdef CONFIG_TCC_BCHECK
4204 /* bound check case */
4205 if (vtop
[-1].r
& VT_MUSTBOUND
) {
4211 gv(RC_TYPE(dbt
)); /* generate value */
4214 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
4215 //tcc_warning("deley cast %x -> %x", sbt, dbt);
4216 vtop
->type
.t
= ft
& VT_TYPE
;
4219 /* if lvalue was saved on stack, must read it */
4220 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
4222 r
= get_reg(RC_INT
);
4223 sv
.type
.t
= VT_PTRDIFF_T
;
4224 sv
.r
= VT_LOCAL
| VT_LVAL
;
4225 sv
.c
.i
= vtop
[-1].c
.i
;
4227 vtop
[-1].r
= r
| VT_LVAL
;
4230 r
= vtop
->r
& VT_VALMASK
;
4231 /* two word case handling :
4232 store second register at word + 4 (or +8 for x86-64) */
4233 if (USING_TWO_WORDS(dbt
)) {
4234 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
4235 vtop
[-1].type
.t
= load_type
;
4238 /* convert to int to increment easily */
4239 vtop
->type
.t
= VT_PTRDIFF_T
;
4245 vtop
[-1].type
.t
= load_type
;
4246 /* XXX: it works because r2 is spilled last ! */
4247 store(vtop
->r2
, vtop
- 1);
4253 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
4257 /* post defines POST/PRE add. c is the token ++ or -- */
4258 ST_FUNC
void inc(int post
, int c
)
4261 vdup(); /* save lvalue */
4263 gv_dup(); /* duplicate value */
4268 vpushi(c
- TOK_MID
);
4270 vstore(); /* store value */
4272 vpop(); /* if post op, return saved value */
4275 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
4277 /* read the string */
4281 while (tok
== TOK_STR
) {
4282 /* XXX: add \0 handling too ? */
4283 cstr_cat(astr
, tokc
.str
.data
, -1);
4286 cstr_ccat(astr
, '\0');
4289 /* If I is >= 1 and a power of two, returns log2(i)+1.
4290 If I is 0 returns 0. */
4291 ST_FUNC
int exact_log2p1(int i
)
4296 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
4307 /* Parse __attribute__((...)) GNUC extension. */
4308 static void parse_attribute(AttributeDef
*ad
)
4314 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
4319 while (tok
!= ')') {
4320 if (tok
< TOK_IDENT
)
4321 expect("attribute name");
4333 tcc_warning_c(warn_implicit_function_declaration
)(
4334 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
4335 s
= external_global_sym(tok
, &func_old_type
);
4336 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4337 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
4338 ad
->cleanup_func
= s
;
4343 case TOK_CONSTRUCTOR1
:
4344 case TOK_CONSTRUCTOR2
:
4345 ad
->f
.func_ctor
= 1;
4347 case TOK_DESTRUCTOR1
:
4348 case TOK_DESTRUCTOR2
:
4349 ad
->f
.func_dtor
= 1;
4351 case TOK_ALWAYS_INLINE1
:
4352 case TOK_ALWAYS_INLINE2
:
4353 ad
->f
.func_alwinl
= 1;
4358 parse_mult_str(&astr
, "section name");
4359 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
4366 parse_mult_str(&astr
, "alias(\"target\")");
4367 ad
->alias_target
= /* save string as token, for later */
4368 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
4372 case TOK_VISIBILITY1
:
4373 case TOK_VISIBILITY2
:
4375 parse_mult_str(&astr
,
4376 "visibility(\"default|hidden|internal|protected\")");
4377 if (!strcmp (astr
.data
, "default"))
4378 ad
->a
.visibility
= STV_DEFAULT
;
4379 else if (!strcmp (astr
.data
, "hidden"))
4380 ad
->a
.visibility
= STV_HIDDEN
;
4381 else if (!strcmp (astr
.data
, "internal"))
4382 ad
->a
.visibility
= STV_INTERNAL
;
4383 else if (!strcmp (astr
.data
, "protected"))
4384 ad
->a
.visibility
= STV_PROTECTED
;
4386 expect("visibility(\"default|hidden|internal|protected\")");
4395 if (n
<= 0 || (n
& (n
- 1)) != 0)
4396 tcc_error("alignment must be a positive power of two");
4401 ad
->a
.aligned
= exact_log2p1(n
);
4402 if (n
!= 1 << (ad
->a
.aligned
- 1))
4403 tcc_error("alignment of %d is larger than implemented", n
);
4415 /* currently, no need to handle it because tcc does not
4416 track unused objects */
4420 ad
->f
.func_noreturn
= 1;
4425 ad
->f
.func_call
= FUNC_CDECL
;
4430 ad
->f
.func_call
= FUNC_STDCALL
;
4432 #ifdef TCC_TARGET_I386
4442 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
4448 ad
->f
.func_call
= FUNC_FASTCALLW
;
4455 ad
->attr_mode
= VT_LLONG
+ 1;
4458 ad
->attr_mode
= VT_BYTE
+ 1;
4461 ad
->attr_mode
= VT_SHORT
+ 1;
4465 ad
->attr_mode
= VT_INT
+ 1;
4468 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4475 ad
->a
.dllexport
= 1;
4477 case TOK_NODECORATE
:
4478 ad
->a
.nodecorate
= 1;
4481 ad
->a
.dllimport
= 1;
4484 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
4485 /* skip parameters */
4487 int parenthesis
= 0;
4491 else if (tok
== ')')
4494 } while (parenthesis
&& tok
!= -1);
4507 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4511 while ((s
= s
->next
) != NULL
) {
4512 if ((s
->v
& SYM_FIELD
) &&
4513 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
4514 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
4515 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
4527 static void check_fields (CType
*type
, int check
)
4531 while ((s
= s
->next
) != NULL
) {
4532 int v
= s
->v
& ~SYM_FIELD
;
4533 if (v
< SYM_FIRST_ANOM
) {
4534 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
4535 if (check
&& (ts
->tok
& SYM_FIELD
))
4536 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
4537 ts
->tok
^= SYM_FIELD
;
4538 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
4539 check_fields (&s
->type
, check
);
4543 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4545 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4546 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4547 int pcc
= !tcc_state
->ms_bitfields
;
4548 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4555 prevbt
= VT_STRUCT
; /* make it never match */
4560 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4561 if (f
->type
.t
& VT_BITFIELD
)
4562 bit_size
= BIT_SIZE(f
->type
.t
);
4565 size
= type_size(&f
->type
, &align
);
4566 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4569 if (pcc
&& bit_size
== 0) {
4570 /* in pcc mode, packing does not affect zero-width bitfields */
4573 /* in pcc mode, attribute packed overrides if set. */
4574 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4577 /* pragma pack overrides align if lesser and packs bitfields always */
4580 if (pragma_pack
< align
)
4581 align
= pragma_pack
;
4582 /* in pcc mode pragma pack also overrides individual align */
4583 if (pcc
&& pragma_pack
< a
)
4587 /* some individual align was specified */
4591 if (type
->ref
->type
.t
== VT_UNION
) {
4592 if (pcc
&& bit_size
>= 0)
4593 size
= (bit_size
+ 7) >> 3;
4598 } else if (bit_size
< 0) {
4600 c
+= (bit_pos
+ 7) >> 3;
4601 c
= (c
+ align
- 1) & -align
;
4610 /* A bit-field. Layout is more complicated. There are two
4611 options: PCC (GCC) compatible and MS compatible */
4613 /* In PCC layout a bit-field is placed adjacent to the
4614 preceding bit-fields, except if:
4616 - an individual alignment was given
4617 - it would overflow its base type container and
4618 there is no packing */
4619 if (bit_size
== 0) {
4621 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4623 } else if (f
->a
.aligned
) {
4625 } else if (!packed
) {
4627 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4628 if (ofs
> size
/ align
)
4632 /* in pcc mode, long long bitfields have type int if they fit */
4633 if (size
== 8 && bit_size
<= 32)
4634 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4636 while (bit_pos
>= align
* 8)
4637 c
+= align
, bit_pos
-= align
* 8;
4640 /* In PCC layout named bit-fields influence the alignment
4641 of the containing struct using the base types alignment,
4642 except for packed fields (which here have correct align). */
4643 if (f
->v
& SYM_FIRST_ANOM
4644 // && bit_size // ??? gcc on ARM/rpi does that
4649 bt
= f
->type
.t
& VT_BTYPE
;
4650 if ((bit_pos
+ bit_size
> size
* 8)
4651 || (bit_size
> 0) == (bt
!= prevbt
)
4653 c
= (c
+ align
- 1) & -align
;
4656 /* In MS bitfield mode a bit-field run always uses
4657 at least as many bits as the underlying type.
4658 To start a new run it's also required that this
4659 or the last bit-field had non-zero width. */
4660 if (bit_size
|| prev_bit_size
)
4663 /* In MS layout the records alignment is normally
4664 influenced by the field, except for a zero-width
4665 field at the start of a run (but by further zero-width
4666 fields it is again). */
4667 if (bit_size
== 0 && prevbt
!= bt
)
4670 prev_bit_size
= bit_size
;
4673 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4674 | (bit_pos
<< VT_STRUCT_SHIFT
);
4675 bit_pos
+= bit_size
;
4677 if (align
> maxalign
)
4681 printf("set field %s offset %-2d size %-2d align %-2d",
4682 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4683 if (f
->type
.t
& VT_BITFIELD
) {
4684 printf(" pos %-2d bits %-2d",
4697 c
+= (bit_pos
+ 7) >> 3;
4699 /* store size and alignment */
4700 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4704 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4705 /* can happen if individual align for some member was given. In
4706 this case MSVC ignores maxalign when aligning the size */
4711 c
= (c
+ a
- 1) & -a
;
4715 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4718 /* check whether we can access bitfields by their type */
4719 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4723 if (0 == (f
->type
.t
& VT_BITFIELD
))
4727 bit_size
= BIT_SIZE(f
->type
.t
);
4730 bit_pos
= BIT_POS(f
->type
.t
);
4731 size
= type_size(&f
->type
, &align
);
4733 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4734 #ifdef TCC_TARGET_ARM
4735 && !(f
->c
& (align
- 1))
4740 /* try to access the field using a different type */
4741 c0
= -1, s
= align
= 1;
4744 px
= f
->c
* 8 + bit_pos
;
4745 cx
= (px
>> 3) & -align
;
4746 px
= px
- (cx
<< 3);
4749 s
= (px
+ bit_size
+ 7) >> 3;
4759 s
= type_size(&t
, &align
);
4763 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4764 #ifdef TCC_TARGET_ARM
4765 && !(cx
& (align
- 1))
4768 /* update offset and bit position */
4771 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4772 | (bit_pos
<< VT_STRUCT_SHIFT
);
4776 printf("FIX field %s offset %-2d size %-2d align %-2d "
4777 "pos %-2d bits %-2d\n",
4778 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4779 cx
, s
, align
, px
, bit_size
);
4782 /* fall back to load/store single-byte wise */
4783 f
->auxtype
= VT_STRUCT
;
4785 printf("FIX field %s : load byte-wise\n",
4786 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4792 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4793 static void struct_decl(CType
*type
, int u
)
4795 int v
, c
, size
, align
, flexible
;
4796 int bit_size
, bsize
, bt
;
4798 AttributeDef ad
, ad1
;
4801 memset(&ad
, 0, sizeof ad
);
4803 parse_attribute(&ad
);
4807 /* struct already defined ? return it */
4809 expect("struct/union/enum name");
4811 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4814 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4816 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4821 /* Record the original enum/struct/union token. */
4822 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4824 /* we put an undefined size for struct/union */
4825 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4826 s
->r
= 0; /* default alignment is zero as gcc */
4828 type
->t
= s
->type
.t
;
4834 tcc_error("struct/union/enum already defined");
4836 /* cannot be empty */
4837 /* non empty enums are not allowed */
4840 long long ll
= 0, pl
= 0, nl
= 0;
4843 /* enum symbols have static storage */
4844 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4848 expect("identifier");
4850 if (ss
&& !local_stack
)
4851 tcc_error("redefinition of enumerator '%s'",
4852 get_tok_str(v
, NULL
));
4856 ll
= expr_const64();
4858 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4860 *ps
= ss
, ps
= &ss
->next
;
4869 /* NOTE: we accept a trailing comma */
4874 /* set integral type of the enum */
4877 if (pl
!= (unsigned)pl
)
4878 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4880 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4881 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4882 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4884 /* set type for enum members */
4885 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4887 if (ll
== (int)ll
) /* default is int if it fits */
4889 if (t
.t
& VT_UNSIGNED
) {
4890 ss
->type
.t
|= VT_UNSIGNED
;
4891 if (ll
== (unsigned)ll
)
4894 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4895 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4900 while (tok
!= '}') {
4901 if (!parse_btype(&btype
, &ad1
)) {
4907 tcc_error("flexible array member '%s' not at the end of struct",
4908 get_tok_str(v
, NULL
));
4914 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4916 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4917 expect("identifier");
4919 int v
= btype
.ref
->v
;
4920 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4921 if (tcc_state
->ms_extensions
== 0)
4922 expect("identifier");
4926 if (type_size(&type1
, &align
) < 0) {
4927 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4930 tcc_error("field '%s' has incomplete type",
4931 get_tok_str(v
, NULL
));
4933 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4934 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4935 (type1
.t
& VT_STORAGE
))
4936 tcc_error("invalid type for '%s'",
4937 get_tok_str(v
, NULL
));
4941 bit_size
= expr_const();
4942 /* XXX: handle v = 0 case for messages */
4944 tcc_error("negative width in bit-field '%s'",
4945 get_tok_str(v
, NULL
));
4946 if (v
&& bit_size
== 0)
4947 tcc_error("zero width for bit-field '%s'",
4948 get_tok_str(v
, NULL
));
4949 parse_attribute(&ad1
);
4951 size
= type_size(&type1
, &align
);
4952 if (bit_size
>= 0) {
4953 bt
= type1
.t
& VT_BTYPE
;
4959 tcc_error("bitfields must have scalar type");
4961 if (bit_size
> bsize
) {
4962 tcc_error("width of '%s' exceeds its type",
4963 get_tok_str(v
, NULL
));
4964 } else if (bit_size
== bsize
4965 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4966 /* no need for bit fields */
4968 } else if (bit_size
== 64) {
4969 tcc_error("field width 64 not implemented");
4971 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4973 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4976 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4977 /* Remember we've seen a real field to check
4978 for placement of flexible array member. */
4981 /* If member is a struct or bit-field, enforce
4982 placing into the struct (as anonymous). */
4984 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4989 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4994 if (tok
== ';' || tok
== TOK_EOF
)
5001 parse_attribute(&ad
);
5002 if (ad
.cleanup_func
) {
5003 tcc_warning("attribute '__cleanup__' ignored on type");
5005 check_fields(type
, 1);
5006 check_fields(type
, 0);
5007 struct_layout(type
, &ad
);
5012 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
5014 merge_symattr(&ad
->a
, &s
->a
);
5015 merge_funcattr(&ad
->f
, &s
->f
);
5018 /* Add type qualifiers to a type. If the type is an array then the qualifiers
5019 are added to the element type, copied because it could be a typedef. */
5020 static void parse_btype_qualify(CType
*type
, int qualifiers
)
5022 while (type
->t
& VT_ARRAY
) {
5023 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
5024 type
= &type
->ref
->type
;
5026 type
->t
|= qualifiers
;
5029 /* return 0 if no type declaration. otherwise, return the basic type
5032 static int parse_btype(CType
*type
, AttributeDef
*ad
)
5034 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
5038 memset(ad
, 0, sizeof(AttributeDef
));
5048 /* currently, we really ignore extension */
5058 if (u
== VT_SHORT
|| u
== VT_LONG
) {
5059 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
5060 tmbt
: tcc_error("too many basic types");
5063 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
5068 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
5085 memset(&ad1
, 0, sizeof(AttributeDef
));
5086 if (parse_btype(&type1
, &ad1
)) {
5087 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
5089 n
= 1 << (ad1
.a
.aligned
- 1);
5091 type_size(&type1
, &n
);
5094 if (n
<= 0 || (n
& (n
- 1)) != 0)
5095 tcc_error("alignment must be a positive power of two");
5098 ad
->a
.aligned
= exact_log2p1(n
);
5102 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
5103 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
5104 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
5105 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
5112 #ifdef TCC_TARGET_ARM64
5114 /* GCC's __uint128_t appears in some Linux header files. Make it a
5115 synonym for long double to get the size and alignment right. */
5126 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
5127 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
5135 struct_decl(&type1
, VT_ENUM
);
5138 type
->ref
= type1
.ref
;
5141 struct_decl(&type1
, VT_STRUCT
);
5144 struct_decl(&type1
, VT_UNION
);
5147 /* type modifiers */
5151 parse_btype_qualify(type
, VT_ATOMIC
);
5154 parse_expr_type(&type1
);
5155 /* remove all storage modifiers except typedef */
5156 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
5158 sym_to_attr(ad
, type1
.ref
);
5166 parse_btype_qualify(type
, VT_CONSTANT
);
5174 parse_btype_qualify(type
, VT_VOLATILE
);
5181 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
5182 tcc_error("signed and unsigned modifier");
5195 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
5196 tcc_error("signed and unsigned modifier");
5197 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
5213 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
5214 tcc_error("multiple storage classes");
5226 ad
->f
.func_noreturn
= 1;
5228 /* GNUC attribute */
5229 case TOK_ATTRIBUTE1
:
5230 case TOK_ATTRIBUTE2
:
5231 parse_attribute(ad
);
5232 if (ad
->attr_mode
) {
5233 u
= ad
->attr_mode
-1;
5234 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
5242 parse_expr_type(&type1
);
5243 /* remove all storage modifiers except typedef */
5244 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
5246 sym_to_attr(ad
, type1
.ref
);
5252 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
5256 if (tok
== ':' && !in_generic
) {
5257 /* ignore if it's a label */
5262 t
&= ~(VT_BTYPE
|VT_LONG
);
5263 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
5264 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
5265 type
->ref
= s
->type
.ref
;
5267 parse_btype_qualify(type
, t
);
5269 /* get attributes from typedef */
5278 if (tcc_state
->char_is_unsigned
) {
5279 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
5282 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5283 bt
= t
& (VT_BTYPE
|VT_LONG
);
5285 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
5286 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5287 if (bt
== VT_LDOUBLE
)
5288 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
5294 /* convert a function parameter type (array to pointer and function to
5295 function pointer) */
5296 static inline void convert_parameter_type(CType
*pt
)
5298 /* remove const and volatile qualifiers (XXX: const could be used
5299 to indicate a const function parameter */
5300 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5301 /* array must be transformed to pointer according to ANSI C */
5303 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
5308 ST_FUNC
void parse_asm_str(CString
*astr
)
5311 parse_mult_str(astr
, "string constant");
5314 /* Parse an asm label and return the token */
5315 static int asm_label_instr(void)
5321 parse_asm_str(&astr
);
5324 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
5326 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
5331 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
5333 int n
, l
, t1
, arg_size
, align
, unused_align
;
5334 Sym
**plast
, *s
, *first
;
5337 TokenString
*vla_array_tok
= NULL
;
5338 int *vla_array_str
= NULL
;
5341 /* function type, or recursive declarator (return if so) */
5343 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
5347 else if (parse_btype(&pt
, &ad1
))
5349 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
5350 merge_attr (ad
, &ad1
);
5361 /* read param name and compute offset */
5362 if (l
!= FUNC_OLD
) {
5363 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
5365 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
5366 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
5367 tcc_error("parameter declared as void");
5372 pt
.t
= VT_VOID
; /* invalid type */
5377 expect("identifier");
5378 convert_parameter_type(&pt
);
5379 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
5380 s
= sym_push(n
, &pt
, 0, 0);
5386 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
5391 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
5392 tcc_error("invalid type");
5395 /* if no parameters, then old type prototype */
5398 /* remove parameter symbols from token table, keep on stack */
5400 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
5401 for (s
= first
; s
; s
= s
->next
)
5405 /* NOTE: const is ignored in returned type as it has a special
5406 meaning in gcc / C++ */
5407 type
->t
&= ~VT_CONSTANT
;
5408 /* some ancient pre-K&R C allows a function to return an array
5409 and the array brackets to be put after the arguments, such
5410 that "int c()[]" means something like "int[] c()" */
5413 skip(']'); /* only handle simple "[]" */
5416 /* we push a anonymous symbol which will contain the function prototype */
5417 ad
->f
.func_args
= arg_size
;
5418 ad
->f
.func_type
= l
;
5419 s
= sym_push(SYM_FIELD
, type
, 0, 0);
5425 } else if (tok
== '[') {
5426 int saved_nocode_wanted
= nocode_wanted
;
5427 /* array definition */
5431 if (td
& TYPE_PARAM
) while (1) {
5432 /* XXX The optional type-quals and static should only be accepted
5433 in parameter decls. The '*' as well, and then even only
5434 in prototypes (not function defs). */
5436 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5449 /* Code generation is not done now but has to be done
5450 at start of function. Save code here for later use. */
5452 vla_array_tok
= tok_str_alloc();
5461 tok_str_add_tok(vla_array_tok
);
5465 tok_str_add(vla_array_tok
, -1);
5466 tok_str_add(vla_array_tok
, 0);
5467 vla_array_str
= vla_array_tok
->str
;
5468 begin_macro(vla_array_tok
, 2);
5477 } else if (tok
!= ']') {
5478 if (!local_stack
|| (storage
& VT_STATIC
))
5479 vpushi(expr_const());
5481 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5482 length must always be evaluated, even under nocode_wanted,
5483 so that its size slot is initialized (e.g. under sizeof
5489 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5492 tcc_error("invalid array size");
5494 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5495 tcc_error("size of variable length array should be an integer");
5501 /* parse next post type */
5502 post_type(type
, ad
, storage
, (td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
)) | TYPE_NEST
);
5504 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5505 tcc_error("declaration of an array of functions");
5506 if ((type
->t
& VT_BTYPE
) == VT_VOID
5507 || type_size(type
, &unused_align
) < 0)
5508 tcc_error("declaration of an array of incomplete type elements");
5510 t1
|= type
->t
& VT_VLA
;
5515 tcc_error("need explicit inner array size in VLAs");
5518 loc
-= type_size(&int_type
, &align
);
5522 vpush_type_size(type
, &align
);
5524 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5531 nocode_wanted
= saved_nocode_wanted
;
5533 /* we push an anonymous symbol which will contain the array
5535 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5536 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5538 if (vla_array_str
) {
5540 s
->vla_array_str
= vla_array_str
;
5542 tok_str_free_str(vla_array_str
);
5548 /* Parse a type declarator (except basic type), and return the type
5549 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5550 expected. 'type' should contain the basic type. 'ad' is the
5551 attribute definition of the basic type. It can be modified by
5552 type_decl(). If this (possibly abstract) declarator is a pointer chain
5553 it returns the innermost pointed to type (equals *type, but is a different
5554 pointer), otherwise returns type itself, that's used for recursive calls. */
5555 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5558 int qualifiers
, storage
;
5560 /* recursive type, remove storage bits first, apply them later again */
5561 storage
= type
->t
& VT_STORAGE
;
5562 type
->t
&= ~VT_STORAGE
;
5565 while (tok
== '*') {
5571 qualifiers
|= VT_ATOMIC
;
5576 qualifiers
|= VT_CONSTANT
;
5581 qualifiers
|= VT_VOLATILE
;
5587 /* XXX: clarify attribute handling */
5588 case TOK_ATTRIBUTE1
:
5589 case TOK_ATTRIBUTE2
:
5590 parse_attribute(ad
);
5594 type
->t
|= qualifiers
;
5596 /* innermost pointed to type is the one for the first derivation */
5597 ret
= pointed_type(type
);
5601 /* This is possibly a parameter type list for abstract declarators
5602 ('int ()'), use post_type for testing this. */
5603 if (!post_type(type
, ad
, 0, td
)) {
5604 /* It's not, so it's a nested declarator, and the post operations
5605 apply to the innermost pointed to type (if any). */
5606 /* XXX: this is not correct to modify 'ad' at this point, but
5607 the syntax is not clear */
5608 parse_attribute(ad
);
5609 post
= type_decl(type
, ad
, v
, td
);
5613 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5614 /* type identifier */
5619 if (!(td
& TYPE_ABSTRACT
))
5620 expect("identifier");
5623 post_type(post
, ad
, storage
, td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5624 parse_attribute(ad
);
5629 /* indirection with full error checking and bound check */
5630 ST_FUNC
void indir(void)
5632 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5633 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5637 if (vtop
->r
& VT_LVAL
)
5639 vtop
->type
= *pointed_type(&vtop
->type
);
5640 /* Arrays and functions are never lvalues */
5641 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5642 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5644 /* if bound checking, the referenced pointer must be checked */
5645 #ifdef CONFIG_TCC_BCHECK
5646 if (tcc_state
->do_bounds_check
)
5647 vtop
->r
|= VT_MUSTBOUND
;
5652 /* pass a parameter to a function and do type checking and casting */
5653 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5658 func_type
= func
->f
.func_type
;
5659 if (func_type
== FUNC_OLD
||
5660 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5661 /* default casting : only need to convert float to double */
5662 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5663 gen_cast_s(VT_DOUBLE
);
5664 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5665 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5666 type
.ref
= vtop
->type
.ref
;
5668 } else if (vtop
->r
& VT_MUSTCAST
) {
5669 force_charshort_cast();
5671 } else if (arg
== NULL
) {
5672 tcc_error("too many arguments to function");
5675 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5676 gen_assign_cast(&type
);
5680 /* parse an expression and return its type without any side effect. */
5681 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5690 /* parse an expression of the form '(type)' or '(expr)' and return its
5692 static void parse_expr_type(CType
*type
)
5698 if (parse_btype(type
, &ad
)) {
5699 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5701 expr_type(type
, gexpr
);
5706 static void parse_type(CType
*type
)
5711 if (!parse_btype(type
, &ad
)) {
5714 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5717 static void parse_builtin_params(int nc
, const char *args
)
5726 while ((c
= *args
++)) {
5741 type
.t
= VT_CONSTANT
;
5747 type
.t
= VT_CONSTANT
;
5749 type
.t
|= char_type
.t
;
5761 gen_assign_cast(&type
);
5768 static void parse_atomic(int atok
)
5770 int size
, align
, arg
;
5771 CType
*atom
, *atom_ptr
, ct
= {0};
5773 static const char *const templates
[] = {
5775 * Each entry consists of callback and function template.
5776 * The template represents argument types and return type.
5778 * ? void (return-only)
5781 * A read-only atomic
5782 * p pointer to memory
5787 /* keep in order of appearance in tcctok.h: */
5788 /* __atomic_store */ "avm.?",
5789 /* __atomic_load */ "Am.v",
5790 /* __atomic_exchange */ "avm.v",
5791 /* __atomic_compare_exchange */ "apvbmm.b",
5792 /* __atomic_fetch_add */ "avm.v",
5793 /* __atomic_fetch_sub */ "avm.v",
5794 /* __atomic_fetch_or */ "avm.v",
5795 /* __atomic_fetch_xor */ "avm.v",
5796 /* __atomic_fetch_and */ "avm.v"
5798 const char *template = templates
[(atok
- TOK___atomic_store
)];
5800 atom
= atom_ptr
= NULL
;
5801 size
= 0; /* pacify compiler */
5806 switch (template[arg
]) {
5809 atom_ptr
= &vtop
->type
;
5810 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5812 atom
= pointed_type(atom_ptr
);
5813 size
= type_size(atom
, &align
);
5815 || (size
& (size
- 1))
5816 || (atok
> TOK___atomic_compare_exchange
5817 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5818 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5819 expect("integral or integer-sized pointer target type");
5820 /* GCC does not care either: */
5821 /* if (!(atom->t & VT_ATOMIC))
5822 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5826 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5827 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5828 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5829 gen_assign_cast(atom_ptr
);
5832 gen_assign_cast(atom
);
5835 gen_assign_cast(&int_type
);
5839 gen_assign_cast(&ct
);
5842 if ('.' == template[++arg
])
5849 switch (template[arg
+ 1]) {
5858 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5859 vpush_helper_func(tok_alloc_const(buf
));
5864 PUT_R_RET(vtop
, ct
.t
);
5865 if (ct
.t
== VT_BOOL
) {
5867 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5869 vtop
->type
.t
= VT_INT
;
5874 ST_FUNC
void unary(void)
5876 int n
, t
, align
, size
, r
, sizeof_caller
;
5881 /* generate line number info */
5883 tcc_debug_line(tcc_state
), tcc_tcov_check_line (1);
5885 sizeof_caller
= in_sizeof
;
5888 /* XXX: GCC 2.95.3 does not generate a table although it should be
5896 #ifdef TCC_TARGET_PE
5897 t
= VT_SHORT
|VT_UNSIGNED
;
5905 vsetc(&type
, VT_CONST
, &tokc
);
5909 t
= VT_INT
| VT_UNSIGNED
;
5915 t
= VT_LLONG
| VT_UNSIGNED
;
5927 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5930 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5932 case TOK___FUNCTION__
:
5934 goto tok_identifier
;
5940 /* special function name identifier */
5941 len
= strlen(funcname
) + 1;
5942 /* generate char[len] type */
5943 type
.t
= char_type
.t
;
5944 if (tcc_state
->warn_write_strings
& WARN_ON
)
5945 type
.t
|= VT_CONSTANT
;
5949 sec
= rodata_section
;
5950 vpush_ref(&type
, sec
, sec
->data_offset
, len
);
5952 memcpy(section_ptr_add(sec
, len
), funcname
, len
);
5957 #ifdef TCC_TARGET_PE
5958 t
= VT_SHORT
| VT_UNSIGNED
;
5964 /* string parsing */
5967 if (tcc_state
->warn_write_strings
& WARN_ON
)
5972 memset(&ad
, 0, sizeof(AttributeDef
));
5973 ad
.section
= rodata_section
;
5974 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5979 if (parse_btype(&type
, &ad
)) {
5980 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5982 /* check ISOC99 compound literal */
5984 /* data is allocated locally by default */
5989 /* all except arrays are lvalues */
5990 if (!(type
.t
& VT_ARRAY
))
5992 memset(&ad
, 0, sizeof(AttributeDef
));
5993 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5995 if (sizeof_caller
) {
6002 } else if (tok
== '{') {
6003 int saved_nocode_wanted
= nocode_wanted
;
6004 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
6006 if (0 == local_scope
)
6007 tcc_error("statement expression outside of function");
6008 /* save all registers */
6010 /* statement expression : we do not accept break/continue
6011 inside as GCC does. We do retain the nocode_wanted state,
6012 as statement expressions can't ever be entered from the
6013 outside, so any reactivation of code emission (from labels
6014 or loop heads) can be disabled again after the end of it. */
6016 /* or'ing to keep however possible CODE_OFF() from e.g. "return 0;"
6017 in the statement expression */
6018 nocode_wanted
|= saved_nocode_wanted
;
6033 /* functions names must be treated as function pointers,
6034 except for unary '&' and sizeof. Since we consider that
6035 functions are not lvalues, we only have to handle it
6036 there and in function calls. */
6037 /* arrays can also be used although they are not lvalues */
6038 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
6039 !(vtop
->type
.t
& VT_ARRAY
))
6042 vtop
->sym
->a
.addrtaken
= 1;
6043 mk_pointer(&vtop
->type
);
6049 gen_test_zero(TOK_EQ
);
6060 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
6061 tcc_error("pointer not accepted for unary plus");
6062 /* In order to force cast, we add zero, except for floating point
6063 where we really need an noop (otherwise -0.0 will be transformed
6065 if (!is_float(vtop
->type
.t
)) {
6077 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
6078 if (t
== TOK_SIZEOF
) {
6079 vpush_type_size(&type
, &align
);
6080 gen_cast_s(VT_SIZE_T
);
6082 type_size(&type
, &align
);
6084 if (vtop
[1].r
& VT_SYM
)
6085 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
6086 if (s
&& s
->a
.aligned
)
6087 align
= 1 << (s
->a
.aligned
- 1);
6092 case TOK_builtin_expect
:
6093 /* __builtin_expect is a no-op for now */
6094 parse_builtin_params(0, "ee");
6097 case TOK_builtin_types_compatible_p
:
6098 parse_builtin_params(0, "tt");
6099 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
6100 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
6101 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
6105 case TOK_builtin_choose_expr
:
6132 case TOK_builtin_constant_p
:
6133 parse_builtin_params(1, "e");
6134 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6135 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
6139 case TOK_builtin_frame_address
:
6140 case TOK_builtin_return_address
:
6146 if (tok
!= TOK_CINT
) {
6147 tcc_error("%s only takes positive integers",
6148 tok1
== TOK_builtin_return_address
?
6149 "__builtin_return_address" :
6150 "__builtin_frame_address");
6152 level
= (uint32_t)tokc
.i
;
6157 vset(&type
, VT_LOCAL
, 0); /* local frame */
6159 #ifdef TCC_TARGET_RISCV64
6163 mk_pointer(&vtop
->type
);
6164 indir(); /* -> parent frame */
6166 if (tok1
== TOK_builtin_return_address
) {
6167 // assume return address is just above frame pointer on stack
6168 #ifdef TCC_TARGET_ARM
6171 #elif defined TCC_TARGET_RISCV64
6178 mk_pointer(&vtop
->type
);
6183 #ifdef TCC_TARGET_RISCV64
6184 case TOK_builtin_va_start
:
6185 parse_builtin_params(0, "ee");
6186 r
= vtop
->r
& VT_VALMASK
;
6190 tcc_error("__builtin_va_start expects a local variable");
6195 #ifdef TCC_TARGET_X86_64
6196 #ifdef TCC_TARGET_PE
6197 case TOK_builtin_va_start
:
6198 parse_builtin_params(0, "ee");
6199 r
= vtop
->r
& VT_VALMASK
;
6203 tcc_error("__builtin_va_start expects a local variable");
6205 vtop
->type
= char_pointer_type
;
6210 case TOK_builtin_va_arg_types
:
6211 parse_builtin_params(0, "t");
6212 vpushi(classify_x86_64_va_arg(&vtop
->type
));
6219 #ifdef TCC_TARGET_ARM64
6220 case TOK_builtin_va_start
: {
6221 parse_builtin_params(0, "ee");
6225 vtop
->type
.t
= VT_VOID
;
6228 case TOK_builtin_va_arg
: {
6229 parse_builtin_params(0, "et");
6237 case TOK___arm64_clear_cache
: {
6238 parse_builtin_params(0, "ee");
6241 vtop
->type
.t
= VT_VOID
;
6246 /* atomic operations */
6247 case TOK___atomic_store
:
6248 case TOK___atomic_load
:
6249 case TOK___atomic_exchange
:
6250 case TOK___atomic_compare_exchange
:
6251 case TOK___atomic_fetch_add
:
6252 case TOK___atomic_fetch_sub
:
6253 case TOK___atomic_fetch_or
:
6254 case TOK___atomic_fetch_xor
:
6255 case TOK___atomic_fetch_and
:
6259 /* pre operations */
6270 if (is_float(vtop
->type
.t
)) {
6280 goto tok_identifier
;
6282 /* allow to take the address of a label */
6283 if (tok
< TOK_UIDENT
)
6284 expect("label identifier");
6285 s
= label_find(tok
);
6287 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6289 if (s
->r
== LABEL_DECLARED
)
6290 s
->r
= LABEL_FORWARD
;
6293 s
->type
.t
= VT_VOID
;
6294 mk_pointer(&s
->type
);
6295 s
->type
.t
|= VT_STATIC
;
6297 vpushsym(&s
->type
, s
);
6303 CType controlling_type
;
6304 int has_default
= 0;
6307 TokenString
*str
= NULL
;
6308 int saved_const_wanted
= const_wanted
;
6313 expr_type(&controlling_type
, expr_eq
);
6314 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
6315 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
6316 mk_pointer(&controlling_type
);
6317 const_wanted
= saved_const_wanted
;
6321 if (tok
== TOK_DEFAULT
) {
6323 tcc_error("too many 'default'");
6329 AttributeDef ad_tmp
;
6334 parse_btype(&cur_type
, &ad_tmp
);
6337 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
6338 if (compare_types(&controlling_type
, &cur_type
, 0)) {
6340 tcc_error("type match twice");
6350 skip_or_save_block(&str
);
6352 skip_or_save_block(NULL
);
6359 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
6360 tcc_error("type '%s' does not match any association", buf
);
6362 begin_macro(str
, 1);
6371 // special qnan , snan and infinity values
6376 vtop
->type
.t
= VT_FLOAT
;
6381 goto special_math_val
;
6384 goto special_math_val
;
6391 expect("identifier");
6393 if (!s
|| IS_ASM_SYM(s
)) {
6394 const char *name
= get_tok_str(t
, NULL
);
6396 tcc_error("'%s' undeclared", name
);
6397 /* for simple function calls, we tolerate undeclared
6398 external reference to int() function */
6399 tcc_warning_c(warn_implicit_function_declaration
)(
6400 "implicit declaration of function '%s'", name
);
6401 s
= external_global_sym(t
, &func_old_type
);
6405 /* A symbol that has a register is a local register variable,
6406 which starts out as VT_LOCAL value. */
6407 if ((r
& VT_VALMASK
) < VT_CONST
)
6408 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
6410 vset(&s
->type
, r
, s
->c
);
6411 /* Point to s as backpointer (even without r&VT_SYM).
6412 Will be used by at least the x86 inline asm parser for
6418 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
6419 vtop
->c
.i
= s
->enum_val
;
6424 /* post operations */
6426 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
6429 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
6430 int qualifiers
, cumofs
= 0;
6432 if (tok
== TOK_ARROW
)
6434 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
6437 /* expect pointer on structure */
6438 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
6439 expect("struct or union");
6440 if (tok
== TOK_CDOUBLE
)
6441 expect("field name");
6443 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
6444 expect("field name");
6445 s
= find_field(&vtop
->type
, tok
, &cumofs
);
6447 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
6448 /* add field offset to pointer */
6449 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
6450 vpushi(cumofs
+ s
->c
);
6452 /* change type to field type, and set to lvalue */
6453 vtop
->type
= s
->type
;
6454 vtop
->type
.t
|= qualifiers
;
6455 /* an array is never an lvalue */
6456 if (!(vtop
->type
.t
& VT_ARRAY
)) {
6458 #ifdef CONFIG_TCC_BCHECK
6459 /* if bound checking, the referenced pointer must be checked */
6460 if (tcc_state
->do_bounds_check
)
6461 vtop
->r
|= VT_MUSTBOUND
;
6465 } else if (tok
== '[') {
6471 } else if (tok
== '(') {
6474 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
6477 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
6478 /* pointer test (no array accepted) */
6479 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
6480 vtop
->type
= *pointed_type(&vtop
->type
);
6481 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6485 expect("function pointer");
6488 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6490 /* get return type */
6493 sa
= s
->next
; /* first parameter */
6494 nb_args
= regsize
= 0;
6496 /* compute first implicit argument if a structure is returned */
6497 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6498 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6499 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6500 &ret_align
, ®size
);
6501 if (ret_nregs
<= 0) {
6502 /* get some space for the returned structure */
6503 size
= type_size(&s
->type
, &align
);
6504 #ifdef TCC_TARGET_ARM64
6505 /* On arm64, a small struct is return in registers.
6506 It is much easier to write it to memory if we know
6507 that we are allowed to write some extra bytes, so
6508 round the allocated space up to a power of 2: */
6510 while (size
& (size
- 1))
6511 size
= (size
| (size
- 1)) + 1;
6513 loc
= (loc
- size
) & -align
;
6515 ret
.r
= VT_LOCAL
| VT_LVAL
;
6516 /* pass it as 'int' to avoid structure arg passing
6518 vseti(VT_LOCAL
, loc
);
6519 #ifdef CONFIG_TCC_BCHECK
6520 if (tcc_state
->do_bounds_check
)
6534 if (ret_nregs
> 0) {
6535 /* return in register */
6537 PUT_R_RET(&ret
, ret
.type
.t
);
6542 gfunc_param_typed(s
, sa
);
6552 tcc_error("too few arguments to function");
6554 gfunc_call(nb_args
);
6556 if (ret_nregs
< 0) {
6557 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6558 #ifdef TCC_TARGET_RISCV64
6559 arch_transfer_ret_regs(1);
6563 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6564 vsetc(&ret
.type
, r
, &ret
.c
);
6565 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6568 /* handle packed struct return */
6569 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6572 size
= type_size(&s
->type
, &align
);
6573 /* We're writing whole regs often, make sure there's enough
6574 space. Assume register size is power of 2. */
6575 if (regsize
> align
)
6577 loc
= (loc
- size
) & -align
;
6581 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6585 if (--ret_nregs
== 0)
6589 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6592 /* Promote char/short return values. This is matters only
6593 for calling function that were not compiled by TCC and
6594 only on some architectures. For those where it doesn't
6595 matter we expect things to be already promoted to int,
6597 t
= s
->type
.t
& VT_BTYPE
;
6598 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6600 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6602 vtop
->type
.t
= VT_INT
;
6606 if (s
->f
.func_noreturn
) {
6608 tcc_tcov_block_end (tcov_data
.line
);
6617 #ifndef precedence_parser /* original top-down parser */
6619 static void expr_prod(void)
6624 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6631 static void expr_sum(void)
6636 while ((t
= tok
) == '+' || t
== '-') {
6643 static void expr_shift(void)
6648 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6655 static void expr_cmp(void)
6660 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6661 t
== TOK_ULT
|| t
== TOK_UGE
) {
6668 static void expr_cmpeq(void)
6673 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6680 static void expr_and(void)
6683 while (tok
== '&') {
6690 static void expr_xor(void)
6693 while (tok
== '^') {
6700 static void expr_or(void)
6703 while (tok
== '|') {
6710 static void expr_landor(int op
);
6712 static void expr_land(void)
6715 if (tok
== TOK_LAND
)
6719 static void expr_lor(void)
6726 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6727 #else /* defined precedence_parser */
6728 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6729 # define expr_lor() unary(), expr_infix(1)
6731 static int precedence(int tok
)
6734 case TOK_LOR
: return 1;
6735 case TOK_LAND
: return 2;
6739 case TOK_EQ
: case TOK_NE
: return 6;
6740 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6741 case TOK_SHL
: case TOK_SAR
: return 8;
6742 case '+': case '-': return 9;
6743 case '*': case '/': case '%': return 10;
6745 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6750 static unsigned char prec
[256];
6751 static void init_prec(void)
6754 for (i
= 0; i
< 256; i
++)
6755 prec
[i
] = precedence(i
);
6757 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6759 static void expr_landor(int op
);
6761 static void expr_infix(int p
)
6764 while ((p2
= precedence(t
)) >= p
) {
6765 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6770 if (precedence(tok
) > p2
)
6779 /* Assuming vtop is a value used in a conditional context
6780 (i.e. compared with zero) return 0 if it's false, 1 if
6781 true and -1 if it can't be statically determined. */
6782 static int condition_3way(void)
6785 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6786 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6788 gen_cast_s(VT_BOOL
);
6795 static void expr_landor(int op
)
6797 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6799 c
= f
? i
: condition_3way();
6801 save_regs(1), cc
= 0;
6803 nocode_wanted
++, f
= 1;
6811 expr_landor_next(op
);
6823 static int is_cond_bool(SValue
*sv
)
6825 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6826 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6827 return (unsigned)sv
->c
.i
< 2;
6828 if (sv
->r
== VT_CMP
)
6833 static void expr_cond(void)
6835 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6843 c
= condition_3way();
6844 g
= (tok
== ':' && gnu_ext
);
6854 /* needed to avoid having different registers saved in
6861 ncw_prev
= nocode_wanted
;
6867 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6868 mk_pointer(&vtop
->type
);
6869 sv
= *vtop
; /* save value to handle it later */
6870 vtop
--; /* no vpop so that FP stack is not flushed */
6880 nocode_wanted
= ncw_prev
;
6886 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6887 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6888 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6889 this code jumps directly to the if's then/else branches. */
6894 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6897 nocode_wanted
= ncw_prev
;
6898 // tcc_warning("two conditions expr_cond");
6902 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6903 mk_pointer(&vtop
->type
);
6905 /* cast operands to correct type according to ISOC rules */
6906 if (!combine_types(&type
, &sv
, vtop
, '?'))
6907 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6908 "type mismatch in conditional expression (have '%s' and '%s')");
6909 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6910 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6911 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6913 /* now we convert second operand */
6917 mk_pointer(&vtop
->type
);
6919 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6923 rc
= RC_TYPE(type
.t
);
6924 /* for long longs, we use fixed registers to avoid having
6925 to handle a complicated move */
6926 if (USING_TWO_WORDS(type
.t
))
6927 rc
= RC_RET(type
.t
);
6935 nocode_wanted
= ncw_prev
;
6937 /* this is horrible, but we must also convert first
6943 mk_pointer(&vtop
->type
);
6945 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6951 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6961 static void expr_eq(void)
6966 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6974 gen_op(TOK_ASSIGN_OP(t
));
6980 ST_FUNC
void gexpr(void)
6991 /* parse a constant expression and return value in vtop. */
6992 static void expr_const1(void)
6995 nocode_wanted
+= unevalmask
+ 1;
6997 nocode_wanted
-= unevalmask
+ 1;
7001 /* parse an integer constant and return its value. */
7002 static inline int64_t expr_const64(void)
7006 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
7007 expect("constant expression");
7013 /* parse an integer constant and return its value.
7014 Complain if it doesn't fit 32bit (signed or unsigned). */
7015 ST_FUNC
int expr_const(void)
7018 int64_t wc
= expr_const64();
7020 if (c
!= wc
&& (unsigned)c
!= wc
)
7021 tcc_error("constant exceeds 32 bit");
7025 /* ------------------------------------------------------------------------- */
7026 /* return from function */
7028 #ifndef TCC_TARGET_ARM64
7029 static void gfunc_return(CType
*func_type
)
7031 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7032 CType type
, ret_type
;
7033 int ret_align
, ret_nregs
, regsize
;
7034 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
7035 &ret_align
, ®size
);
7036 if (ret_nregs
< 0) {
7037 #ifdef TCC_TARGET_RISCV64
7038 arch_transfer_ret_regs(0);
7040 } else if (0 == ret_nregs
) {
7041 /* if returning structure, must copy it to implicit
7042 first pointer arg location */
7045 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
7048 /* copy structure value to pointer */
7051 /* returning structure packed into registers */
7052 int size
, addr
, align
, rc
;
7053 size
= type_size(func_type
,&align
);
7054 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
7055 (vtop
->c
.i
& (ret_align
-1)))
7056 && (align
& (ret_align
-1))) {
7057 loc
= (loc
- size
) & -ret_align
;
7060 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
7064 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
7066 vtop
->type
= ret_type
;
7067 rc
= RC_RET(ret_type
.t
);
7075 if (--ret_nregs
== 0)
7077 /* We assume that when a structure is returned in multiple
7078 registers, their classes are consecutive values of the
7081 vtop
->c
.i
+= regsize
;
7086 gv(RC_RET(func_type
->t
));
7088 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
7092 static void check_func_return(void)
7094 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
7096 if (!strcmp (funcname
, "main")
7097 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
7098 /* main returns 0 by default */
7100 gen_assign_cast(&func_vt
);
7101 gfunc_return(&func_vt
);
7103 tcc_warning("function might return no value: '%s'", funcname
);
7107 /* ------------------------------------------------------------------------- */
7110 static int case_cmpi(const void *pa
, const void *pb
)
7112 int64_t a
= (*(struct case_t
**) pa
)->v1
;
7113 int64_t b
= (*(struct case_t
**) pb
)->v1
;
7114 return a
< b
? -1 : a
> b
;
7117 static int case_cmpu(const void *pa
, const void *pb
)
7119 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
7120 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
7121 return a
< b
? -1 : a
> b
;
7124 static void gtst_addr(int t
, int a
)
7126 gsym_addr(gvtst(0, t
), a
);
7129 static void gcase(struct case_t
**base
, int len
, int *bsym
)
7133 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
7150 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
7152 gcase(base
, len
/2, bsym
);
7156 base
+= e
; len
-= e
;
7166 if (p
->v1
== p
->v2
) {
7168 gtst_addr(0, p
->sym
);
7178 gtst_addr(0, p
->sym
);
7182 *bsym
= gjmp(*bsym
);
7185 /* ------------------------------------------------------------------------- */
7186 /* __attribute__((cleanup(fn))) */
7188 static void try_call_scope_cleanup(Sym
*stop
)
7190 Sym
*cls
= cur_scope
->cl
.s
;
7192 for (; cls
!= stop
; cls
= cls
->ncl
) {
7193 Sym
*fs
= cls
->next
;
7194 Sym
*vs
= cls
->prev_tok
;
7196 vpushsym(&fs
->type
, fs
);
7197 vset(&vs
->type
, vs
->r
, vs
->c
);
7199 mk_pointer(&vtop
->type
);
7205 static void try_call_cleanup_goto(Sym
*cleanupstate
)
7210 if (!cur_scope
->cl
.s
)
7213 /* search NCA of both cleanup chains given parents and initial depth */
7214 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
7215 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
7217 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
7219 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
7222 try_call_scope_cleanup(cc
);
7225 /* call 'func' for each __attribute__((cleanup(func))) */
7226 static void block_cleanup(struct scope
*o
)
7230 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
7231 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
7236 try_call_scope_cleanup(o
->cl
.s
);
7237 pcl
->jnext
= gjmp(0);
7239 goto remove_pending
;
7249 try_call_scope_cleanup(o
->cl
.s
);
7252 /* ------------------------------------------------------------------------- */
7255 static void vla_restore(int loc
)
7258 gen_vla_sp_restore(loc
);
7261 static void vla_leave(struct scope
*o
)
7263 struct scope
*c
= cur_scope
, *v
= NULL
;
7264 for (; c
!= o
&& c
; c
= c
->prev
)
7268 vla_restore(v
->vla
.locorig
);
7271 /* ------------------------------------------------------------------------- */
7274 static void new_scope(struct scope
*o
)
7276 /* copy and link previous scope */
7278 o
->prev
= cur_scope
;
7280 cur_scope
->vla
.num
= 0;
7282 /* record local declaration stack position */
7283 o
->lstk
= local_stack
;
7284 o
->llstk
= local_label_stack
;
7288 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
7291 static void prev_scope(struct scope
*o
, int is_expr
)
7295 if (o
->cl
.s
!= o
->prev
->cl
.s
)
7296 block_cleanup(o
->prev
);
7298 /* pop locally defined labels */
7299 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
7301 /* In the is_expr case (a statement expression is finished here),
7302 vtop might refer to symbols on the local_stack. Either via the
7303 type or via vtop->sym. We can't pop those nor any that in turn
7304 might be referred to. To make it easier we don't roll back
7305 any symbols in that case; some upper level call to block() will
7306 do that. We do have to remove such symbols from the lookup
7307 tables, though. sym_pop will do that. */
7309 /* pop locally defined symbols */
7310 pop_local_syms(o
->lstk
, is_expr
);
7311 cur_scope
= o
->prev
;
7315 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
7318 /* leave a scope via break/continue(/goto) */
7319 static void leave_scope(struct scope
*o
)
7323 try_call_scope_cleanup(o
->cl
.s
);
7327 /* ------------------------------------------------------------------------- */
7328 /* call block from 'for do while' loops */
7330 static void lblock(int *bsym
, int *csym
)
7332 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
7333 int *b
= co
->bsym
, *c
= co
->csym
;
7347 static void block(int is_expr
)
7349 int a
, b
, c
, d
, e
, t
;
7354 /* default return value is (void) */
7356 vtop
->type
.t
= VT_VOID
;
7361 /* If the token carries a value, next() might destroy it. Only with
7362 invalid code such as f(){"123"4;} */
7363 if (TOK_HAS_VALUE(t
))
7368 tcc_tcov_check_line (0), tcc_tcov_block_begin ();
7376 if (tok
== TOK_ELSE
) {
7381 gsym(d
); /* patch else jmp */
7386 } else if (t
== TOK_WHILE
) {
7398 } else if (t
== '{') {
7401 /* handle local labels declarations */
7402 while (tok
== TOK_LABEL
) {
7405 if (tok
< TOK_UIDENT
)
7406 expect("label identifier");
7407 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
7409 } while (tok
== ',');
7413 while (tok
!= '}') {
7422 prev_scope(&o
, is_expr
);
7425 else if (!nocode_wanted
)
7426 check_func_return();
7428 } else if (t
== TOK_RETURN
) {
7429 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
7433 gen_assign_cast(&func_vt
);
7435 if (vtop
->type
.t
!= VT_VOID
)
7436 tcc_warning("void function returns a value");
7440 tcc_warning("'return' with no value");
7443 leave_scope(root_scope
);
7445 gfunc_return(&func_vt
);
7447 /* jump unless last stmt in top-level block */
7448 if (tok
!= '}' || local_scope
!= 1)
7451 tcc_tcov_block_end (tcov_data
.line
);
7454 } else if (t
== TOK_BREAK
) {
7456 if (!cur_scope
->bsym
)
7457 tcc_error("cannot break");
7458 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
7459 leave_scope(cur_switch
->scope
);
7461 leave_scope(loop_scope
);
7462 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
7465 } else if (t
== TOK_CONTINUE
) {
7467 if (!cur_scope
->csym
)
7468 tcc_error("cannot continue");
7469 leave_scope(loop_scope
);
7470 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
7473 } else if (t
== TOK_FOR
) {
7478 /* c99 for-loop init decl? */
7479 if (!decl0(VT_LOCAL
, 1, NULL
)) {
7480 /* no, regular for-loop init expr */
7508 } else if (t
== TOK_DO
) {
7522 } else if (t
== TOK_SWITCH
) {
7523 struct switch_t
*sw
;
7525 sw
= tcc_mallocz(sizeof *sw
);
7527 sw
->scope
= cur_scope
;
7528 sw
->prev
= cur_switch
;
7534 sw
->sv
= *vtop
--; /* save switch value */
7537 b
= gjmp(0); /* jump to first case */
7539 a
= gjmp(a
); /* add implicit break */
7543 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7544 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7546 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7548 for (b
= 1; b
< sw
->n
; b
++)
7549 if (sw
->sv
.type
.t
& VT_UNSIGNED
7550 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7551 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7552 tcc_error("duplicate case value");
7556 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7559 gsym_addr(d
, sw
->def_sym
);
7565 dynarray_reset(&sw
->p
, &sw
->n
);
7566 cur_switch
= sw
->prev
;
7569 } else if (t
== TOK_CASE
) {
7570 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7573 cr
->v1
= cr
->v2
= expr_const64();
7574 if (gnu_ext
&& tok
== TOK_DOTS
) {
7576 cr
->v2
= expr_const64();
7577 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7578 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7579 tcc_warning("empty case range");
7583 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7586 goto block_after_label
;
7588 } else if (t
== TOK_DEFAULT
) {
7591 if (cur_switch
->def_sym
)
7592 tcc_error("too many 'default'");
7594 cur_switch
->def_sym
= gind();
7597 goto block_after_label
;
7599 } else if (t
== TOK_GOTO
) {
7600 if (cur_scope
->vla
.num
)
7601 vla_restore(cur_scope
->vla
.locorig
);
7602 if (tok
== '*' && gnu_ext
) {
7606 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7610 } else if (tok
>= TOK_UIDENT
) {
7611 s
= label_find(tok
);
7612 /* put forward definition if needed */
7614 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7615 else if (s
->r
== LABEL_DECLARED
)
7616 s
->r
= LABEL_FORWARD
;
7618 if (s
->r
& LABEL_FORWARD
) {
7619 /* start new goto chain for cleanups, linked via label->next */
7620 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7621 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7622 pending_gotos
->prev_tok
= s
;
7623 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7624 pending_gotos
->next
= s
;
7626 s
->jnext
= gjmp(s
->jnext
);
7628 try_call_cleanup_goto(s
->cleanupstate
);
7629 gjmp_addr(s
->jnext
);
7634 expect("label identifier");
7638 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7642 if (tok
== ':' && t
>= TOK_UIDENT
) {
7647 if (s
->r
== LABEL_DEFINED
)
7648 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7649 s
->r
= LABEL_DEFINED
;
7651 Sym
*pcl
; /* pending cleanup goto */
7652 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7654 sym_pop(&s
->next
, NULL
, 0);
7658 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7661 s
->cleanupstate
= cur_scope
->cl
.s
;
7664 vla_restore(cur_scope
->vla
.loc
);
7667 /* we accept this, but it is a mistake */
7668 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7671 /* expression case */
7688 tcc_tcov_check_line (0), tcc_tcov_block_end (0);
7691 /* This skips over a stream of tokens containing balanced {} and ()
7692 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7693 with a '{'). If STR then allocates and stores the skipped tokens
7694 in *STR. This doesn't check if () and {} are nested correctly,
7695 i.e. "({)}" is accepted. */
7696 static void skip_or_save_block(TokenString
**str
)
7698 int braces
= tok
== '{';
7701 *str
= tok_str_alloc();
7703 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7705 if (tok
== TOK_EOF
) {
7706 if (str
|| level
> 0)
7707 tcc_error("unexpected end of file");
7712 tok_str_add_tok(*str
);
7715 if (t
== '{' || t
== '(') {
7717 } else if (t
== '}' || t
== ')') {
7719 if (level
== 0 && braces
&& t
== '}')
7724 tok_str_add(*str
, -1);
7725 tok_str_add(*str
, 0);
7729 #define EXPR_CONST 1
7732 static void parse_init_elem(int expr_type
)
7734 int saved_global_expr
;
7737 /* compound literals must be allocated globally in this case */
7738 saved_global_expr
= global_expr
;
7741 global_expr
= saved_global_expr
;
7742 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7743 (compound literals). */
7744 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7745 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7746 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7747 #ifdef TCC_TARGET_PE
7748 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7751 tcc_error("initializer element is not constant");
7760 static void init_assert(init_params
*p
, int offset
)
7762 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7763 : !nocode_wanted
&& offset
> p
->local_offset
)
7764 tcc_internal_error("initializer overflow");
7767 #define init_assert(sec, offset)
7770 /* put zeros for variable based init */
7771 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7773 init_assert(p
, c
+ size
);
7775 /* nothing to do because globals are already set to zero */
7777 vpush_helper_func(TOK_memset
);
7779 #ifdef TCC_TARGET_ARM
7791 #define DIF_SIZE_ONLY 2
7792 #define DIF_HAVE_ELEM 4
7795 /* delete relocations for specified range c ... c + size. Unfortunatly
7796 in very special cases, relocations may occur unordered */
7797 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7799 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7800 if (!sec
|| !sec
->reloc
)
7802 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7803 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7804 while (rel
< rel_end
) {
7805 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7806 sec
->reloc
->data_offset
-= sizeof *rel
;
7809 memcpy(rel2
, rel
, sizeof *rel
);
7816 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7818 if (ref
== p
->flex_array_ref
) {
7819 if (index
>= ref
->c
)
7821 } else if (ref
->c
< 0)
7822 tcc_error("flexible array has zero size in this context");
7825 /* t is the array or struct type. c is the array or struct
7826 address. cur_field is the pointer to the current
7827 field, for arrays the 'c' member contains the current start
7828 index. 'flags' is as in decl_initializer.
7829 'al' contains the already initialized length of the
7830 current container (starting at c). This returns the new length of that. */
7831 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7832 Sym
**cur_field
, int flags
, int al
)
7835 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7836 unsigned long corig
= c
;
7841 if (flags
& DIF_HAVE_ELEM
)
7844 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7851 /* NOTE: we only support ranges for last designator */
7852 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7854 if (!(type
->t
& VT_ARRAY
))
7855 expect("array type");
7857 index
= index_last
= expr_const();
7858 if (tok
== TOK_DOTS
&& gnu_ext
) {
7860 index_last
= expr_const();
7864 decl_design_flex(p
, s
, index_last
);
7865 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7866 tcc_error("index exceeds array bounds or range is empty");
7868 (*cur_field
)->c
= index_last
;
7869 type
= pointed_type(type
);
7870 elem_size
= type_size(type
, &align
);
7871 c
+= index
* elem_size
;
7872 nb_elems
= index_last
- index
+ 1;
7879 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7880 expect("struct/union type");
7882 f
= find_field(type
, l
, &cumofs
);
7895 } else if (!gnu_ext
) {
7900 if (type
->t
& VT_ARRAY
) {
7901 index
= (*cur_field
)->c
;
7903 decl_design_flex(p
, s
, index
);
7905 tcc_error("too many initializers");
7906 type
= pointed_type(type
);
7907 elem_size
= type_size(type
, &align
);
7908 c
+= index
* elem_size
;
7911 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7912 *cur_field
= f
= f
->next
;
7914 tcc_error("too many initializers");
7920 if (!elem_size
) /* for structs */
7921 elem_size
= type_size(type
, &align
);
7923 /* Using designators the same element can be initialized more
7924 than once. In that case we need to delete possibly already
7925 existing relocations. */
7926 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7927 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7928 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7931 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7933 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7937 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7938 /* make init_putv/vstore believe it were a struct */
7940 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7944 vpush_ref(type
, p
->sec
, c
, elem_size
);
7946 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7947 for (i
= 1; i
< nb_elems
; i
++) {
7949 init_putv(p
, type
, c
+ elem_size
* i
);
7954 c
+= nb_elems
* elem_size
;
7960 /* store a value or an expression directly in global data or in local array */
7961 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7967 Section
*sec
= p
->sec
;
7971 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7973 size
= type_size(type
, &align
);
7974 if (type
->t
& VT_BITFIELD
)
7975 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7976 init_assert(p
, c
+ size
);
7979 /* XXX: not portable */
7980 /* XXX: generate error if incorrect relocation */
7981 gen_assign_cast(&dtype
);
7982 bt
= type
->t
& VT_BTYPE
;
7984 if ((vtop
->r
& VT_SYM
)
7986 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7987 || (type
->t
& VT_BITFIELD
))
7988 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7990 tcc_error("initializer element is not computable at load time");
7992 if (NODATA_WANTED
) {
7997 ptr
= sec
->data
+ c
;
8000 /* XXX: make code faster ? */
8001 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
8002 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
8003 /* XXX This rejects compound literals like
8004 '(void *){ptr}'. The problem is that '&sym' is
8005 represented the same way, which would be ruled out
8006 by the SYM_FIRST_ANOM check above, but also '"string"'
8007 in 'char *p = "string"' is represented the same
8008 with the type being VT_PTR and the symbol being an
8009 anonymous one. That is, there's no difference in vtop
8010 between '(void *){x}' and '&(void *){x}'. Ignore
8011 pointer typed entities here. Hopefully no real code
8012 will ever use compound literals with scalar type. */
8013 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
8014 /* These come from compound literals, memcpy stuff over. */
8018 esym
= elfsym(vtop
->sym
);
8019 ssec
= tcc_state
->sections
[esym
->st_shndx
];
8020 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
8022 /* We need to copy over all memory contents, and that
8023 includes relocations. Use the fact that relocs are
8024 created it order, so look from the end of relocs
8025 until we hit one before the copied region. */
8026 unsigned long relofs
= ssec
->reloc
->data_offset
;
8027 while (relofs
>= sizeof(*rel
)) {
8028 relofs
-= sizeof(*rel
);
8029 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
8030 if (rel
->r_offset
>= esym
->st_value
+ size
)
8032 if (rel
->r_offset
< esym
->st_value
)
8034 put_elf_reloca(symtab_section
, sec
,
8035 c
+ rel
->r_offset
- esym
->st_value
,
8036 ELFW(R_TYPE
)(rel
->r_info
),
8037 ELFW(R_SYM
)(rel
->r_info
),
8047 if (type
->t
& VT_BITFIELD
) {
8048 int bit_pos
, bit_size
, bits
, n
;
8049 unsigned char *p
, v
, m
;
8050 bit_pos
= BIT_POS(vtop
->type
.t
);
8051 bit_size
= BIT_SIZE(vtop
->type
.t
);
8052 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
8053 bit_pos
&= 7, bits
= 0;
8058 v
= val
>> bits
<< bit_pos
;
8059 m
= ((1 << n
) - 1) << bit_pos
;
8060 *p
= (*p
& ~m
) | (v
& m
);
8061 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
8066 *(char *)ptr
= val
!= 0;
8072 write16le(ptr
, val
);
8075 write32le(ptr
, val
);
8078 write64le(ptr
, val
);
8081 #if defined TCC_IS_NATIVE_387
8082 /* Host and target platform may be different but both have x87.
8083 On windows, tcc does not use VT_LDOUBLE, except when it is a
8084 cross compiler. In this case a mingw gcc as host compiler
8085 comes here with 10-byte long doubles, while msvc or tcc won't.
8086 tcc itself can still translate by asm.
8087 In any case we avoid possibly random bytes 11 and 12.
8089 if (sizeof (long double) >= 10)
8090 memcpy(ptr
, &vtop
->c
.ld
, 10);
8092 else if (sizeof (long double) == sizeof (double))
8093 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
8095 else if (vtop
->c
.ld
== 0.0)
8099 /* For other platforms it should work natively, but may not work
8100 for cross compilers */
8101 if (sizeof(long double) == LDOUBLE_SIZE
)
8102 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
8103 else if (sizeof(double) == LDOUBLE_SIZE
)
8104 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
8105 #ifndef TCC_CROSS_TEST
8107 tcc_error("can't cross compile long double constants");
8112 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
8115 if (vtop
->r
& VT_SYM
)
8116 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
8118 write64le(ptr
, val
);
8121 write32le(ptr
, val
);
8125 write64le(ptr
, val
);
8129 if (vtop
->r
& VT_SYM
)
8130 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
8131 write32le(ptr
, val
);
8135 //tcc_internal_error("unexpected type");
8141 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
8148 /* 't' contains the type and storage info. 'c' is the offset of the
8149 object in section 'sec'. If 'sec' is NULL, it means stack based
8150 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
8151 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
8152 size only evaluation is wanted (only for arrays). */
8153 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
8155 int len
, n
, no_oblock
, i
;
8161 /* generate line number info */
8162 if (debug_modes
&& !p
->sec
)
8163 tcc_debug_line(tcc_state
), tcc_tcov_check_line (1);
8165 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
8166 /* In case of strings we have special handling for arrays, so
8167 don't consume them as initializer value (which would commit them
8168 to some anonymous symbol). */
8169 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
8170 (!(flags
& DIF_SIZE_ONLY
)
8171 /* a struct may be initialized from a struct of same type, as in
8172 struct {int x,y;} a = {1,2}, b = {3,4}, c[] = {a,b};
8173 In that case we need to parse the element in order to check
8174 it for compatibility below */
8175 || (type
->t
& VT_BTYPE
) == VT_STRUCT
)
8177 int ncw_prev
= nocode_wanted
;
8178 if ((flags
& DIF_SIZE_ONLY
) && !p
->sec
)
8180 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
8181 nocode_wanted
= ncw_prev
;
8182 flags
|= DIF_HAVE_ELEM
;
8185 if (type
->t
& VT_ARRAY
) {
8187 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
8195 t1
= pointed_type(type
);
8196 size1
= type_size(t1
, &align1
);
8198 /* only parse strings here if correct type (otherwise: handle
8199 them as ((w)char *) expressions */
8200 if ((tok
== TOK_LSTR
&&
8201 #ifdef TCC_TARGET_PE
8202 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
8204 (t1
->t
& VT_BTYPE
) == VT_INT
8206 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
8208 cstr_reset(&initstr
);
8209 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
8210 tcc_error("unhandled string literal merging");
8211 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8213 initstr
.size
-= size1
;
8215 len
+= tokc
.str
.size
;
8217 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
8219 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
8222 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
8223 && tok
!= TOK_EOF
) {
8224 /* Not a lone literal but part of a bigger expression. */
8225 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
8226 tokc
.str
.size
= initstr
.size
;
8227 tokc
.str
.data
= initstr
.data
;
8231 decl_design_flex(p
, s
, len
);
8232 if (!(flags
& DIF_SIZE_ONLY
)) {
8237 tcc_warning("initializer-string for array is too long");
8238 /* in order to go faster for common case (char
8239 string in global variable, we handle it
8241 if (p
->sec
&& size1
== 1) {
8242 init_assert(p
, c
+ nb
);
8244 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
8248 /* only add trailing zero if enough storage (no
8249 warning in this case since it is standard) */
8250 if (flags
& DIF_CLEAR
)
8253 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
8257 } else if (size1
== 1)
8258 ch
= ((unsigned char *)initstr
.data
)[i
];
8260 ch
= ((nwchar_t
*)initstr
.data
)[i
];
8262 init_putv(p
, t1
, c
+ i
* size1
);
8273 /* zero memory once in advance */
8274 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
8275 init_putz(p
, c
, n
*size1
);
8280 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
8281 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
8282 flags
&= ~DIF_HAVE_ELEM
;
8283 if (type
->t
& VT_ARRAY
) {
8285 /* special test for multi dimensional arrays (may not
8286 be strictly correct if designators are used at the
8288 if (no_oblock
&& len
>= n
*size1
)
8291 if (s
->type
.t
== VT_UNION
)
8295 if (no_oblock
&& f
== NULL
)
8307 } else if ((flags
& DIF_HAVE_ELEM
)
8308 /* Use i_c_parameter_t, to strip toplevel qualifiers.
8309 The source type might have VT_CONSTANT set, which is
8310 of course assignable to non-const elements. */
8311 && is_compatible_unqualified_types(type
, &vtop
->type
)) {
8314 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8316 if ((flags
& DIF_FIRST
) || tok
== '{') {
8326 } else if (tok
== '{') {
8327 if (flags
& DIF_HAVE_ELEM
)
8330 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
8333 } else one_elem
: if ((flags
& DIF_SIZE_ONLY
)) {
8334 /* If we supported only ISO C we wouldn't have to accept calling
8335 this on anything than an array if DIF_SIZE_ONLY (and even then
8336 only on the outermost level, so no recursion would be needed),
8337 because initializing a flex array member isn't supported.
8338 But GNU C supports it, so we need to recurse even into
8339 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
8340 /* just skip expression */
8341 if (flags
& DIF_HAVE_ELEM
)
8344 skip_or_save_block(NULL
);
8347 if (!(flags
& DIF_HAVE_ELEM
)) {
8348 /* This should happen only when we haven't parsed
8349 the init element above for fear of committing a
8350 string constant to memory too early. */
8351 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
8352 expect("string constant");
8353 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
8355 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
8356 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
8358 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
8362 init_putv(p
, type
, c
);
8366 /* parse an initializer for type 't' if 'has_init' is non zero, and
8367 allocate space in local or global data space ('r' is either
8368 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
8369 variable 'v' of scope 'scope' is declared before initializers
8370 are parsed. If 'v' is zero, then a reference to the new object
8371 is put in the value stack. If 'has_init' is 2, a special parsing
8372 is done to handle string constants. */
8373 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
8374 int has_init
, int v
, int scope
)
8376 int size
, align
, addr
;
8377 TokenString
*init_str
= NULL
;
8380 Sym
*flexible_array
;
8382 int saved_nocode_wanted
= nocode_wanted
;
8383 #ifdef CONFIG_TCC_BCHECK
8384 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
8386 init_params p
= {0};
8388 /* Always allocate static or global variables */
8389 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
8390 nocode_wanted
|= 0x80000000;
8392 flexible_array
= NULL
;
8393 size
= type_size(type
, &align
);
8395 /* exactly one flexible array may be initialized, either the
8396 toplevel array or the last member of the toplevel struct */
8399 /* If the base type itself was an array type of unspecified size
8400 (like in 'typedef int arr[]; arr x = {1};') then we will
8401 overwrite the unknown size by the real one for this decl.
8402 We need to unshare the ref symbol holding that size. */
8403 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
8404 p
.flex_array_ref
= type
->ref
;
8406 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8407 Sym
*field
= type
->ref
->next
;
8410 field
= field
->next
;
8411 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
8412 flexible_array
= field
;
8413 p
.flex_array_ref
= field
->type
.ref
;
8420 /* If unknown size, do a dry-run 1st pass */
8422 tcc_error("unknown type size");
8423 if (has_init
== 2) {
8424 /* only get strings */
8425 init_str
= tok_str_alloc();
8426 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8427 tok_str_add_tok(init_str
);
8430 tok_str_add(init_str
, -1);
8431 tok_str_add(init_str
, 0);
8433 skip_or_save_block(&init_str
);
8437 begin_macro(init_str
, 1);
8439 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
8440 /* prepare second initializer parsing */
8441 macro_ptr
= init_str
->str
;
8444 /* if still unknown size, error */
8445 size
= type_size(type
, &align
);
8447 tcc_error("unknown type size");
8449 /* If there's a flex member and it was used in the initializer
8451 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
8452 size
+= flexible_array
->type
.ref
->c
8453 * pointed_size(&flexible_array
->type
);
8456 /* take into account specified alignment if bigger */
8457 if (ad
->a
.aligned
) {
8458 int speca
= 1 << (ad
->a
.aligned
- 1);
8461 } else if (ad
->a
.packed
) {
8465 if (!v
&& NODATA_WANTED
)
8466 size
= 0, align
= 1;
8468 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
8470 #ifdef CONFIG_TCC_BCHECK
8472 /* add padding between stack variables for bound checking */
8476 loc
= (loc
- size
) & -align
;
8478 p
.local_offset
= addr
+ size
;
8479 #ifdef CONFIG_TCC_BCHECK
8481 /* add padding between stack variables for bound checking */
8486 /* local variable */
8487 #ifdef CONFIG_TCC_ASM
8488 if (ad
->asm_label
) {
8489 int reg
= asm_parse_regvar(ad
->asm_label
);
8491 r
= (r
& ~VT_VALMASK
) | reg
;
8494 sym
= sym_push(v
, type
, r
, addr
);
8495 if (ad
->cleanup_func
) {
8496 Sym
*cls
= sym_push2(&all_cleanups
,
8497 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
8498 cls
->prev_tok
= sym
;
8499 cls
->next
= ad
->cleanup_func
;
8500 cls
->ncl
= cur_scope
->cl
.s
;
8501 cur_scope
->cl
.s
= cls
;
8506 /* push local reference */
8507 vset(type
, r
, addr
);
8511 if (v
&& scope
== VT_CONST
) {
8512 /* see if the symbol was already defined */
8515 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
8516 && sym
->type
.ref
->c
> type
->ref
->c
) {
8517 /* flex array was already declared with explicit size
8519 int arr[] = { 1,2,3 }; */
8520 type
->ref
->c
= sym
->type
.ref
->c
;
8521 size
= type_size(type
, &align
);
8523 patch_storage(sym
, ad
, type
);
8524 /* we accept several definitions of the same global variable. */
8525 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8530 /* allocate symbol in corresponding section */
8534 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
8535 tp
= &tp
->ref
->type
;
8536 if (tp
->t
& VT_CONSTANT
) {
8537 sec
= rodata_section
;
8538 } else if (has_init
) {
8540 /*if (tcc_state->g_debug & 4)
8541 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8542 } else if (tcc_state
->nocommon
)
8547 addr
= section_add(sec
, size
, align
);
8548 #ifdef CONFIG_TCC_BCHECK
8549 /* add padding if bound check */
8551 section_add(sec
, 1, 1);
8554 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8555 sec
= common_section
;
8560 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8561 patch_storage(sym
, ad
, NULL
);
8563 /* update symbol definition */
8564 put_extern_sym(sym
, sec
, addr
, size
);
8566 /* push global reference */
8567 vpush_ref(type
, sec
, addr
, size
);
8572 #ifdef CONFIG_TCC_BCHECK
8573 /* handles bounds now because the symbol must be defined
8574 before for the relocation */
8578 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8579 /* then add global bound info */
8580 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8581 bounds_ptr
[0] = 0; /* relocated */
8582 bounds_ptr
[1] = size
;
8587 if (type
->t
& VT_VLA
) {
8593 /* save before-VLA stack pointer if needed */
8594 if (cur_scope
->vla
.num
== 0) {
8595 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8596 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8598 gen_vla_sp_save(loc
-= PTR_SIZE
);
8599 cur_scope
->vla
.locorig
= loc
;
8603 vpush_type_size(type
, &a
);
8604 gen_vla_alloc(type
, a
);
8605 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8606 /* on _WIN64, because of the function args scratch area, the
8607 result of alloca differs from RSP and is returned in RAX. */
8608 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8610 gen_vla_sp_save(addr
);
8611 cur_scope
->vla
.loc
= addr
;
8612 cur_scope
->vla
.num
++;
8613 } else if (has_init
) {
8615 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8616 /* patch flexible array member size back to -1, */
8617 /* for possible subsequent similar declarations */
8619 flexible_array
->type
.ref
->c
= -1;
8623 /* restore parse state if needed */
8629 nocode_wanted
= saved_nocode_wanted
;
8632 /* generate vla code saved in post_type() */
8633 static void func_vla_arg_code(Sym
*arg
)
8636 TokenString
*vla_array_tok
= NULL
;
8639 func_vla_arg_code(arg
->type
.ref
);
8641 if (arg
->type
.t
& VT_VLA
) {
8642 loc
-= type_size(&int_type
, &align
);
8644 arg
->type
.ref
->c
= loc
;
8647 vla_array_tok
= tok_str_alloc();
8648 vla_array_tok
->str
= arg
->type
.ref
->vla_array_str
;
8649 begin_macro(vla_array_tok
, 1);
8654 vpush_type_size(&arg
->type
.ref
->type
, &align
);
8656 vset(&int_type
, VT_LOCAL
|VT_LVAL
, arg
->type
.ref
->c
);
8663 static void func_vla_arg(Sym
*sym
)
8667 for (arg
= sym
->type
.ref
->next
; arg
; arg
= arg
->next
)
8668 if (arg
->type
.t
& VT_VLA
)
8669 func_vla_arg_code(arg
);
8672 /* parse a function defined by symbol 'sym' and generate its code in
8673 'cur_text_section' */
8674 static void gen_function(Sym
*sym
)
8676 struct scope f
= { 0 };
8677 cur_scope
= root_scope
= &f
;
8679 ind
= cur_text_section
->data_offset
;
8680 if (sym
->a
.aligned
) {
8681 size_t newoff
= section_add(cur_text_section
, 0,
8682 1 << (sym
->a
.aligned
- 1));
8683 gen_fill_nops(newoff
- ind
);
8685 /* NOTE: we patch the symbol size later */
8686 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8687 if (sym
->type
.ref
->f
.func_ctor
)
8688 add_array (tcc_state
, ".init_array", sym
->c
);
8689 if (sym
->type
.ref
->f
.func_dtor
)
8690 add_array (tcc_state
, ".fini_array", sym
->c
);
8692 funcname
= get_tok_str(sym
->v
, NULL
);
8694 func_vt
= sym
->type
.ref
->type
;
8695 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8697 /* put debug symbol */
8698 tcc_debug_funcstart(tcc_state
, sym
);
8699 /* push a dummy symbol to enable local sym storage */
8700 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8701 local_scope
= 1; /* for function parameters */
8705 clear_temp_local_var_list();
8710 /* reset local stack */
8711 pop_local_syms(NULL
, 0);
8713 cur_text_section
->data_offset
= ind
;
8715 label_pop(&global_label_stack
, NULL
, 0);
8716 sym_pop(&all_cleanups
, NULL
, 0);
8717 /* patch symbol size */
8718 elfsym(sym
)->st_size
= ind
- func_ind
;
8719 /* end of function */
8720 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8721 /* It's better to crash than to generate wrong code */
8722 cur_text_section
= NULL
;
8723 funcname
= ""; /* for safety */
8724 func_vt
.t
= VT_VOID
; /* for safety */
8725 func_var
= 0; /* for safety */
8726 ind
= 0; /* for safety */
8727 nocode_wanted
= 0x80000000;
8729 /* do this after funcend debug info */
8733 static void gen_inline_functions(TCCState
*s
)
8736 int inline_generated
, i
;
8737 struct InlineFunc
*fn
;
8739 tcc_open_bf(s
, ":inline:", 0);
8740 /* iterate while inline function are referenced */
8742 inline_generated
= 0;
8743 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8744 fn
= s
->inline_fns
[i
];
8746 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8747 /* the function was used or forced (and then not internal):
8748 generate its code and convert it to a normal function */
8750 tcc_debug_putfile(s
, fn
->filename
);
8751 begin_macro(fn
->func_str
, 1);
8753 cur_text_section
= text_section
;
8757 inline_generated
= 1;
8760 } while (inline_generated
);
8764 static void free_inline_functions(TCCState
*s
)
8767 /* free tokens of unused inline functions */
8768 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8769 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8771 tok_str_free(fn
->func_str
);
8773 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8776 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8777 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8778 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8780 int v
, has_init
, r
, oldint
;
8783 AttributeDef ad
, adbase
;
8786 if (tok
== TOK_STATIC_ASSERT
) {
8796 tcc_error("_Static_assert fail");
8798 goto static_assert_out
;
8802 parse_mult_str(&error_str
, "string constant");
8804 tcc_error("%s", (char *)error_str
.data
);
8805 cstr_free(&error_str
);
8813 if (!parse_btype(&btype
, &adbase
)) {
8814 if (is_for_loop_init
)
8816 /* skip redundant ';' if not in old parameter decl scope */
8817 if (tok
== ';' && l
!= VT_CMP
) {
8823 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8824 /* global asm block */
8828 if (tok
>= TOK_UIDENT
) {
8829 /* special test for old K&R protos without explicit int
8830 type. Only accepted when defining global data */
8835 expect("declaration");
8841 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8843 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8844 tcc_warning("unnamed struct/union that defines no instances");
8848 if (IS_ENUM(btype
.t
)) {
8854 while (1) { /* iterate thru each declaration */
8857 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8861 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8862 printf("type = '%s'\n", buf
);
8865 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8866 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8867 tcc_error("function without file scope cannot be static");
8868 /* if old style function prototype, we accept a
8871 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8872 decl0(VT_CMP
, 0, sym
);
8873 #ifdef TCC_TARGET_MACHO
8874 if (sym
->f
.func_alwinl
8875 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8876 == (VT_EXTERN
| VT_INLINE
))) {
8877 /* always_inline functions must be handled as if they
8878 don't generate multiple global defs, even if extern
8879 inline, i.e. GNU inline semantics for those. Rewrite
8880 them into static inline. */
8881 type
.t
&= ~VT_EXTERN
;
8882 type
.t
|= VT_STATIC
;
8885 /* always compile 'extern inline' */
8886 if (type
.t
& VT_EXTERN
)
8887 type
.t
&= ~VT_INLINE
;
8889 } else if (oldint
) {
8890 tcc_warning("type defaults to int");
8893 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8894 ad
.asm_label
= asm_label_instr();
8895 /* parse one last attribute list, after asm label */
8896 parse_attribute(&ad
);
8898 /* gcc does not allow __asm__("label") with function definition,
8905 #ifdef TCC_TARGET_PE
8906 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8907 if (type
.t
& VT_STATIC
)
8908 tcc_error("cannot have dll linkage with static");
8909 if (type
.t
& VT_TYPEDEF
) {
8910 tcc_warning("'%s' attribute ignored for typedef",
8911 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8912 (ad
.a
.dllexport
= 0, "dllexport"));
8913 } else if (ad
.a
.dllimport
) {
8914 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8917 type
.t
|= VT_EXTERN
;
8923 tcc_error("cannot use local functions");
8924 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8925 expect("function definition");
8927 /* reject abstract declarators in function definition
8928 make old style params without decl have int type */
8930 while ((sym
= sym
->next
) != NULL
) {
8931 if (!(sym
->v
& ~SYM_FIELD
))
8932 expect("identifier");
8933 if (sym
->type
.t
== VT_VOID
)
8934 sym
->type
= int_type
;
8937 /* apply post-declaraton attributes */
8938 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8940 /* put function symbol */
8941 type
.t
&= ~VT_EXTERN
;
8942 sym
= external_sym(v
, &type
, 0, &ad
);
8944 /* static inline functions are just recorded as a kind
8945 of macro. Their code will be emitted at the end of
8946 the compilation unit only if they are used */
8947 if (sym
->type
.t
& VT_INLINE
) {
8948 struct InlineFunc
*fn
;
8949 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8950 strcpy(fn
->filename
, file
->filename
);
8952 skip_or_save_block(&fn
->func_str
);
8953 dynarray_add(&tcc_state
->inline_fns
,
8954 &tcc_state
->nb_inline_fns
, fn
);
8956 /* compute text section */
8957 cur_text_section
= ad
.section
;
8958 if (!cur_text_section
)
8959 cur_text_section
= text_section
;
8965 /* find parameter in function parameter list */
8966 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8967 if ((sym
->v
& ~SYM_FIELD
) == v
)
8969 tcc_error("declaration for parameter '%s' but no such parameter",
8970 get_tok_str(v
, NULL
));
8972 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8973 tcc_error("storage class specified for '%s'",
8974 get_tok_str(v
, NULL
));
8975 if (sym
->type
.t
!= VT_VOID
)
8976 tcc_error("redefinition of parameter '%s'",
8977 get_tok_str(v
, NULL
));
8978 convert_parameter_type(&type
);
8980 } else if (type
.t
& VT_TYPEDEF
) {
8981 /* save typedefed type */
8982 /* XXX: test storage specifiers ? */
8984 if (sym
&& sym
->sym_scope
== local_scope
) {
8985 if (!is_compatible_types(&sym
->type
, &type
)
8986 || !(sym
->type
.t
& VT_TYPEDEF
))
8987 tcc_error("incompatible redefinition of '%s'",
8988 get_tok_str(v
, NULL
));
8991 sym
= sym_push(v
, &type
, 0, 0);
8996 tcc_debug_typedef (tcc_state
, sym
);
8997 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8998 && !(type
.t
& VT_EXTERN
)) {
8999 tcc_error("declaration of void object");
9002 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
9003 /* external function definition */
9004 /* specific case for func_call attribute */
9006 } else if (!(type
.t
& VT_ARRAY
)) {
9007 /* not lvalue if array */
9010 has_init
= (tok
== '=');
9011 if (has_init
&& (type
.t
& VT_VLA
))
9012 tcc_error("variable length array cannot be initialized");
9013 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
9014 || (type
.t
& VT_BTYPE
) == VT_FUNC
9015 /* as with GCC, uninitialized global arrays with no size
9016 are considered extern: */
9017 || ((type
.t
& VT_ARRAY
) && !has_init
9018 && l
== VT_CONST
&& type
.ref
->c
< 0)
9020 /* external variable or function */
9021 type
.t
|= VT_EXTERN
;
9022 sym
= external_sym(v
, &type
, r
, &ad
);
9023 if (ad
.alias_target
) {
9024 /* Aliases need to be emitted when their target
9025 symbol is emitted, even if perhaps unreferenced.
9026 We only support the case where the base is
9027 already defined, otherwise we would need
9028 deferring to emit the aliases until the end of
9029 the compile unit. */
9030 Sym
*alias_target
= sym_find(ad
.alias_target
);
9031 ElfSym
*esym
= elfsym(alias_target
);
9033 tcc_error("unsupported forward __alias__ attribute");
9034 put_extern_sym2(sym
, esym
->st_shndx
,
9035 esym
->st_value
, esym
->st_size
, 1);
9038 if (type
.t
& VT_STATIC
)
9044 else if (l
== VT_CONST
)
9045 /* uninitialized global variables may be overridden */
9046 type
.t
|= VT_EXTERN
;
9047 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
9051 if (is_for_loop_init
)
9063 static void decl(int l
)
9068 /* ------------------------------------------------------------------------- */
9071 /* ------------------------------------------------------------------------- */