2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
49 ST_DATA
char debug_modes
;
52 static SValue _vstack
[1 + VSTACK_SIZE
];
53 #define vstack (_vstack + 1)
55 ST_DATA
int const_wanted
; /* true if constant wanted */
56 ST_DATA
int nocode_wanted
; /* no code generation wanted */
57 #define unevalmask 0xffff /* unevaluated subexpression */
58 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
59 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
61 /* Automagical code suppression ----> */
62 #define CODE_OFF() (nocode_wanted |= 0x20000000)
63 #define CODE_ON() (nocode_wanted &= ~0x20000000)
65 static void tcc_tcov_block_begin(void);
67 /* Clear 'nocode_wanted' at label if it was used */
68 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
69 static int gind(void) { int t
= ind
; CODE_ON(); if (debug_modes
) tcc_tcov_block_begin(); return t
; }
71 /* Set 'nocode_wanted' after unconditional jumps */
72 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
73 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
75 /* These are #undef'd at the end of this file */
76 #define gjmp_addr gjmp_addr_acs
80 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
81 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
82 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
84 static int last_line_num
, new_file
, func_ind
; /* debug info control */
85 ST_DATA
const char *funcname
;
86 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
87 static CString initstr
;
90 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_INT
93 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T VT_LLONG
96 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
97 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
100 static struct switch_t
{
104 } **p
; int n
; /* list of case ranges */
105 int def_sym
; /* default symbol */
108 struct switch_t
*prev
;
110 } *cur_switch
; /* current switch */
112 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
113 /*list of temporary local variables on the stack in current function. */
114 static struct temp_local_variable
{
115 int location
; //offset on stack. Svalue.c.i
118 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
119 static int nb_temp_local_vars
;
121 static struct scope
{
123 struct { int loc
, locorig
, num
; } vla
;
124 struct { Sym
*s
; int n
; } cl
;
127 } *cur_scope
, *loop_scope
, *root_scope
;
136 #define precedence_parser
137 static void init_prec(void);
140 /********************************************************/
141 /* stab debug support */
143 static const struct {
146 } default_debug
[] = {
147 { VT_INT
, "int:t1=r1;-2147483648;2147483647;" },
148 { VT_BYTE
, "char:t2=r2;0;127;" },
150 { VT_LONG
| VT_INT
, "long int:t3=r3;-2147483648;2147483647;" },
152 { VT_LLONG
| VT_LONG
, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
154 { VT_INT
| VT_UNSIGNED
, "unsigned int:t4=r4;0;037777777777;" },
156 { VT_LONG
| VT_INT
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;037777777777;" },
158 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
159 { VT_LLONG
| VT_LONG
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;01777777777777777777777;" },
161 { VT_QLONG
, "__int128:t6=r6;0;-1;" },
162 { VT_QLONG
| VT_UNSIGNED
, "__int128 unsigned:t7=r7;0;-1;" },
163 { VT_LLONG
, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
164 { VT_LLONG
| VT_UNSIGNED
, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
165 { VT_SHORT
, "short int:t10=r10;-32768;32767;" },
166 { VT_SHORT
| VT_UNSIGNED
, "short unsigned int:t11=r11;0;65535;" },
167 { VT_BYTE
| VT_DEFSIGN
, "signed char:t12=r12;-128;127;" },
168 { VT_BYTE
| VT_DEFSIGN
| VT_UNSIGNED
, "unsigned char:t13=r13;0;255;" },
169 { VT_FLOAT
, "float:t14=r1;4;0;" },
170 { VT_DOUBLE
, "double:t15=r1;8;0;" },
171 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
172 { VT_DOUBLE
| VT_LONG
, "long double:t16=r1;8;0;" },
174 { VT_LDOUBLE
, "long double:t16=r1;16;0;" },
176 { -1, "_Float32:t17=r1;4;0;" },
177 { -1, "_Float64:t18=r1;8;0;" },
178 { -1, "_Float128:t19=r1;16;0;" },
179 { -1, "_Float32x:t20=r1;8;0;" },
180 { -1, "_Float64x:t21=r1;16;0;" },
181 { -1, "_Decimal32:t22=r1;4;0;" },
182 { -1, "_Decimal64:t23=r1;8;0;" },
183 { -1, "_Decimal128:t24=r1;16;0;" },
184 /* if default char is unsigned */
185 { VT_BYTE
| VT_UNSIGNED
, "unsigned char:t25=r25;0;255;" },
187 { VT_BOOL
, "bool:t26=r26;0;255;" },
188 { VT_VOID
, "void:t27=27" },
191 static int debug_next_type
;
193 static struct debug_hash
{
198 static int n_debug_hash
;
200 static struct debug_info
{
211 struct debug_info
*child
, *next
, *last
, *parent
;
212 } *debug_info
, *debug_info_root
;
215 unsigned long offset
;
216 unsigned long last_file_name
;
217 unsigned long last_func_name
;
222 /********************************************************/
223 static void gen_cast(CType
*type
);
224 static void gen_cast_s(int t
);
225 static inline CType
*pointed_type(CType
*type
);
226 static int is_compatible_types(CType
*type1
, CType
*type2
);
227 static int parse_btype(CType
*type
, AttributeDef
*ad
);
228 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
229 static void parse_expr_type(CType
*type
);
230 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
231 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
232 static void block(int is_expr
);
233 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
234 static void decl(int l
);
235 static int decl0(int l
, int is_for_loop_init
, Sym
*);
236 static void expr_eq(void);
237 static void vla_runtime_type_size(CType
*type
, int *a
);
238 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
239 static inline int64_t expr_const64(void);
240 static void vpush64(int ty
, unsigned long long v
);
241 static void vpush(CType
*type
);
242 static int gvtst(int inv
, int t
);
243 static void gen_inline_functions(TCCState
*s
);
244 static void free_inline_functions(TCCState
*s
);
245 static void skip_or_save_block(TokenString
**str
);
246 static void gv_dup(void);
247 static int get_temp_local_var(int size
,int align
);
248 static void clear_temp_local_var_list();
249 static void cast_error(CType
*st
, CType
*dt
);
251 ST_INLN
int is_float(int t
)
253 int bt
= t
& VT_BTYPE
;
254 return bt
== VT_LDOUBLE
260 static inline int is_integer_btype(int bt
)
269 static int btype_size(int bt
)
271 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
275 bt
== VT_PTR
? PTR_SIZE
: 0;
278 /* returns function return register from type */
279 static int R_RET(int t
)
283 #ifdef TCC_TARGET_X86_64
284 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
286 #elif defined TCC_TARGET_RISCV64
287 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
293 /* returns 2nd function return register, if any */
294 static int R2_RET(int t
)
300 #elif defined TCC_TARGET_X86_64
305 #elif defined TCC_TARGET_RISCV64
312 /* returns true for two-word types */
313 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
315 /* put function return registers to stack value */
316 static void PUT_R_RET(SValue
*sv
, int t
)
318 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
321 /* returns function return register class for type t */
322 static int RC_RET(int t
)
324 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
327 /* returns generic register class for type t */
328 static int RC_TYPE(int t
)
332 #ifdef TCC_TARGET_X86_64
333 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
335 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
337 #elif defined TCC_TARGET_RISCV64
338 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
344 /* returns 2nd register class corresponding to t and rc */
345 static int RC2_TYPE(int t
, int rc
)
347 if (!USING_TWO_WORDS(t
))
362 /* we use our own 'finite' function to avoid potential problems with
363 non standard math libs */
364 /* XXX: endianness dependent */
365 ST_FUNC
int ieee_finite(double d
)
368 memcpy(p
, &d
, sizeof(double));
369 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
372 /* compiling intel long double natively */
373 #if (defined __i386__ || defined __x86_64__) \
374 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
375 # define TCC_IS_NATIVE_387
378 ST_FUNC
void test_lvalue(void)
380 if (!(vtop
->r
& VT_LVAL
))
384 ST_FUNC
void check_vstack(void)
386 if (vtop
!= vstack
- 1)
387 tcc_error("internal compiler error: vstack leak (%d)",
388 (int)(vtop
- vstack
+ 1));
391 /* ------------------------------------------------------------------------- */
392 /* vstack debugging aid */
395 void pv (const char *lbl
, int a
, int b
)
398 for (i
= a
; i
< a
+ b
; ++i
) {
399 SValue
*p
= &vtop
[-i
];
400 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
401 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
406 /* ------------------------------------------------------------------------- */
407 /* start of translation unit info */
408 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
414 /* file info: full path + filename */
415 section_sym
= put_elf_sym(symtab_section
, 0, 0,
416 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
417 text_section
->sh_num
, NULL
);
418 getcwd(buf
, sizeof(buf
));
420 normalize_slashes(buf
);
422 pstrcat(buf
, sizeof(buf
), "/");
423 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
424 text_section
->data_offset
, text_section
, section_sym
);
425 put_stabs_r(s1
, file
->prev
? file
->prev
->filename
: file
->filename
,
427 text_section
->data_offset
, text_section
, section_sym
);
428 for (i
= 0; i
< sizeof (default_debug
) / sizeof (default_debug
[0]); i
++)
429 put_stabs(s1
, default_debug
[i
].name
, N_LSYM
, 0, 0, 0);
431 new_file
= last_line_num
= 0;
433 debug_next_type
= sizeof(default_debug
) / sizeof(default_debug
[0]);
437 /* we're currently 'including' the <command line> */
441 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
442 symbols can be safely used */
443 put_elf_sym(symtab_section
, 0, 0,
444 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
445 SHN_ABS
, file
->filename
);
448 /* put end of translation unit info */
449 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
453 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
454 text_section
->data_offset
, text_section
, section_sym
);
455 tcc_free(debug_hash
);
458 static BufferedFile
* put_new_file(TCCState
*s1
)
460 BufferedFile
*f
= file
;
461 /* use upper file if from inline ":asm:" */
462 if (f
->filename
[0] == ':')
465 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
466 new_file
= last_line_num
= 0;
471 /* put alternative filename */
472 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
474 if (0 == strcmp(file
->filename
, filename
))
476 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
480 /* begin of #include */
481 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
485 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
489 /* end of #include */
490 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
494 put_stabn(s1
, N_EINCL
, 0, 0, 0);
498 /* generate line number info */
499 static void tcc_debug_line(TCCState
*s1
)
503 || cur_text_section
!= text_section
504 || !(f
= put_new_file(s1
))
505 || last_line_num
== f
->line_num
)
507 if (func_ind
!= -1) {
508 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
510 /* from tcc_assemble */
511 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
513 last_line_num
= f
->line_num
;
516 static void tcc_debug_stabs (TCCState
*s1
, const char *str
, int type
, unsigned long value
,
517 Section
*sec
, int sym_index
)
523 (struct debug_sym
*)tcc_realloc (debug_info
->sym
,
524 sizeof(struct debug_sym
) *
525 (debug_info
->n_sym
+ 1));
526 s
= debug_info
->sym
+ debug_info
->n_sym
++;
529 s
->str
= tcc_strdup(str
);
531 s
->sym_index
= sym_index
;
534 put_stabs_r (s1
, str
, type
, 0, 0, value
, sec
, sym_index
);
536 put_stabs (s1
, str
, type
, 0, 0, value
);
539 static void tcc_debug_stabn(TCCState
*s1
, int type
, int value
)
543 if (type
== N_LBRAC
) {
544 struct debug_info
*info
=
545 (struct debug_info
*) tcc_mallocz(sizeof (*info
));
548 info
->parent
= debug_info
;
550 if (debug_info
->child
) {
551 if (debug_info
->child
->last
)
552 debug_info
->child
->last
->next
= info
;
554 debug_info
->child
->next
= info
;
555 debug_info
->child
->last
= info
;
558 debug_info
->child
= info
;
561 debug_info_root
= info
;
565 debug_info
->end
= value
;
566 debug_info
= debug_info
->parent
;
570 static void tcc_get_debug_info(TCCState
*s1
, Sym
*s
, CString
*result
)
579 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
580 if ((type
& VT_BTYPE
) != VT_BYTE
)
582 if (type
== VT_PTR
|| type
== (VT_PTR
| VT_ARRAY
))
583 n
++, t
= t
->type
.ref
;
587 if ((type
& VT_BTYPE
) == VT_STRUCT
) {
591 for (i
= 0; i
< n_debug_hash
; i
++) {
592 if (t
== debug_hash
[i
].type
) {
593 debug_type
= debug_hash
[i
].debug_type
;
597 if (debug_type
== -1) {
598 debug_type
= ++debug_next_type
;
599 debug_hash
= (struct debug_hash
*)
600 tcc_realloc (debug_hash
,
601 (n_debug_hash
+ 1) * sizeof(*debug_hash
));
602 debug_hash
[n_debug_hash
].debug_type
= debug_type
;
603 debug_hash
[n_debug_hash
++].type
= t
;
605 cstr_printf (&str
, "%s:T%d=%c%d",
606 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
607 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
609 IS_UNION (t
->type
.t
) ? 'u' : 's',
612 int pos
, size
, align
;
615 cstr_printf (&str
, "%s:",
616 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
617 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
618 tcc_get_debug_info (s1
, t
, &str
);
619 if (t
->type
.t
& VT_BITFIELD
) {
620 pos
= t
->c
* 8 + BIT_POS(t
->type
.t
);
621 size
= BIT_SIZE(t
->type
.t
);
625 size
= type_size(&t
->type
, &align
) * 8;
627 cstr_printf (&str
, ",%d,%d;", pos
, size
);
629 cstr_printf (&str
, ";");
630 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
634 else if (IS_ENUM(type
)) {
635 Sym
*e
= t
= t
->type
.ref
;
637 debug_type
= ++debug_next_type
;
639 cstr_printf (&str
, "%s:T%d=e",
640 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
641 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
645 cstr_printf (&str
, "%s:",
646 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
647 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
648 cstr_printf (&str
, e
->type
.t
& VT_UNSIGNED
? "%u," : "%d,",
651 cstr_printf (&str
, ";");
652 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
655 else if ((type
& VT_BTYPE
) != VT_FUNC
) {
656 type
&= ~VT_STRUCT_MASK
;
658 debug_type
<= sizeof(default_debug
) / sizeof(default_debug
[0]);
660 if (default_debug
[debug_type
- 1].type
== type
)
662 if (debug_type
> sizeof(default_debug
) / sizeof(default_debug
[0]))
666 cstr_printf (result
, "%d=", ++debug_next_type
);
669 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
670 if ((type
& VT_BTYPE
) != VT_BYTE
)
673 cstr_printf (result
, "%d=*", ++debug_next_type
);
674 else if (type
== (VT_PTR
| VT_ARRAY
))
675 cstr_printf (result
, "%d=ar1;0;%d;",
676 ++debug_next_type
, t
->type
.ref
->c
- 1);
677 else if (type
== VT_FUNC
) {
678 cstr_printf (result
, "%d=f", ++debug_next_type
);
679 tcc_get_debug_info (s1
, t
->type
.ref
, result
);
686 cstr_printf (result
, "%d", debug_type
);
689 static void tcc_debug_finish (TCCState
*s1
, struct debug_info
*cur
)
693 struct debug_info
*next
= cur
->next
;
695 for (i
= 0; i
< cur
->n_sym
; i
++) {
696 struct debug_sym
*s
= &cur
->sym
[i
];
699 put_stabs_r(s1
, s
->str
, s
->type
, 0, 0, s
->value
,
700 s
->sec
, s
->sym_index
);
702 put_stabs(s1
, s
->str
, s
->type
, 0, 0, s
->value
);
706 put_stabn(s1
, N_LBRAC
, 0, 0, cur
->start
);
707 tcc_debug_finish (s1
, cur
->child
);
708 put_stabn(s1
, N_RBRAC
, 0, 0, cur
->end
);
714 static void tcc_add_debug_info(TCCState
*s1
, int param
, Sym
*s
, Sym
*e
)
719 cstr_new (&debug_str
);
720 for (; s
!= e
; s
= s
->prev
) {
721 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
723 cstr_reset (&debug_str
);
724 cstr_printf (&debug_str
, "%s:%s", get_tok_str(s
->v
, NULL
), param
? "p" : "");
725 tcc_get_debug_info(s1
, s
, &debug_str
);
726 tcc_debug_stabs(s1
, debug_str
.data
, param
? N_PSYM
: N_LSYM
, s
->c
, NULL
, 0);
728 cstr_free (&debug_str
);
731 /* put function symbol */
732 static void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
738 debug_info_root
= NULL
;
740 tcc_debug_stabn(s1
, N_LBRAC
, ind
- func_ind
);
741 if (!(f
= put_new_file(s1
)))
743 cstr_new (&debug_str
);
744 cstr_printf(&debug_str
, "%s:%c", funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
745 tcc_get_debug_info(s1
, sym
->type
.ref
, &debug_str
);
746 put_stabs_r(s1
, debug_str
.data
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
747 cstr_free (&debug_str
);
752 /* put function size */
753 static void tcc_debug_funcend(TCCState
*s1
, int size
)
757 tcc_debug_stabn(s1
, N_RBRAC
, size
);
758 tcc_debug_finish (s1
, debug_info_root
);
762 static void tcc_debug_extern_sym(TCCState
*s1
, Sym
*sym
, int sh_num
, int sym_bind
, int sym_type
)
769 if (sym_type
== STT_FUNC
|| sym
->v
>= SYM_FIRST_ANOM
)
771 s
= s1
->sections
[sh_num
];
774 cstr_printf (&str
, "%s:%c",
775 get_tok_str(sym
->v
, NULL
),
776 sym_bind
== STB_GLOBAL
? 'G' : local_scope
? 'V' : 'S'
778 tcc_get_debug_info(s1
, sym
, &str
);
779 if (sym_bind
== STB_GLOBAL
)
780 tcc_debug_stabs(s1
, str
.data
, N_GSYM
, 0, NULL
, 0);
782 tcc_debug_stabs(s1
, str
.data
,
783 (sym
->type
.t
& VT_STATIC
) && data_section
== s
784 ? N_STSYM
: N_LCSYM
, 0, s
, sym
->c
);
788 static void tcc_debug_typedef(TCCState
*s1
, Sym
*sym
)
795 cstr_printf (&str
, "%s:t",
796 (sym
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
797 ? "" : get_tok_str(sym
->v
& ~SYM_FIELD
, NULL
));
798 tcc_get_debug_info(s1
, sym
, &str
);
799 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
803 /* ------------------------------------------------------------------------- */
804 /* for section layout see lib/tcov.c */
806 static void tcc_tcov_block_end(int line
);
808 static void tcc_tcov_block_begin(void)
812 unsigned long last_offset
= tcov_data
.offset
;
814 tcc_tcov_block_end (0);
815 if (tcc_state
->test_coverage
== 0 || nocode_wanted
)
818 if (tcov_data
.last_file_name
== 0 ||
819 strcmp ((const char *)(tcov_section
->data
+ tcov_data
.last_file_name
),
820 file
->true_filename
) != 0) {
824 if (tcov_data
.last_func_name
)
825 section_ptr_add(tcov_section
, 1);
826 if (tcov_data
.last_file_name
)
827 section_ptr_add(tcov_section
, 1);
828 tcov_data
.last_func_name
= 0;
830 if (file
->true_filename
[0] == '/') {
831 tcov_data
.last_file_name
= tcov_section
->data_offset
;
832 cstr_printf (&cstr
, "%s", file
->true_filename
);
835 getcwd (wd
, sizeof(wd
));
836 tcov_data
.last_file_name
= tcov_section
->data_offset
+ strlen(wd
) + 1;
837 cstr_printf (&cstr
, "%s/%s", wd
, file
->true_filename
);
839 ptr
= section_ptr_add(tcov_section
, cstr
.size
+ 1);
840 strcpy((char *)ptr
, cstr
.data
);
842 normalize_slashes((char *)ptr
);
846 if (tcov_data
.last_func_name
== 0 ||
847 strcmp ((const char *)(tcov_section
->data
+ tcov_data
.last_func_name
),
851 if (tcov_data
.last_func_name
)
852 section_ptr_add(tcov_section
, 1);
853 tcov_data
.last_func_name
= tcov_section
->data_offset
;
854 len
= strlen (funcname
);
855 ptr
= section_ptr_add(tcov_section
, len
+ 1);
856 strcpy((char *)ptr
, funcname
);
857 section_ptr_add(tcov_section
, -tcov_section
->data_offset
& 7);
858 ptr
= section_ptr_add(tcov_section
, 8);
859 write64le (ptr
, file
->line_num
);
861 if (ind
== tcov_data
.ind
&& tcov_data
.line
== file
->line_num
)
862 tcov_data
.offset
= last_offset
;
865 label
.type
.t
= VT_LLONG
| VT_STATIC
;
867 ptr
= section_ptr_add(tcov_section
, 16);
868 tcov_data
.line
= file
->line_num
;
869 write64le (ptr
, (tcov_data
.line
<< 8) | 0xff);
870 put_extern_sym(&label
, tcov_section
,
871 ((unsigned char *)ptr
- tcov_section
->data
) + 8, 0);
872 sv
.type
= label
.type
;
873 sv
.r
= VT_SYM
| VT_LVAL
| VT_CONST
;
877 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || \
878 defined TCC_TARGET_ARM || defined TCC_TARGET_ARM64 || \
879 defined TCC_TARGET_RISCV64
880 gen_increment_tcov (&sv
);
886 tcov_data
.offset
= (unsigned char *)ptr
- tcov_section
->data
;
891 static void tcc_tcov_block_end(int line
)
893 if (tcc_state
->test_coverage
== 0)
895 if (tcov_data
.offset
) {
896 void *ptr
= tcov_section
->data
+ tcov_data
.offset
;
897 unsigned long long nline
= line
? line
: file
->line_num
;
899 write64le (ptr
, (read64le (ptr
) & 0xfffffffffull
) | (nline
<< 36));
900 tcov_data
.offset
= 0;
904 static void tcc_tcov_check_line(int start
)
906 if (tcc_state
->test_coverage
== 0)
908 if (tcov_data
.line
!= file
->line_num
) {
909 if ((tcov_data
.line
+ 1) != file
->line_num
) {
910 tcc_tcov_block_end (tcov_data
.line
);
912 tcc_tcov_block_begin ();
915 tcov_data
.line
= file
->line_num
;
919 static void tcc_tcov_start(void)
921 if (tcc_state
->test_coverage
== 0)
923 memset (&tcov_data
, 0, sizeof (tcov_data
));
924 if (tcov_section
== NULL
) {
925 tcov_section
= new_section(tcc_state
, ".tcov", SHT_PROGBITS
,
926 SHF_ALLOC
| SHF_WRITE
);
927 section_ptr_add(tcov_section
, 4); // pointer to executable name
931 static void tcc_tcov_end(void)
933 if (tcc_state
->test_coverage
== 0)
935 if (tcov_data
.last_func_name
)
936 section_ptr_add(tcov_section
, 1);
937 if (tcov_data
.last_file_name
)
938 section_ptr_add(tcov_section
, 1);
941 /* ------------------------------------------------------------------------- */
942 /* initialize vstack and types. This must be done also for tcc -E */
943 ST_FUNC
void tccgen_init(TCCState
*s1
)
946 memset(vtop
, 0, sizeof *vtop
);
948 /* define some often used types */
951 char_type
.t
= VT_BYTE
;
952 if (s1
->char_is_unsigned
)
953 char_type
.t
|= VT_UNSIGNED
;
954 char_pointer_type
= char_type
;
955 mk_pointer(&char_pointer_type
);
957 func_old_type
.t
= VT_FUNC
;
958 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
959 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
960 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
961 #ifdef precedence_parser
967 ST_FUNC
int tccgen_compile(TCCState
*s1
)
969 cur_text_section
= NULL
;
971 anon_sym
= SYM_FIRST_ANOM
;
974 nocode_wanted
= 0x80000000;
976 debug_modes
= s1
->do_debug
| s1
->test_coverage
<< 1;
980 #ifdef TCC_TARGET_ARM
984 printf("%s: **** new file\n", file
->filename
);
986 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
989 gen_inline_functions(s1
);
991 /* end of translation unit info */
997 ST_FUNC
void tccgen_finish(TCCState
*s1
)
1000 free_inline_functions(s1
);
1001 sym_pop(&global_stack
, NULL
, 0);
1002 sym_pop(&local_stack
, NULL
, 0);
1003 /* free preprocessor macros */
1005 /* free sym_pools */
1006 dynarray_reset(&sym_pools
, &nb_sym_pools
);
1007 sym_free_first
= NULL
;
1010 /* ------------------------------------------------------------------------- */
1011 ST_FUNC ElfSym
*elfsym(Sym
*s
)
1015 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
1018 /* apply storage attributes to Elf symbol */
1019 ST_FUNC
void update_storage(Sym
*sym
)
1022 int sym_bind
, old_sym_bind
;
1028 if (sym
->a
.visibility
)
1029 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
1030 | sym
->a
.visibility
;
1032 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
1033 sym_bind
= STB_LOCAL
;
1034 else if (sym
->a
.weak
)
1035 sym_bind
= STB_WEAK
;
1037 sym_bind
= STB_GLOBAL
;
1038 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
1039 if (sym_bind
!= old_sym_bind
) {
1040 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
1043 #ifdef TCC_TARGET_PE
1044 if (sym
->a
.dllimport
)
1045 esym
->st_other
|= ST_PE_IMPORT
;
1046 if (sym
->a
.dllexport
)
1047 esym
->st_other
|= ST_PE_EXPORT
;
1051 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
1052 get_tok_str(sym
->v
, NULL
),
1053 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
1061 /* ------------------------------------------------------------------------- */
1062 /* update sym->c so that it points to an external symbol in section
1063 'section' with value 'value' */
1065 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
1066 addr_t value
, unsigned long size
,
1067 int can_add_underscore
)
1069 int sym_type
, sym_bind
, info
, other
, t
;
1075 name
= get_tok_str(sym
->v
, NULL
);
1077 if ((t
& VT_BTYPE
) == VT_FUNC
) {
1078 sym_type
= STT_FUNC
;
1079 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
1080 sym_type
= STT_NOTYPE
;
1081 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
1082 sym_type
= STT_FUNC
;
1084 sym_type
= STT_OBJECT
;
1086 if (t
& (VT_STATIC
| VT_INLINE
))
1087 sym_bind
= STB_LOCAL
;
1089 sym_bind
= STB_GLOBAL
;
1092 #ifdef TCC_TARGET_PE
1093 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
1094 Sym
*ref
= sym
->type
.ref
;
1095 if (ref
->a
.nodecorate
) {
1096 can_add_underscore
= 0;
1098 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
1099 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
1101 other
|= ST_PE_STDCALL
;
1102 can_add_underscore
= 0;
1107 if (sym
->asm_label
) {
1108 name
= get_tok_str(sym
->asm_label
, NULL
);
1109 can_add_underscore
= 0;
1112 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
1114 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
1118 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
1119 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
1122 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
, sym_type
);
1126 esym
->st_value
= value
;
1127 esym
->st_size
= size
;
1128 esym
->st_shndx
= sh_num
;
1130 update_storage(sym
);
1133 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
1134 addr_t value
, unsigned long size
)
1136 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
1137 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
1140 /* add a new relocation entry to symbol 'sym' in section 's' */
1141 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
1146 if (nocode_wanted
&& s
== cur_text_section
)
1151 put_extern_sym(sym
, NULL
, 0, 0);
1155 /* now we can add ELF relocation info */
1156 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
1160 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
1162 greloca(s
, sym
, offset
, type
, 0);
1166 /* ------------------------------------------------------------------------- */
1167 /* symbol allocator */
1168 static Sym
*__sym_malloc(void)
1170 Sym
*sym_pool
, *sym
, *last_sym
;
1173 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
1174 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
1176 last_sym
= sym_free_first
;
1178 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
1179 sym
->next
= last_sym
;
1183 sym_free_first
= last_sym
;
1187 static inline Sym
*sym_malloc(void)
1191 sym
= sym_free_first
;
1193 sym
= __sym_malloc();
1194 sym_free_first
= sym
->next
;
1197 sym
= tcc_malloc(sizeof(Sym
));
1202 ST_INLN
void sym_free(Sym
*sym
)
1205 sym
->next
= sym_free_first
;
1206 sym_free_first
= sym
;
1212 /* push, without hashing */
1213 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
1218 memset(s
, 0, sizeof *s
);
1228 /* find a symbol and return its associated structure. 's' is the top
1229 of the symbol stack */
1230 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
1235 else if (s
->v
== -1)
1242 /* structure lookup */
1243 ST_INLN Sym
*struct_find(int v
)
1246 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1248 return table_ident
[v
]->sym_struct
;
1251 /* find an identifier */
1252 ST_INLN Sym
*sym_find(int v
)
1255 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1257 return table_ident
[v
]->sym_identifier
;
1260 static int sym_scope(Sym
*s
)
1262 if (IS_ENUM_VAL (s
->type
.t
))
1263 return s
->type
.ref
->sym_scope
;
1265 return s
->sym_scope
;
1268 /* push a given symbol on the symbol stack */
1269 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
1278 s
= sym_push2(ps
, v
, type
->t
, c
);
1279 s
->type
.ref
= type
->ref
;
1281 /* don't record fields or anonymous symbols */
1283 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1284 /* record symbol in token array */
1285 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1287 ps
= &ts
->sym_struct
;
1289 ps
= &ts
->sym_identifier
;
1292 s
->sym_scope
= local_scope
;
1293 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
1294 tcc_error("redeclaration of '%s'",
1295 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
1300 /* push a global identifier */
1301 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
1304 s
= sym_push2(&global_stack
, v
, t
, c
);
1305 s
->r
= VT_CONST
| VT_SYM
;
1306 /* don't record anonymous symbol */
1307 if (v
< SYM_FIRST_ANOM
) {
1308 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
1309 /* modify the top most local identifier, so that sym_identifier will
1310 point to 's' when popped; happens when called from inline asm */
1311 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
1312 ps
= &(*ps
)->prev_tok
;
1319 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1320 pop them yet from the list, but do remove them from the token array. */
1321 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
1331 /* remove symbol in token array */
1333 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1334 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1336 ps
= &ts
->sym_struct
;
1338 ps
= &ts
->sym_identifier
;
1349 /* ------------------------------------------------------------------------- */
1350 static void vcheck_cmp(void)
1352 /* cannot let cpu flags if other instruction are generated. Also
1353 avoid leaving VT_JMP anywhere except on the top of the stack
1354 because it would complicate the code generator.
1356 Don't do this when nocode_wanted. vtop might come from
1357 !nocode_wanted regions (see 88_codeopt.c) and transforming
1358 it to a register without actually generating code is wrong
1359 as their value might still be used for real. All values
1360 we push under nocode_wanted will eventually be popped
1361 again, so that the VT_CMP/VT_JMP value will be in vtop
1362 when code is unsuppressed again. */
1364 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
1368 static void vsetc(CType
*type
, int r
, CValue
*vc
)
1370 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1371 tcc_error("memory full (vstack)");
1376 vtop
->r2
= VT_CONST
;
1381 ST_FUNC
void vswap(void)
1391 /* pop stack value */
1392 ST_FUNC
void vpop(void)
1395 v
= vtop
->r
& VT_VALMASK
;
1396 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1397 /* for x86, we need to pop the FP stack */
1398 if (v
== TREG_ST0
) {
1399 o(0xd8dd); /* fstp %st(0) */
1403 /* need to put correct jump if && or || without test */
1410 /* push constant of type "type" with useless value */
1411 static void vpush(CType
*type
)
1413 vset(type
, VT_CONST
, 0);
1416 /* push arbitrary 64bit constant */
1417 static void vpush64(int ty
, unsigned long long v
)
1424 vsetc(&ctype
, VT_CONST
, &cval
);
1427 /* push integer constant */
1428 ST_FUNC
void vpushi(int v
)
1433 /* push a pointer sized constant */
1434 static void vpushs(addr_t v
)
1436 vpush64(VT_SIZE_T
, v
);
1439 /* push long long constant */
1440 static inline void vpushll(long long v
)
1442 vpush64(VT_LLONG
, v
);
1445 ST_FUNC
void vset(CType
*type
, int r
, int v
)
1449 vsetc(type
, r
, &cval
);
1452 static void vseti(int r
, int v
)
1460 ST_FUNC
void vpushv(SValue
*v
)
1462 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1463 tcc_error("memory full (vstack)");
1468 static void vdup(void)
1473 /* rotate n first stack elements to the bottom
1474 I1 ... In -> I2 ... In I1 [top is right]
1476 ST_FUNC
void vrotb(int n
)
1483 for(i
=-n
+1;i
!=0;i
++)
1484 vtop
[i
] = vtop
[i
+1];
1488 /* rotate the n elements before entry e towards the top
1489 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1491 ST_FUNC
void vrote(SValue
*e
, int n
)
1498 for(i
= 0;i
< n
- 1; i
++)
1503 /* rotate n first stack elements to the top
1504 I1 ... In -> In I1 ... I(n-1) [top is right]
1506 ST_FUNC
void vrott(int n
)
1511 /* ------------------------------------------------------------------------- */
1512 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1514 /* called from generators to set the result from relational ops */
1515 ST_FUNC
void vset_VT_CMP(int op
)
1523 /* called once before asking generators to load VT_CMP to a register */
1524 static void vset_VT_JMP(void)
1526 int op
= vtop
->cmp_op
;
1528 if (vtop
->jtrue
|| vtop
->jfalse
) {
1529 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1530 int inv
= op
& (op
< 2); /* small optimization */
1531 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1533 /* otherwise convert flags (rsp. 0/1) to register */
1535 if (op
< 2) /* doesn't seem to happen */
1540 /* Set CPU Flags, doesn't yet jump */
1541 static void gvtst_set(int inv
, int t
)
1545 if (vtop
->r
!= VT_CMP
) {
1548 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1549 vset_VT_CMP(vtop
->c
.i
!= 0);
1552 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1553 *p
= gjmp_append(*p
, t
);
1556 /* Generate value test
1558 * Generate a test for any value (jump, comparison and integers) */
1559 static int gvtst(int inv
, int t
)
1564 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1566 x
= u
, u
= t
, t
= x
;
1569 /* jump to the wanted target */
1571 t
= gjmp_cond(op
^ inv
, t
);
1574 /* resolve complementary jumps to here */
1581 /* generate a zero or nozero test */
1582 static void gen_test_zero(int op
)
1584 if (vtop
->r
== VT_CMP
) {
1588 vtop
->jfalse
= vtop
->jtrue
;
1598 /* ------------------------------------------------------------------------- */
1599 /* push a symbol value of TYPE */
1600 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1604 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1608 /* Return a static symbol pointing to a section */
1609 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1615 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1616 sym
->type
.t
|= VT_STATIC
;
1617 put_extern_sym(sym
, sec
, offset
, size
);
1621 /* push a reference to a section offset by adding a dummy symbol */
1622 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1624 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1627 /* define a new external reference to a symbol 'v' of type 'u' */
1628 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1634 /* push forward reference */
1635 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1636 s
->type
.ref
= type
->ref
;
1637 } else if (IS_ASM_SYM(s
)) {
1638 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1639 s
->type
.ref
= type
->ref
;
1645 /* create an external reference with no specific type similar to asm labels.
1646 This avoids type conflicts if the symbol is used from C too */
1647 ST_FUNC Sym
*external_helper_sym(int v
)
1649 CType ct
= { VT_ASM_FUNC
, NULL
};
1650 return external_global_sym(v
, &ct
);
1653 /* push a reference to an helper function (such as memmove) */
1654 ST_FUNC
void vpush_helper_func(int v
)
1656 vpushsym(&func_old_type
, external_helper_sym(v
));
1659 /* Merge symbol attributes. */
1660 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1662 if (sa1
->aligned
&& !sa
->aligned
)
1663 sa
->aligned
= sa1
->aligned
;
1664 sa
->packed
|= sa1
->packed
;
1665 sa
->weak
|= sa1
->weak
;
1666 if (sa1
->visibility
!= STV_DEFAULT
) {
1667 int vis
= sa
->visibility
;
1668 if (vis
== STV_DEFAULT
1669 || vis
> sa1
->visibility
)
1670 vis
= sa1
->visibility
;
1671 sa
->visibility
= vis
;
1673 sa
->dllexport
|= sa1
->dllexport
;
1674 sa
->nodecorate
|= sa1
->nodecorate
;
1675 sa
->dllimport
|= sa1
->dllimport
;
1678 /* Merge function attributes. */
1679 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1681 if (fa1
->func_call
&& !fa
->func_call
)
1682 fa
->func_call
= fa1
->func_call
;
1683 if (fa1
->func_type
&& !fa
->func_type
)
1684 fa
->func_type
= fa1
->func_type
;
1685 if (fa1
->func_args
&& !fa
->func_args
)
1686 fa
->func_args
= fa1
->func_args
;
1687 if (fa1
->func_noreturn
)
1688 fa
->func_noreturn
= 1;
1695 /* Merge attributes. */
1696 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1698 merge_symattr(&ad
->a
, &ad1
->a
);
1699 merge_funcattr(&ad
->f
, &ad1
->f
);
1702 ad
->section
= ad1
->section
;
1703 if (ad1
->alias_target
)
1704 ad
->alias_target
= ad1
->alias_target
;
1706 ad
->asm_label
= ad1
->asm_label
;
1708 ad
->attr_mode
= ad1
->attr_mode
;
1711 /* Merge some type attributes. */
1712 static void patch_type(Sym
*sym
, CType
*type
)
1714 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1715 if (!(sym
->type
.t
& VT_EXTERN
))
1716 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1717 sym
->type
.t
&= ~VT_EXTERN
;
1720 if (IS_ASM_SYM(sym
)) {
1721 /* stay static if both are static */
1722 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1723 sym
->type
.ref
= type
->ref
;
1726 if (!is_compatible_types(&sym
->type
, type
)) {
1727 tcc_error("incompatible types for redefinition of '%s'",
1728 get_tok_str(sym
->v
, NULL
));
1730 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1731 int static_proto
= sym
->type
.t
& VT_STATIC
;
1732 /* warn if static follows non-static function declaration */
1733 if ((type
->t
& VT_STATIC
) && !static_proto
1734 /* XXX this test for inline shouldn't be here. Until we
1735 implement gnu-inline mode again it silences a warning for
1736 mingw caused by our workarounds. */
1737 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1738 tcc_warning("static storage ignored for redefinition of '%s'",
1739 get_tok_str(sym
->v
, NULL
));
1741 /* set 'inline' if both agree or if one has static */
1742 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1743 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1744 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1745 static_proto
|= VT_INLINE
;
1748 if (0 == (type
->t
& VT_EXTERN
)) {
1749 struct FuncAttr f
= sym
->type
.ref
->f
;
1750 /* put complete type, use static from prototype */
1751 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1752 sym
->type
.ref
= type
->ref
;
1753 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1755 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1758 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1759 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1760 sym
->type
.ref
= type
->ref
;
1764 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1765 /* set array size if it was omitted in extern declaration */
1766 sym
->type
.ref
->c
= type
->ref
->c
;
1768 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1769 tcc_warning("storage mismatch for redefinition of '%s'",
1770 get_tok_str(sym
->v
, NULL
));
1774 /* Merge some storage attributes. */
1775 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1778 patch_type(sym
, type
);
1780 #ifdef TCC_TARGET_PE
1781 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1782 tcc_error("incompatible dll linkage for redefinition of '%s'",
1783 get_tok_str(sym
->v
, NULL
));
1785 merge_symattr(&sym
->a
, &ad
->a
);
1787 sym
->asm_label
= ad
->asm_label
;
1788 update_storage(sym
);
1791 /* copy sym to other stack */
1792 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1795 s
= sym_malloc(), *s
= *s0
;
1796 s
->prev
= *ps
, *ps
= s
;
1797 if (s
->v
< SYM_FIRST_ANOM
) {
1798 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1799 s
->prev_tok
= *ps
, *ps
= s
;
1804 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1805 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1807 int bt
= s
->type
.t
& VT_BTYPE
;
1808 if (bt
== VT_FUNC
|| bt
== VT_PTR
|| (bt
== VT_STRUCT
&& s
->sym_scope
)) {
1809 Sym
**sp
= &s
->type
.ref
;
1810 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1811 Sym
*s2
= sym_copy(s
, ps
);
1812 sp
= &(*sp
= s2
)->next
;
1813 sym_copy_ref(s2
, ps
);
1818 /* define a new external reference to a symbol 'v' */
1819 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1823 /* look for global symbol */
1825 while (s
&& s
->sym_scope
)
1829 /* push forward reference */
1830 s
= global_identifier_push(v
, type
->t
, 0);
1833 s
->asm_label
= ad
->asm_label
;
1834 s
->type
.ref
= type
->ref
;
1835 /* copy type to the global stack */
1837 sym_copy_ref(s
, &global_stack
);
1839 patch_storage(s
, ad
, type
);
1841 /* push variables on local_stack if any */
1842 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1843 s
= sym_copy(s
, &local_stack
);
1847 /* save registers up to (vtop - n) stack entry */
1848 ST_FUNC
void save_regs(int n
)
1851 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1855 /* save r to the memory stack, and mark it as being free */
1856 ST_FUNC
void save_reg(int r
)
1858 save_reg_upstack(r
, 0);
1861 /* save r to the memory stack, and mark it as being free,
1862 if seen up to (vtop - n) stack entry */
1863 ST_FUNC
void save_reg_upstack(int r
, int n
)
1865 int l
, size
, align
, bt
;
1868 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1873 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1874 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1875 /* must save value on stack if not already done */
1877 bt
= p
->type
.t
& VT_BTYPE
;
1880 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1883 size
= type_size(&sv
.type
, &align
);
1884 l
= get_temp_local_var(size
,align
);
1885 sv
.r
= VT_LOCAL
| VT_LVAL
;
1887 store(p
->r
& VT_VALMASK
, &sv
);
1888 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1889 /* x86 specific: need to pop fp register ST0 if saved */
1890 if (r
== TREG_ST0
) {
1891 o(0xd8dd); /* fstp %st(0) */
1894 /* special long long case */
1895 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1900 /* mark that stack entry as being saved on the stack */
1901 if (p
->r
& VT_LVAL
) {
1902 /* also clear the bounded flag because the
1903 relocation address of the function was stored in
1905 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1907 p
->r
= VT_LVAL
| VT_LOCAL
;
1916 #ifdef TCC_TARGET_ARM
1917 /* find a register of class 'rc2' with at most one reference on stack.
1918 * If none, call get_reg(rc) */
1919 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1924 for(r
=0;r
<NB_REGS
;r
++) {
1925 if (reg_classes
[r
] & rc2
) {
1928 for(p
= vstack
; p
<= vtop
; p
++) {
1929 if ((p
->r
& VT_VALMASK
) == r
||
1941 /* find a free register of class 'rc'. If none, save one register */
1942 ST_FUNC
int get_reg(int rc
)
1947 /* find a free register */
1948 for(r
=0;r
<NB_REGS
;r
++) {
1949 if (reg_classes
[r
] & rc
) {
1952 for(p
=vstack
;p
<=vtop
;p
++) {
1953 if ((p
->r
& VT_VALMASK
) == r
||
1962 /* no register left : free the first one on the stack (VERY
1963 IMPORTANT to start from the bottom to ensure that we don't
1964 spill registers used in gen_opi()) */
1965 for(p
=vstack
;p
<=vtop
;p
++) {
1966 /* look at second register (if long long) */
1968 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1970 r
= p
->r
& VT_VALMASK
;
1971 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1977 /* Should never comes here */
1981 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1982 static int get_temp_local_var(int size
,int align
){
1984 struct temp_local_variable
*temp_var
;
1991 for(i
=0;i
<nb_temp_local_vars
;i
++){
1992 temp_var
=&arr_temp_local_vars
[i
];
1993 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1996 /*check if temp_var is free*/
1998 for(p
=vstack
;p
<=vtop
;p
++) {
2000 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
2001 if(p
->c
.i
==temp_var
->location
){
2008 found_var
=temp_var
->location
;
2014 loc
= (loc
- size
) & -align
;
2015 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
2016 temp_var
=&arr_temp_local_vars
[i
];
2017 temp_var
->location
=loc
;
2018 temp_var
->size
=size
;
2019 temp_var
->align
=align
;
2020 nb_temp_local_vars
++;
2027 static void clear_temp_local_var_list(){
2028 nb_temp_local_vars
=0;
2031 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
2033 static void move_reg(int r
, int s
, int t
)
2047 /* get address of vtop (vtop MUST BE an lvalue) */
2048 ST_FUNC
void gaddrof(void)
2050 vtop
->r
&= ~VT_LVAL
;
2051 /* tricky: if saved lvalue, then we can go back to lvalue */
2052 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
2053 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
2056 #ifdef CONFIG_TCC_BCHECK
2057 /* generate a bounded pointer addition */
2058 static void gen_bounded_ptr_add(void)
2060 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
2065 vpush_helper_func(TOK___bound_ptr_add
);
2070 /* returned pointer is in REG_IRET */
2071 vtop
->r
= REG_IRET
| VT_BOUNDED
;
2074 /* relocation offset of the bounding function call point */
2075 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
2078 /* patch pointer addition in vtop so that pointer dereferencing is
2080 static void gen_bounded_ptr_deref(void)
2090 size
= type_size(&vtop
->type
, &align
);
2092 case 1: func
= TOK___bound_ptr_indir1
; break;
2093 case 2: func
= TOK___bound_ptr_indir2
; break;
2094 case 4: func
= TOK___bound_ptr_indir4
; break;
2095 case 8: func
= TOK___bound_ptr_indir8
; break;
2096 case 12: func
= TOK___bound_ptr_indir12
; break;
2097 case 16: func
= TOK___bound_ptr_indir16
; break;
2099 /* may happen with struct member access */
2102 sym
= external_helper_sym(func
);
2104 put_extern_sym(sym
, NULL
, 0, 0);
2105 /* patch relocation */
2106 /* XXX: find a better solution ? */
2107 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
2108 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
2111 /* generate lvalue bound code */
2112 static void gbound(void)
2116 vtop
->r
&= ~VT_MUSTBOUND
;
2117 /* if lvalue, then use checking code before dereferencing */
2118 if (vtop
->r
& VT_LVAL
) {
2119 /* if not VT_BOUNDED value, then make one */
2120 if (!(vtop
->r
& VT_BOUNDED
)) {
2121 /* must save type because we must set it to int to get pointer */
2123 vtop
->type
.t
= VT_PTR
;
2126 gen_bounded_ptr_add();
2130 /* then check for dereferencing */
2131 gen_bounded_ptr_deref();
2135 /* we need to call __bound_ptr_add before we start to load function
2136 args into registers */
2137 ST_FUNC
void gbound_args(int nb_args
)
2142 for (i
= 1; i
<= nb_args
; ++i
)
2143 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
2149 sv
= vtop
- nb_args
;
2150 if (sv
->r
& VT_SYM
) {
2154 #ifndef TCC_TARGET_PE
2155 || v
== TOK_sigsetjmp
2156 || v
== TOK___sigsetjmp
2159 vpush_helper_func(TOK___bound_setjmp
);
2162 func_bound_add_epilog
= 1;
2164 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
2165 if (v
== TOK_alloca
)
2166 func_bound_add_epilog
= 1;
2169 if (v
== TOK_longjmp
) /* undo rename to __longjmp14 */
2170 sv
->sym
->asm_label
= TOK___bound_longjmp
;
2175 /* Add bounds for local symbols from S to E (via ->prev) */
2176 static void add_local_bounds(Sym
*s
, Sym
*e
)
2178 for (; s
!= e
; s
= s
->prev
) {
2179 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
2181 /* Add arrays/structs/unions because we always take address */
2182 if ((s
->type
.t
& VT_ARRAY
)
2183 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
2184 || s
->a
.addrtaken
) {
2185 /* add local bound info */
2186 int align
, size
= type_size(&s
->type
, &align
);
2187 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
2188 2 * sizeof(addr_t
));
2189 bounds_ptr
[0] = s
->c
;
2190 bounds_ptr
[1] = size
;
2196 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2197 static void pop_local_syms(Sym
*b
, int keep
)
2199 #ifdef CONFIG_TCC_BCHECK
2200 if (tcc_state
->do_bounds_check
&& !keep
&& (local_scope
|| !func_var
))
2201 add_local_bounds(local_stack
, b
);
2204 tcc_add_debug_info (tcc_state
, !local_scope
, local_stack
, b
);
2205 sym_pop(&local_stack
, b
, keep
);
2208 static void incr_bf_adr(int o
)
2210 vtop
->type
= char_pointer_type
;
2214 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
2218 /* single-byte load mode for packed or otherwise unaligned bitfields */
2219 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
2222 save_reg_upstack(vtop
->r
, 1);
2223 vpush64(type
->t
& VT_BTYPE
, 0); // B X
2224 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2233 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
2235 vpushi((1 << n
) - 1), gen_op('&');
2238 vpushi(bits
), gen_op(TOK_SHL
);
2241 bits
+= n
, bit_size
-= n
, o
= 1;
2244 if (!(type
->t
& VT_UNSIGNED
)) {
2245 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
2246 vpushi(n
), gen_op(TOK_SHL
);
2247 vpushi(n
), gen_op(TOK_SAR
);
2251 /* single-byte store mode for packed or otherwise unaligned bitfields */
2252 static void store_packed_bf(int bit_pos
, int bit_size
)
2254 int bits
, n
, o
, m
, c
;
2255 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2257 save_reg_upstack(vtop
->r
, 1);
2258 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2260 incr_bf_adr(o
); // X B
2262 c
? vdup() : gv_dup(); // B V X
2265 vpushi(bits
), gen_op(TOK_SHR
);
2267 vpushi(bit_pos
), gen_op(TOK_SHL
);
2272 m
= ((1 << n
) - 1) << bit_pos
;
2273 vpushi(m
), gen_op('&'); // X B V1
2274 vpushv(vtop
-1); // X B V1 B
2275 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
2276 gen_op('&'); // X B V1 B1
2277 gen_op('|'); // X B V2
2279 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
2280 vstore(), vpop(); // X B
2281 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
2286 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
2289 if (0 == sv
->type
.ref
)
2291 t
= sv
->type
.ref
->auxtype
;
2292 if (t
!= -1 && t
!= VT_STRUCT
) {
2293 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
2299 /* store vtop a register belonging to class 'rc'. lvalues are
2300 converted to values. Cannot be used if cannot be converted to
2301 register value (such as structures). */
2302 ST_FUNC
int gv(int rc
)
2304 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
2305 int bit_pos
, bit_size
, size
, align
;
2307 /* NOTE: get_reg can modify vstack[] */
2308 if (vtop
->type
.t
& VT_BITFIELD
) {
2311 bit_pos
= BIT_POS(vtop
->type
.t
);
2312 bit_size
= BIT_SIZE(vtop
->type
.t
);
2313 /* remove bit field info to avoid loops */
2314 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
2317 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
2318 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
2319 type
.t
|= VT_UNSIGNED
;
2321 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
2323 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2328 if (r
== VT_STRUCT
) {
2329 load_packed_bf(&type
, bit_pos
, bit_size
);
2331 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
2332 /* cast to int to propagate signedness in following ops */
2334 /* generate shifts */
2335 vpushi(bits
- (bit_pos
+ bit_size
));
2337 vpushi(bits
- bit_size
);
2338 /* NOTE: transformed to SHR if unsigned */
2343 if (is_float(vtop
->type
.t
) &&
2344 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2345 /* CPUs usually cannot use float constants, so we store them
2346 generically in data segment */
2347 init_params p
= { rodata_section
};
2348 unsigned long offset
;
2349 size
= type_size(&vtop
->type
, &align
);
2351 size
= 0, align
= 1;
2352 offset
= section_add(p
.sec
, size
, align
);
2353 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
2355 init_putv(&p
, &vtop
->type
, offset
);
2358 #ifdef CONFIG_TCC_BCHECK
2359 if (vtop
->r
& VT_MUSTBOUND
)
2363 bt
= vtop
->type
.t
& VT_BTYPE
;
2365 #ifdef TCC_TARGET_RISCV64
2367 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
2370 rc2
= RC2_TYPE(bt
, rc
);
2372 /* need to reload if:
2374 - lvalue (need to dereference pointer)
2375 - already a register, but not in the right class */
2376 r
= vtop
->r
& VT_VALMASK
;
2377 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
2378 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
2380 if (!r_ok
|| !r2_ok
) {
2384 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
2385 int original_type
= vtop
->type
.t
;
2387 /* two register type load :
2388 expand to two words temporarily */
2389 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2391 unsigned long long ll
= vtop
->c
.i
;
2392 vtop
->c
.i
= ll
; /* first word */
2394 vtop
->r
= r
; /* save register value */
2395 vpushi(ll
>> 32); /* second word */
2396 } else if (vtop
->r
& VT_LVAL
) {
2397 /* We do not want to modifier the long long pointer here.
2398 So we save any other instances down the stack */
2399 save_reg_upstack(vtop
->r
, 1);
2400 /* load from memory */
2401 vtop
->type
.t
= load_type
;
2404 vtop
[-1].r
= r
; /* save register value */
2405 /* increment pointer to get second word */
2406 vtop
->type
.t
= VT_PTRDIFF_T
;
2411 vtop
->type
.t
= load_type
;
2413 /* move registers */
2416 if (r2_ok
&& vtop
->r2
< VT_CONST
)
2419 vtop
[-1].r
= r
; /* save register value */
2420 vtop
->r
= vtop
[-1].r2
;
2422 /* Allocate second register. Here we rely on the fact that
2423 get_reg() tries first to free r2 of an SValue. */
2427 /* write second register */
2430 vtop
->type
.t
= original_type
;
2432 if (vtop
->r
== VT_CMP
)
2434 /* one register type load */
2439 #ifdef TCC_TARGET_C67
2440 /* uses register pairs for doubles */
2441 if (bt
== VT_DOUBLE
)
2448 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2449 ST_FUNC
void gv2(int rc1
, int rc2
)
2451 /* generate more generic register first. But VT_JMP or VT_CMP
2452 values must be generated first in all cases to avoid possible
2454 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
2459 /* test if reload is needed for first register */
2460 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
2470 /* test if reload is needed for first register */
2471 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
2478 /* expand 64bit on stack in two ints */
2479 ST_FUNC
void lexpand(void)
2482 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
2483 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
2484 if (v
== VT_CONST
) {
2487 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
2493 vtop
[0].r
= vtop
[-1].r2
;
2494 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2496 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2501 /* build a long long from two ints */
2502 static void lbuild(int t
)
2504 gv2(RC_INT
, RC_INT
);
2505 vtop
[-1].r2
= vtop
[0].r
;
2506 vtop
[-1].type
.t
= t
;
2511 /* convert stack entry to register and duplicate its value in another
2513 static void gv_dup(void)
2519 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2520 if (t
& VT_BITFIELD
) {
2530 /* stack: H L L1 H1 */
2540 /* duplicate value */
2550 /* generate CPU independent (unsigned) long long operations */
2551 static void gen_opl(int op
)
2553 int t
, a
, b
, op1
, c
, i
;
2555 unsigned short reg_iret
= REG_IRET
;
2556 unsigned short reg_lret
= REG_IRE2
;
2562 func
= TOK___divdi3
;
2565 func
= TOK___udivdi3
;
2568 func
= TOK___moddi3
;
2571 func
= TOK___umoddi3
;
2578 /* call generic long long function */
2579 vpush_helper_func(func
);
2584 vtop
->r2
= reg_lret
;
2592 //pv("gen_opl A",0,2);
2598 /* stack: L1 H1 L2 H2 */
2603 vtop
[-2] = vtop
[-3];
2606 /* stack: H1 H2 L1 L2 */
2607 //pv("gen_opl B",0,4);
2613 /* stack: H1 H2 L1 L2 ML MH */
2616 /* stack: ML MH H1 H2 L1 L2 */
2620 /* stack: ML MH H1 L2 H2 L1 */
2625 /* stack: ML MH M1 M2 */
2628 } else if (op
== '+' || op
== '-') {
2629 /* XXX: add non carry method too (for MIPS or alpha) */
2635 /* stack: H1 H2 (L1 op L2) */
2638 gen_op(op1
+ 1); /* TOK_xxxC2 */
2641 /* stack: H1 H2 (L1 op L2) */
2644 /* stack: (L1 op L2) H1 H2 */
2646 /* stack: (L1 op L2) (H1 op H2) */
2654 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2655 t
= vtop
[-1].type
.t
;
2659 /* stack: L H shift */
2661 /* constant: simpler */
2662 /* NOTE: all comments are for SHL. the other cases are
2663 done by swapping words */
2674 if (op
!= TOK_SAR
) {
2707 /* XXX: should provide a faster fallback on x86 ? */
2710 func
= TOK___ashrdi3
;
2713 func
= TOK___lshrdi3
;
2716 func
= TOK___ashldi3
;
2722 /* compare operations */
2728 /* stack: L1 H1 L2 H2 */
2730 vtop
[-1] = vtop
[-2];
2732 /* stack: L1 L2 H1 H2 */
2736 /* when values are equal, we need to compare low words. since
2737 the jump is inverted, we invert the test too. */
2740 else if (op1
== TOK_GT
)
2742 else if (op1
== TOK_ULT
)
2744 else if (op1
== TOK_UGT
)
2754 /* generate non equal test */
2756 vset_VT_CMP(TOK_NE
);
2760 /* compare low. Always unsigned */
2764 else if (op1
== TOK_LE
)
2766 else if (op1
== TOK_GT
)
2768 else if (op1
== TOK_GE
)
2771 #if 0//def TCC_TARGET_I386
2772 if (op
== TOK_NE
) { gsym(b
); break; }
2773 if (op
== TOK_EQ
) { gsym(a
); break; }
2782 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2784 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2785 return (a
^ b
) >> 63 ? -x
: x
;
2788 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2790 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2793 /* handle integer constant optimizations and various machine
2795 static void gen_opic(int op
)
2797 SValue
*v1
= vtop
- 1;
2799 int t1
= v1
->type
.t
& VT_BTYPE
;
2800 int t2
= v2
->type
.t
& VT_BTYPE
;
2801 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2802 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2803 uint64_t l1
= c1
? v1
->c
.i
: 0;
2804 uint64_t l2
= c2
? v2
->c
.i
: 0;
2805 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2807 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2808 l1
= ((uint32_t)l1
|
2809 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2810 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2811 l2
= ((uint32_t)l2
|
2812 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2816 case '+': l1
+= l2
; break;
2817 case '-': l1
-= l2
; break;
2818 case '&': l1
&= l2
; break;
2819 case '^': l1
^= l2
; break;
2820 case '|': l1
|= l2
; break;
2821 case '*': l1
*= l2
; break;
2828 /* if division by zero, generate explicit division */
2830 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2831 tcc_error("division by zero in constant");
2835 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2836 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2837 case TOK_UDIV
: l1
= l1
/ l2
; break;
2838 case TOK_UMOD
: l1
= l1
% l2
; break;
2841 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2842 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2844 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2847 case TOK_ULT
: l1
= l1
< l2
; break;
2848 case TOK_UGE
: l1
= l1
>= l2
; break;
2849 case TOK_EQ
: l1
= l1
== l2
; break;
2850 case TOK_NE
: l1
= l1
!= l2
; break;
2851 case TOK_ULE
: l1
= l1
<= l2
; break;
2852 case TOK_UGT
: l1
= l1
> l2
; break;
2853 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2854 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2855 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2856 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2858 case TOK_LAND
: l1
= l1
&& l2
; break;
2859 case TOK_LOR
: l1
= l1
|| l2
; break;
2863 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2864 l1
= ((uint32_t)l1
|
2865 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2869 /* if commutative ops, put c2 as constant */
2870 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2871 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2873 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2874 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2876 if (!const_wanted
&&
2878 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2879 (l1
== -1 && op
== TOK_SAR
))) {
2880 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2882 } else if (!const_wanted
&&
2883 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2885 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2886 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2887 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2892 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2895 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2896 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2899 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2900 /* filter out NOP operations like x*1, x-0, x&-1... */
2902 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2903 /* try to use shifts instead of muls or divs */
2904 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2913 else if (op
== TOK_PDIV
)
2919 } else if (c2
&& (op
== '+' || op
== '-') &&
2920 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2921 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2922 /* symbol + constant case */
2926 /* The backends can't always deal with addends to symbols
2927 larger than +-1<<31. Don't construct such. */
2934 /* call low level op generator */
2935 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2936 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2944 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2945 # define gen_negf gen_opf
2946 #elif defined TCC_TARGET_ARM
2947 void gen_negf(int op
)
2949 /* arm will detect 0-x and replace by vneg */
2950 vpushi(0), vswap(), gen_op('-');
2953 /* XXX: implement in gen_opf() for other backends too */
2954 void gen_negf(int op
)
2956 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2957 subtract(-0, x), but with them it's really a sign flip
2958 operation. We implement this with bit manipulation and have
2959 to do some type reinterpretation for this, which TCC can do
2962 int align
, size
, bt
;
2964 size
= type_size(&vtop
->type
, &align
);
2965 bt
= vtop
->type
.t
& VT_BTYPE
;
2966 save_reg(gv(RC_TYPE(bt
)));
2968 incr_bf_adr(size
- 1);
2970 vpushi(0x80); /* flip sign */
2977 /* generate a floating point operation with constant propagation */
2978 static void gen_opif(int op
)
2982 #if defined _MSC_VER && defined __x86_64__
2983 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2993 /* currently, we cannot do computations with forward symbols */
2994 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2995 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2997 if (v1
->type
.t
== VT_FLOAT
) {
3000 } else if (v1
->type
.t
== VT_DOUBLE
) {
3007 /* NOTE: we only do constant propagation if finite number (not
3008 NaN or infinity) (ANSI spec) */
3009 if (!(ieee_finite(f1
) || !ieee_finite(f2
)) && !const_wanted
)
3012 case '+': f1
+= f2
; break;
3013 case '-': f1
-= f2
; break;
3014 case '*': f1
*= f2
; break;
3017 union { float f
; unsigned u
; } x1
, x2
, y
;
3018 /* If not in initializer we need to potentially generate
3019 FP exceptions at runtime, otherwise we want to fold. */
3022 /* the run-time result of 0.0/0.0 on x87, also of other compilers
3023 when used to compile the f1 /= f2 below, would be -nan */
3024 x1
.f
= f1
, x2
.f
= f2
;
3026 y
.u
= 0x7fc00000; /* nan */
3028 y
.u
= 0x7f800000; /* infinity */
3029 y
.u
|= (x1
.u
^ x2
.u
) & 0x80000000; /* set sign */
3038 /* XXX: also handles tests ? */
3044 /* XXX: overflow test ? */
3045 if (v1
->type
.t
== VT_FLOAT
) {
3047 } else if (v1
->type
.t
== VT_DOUBLE
) {
3054 if (op
== TOK_NEG
) {
3062 /* print a type. If 'varstr' is not NULL, then the variable is also
3063 printed in the type */
3065 /* XXX: add array and function pointers */
3066 static void type_to_str(char *buf
, int buf_size
,
3067 CType
*type
, const char *varstr
)
3079 pstrcat(buf
, buf_size
, "extern ");
3081 pstrcat(buf
, buf_size
, "static ");
3083 pstrcat(buf
, buf_size
, "typedef ");
3085 pstrcat(buf
, buf_size
, "inline ");
3087 if (t
& VT_VOLATILE
)
3088 pstrcat(buf
, buf_size
, "volatile ");
3089 if (t
& VT_CONSTANT
)
3090 pstrcat(buf
, buf_size
, "const ");
3092 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
3093 || ((t
& VT_UNSIGNED
)
3094 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
3097 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
3099 buf_size
-= strlen(buf
);
3135 tstr
= "long double";
3137 pstrcat(buf
, buf_size
, tstr
);
3144 pstrcat(buf
, buf_size
, tstr
);
3145 v
= type
->ref
->v
& ~SYM_STRUCT
;
3146 if (v
>= SYM_FIRST_ANOM
)
3147 pstrcat(buf
, buf_size
, "<anonymous>");
3149 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
3154 if (varstr
&& '*' == *varstr
) {
3155 pstrcat(buf1
, sizeof(buf1
), "(");
3156 pstrcat(buf1
, sizeof(buf1
), varstr
);
3157 pstrcat(buf1
, sizeof(buf1
), ")");
3159 pstrcat(buf1
, buf_size
, "(");
3161 while (sa
!= NULL
) {
3163 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
3164 pstrcat(buf1
, sizeof(buf1
), buf2
);
3167 pstrcat(buf1
, sizeof(buf1
), ", ");
3169 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
3170 pstrcat(buf1
, sizeof(buf1
), ", ...");
3171 pstrcat(buf1
, sizeof(buf1
), ")");
3172 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3177 if (varstr
&& '*' == *varstr
)
3178 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
3180 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
3181 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3184 pstrcpy(buf1
, sizeof(buf1
), "*");
3185 if (t
& VT_CONSTANT
)
3186 pstrcat(buf1
, buf_size
, "const ");
3187 if (t
& VT_VOLATILE
)
3188 pstrcat(buf1
, buf_size
, "volatile ");
3190 pstrcat(buf1
, sizeof(buf1
), varstr
);
3191 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
3195 pstrcat(buf
, buf_size
, " ");
3196 pstrcat(buf
, buf_size
, varstr
);
3201 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
3203 char buf1
[256], buf2
[256];
3204 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3205 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3206 tcc_error(fmt
, buf1
, buf2
);
3209 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
3211 char buf1
[256], buf2
[256];
3212 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3213 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3214 tcc_warning(fmt
, buf1
, buf2
);
3217 static int pointed_size(CType
*type
)
3220 return type_size(pointed_type(type
), &align
);
3223 static void vla_runtime_pointed_size(CType
*type
)
3226 vla_runtime_type_size(pointed_type(type
), &align
);
3229 static inline int is_null_pointer(SValue
*p
)
3231 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
3233 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
3234 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
3235 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
3236 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
3237 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
3238 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3242 /* compare function types. OLD functions match any new functions */
3243 static int is_compatible_func(CType
*type1
, CType
*type2
)
3249 if (s1
->f
.func_call
!= s2
->f
.func_call
)
3251 if (s1
->f
.func_type
!= s2
->f
.func_type
3252 && s1
->f
.func_type
!= FUNC_OLD
3253 && s2
->f
.func_type
!= FUNC_OLD
)
3256 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
3258 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
3269 /* return true if type1 and type2 are the same. If unqualified is
3270 true, qualifiers on the types are ignored.
3272 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3276 t1
= type1
->t
& VT_TYPE
;
3277 t2
= type2
->t
& VT_TYPE
;
3279 /* strip qualifiers before comparing */
3280 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3281 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3284 /* Default Vs explicit signedness only matters for char */
3285 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3289 /* XXX: bitfields ? */
3294 && !(type1
->ref
->c
< 0
3295 || type2
->ref
->c
< 0
3296 || type1
->ref
->c
== type2
->ref
->c
))
3299 /* test more complicated cases */
3300 bt1
= t1
& VT_BTYPE
;
3301 if (bt1
== VT_PTR
) {
3302 type1
= pointed_type(type1
);
3303 type2
= pointed_type(type2
);
3304 return is_compatible_types(type1
, type2
);
3305 } else if (bt1
== VT_STRUCT
) {
3306 return (type1
->ref
== type2
->ref
);
3307 } else if (bt1
== VT_FUNC
) {
3308 return is_compatible_func(type1
, type2
);
3309 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
3310 /* If both are enums then they must be the same, if only one is then
3311 t1 and t2 must be equal, which was checked above already. */
3312 return type1
->ref
== type2
->ref
;
3318 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3319 type is stored in DEST if non-null (except for pointer plus/minus) . */
3320 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
3322 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
3323 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
3329 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
3330 ret
= op
== '?' ? 1 : 0;
3331 /* NOTE: as an extension, we accept void on only one side */
3333 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3334 if (op
== '+') ; /* Handled in caller */
3335 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3336 /* If one is a null ptr constant the result type is the other. */
3337 else if (is_null_pointer (op2
)) type
= *type1
;
3338 else if (is_null_pointer (op1
)) type
= *type2
;
3339 else if (bt1
!= bt2
) {
3340 /* accept comparison or cond-expr between pointer and integer
3342 if ((op
== '?' || TOK_ISCOND(op
))
3343 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
3344 tcc_warning("pointer/integer mismatch in %s",
3345 op
== '?' ? "conditional expression" : "comparison");
3346 else if (op
!= '-' || !is_integer_btype(bt2
))
3348 type
= *(bt1
== VT_PTR
? type1
: type2
);
3350 CType
*pt1
= pointed_type(type1
);
3351 CType
*pt2
= pointed_type(type2
);
3352 int pbt1
= pt1
->t
& VT_BTYPE
;
3353 int pbt2
= pt2
->t
& VT_BTYPE
;
3354 int newquals
, copied
= 0;
3355 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
3356 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
3357 if (op
!= '?' && !TOK_ISCOND(op
))
3360 type_incompatibility_warning(type1
, type2
,
3362 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3363 : "pointer type mismatch in comparison('%s' and '%s')");
3366 /* pointers to void get preferred, otherwise the
3367 pointed to types minus qualifs should be compatible */
3368 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
3369 /* combine qualifs */
3370 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
3371 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3374 /* copy the pointer target symbol */
3375 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3378 pointed_type(&type
)->t
|= newquals
;
3380 /* pointers to incomplete arrays get converted to
3381 pointers to completed ones if possible */
3382 if (pt1
->t
& VT_ARRAY
3383 && pt2
->t
& VT_ARRAY
3384 && pointed_type(&type
)->ref
->c
< 0
3385 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
3388 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3390 pointed_type(&type
)->ref
=
3391 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
3392 0, pointed_type(&type
)->ref
->c
);
3393 pointed_type(&type
)->ref
->c
=
3394 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
3400 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
3401 if (op
!= '?' || !compare_types(type1
, type2
, 1))
3404 } else if (is_float(bt1
) || is_float(bt2
)) {
3405 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
3406 type
.t
= VT_LDOUBLE
;
3407 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
3412 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
3413 /* cast to biggest op */
3414 type
.t
= VT_LLONG
| VT_LONG
;
3415 if (bt1
== VT_LLONG
)
3417 if (bt2
== VT_LLONG
)
3419 /* convert to unsigned if it does not fit in a long long */
3420 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
3421 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
3422 type
.t
|= VT_UNSIGNED
;
3424 /* integer operations */
3425 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
3426 /* convert to unsigned if it does not fit in an integer */
3427 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
3428 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
3429 type
.t
|= VT_UNSIGNED
;
3436 /* generic gen_op: handles types problems */
3437 ST_FUNC
void gen_op(int op
)
3439 int u
, t1
, t2
, bt1
, bt2
, t
;
3440 CType type1
, combtype
;
3443 t1
= vtop
[-1].type
.t
;
3444 t2
= vtop
[0].type
.t
;
3445 bt1
= t1
& VT_BTYPE
;
3446 bt2
= t2
& VT_BTYPE
;
3448 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
3449 if (bt2
== VT_FUNC
) {
3450 mk_pointer(&vtop
->type
);
3453 if (bt1
== VT_FUNC
) {
3455 mk_pointer(&vtop
->type
);
3460 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
3461 tcc_error_noabort("invalid operand types for binary operation");
3463 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3464 /* at least one operand is a pointer */
3465 /* relational op: must be both pointers */
3468 /* if both pointers, then it must be the '-' op */
3469 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3471 tcc_error("cannot use pointers here");
3472 if (vtop
[-1].type
.t
& VT_VLA
) {
3473 vla_runtime_pointed_size(&vtop
[-1].type
);
3475 vpushi(pointed_size(&vtop
[-1].type
));
3479 vtop
->type
.t
= VT_PTRDIFF_T
;
3483 /* exactly one pointer : must be '+' or '-'. */
3484 if (op
!= '-' && op
!= '+')
3485 tcc_error("cannot use pointers here");
3486 /* Put pointer as first operand */
3487 if (bt2
== VT_PTR
) {
3489 t
= t1
, t1
= t2
, t2
= t
;
3492 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
3493 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3496 type1
= vtop
[-1].type
;
3497 if (vtop
[-1].type
.ref
->type
.t
& VT_VLA
)
3498 vla_runtime_pointed_size(&vtop
[-1].type
);
3500 u
= pointed_size(&vtop
[-1].type
);
3502 tcc_error("unknown array element size");
3506 /* XXX: cast to int ? (long long case) */
3511 #ifdef CONFIG_TCC_BCHECK
3512 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
3513 /* if bounded pointers, we generate a special code to
3520 gen_bounded_ptr_add();
3526 type1
.t
&= ~VT_ARRAY
;
3527 /* put again type if gen_opic() swaped operands */
3531 /* floats can only be used for a few operations */
3532 if (is_float(combtype
.t
)
3533 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3535 tcc_error("invalid operands for binary operation");
3536 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
3537 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
3538 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
3540 t
|= (VT_LONG
& t1
);
3544 t
= t2
= combtype
.t
;
3545 /* XXX: currently, some unsigned operations are explicit, so
3546 we modify them here */
3547 if (t
& VT_UNSIGNED
) {
3554 else if (op
== TOK_LT
)
3556 else if (op
== TOK_GT
)
3558 else if (op
== TOK_LE
)
3560 else if (op
== TOK_GE
)
3566 /* special case for shifts and long long: we keep the shift as
3568 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3575 if (TOK_ISCOND(op
)) {
3576 /* relational op: the result is an int */
3577 vtop
->type
.t
= VT_INT
;
3582 // Make sure that we have converted to an rvalue:
3583 if (vtop
->r
& VT_LVAL
)
3584 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3587 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3588 #define gen_cvt_itof1 gen_cvt_itof
3590 /* generic itof for unsigned long long case */
3591 static void gen_cvt_itof1(int t
)
3593 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3594 (VT_LLONG
| VT_UNSIGNED
)) {
3597 vpush_helper_func(TOK___floatundisf
);
3598 #if LDOUBLE_SIZE != 8
3599 else if (t
== VT_LDOUBLE
)
3600 vpush_helper_func(TOK___floatundixf
);
3603 vpush_helper_func(TOK___floatundidf
);
3614 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3615 #define gen_cvt_ftoi1 gen_cvt_ftoi
3617 /* generic ftoi for unsigned long long case */
3618 static void gen_cvt_ftoi1(int t
)
3621 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3622 /* not handled natively */
3623 st
= vtop
->type
.t
& VT_BTYPE
;
3625 vpush_helper_func(TOK___fixunssfdi
);
3626 #if LDOUBLE_SIZE != 8
3627 else if (st
== VT_LDOUBLE
)
3628 vpush_helper_func(TOK___fixunsxfdi
);
3631 vpush_helper_func(TOK___fixunsdfdi
);
3642 /* special delayed cast for char/short */
3643 static void force_charshort_cast(void)
3645 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3646 int dbt
= vtop
->type
.t
;
3647 vtop
->r
&= ~VT_MUSTCAST
;
3649 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3653 static void gen_cast_s(int t
)
3661 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3662 static void gen_cast(CType
*type
)
3664 int sbt
, dbt
, sf
, df
, c
;
3665 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3667 /* special delayed cast for char/short */
3668 if (vtop
->r
& VT_MUSTCAST
)
3669 force_charshort_cast();
3671 /* bitfields first get cast to ints */
3672 if (vtop
->type
.t
& VT_BITFIELD
)
3675 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3676 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3684 dbt_bt
= dbt
& VT_BTYPE
;
3685 sbt_bt
= sbt
& VT_BTYPE
;
3686 if (dbt_bt
== VT_VOID
)
3688 if (sbt_bt
== VT_VOID
) {
3690 cast_error(&vtop
->type
, type
);
3693 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3694 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3695 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3698 /* constant case: we can do it now */
3699 /* XXX: in ISOC, cannot do it if error in convert */
3700 if (sbt
== VT_FLOAT
)
3701 vtop
->c
.ld
= vtop
->c
.f
;
3702 else if (sbt
== VT_DOUBLE
)
3703 vtop
->c
.ld
= vtop
->c
.d
;
3706 if (sbt_bt
== VT_LLONG
) {
3707 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3708 vtop
->c
.ld
= vtop
->c
.i
;
3710 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3712 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3713 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3715 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3718 if (dbt
== VT_FLOAT
)
3719 vtop
->c
.f
= (float)vtop
->c
.ld
;
3720 else if (dbt
== VT_DOUBLE
)
3721 vtop
->c
.d
= (double)vtop
->c
.ld
;
3722 } else if (sf
&& dbt
== VT_BOOL
) {
3723 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3726 vtop
->c
.i
= vtop
->c
.ld
;
3727 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3729 else if (sbt
& VT_UNSIGNED
)
3730 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3732 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3734 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3736 else if (dbt
== VT_BOOL
)
3737 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3739 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3740 dbt_bt
== VT_SHORT
? 0xffff :
3743 if (!(dbt
& VT_UNSIGNED
))
3744 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3749 } else if (dbt
== VT_BOOL
3750 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3751 == (VT_CONST
| VT_SYM
)) {
3752 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3758 /* cannot generate code for global or static initializers */
3759 if (STATIC_DATA_WANTED
)
3762 /* non constant case: generate code */
3763 if (dbt
== VT_BOOL
) {
3764 gen_test_zero(TOK_NE
);
3770 /* convert from fp to fp */
3773 /* convert int to fp */
3776 /* convert fp to int */
3778 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3781 goto again
; /* may need char/short cast */
3786 ds
= btype_size(dbt_bt
);
3787 ss
= btype_size(sbt_bt
);
3788 if (ds
== 0 || ss
== 0)
3791 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3792 tcc_error("cast to incomplete type");
3794 /* same size and no sign conversion needed */
3795 if (ds
== ss
&& ds
>= 4)
3797 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3798 tcc_warning("cast between pointer and integer of different size");
3799 if (sbt_bt
== VT_PTR
) {
3800 /* put integer type to allow logical operations below */
3801 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3805 /* processor allows { int a = 0, b = *(char*)&a; }
3806 That means that if we cast to less width, we can just
3807 change the type and read it still later. */
3808 #define ALLOW_SUBTYPE_ACCESS 1
3810 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3811 /* value still in memory */
3815 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3817 goto done
; /* no 64bit envolved */
3825 /* generate high word */
3826 if (sbt
& VT_UNSIGNED
) {
3835 } else if (ss
== 8) {
3836 /* from long long: just take low order word */
3844 /* need to convert from 32bit to 64bit */
3845 if (sbt
& VT_UNSIGNED
) {
3846 #if defined(TCC_TARGET_RISCV64)
3847 /* RISC-V keeps 32bit vals in registers sign-extended.
3848 So here we need a zero-extension. */
3857 ss
= ds
, ds
= 4, dbt
= sbt
;
3858 } else if (ss
== 8) {
3859 /* RISC-V keeps 32bit vals in registers sign-extended.
3860 So here we need a sign-extension for signed types and
3861 zero-extension. for unsigned types. */
3862 #if !defined(TCC_TARGET_RISCV64)
3863 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3872 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3878 bits
= (ss
- ds
) * 8;
3879 /* for unsigned, gen_op will convert SAR to SHR */
3880 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3883 vpushi(bits
- trunc
);
3890 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3893 /* return type size as known at compile time. Put alignment at 'a' */
3894 ST_FUNC
int type_size(CType
*type
, int *a
)
3899 bt
= type
->t
& VT_BTYPE
;
3900 if (bt
== VT_STRUCT
) {
3905 } else if (bt
== VT_PTR
) {
3906 if (type
->t
& VT_ARRAY
) {
3910 ts
= type_size(&s
->type
, a
);
3912 if (ts
< 0 && s
->c
< 0)
3920 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3921 return -1; /* incomplete enum */
3922 } else if (bt
== VT_LDOUBLE
) {
3924 return LDOUBLE_SIZE
;
3925 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3926 #ifdef TCC_TARGET_I386
3927 #ifdef TCC_TARGET_PE
3932 #elif defined(TCC_TARGET_ARM)
3942 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3945 } else if (bt
== VT_SHORT
) {
3948 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3952 /* char, void, function, _Bool */
3958 /* push type size as known at runtime time on top of value stack. Put
3960 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
3962 if (type
->t
& VT_VLA
) {
3963 type_size(&type
->ref
->type
, a
);
3964 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3966 vpushi(type_size(type
, a
));
3970 /* return the pointed type of t */
3971 static inline CType
*pointed_type(CType
*type
)
3973 return &type
->ref
->type
;
3976 /* modify type so that its it is a pointer to type. */
3977 ST_FUNC
void mk_pointer(CType
*type
)
3980 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3981 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3985 /* return true if type1 and type2 are exactly the same (including
3988 static int is_compatible_types(CType
*type1
, CType
*type2
)
3990 return compare_types(type1
,type2
,0);
3993 /* return true if type1 and type2 are the same (ignoring qualifiers).
3995 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3997 return compare_types(type1
,type2
,1);
4000 static void cast_error(CType
*st
, CType
*dt
)
4002 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
4005 /* verify type compatibility to store vtop in 'dt' type */
4006 static void verify_assign_cast(CType
*dt
)
4008 CType
*st
, *type1
, *type2
;
4009 int dbt
, sbt
, qualwarn
, lvl
;
4011 st
= &vtop
->type
; /* source type */
4012 dbt
= dt
->t
& VT_BTYPE
;
4013 sbt
= st
->t
& VT_BTYPE
;
4014 if (dt
->t
& VT_CONSTANT
)
4015 tcc_warning("assignment of read-only location");
4019 tcc_error("assignment to void expression");
4022 /* special cases for pointers */
4023 /* '0' can also be a pointer */
4024 if (is_null_pointer(vtop
))
4026 /* accept implicit pointer to integer cast with warning */
4027 if (is_integer_btype(sbt
)) {
4028 tcc_warning("assignment makes pointer from integer without a cast");
4031 type1
= pointed_type(dt
);
4033 type2
= pointed_type(st
);
4034 else if (sbt
== VT_FUNC
)
4035 type2
= st
; /* a function is implicitly a function pointer */
4038 if (is_compatible_types(type1
, type2
))
4040 for (qualwarn
= lvl
= 0;; ++lvl
) {
4041 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
4042 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
4044 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
4045 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
4046 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
4048 type1
= pointed_type(type1
);
4049 type2
= pointed_type(type2
);
4051 if (!is_compatible_unqualified_types(type1
, type2
)) {
4052 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
4053 /* void * can match anything */
4054 } else if (dbt
== sbt
4055 && is_integer_btype(sbt
& VT_BTYPE
)
4056 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
4057 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
4058 /* Like GCC don't warn by default for merely changes
4059 in pointer target signedness. Do warn for different
4060 base types, though, in particular for unsigned enums
4061 and signed int targets. */
4063 tcc_warning("assignment from incompatible pointer type");
4068 tcc_warning_c(warn_discarded_qualifiers
)("assignment discards qualifiers from pointer target type");
4074 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
4075 tcc_warning("assignment makes integer from pointer without a cast");
4076 } else if (sbt
== VT_STRUCT
) {
4077 goto case_VT_STRUCT
;
4079 /* XXX: more tests */
4083 if (!is_compatible_unqualified_types(dt
, st
)) {
4091 static void gen_assign_cast(CType
*dt
)
4093 verify_assign_cast(dt
);
4097 /* store vtop in lvalue pushed on stack */
4098 ST_FUNC
void vstore(void)
4100 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
4102 ft
= vtop
[-1].type
.t
;
4103 sbt
= vtop
->type
.t
& VT_BTYPE
;
4104 dbt
= ft
& VT_BTYPE
;
4106 verify_assign_cast(&vtop
[-1].type
);
4108 if (sbt
== VT_STRUCT
) {
4109 /* if structure, only generate pointer */
4110 /* structure assignment : generate memcpy */
4111 /* XXX: optimize if small size */
4112 size
= type_size(&vtop
->type
, &align
);
4116 #ifdef CONFIG_TCC_BCHECK
4117 if (vtop
->r
& VT_MUSTBOUND
)
4118 gbound(); /* check would be wrong after gaddrof() */
4120 vtop
->type
.t
= VT_PTR
;
4123 /* address of memcpy() */
4126 vpush_helper_func(TOK_memmove8
);
4127 else if(!(align
& 3))
4128 vpush_helper_func(TOK_memmove4
);
4131 /* Use memmove, rather than memcpy, as dest and src may be same: */
4132 vpush_helper_func(TOK_memmove
);
4137 #ifdef CONFIG_TCC_BCHECK
4138 if (vtop
->r
& VT_MUSTBOUND
)
4141 vtop
->type
.t
= VT_PTR
;
4146 /* leave source on stack */
4148 } else if (ft
& VT_BITFIELD
) {
4149 /* bitfield store handling */
4151 /* save lvalue as expression result (example: s.b = s.a = n;) */
4152 vdup(), vtop
[-1] = vtop
[-2];
4154 bit_pos
= BIT_POS(ft
);
4155 bit_size
= BIT_SIZE(ft
);
4156 /* remove bit field info to avoid loops */
4157 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
4159 if (dbt
== VT_BOOL
) {
4160 gen_cast(&vtop
[-1].type
);
4161 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
4163 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
4164 if (dbt
!= VT_BOOL
) {
4165 gen_cast(&vtop
[-1].type
);
4166 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
4168 if (r
== VT_STRUCT
) {
4169 store_packed_bf(bit_pos
, bit_size
);
4171 unsigned long long mask
= (1ULL << bit_size
) - 1;
4172 if (dbt
!= VT_BOOL
) {
4174 if (dbt
== VT_LLONG
)
4177 vpushi((unsigned)mask
);
4184 /* duplicate destination */
4187 /* load destination, mask and or with source */
4188 if (dbt
== VT_LLONG
)
4189 vpushll(~(mask
<< bit_pos
));
4191 vpushi(~((unsigned)mask
<< bit_pos
));
4196 /* ... and discard */
4199 } else if (dbt
== VT_VOID
) {
4202 /* optimize char/short casts */
4204 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
4205 && is_integer_btype(sbt
)
4207 if ((vtop
->r
& VT_MUSTCAST
)
4208 && btype_size(dbt
) > btype_size(sbt
)
4210 force_charshort_cast();
4213 gen_cast(&vtop
[-1].type
);
4216 #ifdef CONFIG_TCC_BCHECK
4217 /* bound check case */
4218 if (vtop
[-1].r
& VT_MUSTBOUND
) {
4224 gv(RC_TYPE(dbt
)); /* generate value */
4227 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
4228 //tcc_warning("deley cast %x -> %x", sbt, dbt);
4229 vtop
->type
.t
= ft
& VT_TYPE
;
4232 /* if lvalue was saved on stack, must read it */
4233 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
4235 r
= get_reg(RC_INT
);
4236 sv
.type
.t
= VT_PTRDIFF_T
;
4237 sv
.r
= VT_LOCAL
| VT_LVAL
;
4238 sv
.c
.i
= vtop
[-1].c
.i
;
4240 vtop
[-1].r
= r
| VT_LVAL
;
4243 r
= vtop
->r
& VT_VALMASK
;
4244 /* two word case handling :
4245 store second register at word + 4 (or +8 for x86-64) */
4246 if (USING_TWO_WORDS(dbt
)) {
4247 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
4248 vtop
[-1].type
.t
= load_type
;
4251 /* convert to int to increment easily */
4252 vtop
->type
.t
= VT_PTRDIFF_T
;
4258 vtop
[-1].type
.t
= load_type
;
4259 /* XXX: it works because r2 is spilled last ! */
4260 store(vtop
->r2
, vtop
- 1);
4266 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
4270 /* post defines POST/PRE add. c is the token ++ or -- */
4271 ST_FUNC
void inc(int post
, int c
)
4274 vdup(); /* save lvalue */
4276 gv_dup(); /* duplicate value */
4281 vpushi(c
- TOK_MID
);
4283 vstore(); /* store value */
4285 vpop(); /* if post op, return saved value */
4288 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
4290 /* read the string */
4294 while (tok
== TOK_STR
) {
4295 /* XXX: add \0 handling too ? */
4296 cstr_cat(astr
, tokc
.str
.data
, -1);
4299 cstr_ccat(astr
, '\0');
4302 /* If I is >= 1 and a power of two, returns log2(i)+1.
4303 If I is 0 returns 0. */
4304 ST_FUNC
int exact_log2p1(int i
)
4309 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
4320 /* Parse __attribute__((...)) GNUC extension. */
4321 static void parse_attribute(AttributeDef
*ad
)
4327 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
4332 while (tok
!= ')') {
4333 if (tok
< TOK_IDENT
)
4334 expect("attribute name");
4346 tcc_warning_c(warn_implicit_function_declaration
)(
4347 "implicit declaration of function '%s'", get_tok_str(tok
, &tokc
));
4348 s
= external_global_sym(tok
, &func_old_type
);
4349 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4350 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
4351 ad
->cleanup_func
= s
;
4356 case TOK_CONSTRUCTOR1
:
4357 case TOK_CONSTRUCTOR2
:
4358 ad
->f
.func_ctor
= 1;
4360 case TOK_DESTRUCTOR1
:
4361 case TOK_DESTRUCTOR2
:
4362 ad
->f
.func_dtor
= 1;
4364 case TOK_ALWAYS_INLINE1
:
4365 case TOK_ALWAYS_INLINE2
:
4366 ad
->f
.func_alwinl
= 1;
4371 parse_mult_str(&astr
, "section name");
4372 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
4379 parse_mult_str(&astr
, "alias(\"target\")");
4380 ad
->alias_target
= /* save string as token, for later */
4381 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
4385 case TOK_VISIBILITY1
:
4386 case TOK_VISIBILITY2
:
4388 parse_mult_str(&astr
,
4389 "visibility(\"default|hidden|internal|protected\")");
4390 if (!strcmp (astr
.data
, "default"))
4391 ad
->a
.visibility
= STV_DEFAULT
;
4392 else if (!strcmp (astr
.data
, "hidden"))
4393 ad
->a
.visibility
= STV_HIDDEN
;
4394 else if (!strcmp (astr
.data
, "internal"))
4395 ad
->a
.visibility
= STV_INTERNAL
;
4396 else if (!strcmp (astr
.data
, "protected"))
4397 ad
->a
.visibility
= STV_PROTECTED
;
4399 expect("visibility(\"default|hidden|internal|protected\")");
4408 if (n
<= 0 || (n
& (n
- 1)) != 0)
4409 tcc_error("alignment must be a positive power of two");
4414 ad
->a
.aligned
= exact_log2p1(n
);
4415 if (n
!= 1 << (ad
->a
.aligned
- 1))
4416 tcc_error("alignment of %d is larger than implemented", n
);
4428 /* currently, no need to handle it because tcc does not
4429 track unused objects */
4433 ad
->f
.func_noreturn
= 1;
4438 ad
->f
.func_call
= FUNC_CDECL
;
4443 ad
->f
.func_call
= FUNC_STDCALL
;
4445 #ifdef TCC_TARGET_I386
4455 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
4461 ad
->f
.func_call
= FUNC_FASTCALLW
;
4468 ad
->attr_mode
= VT_LLONG
+ 1;
4471 ad
->attr_mode
= VT_BYTE
+ 1;
4474 ad
->attr_mode
= VT_SHORT
+ 1;
4478 ad
->attr_mode
= VT_INT
+ 1;
4481 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4488 ad
->a
.dllexport
= 1;
4490 case TOK_NODECORATE
:
4491 ad
->a
.nodecorate
= 1;
4494 ad
->a
.dllimport
= 1;
4497 tcc_warning_c(warn_unsupported
)("'%s' attribute ignored", get_tok_str(t
, NULL
));
4498 /* skip parameters */
4500 int parenthesis
= 0;
4504 else if (tok
== ')')
4507 } while (parenthesis
&& tok
!= -1);
4520 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4524 while ((s
= s
->next
) != NULL
) {
4525 if ((s
->v
& SYM_FIELD
) &&
4526 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
4527 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
4528 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
4540 static void check_fields (CType
*type
, int check
)
4544 while ((s
= s
->next
) != NULL
) {
4545 int v
= s
->v
& ~SYM_FIELD
;
4546 if (v
< SYM_FIRST_ANOM
) {
4547 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
4548 if (check
&& (ts
->tok
& SYM_FIELD
))
4549 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
4550 ts
->tok
^= SYM_FIELD
;
4551 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
4552 check_fields (&s
->type
, check
);
4556 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4558 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4559 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4560 int pcc
= !tcc_state
->ms_bitfields
;
4561 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4568 prevbt
= VT_STRUCT
; /* make it never match */
4573 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4574 if (f
->type
.t
& VT_BITFIELD
)
4575 bit_size
= BIT_SIZE(f
->type
.t
);
4578 size
= type_size(&f
->type
, &align
);
4579 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4582 if (pcc
&& bit_size
== 0) {
4583 /* in pcc mode, packing does not affect zero-width bitfields */
4586 /* in pcc mode, attribute packed overrides if set. */
4587 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4590 /* pragma pack overrides align if lesser and packs bitfields always */
4593 if (pragma_pack
< align
)
4594 align
= pragma_pack
;
4595 /* in pcc mode pragma pack also overrides individual align */
4596 if (pcc
&& pragma_pack
< a
)
4600 /* some individual align was specified */
4604 if (type
->ref
->type
.t
== VT_UNION
) {
4605 if (pcc
&& bit_size
>= 0)
4606 size
= (bit_size
+ 7) >> 3;
4611 } else if (bit_size
< 0) {
4613 c
+= (bit_pos
+ 7) >> 3;
4614 c
= (c
+ align
- 1) & -align
;
4623 /* A bit-field. Layout is more complicated. There are two
4624 options: PCC (GCC) compatible and MS compatible */
4626 /* In PCC layout a bit-field is placed adjacent to the
4627 preceding bit-fields, except if:
4629 - an individual alignment was given
4630 - it would overflow its base type container and
4631 there is no packing */
4632 if (bit_size
== 0) {
4634 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4636 } else if (f
->a
.aligned
) {
4638 } else if (!packed
) {
4640 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4641 if (ofs
> size
/ align
)
4645 /* in pcc mode, long long bitfields have type int if they fit */
4646 if (size
== 8 && bit_size
<= 32)
4647 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4649 while (bit_pos
>= align
* 8)
4650 c
+= align
, bit_pos
-= align
* 8;
4653 /* In PCC layout named bit-fields influence the alignment
4654 of the containing struct using the base types alignment,
4655 except for packed fields (which here have correct align). */
4656 if (f
->v
& SYM_FIRST_ANOM
4657 // && bit_size // ??? gcc on ARM/rpi does that
4662 bt
= f
->type
.t
& VT_BTYPE
;
4663 if ((bit_pos
+ bit_size
> size
* 8)
4664 || (bit_size
> 0) == (bt
!= prevbt
)
4666 c
= (c
+ align
- 1) & -align
;
4669 /* In MS bitfield mode a bit-field run always uses
4670 at least as many bits as the underlying type.
4671 To start a new run it's also required that this
4672 or the last bit-field had non-zero width. */
4673 if (bit_size
|| prev_bit_size
)
4676 /* In MS layout the records alignment is normally
4677 influenced by the field, except for a zero-width
4678 field at the start of a run (but by further zero-width
4679 fields it is again). */
4680 if (bit_size
== 0 && prevbt
!= bt
)
4683 prev_bit_size
= bit_size
;
4686 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4687 | (bit_pos
<< VT_STRUCT_SHIFT
);
4688 bit_pos
+= bit_size
;
4690 if (align
> maxalign
)
4694 printf("set field %s offset %-2d size %-2d align %-2d",
4695 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4696 if (f
->type
.t
& VT_BITFIELD
) {
4697 printf(" pos %-2d bits %-2d",
4710 c
+= (bit_pos
+ 7) >> 3;
4712 /* store size and alignment */
4713 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4717 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4718 /* can happen if individual align for some member was given. In
4719 this case MSVC ignores maxalign when aligning the size */
4724 c
= (c
+ a
- 1) & -a
;
4728 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4731 /* check whether we can access bitfields by their type */
4732 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4736 if (0 == (f
->type
.t
& VT_BITFIELD
))
4740 bit_size
= BIT_SIZE(f
->type
.t
);
4743 bit_pos
= BIT_POS(f
->type
.t
);
4744 size
= type_size(&f
->type
, &align
);
4746 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
4747 #ifdef TCC_TARGET_ARM
4748 && !(f
->c
& (align
- 1))
4753 /* try to access the field using a different type */
4754 c0
= -1, s
= align
= 1;
4757 px
= f
->c
* 8 + bit_pos
;
4758 cx
= (px
>> 3) & -align
;
4759 px
= px
- (cx
<< 3);
4762 s
= (px
+ bit_size
+ 7) >> 3;
4772 s
= type_size(&t
, &align
);
4776 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
4777 #ifdef TCC_TARGET_ARM
4778 && !(cx
& (align
- 1))
4781 /* update offset and bit position */
4784 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4785 | (bit_pos
<< VT_STRUCT_SHIFT
);
4789 printf("FIX field %s offset %-2d size %-2d align %-2d "
4790 "pos %-2d bits %-2d\n",
4791 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4792 cx
, s
, align
, px
, bit_size
);
4795 /* fall back to load/store single-byte wise */
4796 f
->auxtype
= VT_STRUCT
;
4798 printf("FIX field %s : load byte-wise\n",
4799 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4805 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4806 static void struct_decl(CType
*type
, int u
)
4808 int v
, c
, size
, align
, flexible
;
4809 int bit_size
, bsize
, bt
;
4811 AttributeDef ad
, ad1
;
4814 memset(&ad
, 0, sizeof ad
);
4816 parse_attribute(&ad
);
4820 /* struct already defined ? return it */
4822 expect("struct/union/enum name");
4824 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4827 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4829 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4834 /* Record the original enum/struct/union token. */
4835 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4837 /* we put an undefined size for struct/union */
4838 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4839 s
->r
= 0; /* default alignment is zero as gcc */
4841 type
->t
= s
->type
.t
;
4847 tcc_error("struct/union/enum already defined");
4849 /* cannot be empty */
4850 /* non empty enums are not allowed */
4853 long long ll
= 0, pl
= 0, nl
= 0;
4856 /* enum symbols have static storage */
4857 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4861 expect("identifier");
4863 if (ss
&& !local_stack
)
4864 tcc_error("redefinition of enumerator '%s'",
4865 get_tok_str(v
, NULL
));
4869 ll
= expr_const64();
4871 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4873 *ps
= ss
, ps
= &ss
->next
;
4882 /* NOTE: we accept a trailing comma */
4887 /* set integral type of the enum */
4890 if (pl
!= (unsigned)pl
)
4891 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4893 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4894 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4895 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4897 /* set type for enum members */
4898 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4900 if (ll
== (int)ll
) /* default is int if it fits */
4902 if (t
.t
& VT_UNSIGNED
) {
4903 ss
->type
.t
|= VT_UNSIGNED
;
4904 if (ll
== (unsigned)ll
)
4907 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4908 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4913 while (tok
!= '}') {
4914 if (!parse_btype(&btype
, &ad1
)) {
4920 tcc_error("flexible array member '%s' not at the end of struct",
4921 get_tok_str(v
, NULL
));
4927 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4929 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4930 expect("identifier");
4932 int v
= btype
.ref
->v
;
4933 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4934 if (tcc_state
->ms_extensions
== 0)
4935 expect("identifier");
4939 if (type_size(&type1
, &align
) < 0) {
4940 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4943 tcc_error("field '%s' has incomplete type",
4944 get_tok_str(v
, NULL
));
4946 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4947 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4948 (type1
.t
& VT_STORAGE
))
4949 tcc_error("invalid type for '%s'",
4950 get_tok_str(v
, NULL
));
4954 bit_size
= expr_const();
4955 /* XXX: handle v = 0 case for messages */
4957 tcc_error("negative width in bit-field '%s'",
4958 get_tok_str(v
, NULL
));
4959 if (v
&& bit_size
== 0)
4960 tcc_error("zero width for bit-field '%s'",
4961 get_tok_str(v
, NULL
));
4962 parse_attribute(&ad1
);
4964 size
= type_size(&type1
, &align
);
4965 if (bit_size
>= 0) {
4966 bt
= type1
.t
& VT_BTYPE
;
4972 tcc_error("bitfields must have scalar type");
4974 if (bit_size
> bsize
) {
4975 tcc_error("width of '%s' exceeds its type",
4976 get_tok_str(v
, NULL
));
4977 } else if (bit_size
== bsize
4978 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4979 /* no need for bit fields */
4981 } else if (bit_size
== 64) {
4982 tcc_error("field width 64 not implemented");
4984 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4986 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4989 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4990 /* Remember we've seen a real field to check
4991 for placement of flexible array member. */
4994 /* If member is a struct or bit-field, enforce
4995 placing into the struct (as anonymous). */
4997 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
5002 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
5007 if (tok
== ';' || tok
== TOK_EOF
)
5014 parse_attribute(&ad
);
5015 if (ad
.cleanup_func
) {
5016 tcc_warning("attribute '__cleanup__' ignored on type");
5018 check_fields(type
, 1);
5019 check_fields(type
, 0);
5020 struct_layout(type
, &ad
);
5025 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
5027 merge_symattr(&ad
->a
, &s
->a
);
5028 merge_funcattr(&ad
->f
, &s
->f
);
5031 /* Add type qualifiers to a type. If the type is an array then the qualifiers
5032 are added to the element type, copied because it could be a typedef. */
5033 static void parse_btype_qualify(CType
*type
, int qualifiers
)
5035 while (type
->t
& VT_ARRAY
) {
5036 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
5037 type
= &type
->ref
->type
;
5039 type
->t
|= qualifiers
;
5042 /* return 0 if no type declaration. otherwise, return the basic type
5045 static int parse_btype(CType
*type
, AttributeDef
*ad
)
5047 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
5051 memset(ad
, 0, sizeof(AttributeDef
));
5061 /* currently, we really ignore extension */
5071 if (u
== VT_SHORT
|| u
== VT_LONG
) {
5072 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
5073 tmbt
: tcc_error("too many basic types");
5076 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
5081 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
5098 memset(&ad1
, 0, sizeof(AttributeDef
));
5099 if (parse_btype(&type1
, &ad1
)) {
5100 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
5102 n
= 1 << (ad1
.a
.aligned
- 1);
5104 type_size(&type1
, &n
);
5107 if (n
<= 0 || (n
& (n
- 1)) != 0)
5108 tcc_error("alignment must be a positive power of two");
5111 ad
->a
.aligned
= exact_log2p1(n
);
5115 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
5116 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
5117 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
5118 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
5125 #ifdef TCC_TARGET_ARM64
5127 /* GCC's __uint128_t appears in some Linux header files. Make it a
5128 synonym for long double to get the size and alignment right. */
5139 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
5140 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
5148 struct_decl(&type1
, VT_ENUM
);
5151 type
->ref
= type1
.ref
;
5154 struct_decl(&type1
, VT_STRUCT
);
5157 struct_decl(&type1
, VT_UNION
);
5160 /* type modifiers */
5164 parse_btype_qualify(type
, VT_ATOMIC
);
5167 parse_expr_type(&type1
);
5168 /* remove all storage modifiers except typedef */
5169 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
5171 sym_to_attr(ad
, type1
.ref
);
5179 parse_btype_qualify(type
, VT_CONSTANT
);
5187 parse_btype_qualify(type
, VT_VOLATILE
);
5194 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
5195 tcc_error("signed and unsigned modifier");
5208 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
5209 tcc_error("signed and unsigned modifier");
5210 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
5226 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
5227 tcc_error("multiple storage classes");
5239 ad
->f
.func_noreturn
= 1;
5241 /* GNUC attribute */
5242 case TOK_ATTRIBUTE1
:
5243 case TOK_ATTRIBUTE2
:
5244 parse_attribute(ad
);
5245 if (ad
->attr_mode
) {
5246 u
= ad
->attr_mode
-1;
5247 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
5255 parse_expr_type(&type1
);
5256 /* remove all storage modifiers except typedef */
5257 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
5259 sym_to_attr(ad
, type1
.ref
);
5265 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
5269 if (tok
== ':' && !in_generic
) {
5270 /* ignore if it's a label */
5275 t
&= ~(VT_BTYPE
|VT_LONG
);
5276 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
5277 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
5278 type
->ref
= s
->type
.ref
;
5280 parse_btype_qualify(type
, t
);
5282 /* get attributes from typedef */
5291 if (tcc_state
->char_is_unsigned
) {
5292 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
5295 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5296 bt
= t
& (VT_BTYPE
|VT_LONG
);
5298 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
5299 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5300 if (bt
== VT_LDOUBLE
)
5301 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
5307 /* convert a function parameter type (array to pointer and function to
5308 function pointer) */
5309 static inline void convert_parameter_type(CType
*pt
)
5311 /* remove const and volatile qualifiers (XXX: const could be used
5312 to indicate a const function parameter */
5313 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5314 /* array must be transformed to pointer according to ANSI C */
5316 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
5321 ST_FUNC
void parse_asm_str(CString
*astr
)
5324 parse_mult_str(astr
, "string constant");
5327 /* Parse an asm label and return the token */
5328 static int asm_label_instr(void)
5334 parse_asm_str(&astr
);
5337 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
5339 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
5344 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
5346 int n
, l
, t1
, arg_size
, align
, unused_align
;
5347 Sym
**plast
, *s
, *first
;
5352 /* function type, or recursive declarator (return if so) */
5354 if (TYPE_DIRECT
== (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)))
5358 else if (parse_btype(&pt
, &ad1
))
5360 else if (td
& (TYPE_DIRECT
|TYPE_ABSTRACT
)) {
5361 merge_attr (ad
, &ad1
);
5372 /* read param name and compute offset */
5373 if (l
!= FUNC_OLD
) {
5374 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
5376 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
| TYPE_PARAM
);
5377 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
5378 tcc_error("parameter declared as void");
5383 pt
.t
= VT_VOID
; /* invalid type */
5388 expect("identifier");
5389 convert_parameter_type(&pt
);
5390 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
5391 s
= sym_push(n
, &pt
, 0, 0);
5397 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
5402 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
5403 tcc_error("invalid type");
5406 /* if no parameters, then old type prototype */
5409 /* remove parameter symbols from token table, keep on stack */
5411 sym_pop(local_stack
? &local_stack
: &global_stack
, first
->prev
, 1);
5412 for (s
= first
; s
; s
= s
->next
)
5416 /* NOTE: const is ignored in returned type as it has a special
5417 meaning in gcc / C++ */
5418 type
->t
&= ~VT_CONSTANT
;
5419 /* some ancient pre-K&R C allows a function to return an array
5420 and the array brackets to be put after the arguments, such
5421 that "int c()[]" means something like "int[] c()" */
5424 skip(']'); /* only handle simple "[]" */
5427 /* we push a anonymous symbol which will contain the function prototype */
5428 ad
->f
.func_args
= arg_size
;
5429 ad
->f
.func_type
= l
;
5430 s
= sym_push(SYM_FIELD
, type
, 0, 0);
5436 } else if (tok
== '[') {
5437 int saved_nocode_wanted
= nocode_wanted
;
5438 /* array definition */
5442 if (td
& TYPE_PARAM
) while (1) {
5443 /* XXX The optional type-quals and static should only be accepted
5444 in parameter decls. The '*' as well, and then even only
5445 in prototypes (not function defs). */
5447 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5463 } else if (tok
!= ']') {
5464 if (!local_stack
|| (storage
& VT_STATIC
))
5465 vpushi(expr_const());
5467 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5468 length must always be evaluated, even under nocode_wanted,
5469 so that its size slot is initialized (e.g. under sizeof
5474 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5477 tcc_error("invalid array size");
5479 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5480 tcc_error("size of variable length array should be an integer");
5486 /* parse next post type */
5487 post_type(type
, ad
, storage
, td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5489 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5490 tcc_error("declaration of an array of functions");
5491 if ((type
->t
& VT_BTYPE
) == VT_VOID
5492 || type_size(type
, &unused_align
) < 0)
5493 tcc_error("declaration of an array of incomplete type elements");
5495 t1
|= type
->t
& VT_VLA
;
5499 tcc_error("need explicit inner array size in VLAs");
5500 loc
-= type_size(&int_type
, &align
);
5504 vla_runtime_type_size(type
, &align
);
5506 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5512 nocode_wanted
= saved_nocode_wanted
;
5514 /* we push an anonymous symbol which will contain the array
5516 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5517 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5523 /* Parse a type declarator (except basic type), and return the type
5524 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5525 expected. 'type' should contain the basic type. 'ad' is the
5526 attribute definition of the basic type. It can be modified by
5527 type_decl(). If this (possibly abstract) declarator is a pointer chain
5528 it returns the innermost pointed to type (equals *type, but is a different
5529 pointer), otherwise returns type itself, that's used for recursive calls. */
5530 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5533 int qualifiers
, storage
;
5535 /* recursive type, remove storage bits first, apply them later again */
5536 storage
= type
->t
& VT_STORAGE
;
5537 type
->t
&= ~VT_STORAGE
;
5540 while (tok
== '*') {
5546 qualifiers
|= VT_ATOMIC
;
5551 qualifiers
|= VT_CONSTANT
;
5556 qualifiers
|= VT_VOLATILE
;
5562 /* XXX: clarify attribute handling */
5563 case TOK_ATTRIBUTE1
:
5564 case TOK_ATTRIBUTE2
:
5565 parse_attribute(ad
);
5569 type
->t
|= qualifiers
;
5571 /* innermost pointed to type is the one for the first derivation */
5572 ret
= pointed_type(type
);
5576 /* This is possibly a parameter type list for abstract declarators
5577 ('int ()'), use post_type for testing this. */
5578 if (!post_type(type
, ad
, 0, td
)) {
5579 /* It's not, so it's a nested declarator, and the post operations
5580 apply to the innermost pointed to type (if any). */
5581 /* XXX: this is not correct to modify 'ad' at this point, but
5582 the syntax is not clear */
5583 parse_attribute(ad
);
5584 post
= type_decl(type
, ad
, v
, td
);
5588 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5589 /* type identifier */
5594 if (!(td
& TYPE_ABSTRACT
))
5595 expect("identifier");
5598 post_type(post
, ad
, storage
, td
& ~(TYPE_DIRECT
|TYPE_ABSTRACT
));
5599 parse_attribute(ad
);
5604 /* indirection with full error checking and bound check */
5605 ST_FUNC
void indir(void)
5607 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5608 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5612 if (vtop
->r
& VT_LVAL
)
5614 vtop
->type
= *pointed_type(&vtop
->type
);
5615 /* Arrays and functions are never lvalues */
5616 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5617 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5619 /* if bound checking, the referenced pointer must be checked */
5620 #ifdef CONFIG_TCC_BCHECK
5621 if (tcc_state
->do_bounds_check
)
5622 vtop
->r
|= VT_MUSTBOUND
;
5627 /* pass a parameter to a function and do type checking and casting */
5628 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5633 func_type
= func
->f
.func_type
;
5634 if (func_type
== FUNC_OLD
||
5635 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5636 /* default casting : only need to convert float to double */
5637 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5638 gen_cast_s(VT_DOUBLE
);
5639 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5640 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5641 type
.ref
= vtop
->type
.ref
;
5643 } else if (vtop
->r
& VT_MUSTCAST
) {
5644 force_charshort_cast();
5646 } else if (arg
== NULL
) {
5647 tcc_error("too many arguments to function");
5650 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5651 gen_assign_cast(&type
);
5655 /* parse an expression and return its type without any side effect. */
5656 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5665 /* parse an expression of the form '(type)' or '(expr)' and return its
5667 static void parse_expr_type(CType
*type
)
5673 if (parse_btype(type
, &ad
)) {
5674 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5676 expr_type(type
, gexpr
);
5681 static void parse_type(CType
*type
)
5686 if (!parse_btype(type
, &ad
)) {
5689 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5692 static void parse_builtin_params(int nc
, const char *args
)
5701 while ((c
= *args
++)) {
5716 type
.t
= VT_CONSTANT
;
5722 type
.t
= VT_CONSTANT
;
5724 type
.t
|= char_type
.t
;
5736 gen_assign_cast(&type
);
5743 static void parse_atomic(int atok
)
5745 int size
, align
, arg
;
5746 CType
*atom
, *atom_ptr
, ct
= {0};
5748 static const char *const templates
[] = {
5750 * Each entry consists of callback and function template.
5751 * The template represents argument types and return type.
5753 * ? void (return-only)
5756 * A read-only atomic
5757 * p pointer to memory
5762 /* keep in order of appearance in tcctok.h: */
5763 /* __atomic_store */ "avm.?",
5764 /* __atomic_load */ "Am.v",
5765 /* __atomic_exchange */ "avm.v",
5766 /* __atomic_compare_exchange */ "apvbmm.b",
5767 /* __atomic_fetch_add */ "avm.v",
5768 /* __atomic_fetch_sub */ "avm.v",
5769 /* __atomic_fetch_or */ "avm.v",
5770 /* __atomic_fetch_xor */ "avm.v",
5771 /* __atomic_fetch_and */ "avm.v"
5773 const char *template = templates
[(atok
- TOK___atomic_store
)];
5775 atom
= atom_ptr
= NULL
;
5776 size
= 0; /* pacify compiler */
5781 switch (template[arg
]) {
5784 atom_ptr
= &vtop
->type
;
5785 if ((atom_ptr
->t
& VT_BTYPE
) != VT_PTR
)
5787 atom
= pointed_type(atom_ptr
);
5788 size
= type_size(atom
, &align
);
5790 || (size
& (size
- 1))
5791 || (atok
> TOK___atomic_compare_exchange
5792 && (0 == btype_size(atom
->t
& VT_BTYPE
)
5793 || (atom
->t
& VT_BTYPE
) == VT_PTR
)))
5794 expect("integral or integer-sized pointer target type");
5795 /* GCC does not care either: */
5796 /* if (!(atom->t & VT_ATOMIC))
5797 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5801 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
5802 || type_size(pointed_type(&vtop
->type
), &align
) != size
)
5803 tcc_error("pointer target type mismatch in argument %d", arg
+ 1);
5804 gen_assign_cast(atom_ptr
);
5807 gen_assign_cast(atom
);
5810 gen_assign_cast(&int_type
);
5814 gen_assign_cast(&ct
);
5817 if ('.' == template[++arg
])
5824 switch (template[arg
+ 1]) {
5833 sprintf(buf
, "%s_%d", get_tok_str(atok
, 0), size
);
5834 vpush_helper_func(tok_alloc_const(buf
));
5839 PUT_R_RET(vtop
, ct
.t
);
5840 if (ct
.t
== VT_BOOL
) {
5842 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
5844 vtop
->type
.t
= VT_INT
;
5849 ST_FUNC
void unary(void)
5851 int n
, t
, align
, size
, r
, sizeof_caller
;
5856 /* generate line number info */
5858 tcc_debug_line(tcc_state
), tcc_tcov_check_line (1);
5860 sizeof_caller
= in_sizeof
;
5863 /* XXX: GCC 2.95.3 does not generate a table although it should be
5871 #ifdef TCC_TARGET_PE
5872 t
= VT_SHORT
|VT_UNSIGNED
;
5880 vsetc(&type
, VT_CONST
, &tokc
);
5884 t
= VT_INT
| VT_UNSIGNED
;
5890 t
= VT_LLONG
| VT_UNSIGNED
;
5902 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5905 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5907 case TOK___FUNCTION__
:
5909 goto tok_identifier
;
5915 /* special function name identifier */
5916 len
= strlen(funcname
) + 1;
5917 /* generate char[len] type */
5918 type
.t
= char_type
.t
;
5919 if (tcc_state
->warn_write_strings
& WARN_ON
)
5920 type
.t
|= VT_CONSTANT
;
5924 sec
= rodata_section
;
5925 vpush_ref(&type
, sec
, sec
->data_offset
, len
);
5927 memcpy(section_ptr_add(sec
, len
), funcname
, len
);
5932 #ifdef TCC_TARGET_PE
5933 t
= VT_SHORT
| VT_UNSIGNED
;
5939 /* string parsing */
5942 if (tcc_state
->warn_write_strings
& WARN_ON
)
5947 memset(&ad
, 0, sizeof(AttributeDef
));
5948 ad
.section
= rodata_section
;
5949 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5954 if (parse_btype(&type
, &ad
)) {
5955 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5957 /* check ISOC99 compound literal */
5959 /* data is allocated locally by default */
5964 /* all except arrays are lvalues */
5965 if (!(type
.t
& VT_ARRAY
))
5967 memset(&ad
, 0, sizeof(AttributeDef
));
5968 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5970 if (sizeof_caller
) {
5977 } else if (tok
== '{') {
5978 int saved_nocode_wanted
= nocode_wanted
;
5979 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5981 if (0 == local_scope
)
5982 tcc_error("statement expression outside of function");
5983 /* save all registers */
5985 /* statement expression : we do not accept break/continue
5986 inside as GCC does. We do retain the nocode_wanted state,
5987 as statement expressions can't ever be entered from the
5988 outside, so any reactivation of code emission (from labels
5989 or loop heads) can be disabled again after the end of it. */
5991 /* or'ing to keep however possible CODE_OFF() from e.g. "return 0;"
5992 in the statement expression */
5993 nocode_wanted
|= saved_nocode_wanted
;
6008 /* functions names must be treated as function pointers,
6009 except for unary '&' and sizeof. Since we consider that
6010 functions are not lvalues, we only have to handle it
6011 there and in function calls. */
6012 /* arrays can also be used although they are not lvalues */
6013 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
6014 !(vtop
->type
.t
& VT_ARRAY
))
6017 vtop
->sym
->a
.addrtaken
= 1;
6018 mk_pointer(&vtop
->type
);
6024 gen_test_zero(TOK_EQ
);
6035 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
6036 tcc_error("pointer not accepted for unary plus");
6037 /* In order to force cast, we add zero, except for floating point
6038 where we really need an noop (otherwise -0.0 will be transformed
6040 if (!is_float(vtop
->type
.t
)) {
6052 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
6054 if (vtop
[1].r
& VT_SYM
)
6055 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
6056 size
= type_size(&type
, &align
);
6057 if (s
&& s
->a
.aligned
)
6058 align
= 1 << (s
->a
.aligned
- 1);
6059 if (t
== TOK_SIZEOF
) {
6060 if (!(type
.t
& VT_VLA
)) {
6062 tcc_error("sizeof applied to an incomplete type");
6065 vla_runtime_type_size(&type
, &align
);
6070 vtop
->type
.t
|= VT_UNSIGNED
;
6073 case TOK_builtin_expect
:
6074 /* __builtin_expect is a no-op for now */
6075 parse_builtin_params(0, "ee");
6078 case TOK_builtin_types_compatible_p
:
6079 parse_builtin_params(0, "tt");
6080 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
6081 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
6082 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
6086 case TOK_builtin_choose_expr
:
6113 case TOK_builtin_constant_p
:
6114 parse_builtin_params(1, "e");
6115 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6116 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
6120 case TOK_builtin_frame_address
:
6121 case TOK_builtin_return_address
:
6127 if (tok
!= TOK_CINT
) {
6128 tcc_error("%s only takes positive integers",
6129 tok1
== TOK_builtin_return_address
?
6130 "__builtin_return_address" :
6131 "__builtin_frame_address");
6133 level
= (uint32_t)tokc
.i
;
6138 vset(&type
, VT_LOCAL
, 0); /* local frame */
6140 #ifdef TCC_TARGET_RISCV64
6144 mk_pointer(&vtop
->type
);
6145 indir(); /* -> parent frame */
6147 if (tok1
== TOK_builtin_return_address
) {
6148 // assume return address is just above frame pointer on stack
6149 #ifdef TCC_TARGET_ARM
6152 #elif defined TCC_TARGET_RISCV64
6159 mk_pointer(&vtop
->type
);
6164 #ifdef TCC_TARGET_RISCV64
6165 case TOK_builtin_va_start
:
6166 parse_builtin_params(0, "ee");
6167 r
= vtop
->r
& VT_VALMASK
;
6171 tcc_error("__builtin_va_start expects a local variable");
6176 #ifdef TCC_TARGET_X86_64
6177 #ifdef TCC_TARGET_PE
6178 case TOK_builtin_va_start
:
6179 parse_builtin_params(0, "ee");
6180 r
= vtop
->r
& VT_VALMASK
;
6184 tcc_error("__builtin_va_start expects a local variable");
6186 vtop
->type
= char_pointer_type
;
6191 case TOK_builtin_va_arg_types
:
6192 parse_builtin_params(0, "t");
6193 vpushi(classify_x86_64_va_arg(&vtop
->type
));
6200 #ifdef TCC_TARGET_ARM64
6201 case TOK_builtin_va_start
: {
6202 parse_builtin_params(0, "ee");
6206 vtop
->type
.t
= VT_VOID
;
6209 case TOK_builtin_va_arg
: {
6210 parse_builtin_params(0, "et");
6218 case TOK___arm64_clear_cache
: {
6219 parse_builtin_params(0, "ee");
6222 vtop
->type
.t
= VT_VOID
;
6227 /* atomic operations */
6228 case TOK___atomic_store
:
6229 case TOK___atomic_load
:
6230 case TOK___atomic_exchange
:
6231 case TOK___atomic_compare_exchange
:
6232 case TOK___atomic_fetch_add
:
6233 case TOK___atomic_fetch_sub
:
6234 case TOK___atomic_fetch_or
:
6235 case TOK___atomic_fetch_xor
:
6236 case TOK___atomic_fetch_and
:
6240 /* pre operations */
6251 if (is_float(vtop
->type
.t
)) {
6261 goto tok_identifier
;
6263 /* allow to take the address of a label */
6264 if (tok
< TOK_UIDENT
)
6265 expect("label identifier");
6266 s
= label_find(tok
);
6268 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
6270 if (s
->r
== LABEL_DECLARED
)
6271 s
->r
= LABEL_FORWARD
;
6274 s
->type
.t
= VT_VOID
;
6275 mk_pointer(&s
->type
);
6276 s
->type
.t
|= VT_STATIC
;
6278 vpushsym(&s
->type
, s
);
6284 CType controlling_type
;
6285 int has_default
= 0;
6288 TokenString
*str
= NULL
;
6289 int saved_const_wanted
= const_wanted
;
6294 expr_type(&controlling_type
, expr_eq
);
6295 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
6296 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
6297 mk_pointer(&controlling_type
);
6298 const_wanted
= saved_const_wanted
;
6302 if (tok
== TOK_DEFAULT
) {
6304 tcc_error("too many 'default'");
6310 AttributeDef ad_tmp
;
6315 parse_btype(&cur_type
, &ad_tmp
);
6318 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
6319 if (compare_types(&controlling_type
, &cur_type
, 0)) {
6321 tcc_error("type match twice");
6331 skip_or_save_block(&str
);
6333 skip_or_save_block(NULL
);
6340 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
6341 tcc_error("type '%s' does not match any association", buf
);
6343 begin_macro(str
, 1);
6352 // special qnan , snan and infinity values
6357 vtop
->type
.t
= VT_FLOAT
;
6362 goto special_math_val
;
6365 goto special_math_val
;
6372 expect("identifier");
6374 if (!s
|| IS_ASM_SYM(s
)) {
6375 const char *name
= get_tok_str(t
, NULL
);
6377 tcc_error("'%s' undeclared", name
);
6378 /* for simple function calls, we tolerate undeclared
6379 external reference to int() function */
6380 tcc_warning_c(warn_implicit_function_declaration
)(
6381 "implicit declaration of function '%s'", name
);
6382 s
= external_global_sym(t
, &func_old_type
);
6386 /* A symbol that has a register is a local register variable,
6387 which starts out as VT_LOCAL value. */
6388 if ((r
& VT_VALMASK
) < VT_CONST
)
6389 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
6391 vset(&s
->type
, r
, s
->c
);
6392 /* Point to s as backpointer (even without r&VT_SYM).
6393 Will be used by at least the x86 inline asm parser for
6399 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
6400 vtop
->c
.i
= s
->enum_val
;
6405 /* post operations */
6407 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
6410 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
6411 int qualifiers
, cumofs
= 0;
6413 if (tok
== TOK_ARROW
)
6415 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
6418 /* expect pointer on structure */
6419 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
6420 expect("struct or union");
6421 if (tok
== TOK_CDOUBLE
)
6422 expect("field name");
6424 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
6425 expect("field name");
6426 s
= find_field(&vtop
->type
, tok
, &cumofs
);
6428 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
6429 /* add field offset to pointer */
6430 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
6431 vpushi(cumofs
+ s
->c
);
6433 /* change type to field type, and set to lvalue */
6434 vtop
->type
= s
->type
;
6435 vtop
->type
.t
|= qualifiers
;
6436 /* an array is never an lvalue */
6437 if (!(vtop
->type
.t
& VT_ARRAY
)) {
6439 #ifdef CONFIG_TCC_BCHECK
6440 /* if bound checking, the referenced pointer must be checked */
6441 if (tcc_state
->do_bounds_check
)
6442 vtop
->r
|= VT_MUSTBOUND
;
6446 } else if (tok
== '[') {
6452 } else if (tok
== '(') {
6455 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
6458 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
6459 /* pointer test (no array accepted) */
6460 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
6461 vtop
->type
= *pointed_type(&vtop
->type
);
6462 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6466 expect("function pointer");
6469 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6471 /* get return type */
6474 sa
= s
->next
; /* first parameter */
6475 nb_args
= regsize
= 0;
6477 /* compute first implicit argument if a structure is returned */
6478 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6479 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6480 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6481 &ret_align
, ®size
);
6482 if (ret_nregs
<= 0) {
6483 /* get some space for the returned structure */
6484 size
= type_size(&s
->type
, &align
);
6485 #ifdef TCC_TARGET_ARM64
6486 /* On arm64, a small struct is return in registers.
6487 It is much easier to write it to memory if we know
6488 that we are allowed to write some extra bytes, so
6489 round the allocated space up to a power of 2: */
6491 while (size
& (size
- 1))
6492 size
= (size
| (size
- 1)) + 1;
6494 loc
= (loc
- size
) & -align
;
6496 ret
.r
= VT_LOCAL
| VT_LVAL
;
6497 /* pass it as 'int' to avoid structure arg passing
6499 vseti(VT_LOCAL
, loc
);
6500 #ifdef CONFIG_TCC_BCHECK
6501 if (tcc_state
->do_bounds_check
)
6515 if (ret_nregs
> 0) {
6516 /* return in register */
6518 PUT_R_RET(&ret
, ret
.type
.t
);
6523 gfunc_param_typed(s
, sa
);
6533 tcc_error("too few arguments to function");
6535 gfunc_call(nb_args
);
6537 if (ret_nregs
< 0) {
6538 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6539 #ifdef TCC_TARGET_RISCV64
6540 arch_transfer_ret_regs(1);
6544 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6545 vsetc(&ret
.type
, r
, &ret
.c
);
6546 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6549 /* handle packed struct return */
6550 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6553 size
= type_size(&s
->type
, &align
);
6554 /* We're writing whole regs often, make sure there's enough
6555 space. Assume register size is power of 2. */
6556 if (regsize
> align
)
6558 loc
= (loc
- size
) & -align
;
6562 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6566 if (--ret_nregs
== 0)
6570 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6573 /* Promote char/short return values. This is matters only
6574 for calling function that were not compiled by TCC and
6575 only on some architectures. For those where it doesn't
6576 matter we expect things to be already promoted to int,
6578 t
= s
->type
.t
& VT_BTYPE
;
6579 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6581 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6583 vtop
->type
.t
= VT_INT
;
6587 if (s
->f
.func_noreturn
) {
6589 tcc_tcov_block_end (tcov_data
.line
);
6598 #ifndef precedence_parser /* original top-down parser */
6600 static void expr_prod(void)
6605 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6612 static void expr_sum(void)
6617 while ((t
= tok
) == '+' || t
== '-') {
6624 static void expr_shift(void)
6629 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6636 static void expr_cmp(void)
6641 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6642 t
== TOK_ULT
|| t
== TOK_UGE
) {
6649 static void expr_cmpeq(void)
6654 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6661 static void expr_and(void)
6664 while (tok
== '&') {
6671 static void expr_xor(void)
6674 while (tok
== '^') {
6681 static void expr_or(void)
6684 while (tok
== '|') {
6691 static void expr_landor(int op
);
6693 static void expr_land(void)
6696 if (tok
== TOK_LAND
)
6700 static void expr_lor(void)
6707 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6708 #else /* defined precedence_parser */
6709 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6710 # define expr_lor() unary(), expr_infix(1)
6712 static int precedence(int tok
)
6715 case TOK_LOR
: return 1;
6716 case TOK_LAND
: return 2;
6720 case TOK_EQ
: case TOK_NE
: return 6;
6721 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6722 case TOK_SHL
: case TOK_SAR
: return 8;
6723 case '+': case '-': return 9;
6724 case '*': case '/': case '%': return 10;
6726 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6731 static unsigned char prec
[256];
6732 static void init_prec(void)
6735 for (i
= 0; i
< 256; i
++)
6736 prec
[i
] = precedence(i
);
6738 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6740 static void expr_landor(int op
);
6742 static void expr_infix(int p
)
6745 while ((p2
= precedence(t
)) >= p
) {
6746 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6751 if (precedence(tok
) > p2
)
6760 /* Assuming vtop is a value used in a conditional context
6761 (i.e. compared with zero) return 0 if it's false, 1 if
6762 true and -1 if it can't be statically determined. */
6763 static int condition_3way(void)
6766 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6767 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6769 gen_cast_s(VT_BOOL
);
6776 static void expr_landor(int op
)
6778 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6780 c
= f
? i
: condition_3way();
6782 save_regs(1), cc
= 0;
6784 nocode_wanted
++, f
= 1;
6792 expr_landor_next(op
);
6804 static int is_cond_bool(SValue
*sv
)
6806 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6807 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6808 return (unsigned)sv
->c
.i
< 2;
6809 if (sv
->r
== VT_CMP
)
6814 static void expr_cond(void)
6816 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6824 c
= condition_3way();
6825 g
= (tok
== ':' && gnu_ext
);
6835 /* needed to avoid having different registers saved in
6842 ncw_prev
= nocode_wanted
;
6848 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6849 mk_pointer(&vtop
->type
);
6850 sv
= *vtop
; /* save value to handle it later */
6851 vtop
--; /* no vpop so that FP stack is not flushed */
6861 nocode_wanted
= ncw_prev
;
6867 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6868 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6869 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6870 this code jumps directly to the if's then/else branches. */
6875 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6878 nocode_wanted
= ncw_prev
;
6879 // tcc_warning("two conditions expr_cond");
6883 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6884 mk_pointer(&vtop
->type
);
6886 /* cast operands to correct type according to ISOC rules */
6887 if (!combine_types(&type
, &sv
, vtop
, '?'))
6888 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6889 "type mismatch in conditional expression (have '%s' and '%s')");
6890 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6891 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6892 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6894 /* now we convert second operand */
6898 mk_pointer(&vtop
->type
);
6900 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6904 rc
= RC_TYPE(type
.t
);
6905 /* for long longs, we use fixed registers to avoid having
6906 to handle a complicated move */
6907 if (USING_TWO_WORDS(type
.t
))
6908 rc
= RC_RET(type
.t
);
6916 nocode_wanted
= ncw_prev
;
6918 /* this is horrible, but we must also convert first
6924 mk_pointer(&vtop
->type
);
6926 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6932 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6942 static void expr_eq(void)
6947 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6955 gen_op(TOK_ASSIGN_OP(t
));
6961 ST_FUNC
void gexpr(void)
6972 /* parse a constant expression and return value in vtop. */
6973 static void expr_const1(void)
6976 nocode_wanted
+= unevalmask
+ 1;
6978 nocode_wanted
-= unevalmask
+ 1;
6982 /* parse an integer constant and return its value. */
6983 static inline int64_t expr_const64(void)
6987 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6988 expect("constant expression");
6994 /* parse an integer constant and return its value.
6995 Complain if it doesn't fit 32bit (signed or unsigned). */
6996 ST_FUNC
int expr_const(void)
6999 int64_t wc
= expr_const64();
7001 if (c
!= wc
&& (unsigned)c
!= wc
)
7002 tcc_error("constant exceeds 32 bit");
7006 /* ------------------------------------------------------------------------- */
7007 /* return from function */
7009 #ifndef TCC_TARGET_ARM64
7010 static void gfunc_return(CType
*func_type
)
7012 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7013 CType type
, ret_type
;
7014 int ret_align
, ret_nregs
, regsize
;
7015 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
7016 &ret_align
, ®size
);
7017 if (ret_nregs
< 0) {
7018 #ifdef TCC_TARGET_RISCV64
7019 arch_transfer_ret_regs(0);
7021 } else if (0 == ret_nregs
) {
7022 /* if returning structure, must copy it to implicit
7023 first pointer arg location */
7026 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
7029 /* copy structure value to pointer */
7032 /* returning structure packed into registers */
7033 int size
, addr
, align
, rc
;
7034 size
= type_size(func_type
,&align
);
7035 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
7036 (vtop
->c
.i
& (ret_align
-1)))
7037 && (align
& (ret_align
-1))) {
7038 loc
= (loc
- size
) & -ret_align
;
7041 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
7045 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
7047 vtop
->type
= ret_type
;
7048 rc
= RC_RET(ret_type
.t
);
7056 if (--ret_nregs
== 0)
7058 /* We assume that when a structure is returned in multiple
7059 registers, their classes are consecutive values of the
7062 vtop
->c
.i
+= regsize
;
7067 gv(RC_RET(func_type
->t
));
7069 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
7073 static void check_func_return(void)
7075 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
7077 if (!strcmp (funcname
, "main")
7078 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
7079 /* main returns 0 by default */
7081 gen_assign_cast(&func_vt
);
7082 gfunc_return(&func_vt
);
7084 tcc_warning("function might return no value: '%s'", funcname
);
7088 /* ------------------------------------------------------------------------- */
7091 static int case_cmpi(const void *pa
, const void *pb
)
7093 int64_t a
= (*(struct case_t
**) pa
)->v1
;
7094 int64_t b
= (*(struct case_t
**) pb
)->v1
;
7095 return a
< b
? -1 : a
> b
;
7098 static int case_cmpu(const void *pa
, const void *pb
)
7100 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
7101 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
7102 return a
< b
? -1 : a
> b
;
7105 static void gtst_addr(int t
, int a
)
7107 gsym_addr(gvtst(0, t
), a
);
7110 static void gcase(struct case_t
**base
, int len
, int *bsym
)
7114 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
7131 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
7133 gcase(base
, len
/2, bsym
);
7137 base
+= e
; len
-= e
;
7147 if (p
->v1
== p
->v2
) {
7149 gtst_addr(0, p
->sym
);
7159 gtst_addr(0, p
->sym
);
7163 *bsym
= gjmp(*bsym
);
7166 /* ------------------------------------------------------------------------- */
7167 /* __attribute__((cleanup(fn))) */
7169 static void try_call_scope_cleanup(Sym
*stop
)
7171 Sym
*cls
= cur_scope
->cl
.s
;
7173 for (; cls
!= stop
; cls
= cls
->ncl
) {
7174 Sym
*fs
= cls
->next
;
7175 Sym
*vs
= cls
->prev_tok
;
7177 vpushsym(&fs
->type
, fs
);
7178 vset(&vs
->type
, vs
->r
, vs
->c
);
7180 mk_pointer(&vtop
->type
);
7186 static void try_call_cleanup_goto(Sym
*cleanupstate
)
7191 if (!cur_scope
->cl
.s
)
7194 /* search NCA of both cleanup chains given parents and initial depth */
7195 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
7196 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
7198 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
7200 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
7203 try_call_scope_cleanup(cc
);
7206 /* call 'func' for each __attribute__((cleanup(func))) */
7207 static void block_cleanup(struct scope
*o
)
7211 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
7212 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
7217 try_call_scope_cleanup(o
->cl
.s
);
7218 pcl
->jnext
= gjmp(0);
7220 goto remove_pending
;
7230 try_call_scope_cleanup(o
->cl
.s
);
7233 /* ------------------------------------------------------------------------- */
7236 static void vla_restore(int loc
)
7239 gen_vla_sp_restore(loc
);
7242 static void vla_leave(struct scope
*o
)
7244 struct scope
*c
= cur_scope
, *v
= NULL
;
7245 for (; c
!= o
&& c
; c
= c
->prev
)
7249 vla_restore(v
->vla
.locorig
);
7252 /* ------------------------------------------------------------------------- */
7255 static void new_scope(struct scope
*o
)
7257 /* copy and link previous scope */
7259 o
->prev
= cur_scope
;
7261 cur_scope
->vla
.num
= 0;
7263 /* record local declaration stack position */
7264 o
->lstk
= local_stack
;
7265 o
->llstk
= local_label_stack
;
7269 tcc_debug_stabn(tcc_state
, N_LBRAC
, ind
- func_ind
);
7272 static void prev_scope(struct scope
*o
, int is_expr
)
7276 if (o
->cl
.s
!= o
->prev
->cl
.s
)
7277 block_cleanup(o
->prev
);
7279 /* pop locally defined labels */
7280 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
7282 /* In the is_expr case (a statement expression is finished here),
7283 vtop might refer to symbols on the local_stack. Either via the
7284 type or via vtop->sym. We can't pop those nor any that in turn
7285 might be referred to. To make it easier we don't roll back
7286 any symbols in that case; some upper level call to block() will
7287 do that. We do have to remove such symbols from the lookup
7288 tables, though. sym_pop will do that. */
7290 /* pop locally defined symbols */
7291 pop_local_syms(o
->lstk
, is_expr
);
7292 cur_scope
= o
->prev
;
7296 tcc_debug_stabn(tcc_state
, N_RBRAC
, ind
- func_ind
);
7299 /* leave a scope via break/continue(/goto) */
7300 static void leave_scope(struct scope
*o
)
7304 try_call_scope_cleanup(o
->cl
.s
);
7308 /* ------------------------------------------------------------------------- */
7309 /* call block from 'for do while' loops */
7311 static void lblock(int *bsym
, int *csym
)
7313 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
7314 int *b
= co
->bsym
, *c
= co
->csym
;
7328 static void block(int is_expr
)
7330 int a
, b
, c
, d
, e
, t
;
7335 /* default return value is (void) */
7337 vtop
->type
.t
= VT_VOID
;
7342 /* If the token carries a value, next() might destroy it. Only with
7343 invalid code such as f(){"123"4;} */
7344 if (TOK_HAS_VALUE(t
))
7349 tcc_tcov_check_line (0), tcc_tcov_block_begin ();
7357 if (tok
== TOK_ELSE
) {
7362 gsym(d
); /* patch else jmp */
7367 } else if (t
== TOK_WHILE
) {
7379 } else if (t
== '{') {
7382 /* handle local labels declarations */
7383 while (tok
== TOK_LABEL
) {
7386 if (tok
< TOK_UIDENT
)
7387 expect("label identifier");
7388 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
7390 } while (tok
== ',');
7394 while (tok
!= '}') {
7403 prev_scope(&o
, is_expr
);
7406 else if (!nocode_wanted
)
7407 check_func_return();
7409 } else if (t
== TOK_RETURN
) {
7410 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
7414 gen_assign_cast(&func_vt
);
7416 if (vtop
->type
.t
!= VT_VOID
)
7417 tcc_warning("void function returns a value");
7421 tcc_warning("'return' with no value");
7424 leave_scope(root_scope
);
7426 gfunc_return(&func_vt
);
7428 /* jump unless last stmt in top-level block */
7429 if (tok
!= '}' || local_scope
!= 1)
7432 tcc_tcov_block_end (tcov_data
.line
);
7435 } else if (t
== TOK_BREAK
) {
7437 if (!cur_scope
->bsym
)
7438 tcc_error("cannot break");
7439 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
7440 leave_scope(cur_switch
->scope
);
7442 leave_scope(loop_scope
);
7443 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
7446 } else if (t
== TOK_CONTINUE
) {
7448 if (!cur_scope
->csym
)
7449 tcc_error("cannot continue");
7450 leave_scope(loop_scope
);
7451 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
7454 } else if (t
== TOK_FOR
) {
7459 /* c99 for-loop init decl? */
7460 if (!decl0(VT_LOCAL
, 1, NULL
)) {
7461 /* no, regular for-loop init expr */
7489 } else if (t
== TOK_DO
) {
7503 } else if (t
== TOK_SWITCH
) {
7504 struct switch_t
*sw
;
7506 sw
= tcc_mallocz(sizeof *sw
);
7508 sw
->scope
= cur_scope
;
7509 sw
->prev
= cur_switch
;
7515 sw
->sv
= *vtop
--; /* save switch value */
7518 b
= gjmp(0); /* jump to first case */
7520 a
= gjmp(a
); /* add implicit break */
7524 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7525 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7527 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7529 for (b
= 1; b
< sw
->n
; b
++)
7530 if (sw
->sv
.type
.t
& VT_UNSIGNED
7531 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7532 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7533 tcc_error("duplicate case value");
7537 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7540 gsym_addr(d
, sw
->def_sym
);
7546 dynarray_reset(&sw
->p
, &sw
->n
);
7547 cur_switch
= sw
->prev
;
7550 } else if (t
== TOK_CASE
) {
7551 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7554 cr
->v1
= cr
->v2
= expr_const64();
7555 if (gnu_ext
&& tok
== TOK_DOTS
) {
7557 cr
->v2
= expr_const64();
7558 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7559 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7560 tcc_warning("empty case range");
7564 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7567 goto block_after_label
;
7569 } else if (t
== TOK_DEFAULT
) {
7572 if (cur_switch
->def_sym
)
7573 tcc_error("too many 'default'");
7575 cur_switch
->def_sym
= gind();
7578 goto block_after_label
;
7580 } else if (t
== TOK_GOTO
) {
7581 if (cur_scope
->vla
.num
)
7582 vla_restore(cur_scope
->vla
.locorig
);
7583 if (tok
== '*' && gnu_ext
) {
7587 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7591 } else if (tok
>= TOK_UIDENT
) {
7592 s
= label_find(tok
);
7593 /* put forward definition if needed */
7595 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7596 else if (s
->r
== LABEL_DECLARED
)
7597 s
->r
= LABEL_FORWARD
;
7599 if (s
->r
& LABEL_FORWARD
) {
7600 /* start new goto chain for cleanups, linked via label->next */
7601 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7602 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7603 pending_gotos
->prev_tok
= s
;
7604 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7605 pending_gotos
->next
= s
;
7607 s
->jnext
= gjmp(s
->jnext
);
7609 try_call_cleanup_goto(s
->cleanupstate
);
7610 gjmp_addr(s
->jnext
);
7615 expect("label identifier");
7619 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7623 if (tok
== ':' && t
>= TOK_UIDENT
) {
7628 if (s
->r
== LABEL_DEFINED
)
7629 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7630 s
->r
= LABEL_DEFINED
;
7632 Sym
*pcl
; /* pending cleanup goto */
7633 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7635 sym_pop(&s
->next
, NULL
, 0);
7639 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7642 s
->cleanupstate
= cur_scope
->cl
.s
;
7645 vla_restore(cur_scope
->vla
.loc
);
7648 /* we accept this, but it is a mistake */
7649 tcc_warning_c(warn_all
)("deprecated use of label at end of compound statement");
7652 /* expression case */
7669 tcc_tcov_check_line (0), tcc_tcov_block_end (0);
7672 /* This skips over a stream of tokens containing balanced {} and ()
7673 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7674 with a '{'). If STR then allocates and stores the skipped tokens
7675 in *STR. This doesn't check if () and {} are nested correctly,
7676 i.e. "({)}" is accepted. */
7677 static void skip_or_save_block(TokenString
**str
)
7679 int braces
= tok
== '{';
7682 *str
= tok_str_alloc();
7684 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7686 if (tok
== TOK_EOF
) {
7687 if (str
|| level
> 0)
7688 tcc_error("unexpected end of file");
7693 tok_str_add_tok(*str
);
7696 if (t
== '{' || t
== '(') {
7698 } else if (t
== '}' || t
== ')') {
7700 if (level
== 0 && braces
&& t
== '}')
7705 tok_str_add(*str
, -1);
7706 tok_str_add(*str
, 0);
7710 #define EXPR_CONST 1
7713 static void parse_init_elem(int expr_type
)
7715 int saved_global_expr
;
7718 /* compound literals must be allocated globally in this case */
7719 saved_global_expr
= global_expr
;
7722 global_expr
= saved_global_expr
;
7723 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7724 (compound literals). */
7725 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7726 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7727 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7728 #ifdef TCC_TARGET_PE
7729 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7732 tcc_error("initializer element is not constant");
7741 static void init_assert(init_params
*p
, int offset
)
7743 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7744 : !nocode_wanted
&& offset
> p
->local_offset
)
7745 tcc_internal_error("initializer overflow");
7748 #define init_assert(sec, offset)
7751 /* put zeros for variable based init */
7752 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7754 init_assert(p
, c
+ size
);
7756 /* nothing to do because globals are already set to zero */
7758 vpush_helper_func(TOK_memset
);
7760 #ifdef TCC_TARGET_ARM
7772 #define DIF_SIZE_ONLY 2
7773 #define DIF_HAVE_ELEM 4
7776 /* delete relocations for specified range c ... c + size. Unfortunatly
7777 in very special cases, relocations may occur unordered */
7778 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7780 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7781 if (!sec
|| !sec
->reloc
)
7783 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7784 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7785 while (rel
< rel_end
) {
7786 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7787 sec
->reloc
->data_offset
-= sizeof *rel
;
7790 memcpy(rel2
, rel
, sizeof *rel
);
7797 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7799 if (ref
== p
->flex_array_ref
) {
7800 if (index
>= ref
->c
)
7802 } else if (ref
->c
< 0)
7803 tcc_error("flexible array has zero size in this context");
7806 /* t is the array or struct type. c is the array or struct
7807 address. cur_field is the pointer to the current
7808 field, for arrays the 'c' member contains the current start
7809 index. 'flags' is as in decl_initializer.
7810 'al' contains the already initialized length of the
7811 current container (starting at c). This returns the new length of that. */
7812 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7813 Sym
**cur_field
, int flags
, int al
)
7816 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7817 unsigned long corig
= c
;
7822 if (flags
& DIF_HAVE_ELEM
)
7825 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7832 /* NOTE: we only support ranges for last designator */
7833 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7835 if (!(type
->t
& VT_ARRAY
))
7836 expect("array type");
7838 index
= index_last
= expr_const();
7839 if (tok
== TOK_DOTS
&& gnu_ext
) {
7841 index_last
= expr_const();
7845 decl_design_flex(p
, s
, index_last
);
7846 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7847 tcc_error("index exceeds array bounds or range is empty");
7849 (*cur_field
)->c
= index_last
;
7850 type
= pointed_type(type
);
7851 elem_size
= type_size(type
, &align
);
7852 c
+= index
* elem_size
;
7853 nb_elems
= index_last
- index
+ 1;
7860 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7861 expect("struct/union type");
7863 f
= find_field(type
, l
, &cumofs
);
7876 } else if (!gnu_ext
) {
7881 if (type
->t
& VT_ARRAY
) {
7882 index
= (*cur_field
)->c
;
7884 decl_design_flex(p
, s
, index
);
7886 tcc_error("too many initializers");
7887 type
= pointed_type(type
);
7888 elem_size
= type_size(type
, &align
);
7889 c
+= index
* elem_size
;
7892 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7893 *cur_field
= f
= f
->next
;
7895 tcc_error("too many initializers");
7901 if (!elem_size
) /* for structs */
7902 elem_size
= type_size(type
, &align
);
7904 /* Using designators the same element can be initialized more
7905 than once. In that case we need to delete possibly already
7906 existing relocations. */
7907 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7908 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7909 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7912 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7914 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7918 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7919 /* make init_putv/vstore believe it were a struct */
7921 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7925 vpush_ref(type
, p
->sec
, c
, elem_size
);
7927 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7928 for (i
= 1; i
< nb_elems
; i
++) {
7930 init_putv(p
, type
, c
+ elem_size
* i
);
7935 c
+= nb_elems
* elem_size
;
7941 /* store a value or an expression directly in global data or in local array */
7942 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7948 Section
*sec
= p
->sec
;
7952 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7954 size
= type_size(type
, &align
);
7955 if (type
->t
& VT_BITFIELD
)
7956 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7957 init_assert(p
, c
+ size
);
7960 /* XXX: not portable */
7961 /* XXX: generate error if incorrect relocation */
7962 gen_assign_cast(&dtype
);
7963 bt
= type
->t
& VT_BTYPE
;
7965 if ((vtop
->r
& VT_SYM
)
7967 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7968 || (type
->t
& VT_BITFIELD
))
7969 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7971 tcc_error("initializer element is not computable at load time");
7973 if (NODATA_WANTED
) {
7978 ptr
= sec
->data
+ c
;
7981 /* XXX: make code faster ? */
7982 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7983 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7984 /* XXX This rejects compound literals like
7985 '(void *){ptr}'. The problem is that '&sym' is
7986 represented the same way, which would be ruled out
7987 by the SYM_FIRST_ANOM check above, but also '"string"'
7988 in 'char *p = "string"' is represented the same
7989 with the type being VT_PTR and the symbol being an
7990 anonymous one. That is, there's no difference in vtop
7991 between '(void *){x}' and '&(void *){x}'. Ignore
7992 pointer typed entities here. Hopefully no real code
7993 will ever use compound literals with scalar type. */
7994 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7995 /* These come from compound literals, memcpy stuff over. */
7999 esym
= elfsym(vtop
->sym
);
8000 ssec
= tcc_state
->sections
[esym
->st_shndx
];
8001 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
8003 /* We need to copy over all memory contents, and that
8004 includes relocations. Use the fact that relocs are
8005 created it order, so look from the end of relocs
8006 until we hit one before the copied region. */
8007 unsigned long relofs
= ssec
->reloc
->data_offset
;
8008 while (relofs
>= sizeof(*rel
)) {
8009 relofs
-= sizeof(*rel
);
8010 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ relofs
);
8011 if (rel
->r_offset
>= esym
->st_value
+ size
)
8013 if (rel
->r_offset
< esym
->st_value
)
8015 put_elf_reloca(symtab_section
, sec
,
8016 c
+ rel
->r_offset
- esym
->st_value
,
8017 ELFW(R_TYPE
)(rel
->r_info
),
8018 ELFW(R_SYM
)(rel
->r_info
),
8028 if (type
->t
& VT_BITFIELD
) {
8029 int bit_pos
, bit_size
, bits
, n
;
8030 unsigned char *p
, v
, m
;
8031 bit_pos
= BIT_POS(vtop
->type
.t
);
8032 bit_size
= BIT_SIZE(vtop
->type
.t
);
8033 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
8034 bit_pos
&= 7, bits
= 0;
8039 v
= val
>> bits
<< bit_pos
;
8040 m
= ((1 << n
) - 1) << bit_pos
;
8041 *p
= (*p
& ~m
) | (v
& m
);
8042 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
8047 *(char *)ptr
= val
!= 0;
8053 write16le(ptr
, val
);
8056 write32le(ptr
, val
);
8059 write64le(ptr
, val
);
8062 #if defined TCC_IS_NATIVE_387
8063 /* Host and target platform may be different but both have x87.
8064 On windows, tcc does not use VT_LDOUBLE, except when it is a
8065 cross compiler. In this case a mingw gcc as host compiler
8066 comes here with 10-byte long doubles, while msvc or tcc won't.
8067 tcc itself can still translate by asm.
8068 In any case we avoid possibly random bytes 11 and 12.
8070 if (sizeof (long double) >= 10)
8071 memcpy(ptr
, &vtop
->c
.ld
, 10);
8073 else if (sizeof (long double) == sizeof (double))
8074 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
8076 else if (vtop
->c
.ld
== 0.0)
8080 /* For other platforms it should work natively, but may not work
8081 for cross compilers */
8082 if (sizeof(long double) == LDOUBLE_SIZE
)
8083 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
8084 else if (sizeof(double) == LDOUBLE_SIZE
)
8085 memcpy(ptr
, &vtop
->c
.ld
, LDOUBLE_SIZE
);
8086 #ifndef TCC_CROSS_TEST
8088 tcc_error("can't cross compile long double constants");
8093 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
8096 if (vtop
->r
& VT_SYM
)
8097 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
8099 write64le(ptr
, val
);
8102 write32le(ptr
, val
);
8106 write64le(ptr
, val
);
8110 if (vtop
->r
& VT_SYM
)
8111 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
8112 write32le(ptr
, val
);
8116 //tcc_internal_error("unexpected type");
8122 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
8129 /* 't' contains the type and storage info. 'c' is the offset of the
8130 object in section 'sec'. If 'sec' is NULL, it means stack based
8131 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
8132 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
8133 size only evaluation is wanted (only for arrays). */
8134 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
8136 int len
, n
, no_oblock
, i
;
8142 /* generate line number info */
8143 if (debug_modes
&& !p
->sec
)
8144 tcc_debug_line(tcc_state
), tcc_tcov_check_line (1);
8146 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
8147 /* In case of strings we have special handling for arrays, so
8148 don't consume them as initializer value (which would commit them
8149 to some anonymous symbol). */
8150 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
8151 !(flags
& DIF_SIZE_ONLY
)) {
8152 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
8153 flags
|= DIF_HAVE_ELEM
;
8156 if ((flags
& DIF_HAVE_ELEM
) &&
8157 !(type
->t
& VT_ARRAY
) &&
8158 /* Use i_c_parameter_t, to strip toplevel qualifiers.
8159 The source type might have VT_CONSTANT set, which is
8160 of course assignable to non-const elements. */
8161 is_compatible_unqualified_types(type
, &vtop
->type
)) {
8164 } else if (type
->t
& VT_ARRAY
) {
8166 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
8174 t1
= pointed_type(type
);
8175 size1
= type_size(t1
, &align1
);
8177 /* only parse strings here if correct type (otherwise: handle
8178 them as ((w)char *) expressions */
8179 if ((tok
== TOK_LSTR
&&
8180 #ifdef TCC_TARGET_PE
8181 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
8183 (t1
->t
& VT_BTYPE
) == VT_INT
8185 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
8187 cstr_reset(&initstr
);
8188 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
8189 tcc_error("unhandled string literal merging");
8190 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8192 initstr
.size
-= size1
;
8194 len
+= tokc
.str
.size
;
8196 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
8198 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
8201 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
8202 && tok
!= TOK_EOF
) {
8203 /* Not a lone literal but part of a bigger expression. */
8204 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
8205 tokc
.str
.size
= initstr
.size
;
8206 tokc
.str
.data
= initstr
.data
;
8210 decl_design_flex(p
, s
, len
);
8211 if (!(flags
& DIF_SIZE_ONLY
)) {
8216 tcc_warning("initializer-string for array is too long");
8217 /* in order to go faster for common case (char
8218 string in global variable, we handle it
8220 if (p
->sec
&& size1
== 1) {
8221 init_assert(p
, c
+ nb
);
8223 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
8227 /* only add trailing zero if enough storage (no
8228 warning in this case since it is standard) */
8229 if (flags
& DIF_CLEAR
)
8232 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
8236 } else if (size1
== 1)
8237 ch
= ((unsigned char *)initstr
.data
)[i
];
8239 ch
= ((nwchar_t
*)initstr
.data
)[i
];
8241 init_putv(p
, t1
, c
+ i
* size1
);
8252 /* zero memory once in advance */
8253 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
8254 init_putz(p
, c
, n
*size1
);
8259 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
8260 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
8261 flags
&= ~DIF_HAVE_ELEM
;
8262 if (type
->t
& VT_ARRAY
) {
8264 /* special test for multi dimensional arrays (may not
8265 be strictly correct if designators are used at the
8267 if (no_oblock
&& len
>= n
*size1
)
8270 if (s
->type
.t
== VT_UNION
)
8274 if (no_oblock
&& f
== NULL
)
8285 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8287 if ((flags
& DIF_FIRST
) || tok
== '{') {
8296 } else if (tok
== '{') {
8297 if (flags
& DIF_HAVE_ELEM
)
8300 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
8302 } else if ((flags
& DIF_SIZE_ONLY
)) {
8303 /* If we supported only ISO C we wouldn't have to accept calling
8304 this on anything than an array if DIF_SIZE_ONLY (and even then
8305 only on the outermost level, so no recursion would be needed),
8306 because initializing a flex array member isn't supported.
8307 But GNU C supports it, so we need to recurse even into
8308 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
8309 /* just skip expression */
8310 skip_or_save_block(NULL
);
8312 if (!(flags
& DIF_HAVE_ELEM
)) {
8313 /* This should happen only when we haven't parsed
8314 the init element above for fear of committing a
8315 string constant to memory too early. */
8316 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
8317 expect("string constant");
8318 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
8321 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
8322 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
8324 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
8328 init_putv(p
, type
, c
);
8332 /* parse an initializer for type 't' if 'has_init' is non zero, and
8333 allocate space in local or global data space ('r' is either
8334 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
8335 variable 'v' of scope 'scope' is declared before initializers
8336 are parsed. If 'v' is zero, then a reference to the new object
8337 is put in the value stack. If 'has_init' is 2, a special parsing
8338 is done to handle string constants. */
8339 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
8340 int has_init
, int v
, int scope
)
8342 int size
, align
, addr
;
8343 TokenString
*init_str
= NULL
;
8346 Sym
*flexible_array
;
8348 int saved_nocode_wanted
= nocode_wanted
;
8349 #ifdef CONFIG_TCC_BCHECK
8350 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
8352 init_params p
= {0};
8354 /* Always allocate static or global variables */
8355 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
8356 nocode_wanted
|= 0x80000000;
8358 flexible_array
= NULL
;
8359 size
= type_size(type
, &align
);
8361 /* exactly one flexible array may be initialized, either the
8362 toplevel array or the last member of the toplevel struct */
8365 /* If the base type itself was an array type of unspecified size
8366 (like in 'typedef int arr[]; arr x = {1};') then we will
8367 overwrite the unknown size by the real one for this decl.
8368 We need to unshare the ref symbol holding that size. */
8369 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
8370 p
.flex_array_ref
= type
->ref
;
8372 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8373 Sym
*field
= type
->ref
->next
;
8376 field
= field
->next
;
8377 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
8378 flexible_array
= field
;
8379 p
.flex_array_ref
= field
->type
.ref
;
8386 /* If unknown size, do a dry-run 1st pass */
8388 tcc_error("unknown type size");
8389 if (has_init
== 2) {
8390 /* only get strings */
8391 init_str
= tok_str_alloc();
8392 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8393 tok_str_add_tok(init_str
);
8396 tok_str_add(init_str
, -1);
8397 tok_str_add(init_str
, 0);
8399 skip_or_save_block(&init_str
);
8403 begin_macro(init_str
, 1);
8405 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
8406 /* prepare second initializer parsing */
8407 macro_ptr
= init_str
->str
;
8410 /* if still unknown size, error */
8411 size
= type_size(type
, &align
);
8413 tcc_error("unknown type size");
8415 /* If there's a flex member and it was used in the initializer
8417 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
8418 size
+= flexible_array
->type
.ref
->c
8419 * pointed_size(&flexible_array
->type
);
8422 /* take into account specified alignment if bigger */
8423 if (ad
->a
.aligned
) {
8424 int speca
= 1 << (ad
->a
.aligned
- 1);
8427 } else if (ad
->a
.packed
) {
8431 if (!v
&& NODATA_WANTED
)
8432 size
= 0, align
= 1;
8434 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
8436 #ifdef CONFIG_TCC_BCHECK
8438 /* add padding between stack variables for bound checking */
8442 loc
= (loc
- size
) & -align
;
8444 p
.local_offset
= addr
+ size
;
8445 #ifdef CONFIG_TCC_BCHECK
8447 /* add padding between stack variables for bound checking */
8452 /* local variable */
8453 #ifdef CONFIG_TCC_ASM
8454 if (ad
->asm_label
) {
8455 int reg
= asm_parse_regvar(ad
->asm_label
);
8457 r
= (r
& ~VT_VALMASK
) | reg
;
8460 sym
= sym_push(v
, type
, r
, addr
);
8461 if (ad
->cleanup_func
) {
8462 Sym
*cls
= sym_push2(&all_cleanups
,
8463 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
8464 cls
->prev_tok
= sym
;
8465 cls
->next
= ad
->cleanup_func
;
8466 cls
->ncl
= cur_scope
->cl
.s
;
8467 cur_scope
->cl
.s
= cls
;
8472 /* push local reference */
8473 vset(type
, r
, addr
);
8477 if (v
&& scope
== VT_CONST
) {
8478 /* see if the symbol was already defined */
8481 if (p
.flex_array_ref
&& (sym
->type
.t
& type
->t
& VT_ARRAY
)
8482 && sym
->type
.ref
->c
> type
->ref
->c
) {
8483 /* flex array was already declared with explicit size
8485 int arr[] = { 1,2,3 }; */
8486 type
->ref
->c
= sym
->type
.ref
->c
;
8487 size
= type_size(type
, &align
);
8489 patch_storage(sym
, ad
, type
);
8490 /* we accept several definitions of the same global variable. */
8491 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8496 /* allocate symbol in corresponding section */
8500 while ((tp
->t
& (VT_BTYPE
|VT_ARRAY
)) == (VT_PTR
|VT_ARRAY
))
8501 tp
= &tp
->ref
->type
;
8502 if (tp
->t
& VT_CONSTANT
) {
8503 sec
= rodata_section
;
8504 } else if (has_init
) {
8506 /*if (tcc_state->g_debug & 4)
8507 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8508 } else if (tcc_state
->nocommon
)
8513 addr
= section_add(sec
, size
, align
);
8514 #ifdef CONFIG_TCC_BCHECK
8515 /* add padding if bound check */
8517 section_add(sec
, 1, 1);
8520 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8521 sec
= common_section
;
8526 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8527 patch_storage(sym
, ad
, NULL
);
8529 /* update symbol definition */
8530 put_extern_sym(sym
, sec
, addr
, size
);
8532 /* push global reference */
8533 vpush_ref(type
, sec
, addr
, size
);
8538 #ifdef CONFIG_TCC_BCHECK
8539 /* handles bounds now because the symbol must be defined
8540 before for the relocation */
8544 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8545 /* then add global bound info */
8546 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8547 bounds_ptr
[0] = 0; /* relocated */
8548 bounds_ptr
[1] = size
;
8553 if (type
->t
& VT_VLA
) {
8559 /* save before-VLA stack pointer if needed */
8560 if (cur_scope
->vla
.num
== 0) {
8561 if (cur_scope
->prev
&& cur_scope
->prev
->vla
.num
) {
8562 cur_scope
->vla
.locorig
= cur_scope
->prev
->vla
.loc
;
8564 gen_vla_sp_save(loc
-= PTR_SIZE
);
8565 cur_scope
->vla
.locorig
= loc
;
8569 vla_runtime_type_size(type
, &a
);
8570 gen_vla_alloc(type
, a
);
8571 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8572 /* on _WIN64, because of the function args scratch area, the
8573 result of alloca differs from RSP and is returned in RAX. */
8574 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8576 gen_vla_sp_save(addr
);
8577 cur_scope
->vla
.loc
= addr
;
8578 cur_scope
->vla
.num
++;
8579 } else if (has_init
) {
8581 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8582 /* patch flexible array member size back to -1, */
8583 /* for possible subsequent similar declarations */
8585 flexible_array
->type
.ref
->c
= -1;
8589 /* restore parse state if needed */
8595 nocode_wanted
= saved_nocode_wanted
;
8598 /* parse a function defined by symbol 'sym' and generate its code in
8599 'cur_text_section' */
8600 static void gen_function(Sym
*sym
)
8602 struct scope f
= { 0 };
8603 cur_scope
= root_scope
= &f
;
8605 ind
= cur_text_section
->data_offset
;
8606 if (sym
->a
.aligned
) {
8607 size_t newoff
= section_add(cur_text_section
, 0,
8608 1 << (sym
->a
.aligned
- 1));
8609 gen_fill_nops(newoff
- ind
);
8611 /* NOTE: we patch the symbol size later */
8612 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8613 if (sym
->type
.ref
->f
.func_ctor
)
8614 add_array (tcc_state
, ".init_array", sym
->c
);
8615 if (sym
->type
.ref
->f
.func_dtor
)
8616 add_array (tcc_state
, ".fini_array", sym
->c
);
8618 funcname
= get_tok_str(sym
->v
, NULL
);
8620 func_vt
= sym
->type
.ref
->type
;
8621 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8623 /* put debug symbol */
8624 tcc_debug_funcstart(tcc_state
, sym
);
8625 /* push a dummy symbol to enable local sym storage */
8626 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8627 local_scope
= 1; /* for function parameters */
8631 clear_temp_local_var_list();
8635 /* reset local stack */
8636 pop_local_syms(NULL
, 0);
8638 cur_text_section
->data_offset
= ind
;
8640 label_pop(&global_label_stack
, NULL
, 0);
8641 sym_pop(&all_cleanups
, NULL
, 0);
8642 /* patch symbol size */
8643 elfsym(sym
)->st_size
= ind
- func_ind
;
8644 /* end of function */
8645 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8646 /* It's better to crash than to generate wrong code */
8647 cur_text_section
= NULL
;
8648 funcname
= ""; /* for safety */
8649 func_vt
.t
= VT_VOID
; /* for safety */
8650 func_var
= 0; /* for safety */
8651 ind
= 0; /* for safety */
8652 nocode_wanted
= 0x80000000;
8654 /* do this after funcend debug info */
8658 static void gen_inline_functions(TCCState
*s
)
8661 int inline_generated
, i
;
8662 struct InlineFunc
*fn
;
8664 tcc_open_bf(s
, ":inline:", 0);
8665 /* iterate while inline function are referenced */
8667 inline_generated
= 0;
8668 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8669 fn
= s
->inline_fns
[i
];
8671 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8672 /* the function was used or forced (and then not internal):
8673 generate its code and convert it to a normal function */
8675 tcc_debug_putfile(s
, fn
->filename
);
8676 begin_macro(fn
->func_str
, 1);
8678 cur_text_section
= text_section
;
8682 inline_generated
= 1;
8685 } while (inline_generated
);
8689 static void free_inline_functions(TCCState
*s
)
8692 /* free tokens of unused inline functions */
8693 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8694 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8696 tok_str_free(fn
->func_str
);
8698 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8701 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8702 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8703 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8705 int v
, has_init
, r
, oldint
;
8708 AttributeDef ad
, adbase
;
8711 if (tok
== TOK_STATIC_ASSERT
) {
8721 tcc_error("_Static_assert fail");
8723 goto static_assert_out
;
8727 parse_mult_str(&error_str
, "string constant");
8729 tcc_error("%s", (char *)error_str
.data
);
8730 cstr_free(&error_str
);
8738 if (!parse_btype(&btype
, &adbase
)) {
8739 if (is_for_loop_init
)
8741 /* skip redundant ';' if not in old parameter decl scope */
8742 if (tok
== ';' && l
!= VT_CMP
) {
8748 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8749 /* global asm block */
8753 if (tok
>= TOK_UIDENT
) {
8754 /* special test for old K&R protos without explicit int
8755 type. Only accepted when defining global data */
8760 expect("declaration");
8766 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8768 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8769 tcc_warning("unnamed struct/union that defines no instances");
8773 if (IS_ENUM(btype
.t
)) {
8779 while (1) { /* iterate thru each declaration */
8782 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8786 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8787 printf("type = '%s'\n", buf
);
8790 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8791 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8792 tcc_error("function without file scope cannot be static");
8793 /* if old style function prototype, we accept a
8796 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8797 decl0(VT_CMP
, 0, sym
);
8798 #ifdef TCC_TARGET_MACHO
8799 if (sym
->f
.func_alwinl
8800 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8801 == (VT_EXTERN
| VT_INLINE
))) {
8802 /* always_inline functions must be handled as if they
8803 don't generate multiple global defs, even if extern
8804 inline, i.e. GNU inline semantics for those. Rewrite
8805 them into static inline. */
8806 type
.t
&= ~VT_EXTERN
;
8807 type
.t
|= VT_STATIC
;
8810 /* always compile 'extern inline' */
8811 if (type
.t
& VT_EXTERN
)
8812 type
.t
&= ~VT_INLINE
;
8814 } else if (oldint
) {
8815 tcc_warning("type defaults to int");
8818 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8819 ad
.asm_label
= asm_label_instr();
8820 /* parse one last attribute list, after asm label */
8821 parse_attribute(&ad
);
8823 /* gcc does not allow __asm__("label") with function definition,
8830 #ifdef TCC_TARGET_PE
8831 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8832 if (type
.t
& VT_STATIC
)
8833 tcc_error("cannot have dll linkage with static");
8834 if (type
.t
& VT_TYPEDEF
) {
8835 tcc_warning("'%s' attribute ignored for typedef",
8836 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8837 (ad
.a
.dllexport
= 0, "dllexport"));
8838 } else if (ad
.a
.dllimport
) {
8839 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8842 type
.t
|= VT_EXTERN
;
8848 tcc_error("cannot use local functions");
8849 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8850 expect("function definition");
8852 /* reject abstract declarators in function definition
8853 make old style params without decl have int type */
8855 while ((sym
= sym
->next
) != NULL
) {
8856 if (!(sym
->v
& ~SYM_FIELD
))
8857 expect("identifier");
8858 if (sym
->type
.t
== VT_VOID
)
8859 sym
->type
= int_type
;
8862 /* apply post-declaraton attributes */
8863 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8865 /* put function symbol */
8866 type
.t
&= ~VT_EXTERN
;
8867 sym
= external_sym(v
, &type
, 0, &ad
);
8869 /* static inline functions are just recorded as a kind
8870 of macro. Their code will be emitted at the end of
8871 the compilation unit only if they are used */
8872 if (sym
->type
.t
& VT_INLINE
) {
8873 struct InlineFunc
*fn
;
8874 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8875 strcpy(fn
->filename
, file
->filename
);
8877 skip_or_save_block(&fn
->func_str
);
8878 dynarray_add(&tcc_state
->inline_fns
,
8879 &tcc_state
->nb_inline_fns
, fn
);
8881 /* compute text section */
8882 cur_text_section
= ad
.section
;
8883 if (!cur_text_section
)
8884 cur_text_section
= text_section
;
8890 /* find parameter in function parameter list */
8891 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8892 if ((sym
->v
& ~SYM_FIELD
) == v
)
8894 tcc_error("declaration for parameter '%s' but no such parameter",
8895 get_tok_str(v
, NULL
));
8897 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8898 tcc_error("storage class specified for '%s'",
8899 get_tok_str(v
, NULL
));
8900 if (sym
->type
.t
!= VT_VOID
)
8901 tcc_error("redefinition of parameter '%s'",
8902 get_tok_str(v
, NULL
));
8903 convert_parameter_type(&type
);
8905 } else if (type
.t
& VT_TYPEDEF
) {
8906 /* save typedefed type */
8907 /* XXX: test storage specifiers ? */
8909 if (sym
&& sym
->sym_scope
== local_scope
) {
8910 if (!is_compatible_types(&sym
->type
, &type
)
8911 || !(sym
->type
.t
& VT_TYPEDEF
))
8912 tcc_error("incompatible redefinition of '%s'",
8913 get_tok_str(v
, NULL
));
8916 sym
= sym_push(v
, &type
, 0, 0);
8921 tcc_debug_typedef (tcc_state
, sym
);
8922 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8923 && !(type
.t
& VT_EXTERN
)) {
8924 tcc_error("declaration of void object");
8927 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8928 /* external function definition */
8929 /* specific case for func_call attribute */
8931 } else if (!(type
.t
& VT_ARRAY
)) {
8932 /* not lvalue if array */
8935 has_init
= (tok
== '=');
8936 if (has_init
&& (type
.t
& VT_VLA
))
8937 tcc_error("variable length array cannot be initialized");
8938 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8939 || (type
.t
& VT_BTYPE
) == VT_FUNC
8940 /* as with GCC, uninitialized global arrays with no size
8941 are considered extern: */
8942 || ((type
.t
& VT_ARRAY
) && !has_init
8943 && l
== VT_CONST
&& type
.ref
->c
< 0)
8945 /* external variable or function */
8946 type
.t
|= VT_EXTERN
;
8947 sym
= external_sym(v
, &type
, r
, &ad
);
8948 if (ad
.alias_target
) {
8949 /* Aliases need to be emitted when their target
8950 symbol is emitted, even if perhaps unreferenced.
8951 We only support the case where the base is
8952 already defined, otherwise we would need
8953 deferring to emit the aliases until the end of
8954 the compile unit. */
8955 Sym
*alias_target
= sym_find(ad
.alias_target
);
8956 ElfSym
*esym
= elfsym(alias_target
);
8958 tcc_error("unsupported forward __alias__ attribute");
8959 put_extern_sym2(sym
, esym
->st_shndx
,
8960 esym
->st_value
, esym
->st_size
, 1);
8963 if (type
.t
& VT_STATIC
)
8969 else if (l
== VT_CONST
)
8970 /* uninitialized global variables may be overridden */
8971 type
.t
|= VT_EXTERN
;
8972 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8976 if (is_for_loop_init
)
8988 static void decl(int l
)
8993 /* ------------------------------------------------------------------------- */
8996 /* ------------------------------------------------------------------------- */