2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
66 static int gind(void) { CODE_ON(); return ind
; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
70 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
77 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
79 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
81 static int last_line_num
, new_file
, func_ind
; /* debug info control */
82 ST_DATA
const char *funcname
;
83 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
84 static CString initstr
;
87 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
88 #define VT_PTRDIFF_T VT_INT
90 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_LLONG
93 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
97 ST_DATA
struct switch_t
{
101 } **p
; int n
; /* list of case ranges */
102 int def_sym
; /* default symbol */
105 struct switch_t
*prev
;
107 } *cur_switch
; /* current switch */
109 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
110 /*list of temporary local variables on the stack in current function. */
111 ST_DATA
struct temp_local_variable
{
112 int location
; //offset on stack. Svalue.c.i
115 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
116 short nb_temp_local_vars
;
118 static struct scope
{
120 struct { int loc
, num
; } vla
;
121 struct { Sym
*s
; int n
; } cl
;
124 } *cur_scope
, *loop_scope
, *root_scope
;
132 /********************************************************/
133 /* stab debug support */
135 static const struct {
138 } default_debug
[] = {
139 { VT_INT
, "int:t1=r1;-2147483648;2147483647;" },
140 { VT_BYTE
, "char:t2=r2;0;127;" },
142 { VT_LONG
| VT_INT
, "long int:t3=r3;-2147483648;2147483647;" },
144 { VT_LLONG
| VT_LONG
, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
146 { VT_INT
| VT_UNSIGNED
, "unsigned int:t4=r4;0;037777777777;" },
148 { VT_LONG
| VT_INT
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;037777777777;" },
150 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
151 { VT_LLONG
| VT_LONG
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;01777777777777777777777;" },
153 { VT_QLONG
, "__int128:t6=r6;0;-1;" },
154 { VT_QLONG
| VT_UNSIGNED
, "__int128 unsigned:t7=r7;0;-1;" },
155 { VT_LLONG
, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
156 { VT_LLONG
| VT_UNSIGNED
, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
157 { VT_SHORT
, "short int:t10=r10;-32768;32767;" },
158 { VT_SHORT
| VT_UNSIGNED
, "short unsigned int:t11=r11;0;65535;" },
159 { VT_BYTE
| VT_DEFSIGN
, "signed char:t12=r12;-128;127;" },
160 { VT_BYTE
| VT_DEFSIGN
| VT_UNSIGNED
, "unsigned char:t13=r13;0;255;" },
161 { VT_FLOAT
, "float:t14=r1;4;0;" },
162 { VT_DOUBLE
, "double:t15=r1;8;0;" },
163 { VT_LDOUBLE
, "long double:t16=r1;16;0;" },
164 { -1, "_Float32:t17=r1;4;0;" },
165 { -1, "_Float64:t18=r1;8;0;" },
166 { -1, "_Float128:t19=r1;16;0;" },
167 { -1, "_Float32x:t20=r1;8;0;" },
168 { -1, "_Float64x:t21=r1;16;0;" },
169 { -1, "_Decimal32:t22=r1;4;0;" },
170 { -1, "_Decimal64:t23=r1;8;0;" },
171 { -1, "_Decimal128:t24=r1;16;0;" },
172 /* if default char is unsigned */
173 { VT_BYTE
| VT_UNSIGNED
, "unsigned char:t25=r25;0;255;" },
175 { VT_BOOL
, "bool:t26=r26;0;255;" },
176 { VT_VOID
, "void:t27=27" },
179 static int debug_next_type
;
181 static struct debug_hash
{
186 static int n_debug_hash
;
188 static struct debug_info
{
199 struct debug_info
*child
, *next
, *last
, *parent
;
200 } *debug_info
, *debug_info_root
;
202 /********************************************************/
204 #define precedence_parser
205 static void init_prec(void);
207 /********************************************************/
208 #ifndef CONFIG_TCC_ASM
209 ST_FUNC
void asm_instr(void)
211 tcc_error("inline asm() not supported");
213 ST_FUNC
void asm_global_instr(void)
215 tcc_error("inline asm() not supported");
219 /* ------------------------------------------------------------------------- */
220 static void gen_cast(CType
*type
);
221 static void gen_cast_s(int t
);
222 static inline CType
*pointed_type(CType
*type
);
223 static int is_compatible_types(CType
*type1
, CType
*type2
);
224 static int parse_btype(CType
*type
, AttributeDef
*ad
);
225 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
226 static void parse_expr_type(CType
*type
);
227 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
228 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
229 static void block(int is_expr
);
230 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
231 static void decl(int l
);
232 static int decl0(int l
, int is_for_loop_init
, Sym
*);
233 static void expr_eq(void);
234 static void vla_runtime_type_size(CType
*type
, int *a
);
235 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
236 static inline int64_t expr_const64(void);
237 static void vpush64(int ty
, unsigned long long v
);
238 static void vpush(CType
*type
);
239 static int gvtst(int inv
, int t
);
240 static void gen_inline_functions(TCCState
*s
);
241 static void free_inline_functions(TCCState
*s
);
242 static void skip_or_save_block(TokenString
**str
);
243 static void gv_dup(void);
244 static int get_temp_local_var(int size
,int align
);
245 static void clear_temp_local_var_list();
246 static void cast_error(CType
*st
, CType
*dt
);
248 ST_INLN
int is_float(int t
)
250 int bt
= t
& VT_BTYPE
;
251 return bt
== VT_LDOUBLE
257 static inline int is_integer_btype(int bt
)
266 static int btype_size(int bt
)
268 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
272 bt
== VT_PTR
? PTR_SIZE
: 0;
275 /* returns function return register from type */
276 static int R_RET(int t
)
280 #ifdef TCC_TARGET_X86_64
281 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
283 #elif defined TCC_TARGET_RISCV64
284 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
290 /* returns 2nd function return register, if any */
291 static int R2_RET(int t
)
297 #elif defined TCC_TARGET_X86_64
302 #elif defined TCC_TARGET_RISCV64
309 /* returns true for two-word types */
310 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
312 /* put function return registers to stack value */
313 static void PUT_R_RET(SValue
*sv
, int t
)
315 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
318 /* returns function return register class for type t */
319 static int RC_RET(int t
)
321 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
324 /* returns generic register class for type t */
325 static int RC_TYPE(int t
)
329 #ifdef TCC_TARGET_X86_64
330 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
332 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
334 #elif defined TCC_TARGET_RISCV64
335 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
341 /* returns 2nd register class corresponding to t and rc */
342 static int RC2_TYPE(int t
, int rc
)
344 if (!USING_TWO_WORDS(t
))
359 /* we use our own 'finite' function to avoid potential problems with
360 non standard math libs */
361 /* XXX: endianness dependent */
362 ST_FUNC
int ieee_finite(double d
)
365 memcpy(p
, &d
, sizeof(double));
366 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
369 /* compiling intel long double natively */
370 #if (defined __i386__ || defined __x86_64__) \
371 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
372 # define TCC_IS_NATIVE_387
375 ST_FUNC
void test_lvalue(void)
377 if (!(vtop
->r
& VT_LVAL
))
381 ST_FUNC
void check_vstack(void)
383 if (vtop
!= vstack
- 1)
384 tcc_error("internal compiler error: vstack leak (%d)",
385 (int)(vtop
- vstack
+ 1));
388 /* ------------------------------------------------------------------------- */
389 /* vstack debugging aid */
392 void pv (const char *lbl
, int a
, int b
)
395 for (i
= a
; i
< a
+ b
; ++i
) {
396 SValue
*p
= &vtop
[-i
];
397 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
398 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
403 /* ------------------------------------------------------------------------- */
404 /* start of translation unit info */
405 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
411 /* file info: full path + filename */
412 section_sym
= put_elf_sym(symtab_section
, 0, 0,
413 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
414 text_section
->sh_num
, NULL
);
415 getcwd(buf
, sizeof(buf
));
417 normalize_slashes(buf
);
419 pstrcat(buf
, sizeof(buf
), "/");
420 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
421 text_section
->data_offset
, text_section
, section_sym
);
422 put_stabs_r(s1
, file
->prev
? file
->prev
->filename
: file
->filename
,
424 text_section
->data_offset
, text_section
, section_sym
);
425 for (i
= 0; i
< sizeof (default_debug
) / sizeof (default_debug
[0]); i
++)
426 put_stabs(s1
, default_debug
[i
].name
, N_LSYM
, 0, 0, 0);
428 new_file
= last_line_num
= 0;
430 debug_next_type
= sizeof(default_debug
) / sizeof(default_debug
[0]);
434 /* we're currently 'including' the <command line> */
438 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
439 symbols can be safely used */
440 put_elf_sym(symtab_section
, 0, 0,
441 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
442 SHN_ABS
, file
->filename
);
445 static void tcc_debug_stabs (TCCState
*s1
, const char *str
, int type
, unsigned long value
,
446 Section
*sec
, int sym_index
)
452 (struct debug_sym
*)tcc_realloc (debug_info
->sym
,
453 sizeof(struct debug_sym
) *
454 (debug_info
->n_sym
+ 1));
455 s
= debug_info
->sym
+ debug_info
->n_sym
++;
458 s
->str
= tcc_strdup(str
);
460 s
->sym_index
= sym_index
;
463 put_stabs_r (s1
, str
, type
, 0, 0, value
, sec
, sym_index
);
465 put_stabs (s1
, str
, type
, 0, 0, value
);
468 static void tcc_debug_stabn(int type
, int value
)
470 if (type
== N_LBRAC
) {
471 struct debug_info
*info
=
472 (struct debug_info
*) tcc_mallocz(sizeof (*info
));
475 info
->parent
= debug_info
;
477 if (debug_info
->child
) {
478 if (debug_info
->child
->last
)
479 debug_info
->child
->last
->next
= info
;
481 debug_info
->child
->next
= info
;
482 debug_info
->child
->last
= info
;
485 debug_info
->child
= info
;
488 debug_info_root
= info
;
492 debug_info
->end
= value
;
493 debug_info
= debug_info
->parent
;
497 static void tcc_get_debug_info(TCCState
*s1
, Sym
*s
, CString
*result
)
506 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
507 if ((type
& VT_BTYPE
) != VT_BYTE
)
509 if (type
== VT_PTR
|| type
== (VT_PTR
| VT_ARRAY
))
510 n
++, t
= t
->type
.ref
;
514 if ((type
& VT_BTYPE
) == VT_STRUCT
) {
518 for (i
= 0; i
< n_debug_hash
; i
++) {
519 if (t
== debug_hash
[i
].type
) {
520 debug_type
= debug_hash
[i
].debug_type
;
524 if (debug_type
== -1) {
525 debug_type
= ++debug_next_type
;
526 debug_hash
= (struct debug_hash
*)
527 tcc_realloc (debug_hash
,
528 (n_debug_hash
+ 1) * sizeof(*debug_hash
));
529 debug_hash
[n_debug_hash
].debug_type
= debug_type
;
530 debug_hash
[n_debug_hash
++].type
= t
;
532 cstr_printf (&str
, "%s:T%d=%c%d",
533 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
534 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
536 IS_UNION (t
->type
.t
) ? 'u' : 's',
539 int pos
, size
, align
;
542 cstr_printf (&str
, "%s:",
543 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
544 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
545 tcc_get_debug_info (s1
, t
, &str
);
546 if (t
->type
.t
& VT_BITFIELD
) {
547 pos
= t
->c
* 8 + BIT_POS(t
->type
.t
);
548 size
= BIT_SIZE(t
->type
.t
);
552 size
= type_size(&t
->type
, &align
) * 8;
554 cstr_printf (&str
, ",%d,%d;", pos
, size
);
556 cstr_printf (&str
, ";");
557 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
561 else if (IS_ENUM(type
)) {
562 Sym
*e
= t
= t
->type
.ref
;
564 debug_type
= ++debug_next_type
;
566 cstr_printf (&str
, "%s:T%d=e",
567 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
568 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
572 cstr_printf (&str
, "%s:",
573 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
574 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
575 cstr_printf (&str
, e
->type
.t
& VT_UNSIGNED
? "%u," : "%d,",
578 cstr_printf (&str
, ";");
579 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
582 else if ((type
& VT_BTYPE
) != VT_FUNC
) {
583 type
&= ~VT_STRUCT_MASK
;
585 debug_type
<= sizeof(default_debug
) / sizeof(default_debug
[0]);
587 if (default_debug
[debug_type
- 1].type
== type
)
589 if (debug_type
> sizeof(default_debug
) / sizeof(default_debug
[0]))
593 cstr_printf (result
, "%d=", ++debug_next_type
);
596 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
597 if ((type
& VT_BTYPE
) != VT_BYTE
)
600 cstr_printf (result
, "%d=*", ++debug_next_type
);
601 else if (type
== (VT_PTR
| VT_ARRAY
))
602 cstr_printf (result
, "%d=ar1;0;%d;",
603 ++debug_next_type
, t
->type
.ref
->c
- 1);
604 else if (type
== VT_FUNC
) {
605 cstr_printf (result
, "%d=f", ++debug_next_type
);
606 tcc_get_debug_info (s1
, t
->type
.ref
, result
);
613 cstr_printf (result
, "%d", debug_type
);
616 static void tcc_debug_finish (TCCState
*s1
, struct debug_info
*cur
)
620 struct debug_info
*next
= cur
->next
;
622 for (i
= 0; i
< cur
->n_sym
; i
++) {
623 struct debug_sym
*s
= &cur
->sym
[i
];
626 put_stabs_r(s1
, s
->str
, s
->type
, 0, 0, s
->value
,
627 s
->sec
, s
->sym_index
);
629 put_stabs(s1
, s
->str
, s
->type
, 0, 0, s
->value
);
633 put_stabn(s1
, N_LBRAC
, 0, 0, cur
->start
);
634 tcc_debug_finish (s1
, cur
->child
);
635 put_stabn(s1
, N_RBRAC
, 0, 0, cur
->end
);
641 static void tcc_add_debug_info(TCCState
*s1
, int param
, Sym
*s
, Sym
*e
)
644 cstr_new (&debug_str
);
645 for (; s
!= e
; s
= s
->prev
) {
646 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
648 cstr_reset (&debug_str
);
649 cstr_printf (&debug_str
, "%s:%s", get_tok_str(s
->v
, NULL
), param
? "p" : "");
650 tcc_get_debug_info(s1
, s
, &debug_str
);
651 tcc_debug_stabs(s1
, debug_str
.data
, param
? N_PSYM
: N_LSYM
, s
->c
, NULL
, 0);
653 cstr_free (&debug_str
);
656 static void tcc_debug_extern_sym(TCCState
*s1
, Sym
*sym
, int sh_num
, int sym_bind
)
658 Section
*s
= s1
->sections
[sh_num
];
662 cstr_printf (&str
, "%s:%c",
663 get_tok_str(sym
->v
, NULL
),
664 sym_bind
== STB_GLOBAL
? 'G' : local_scope
? 'V' : 'S'
666 tcc_get_debug_info(s1
, sym
, &str
);
667 if (sym_bind
== STB_GLOBAL
)
668 tcc_debug_stabs(s1
, str
.data
, N_GSYM
, 0, NULL
, 0);
670 tcc_debug_stabs(s1
, str
.data
,
671 (sym
->type
.t
& VT_STATIC
) && data_section
== s
672 ? N_STSYM
: N_LCSYM
, 0, s
, sym
->c
);
676 static void tcc_debug_typedef(TCCState
*s1
, Sym
*sym
)
681 cstr_printf (&str
, "%s:t",
682 (sym
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
683 ? "" : get_tok_str(sym
->v
& ~SYM_FIELD
, NULL
));
684 tcc_get_debug_info(s1
, sym
, &str
);
685 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
689 /* put end of translation unit info */
690 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
694 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
695 text_section
->data_offset
, text_section
, section_sym
);
696 tcc_free(debug_hash
);
699 static BufferedFile
* put_new_file(TCCState
*s1
)
701 BufferedFile
*f
= file
;
702 /* use upper file if from inline ":asm:" */
703 if (f
->filename
[0] == ':')
706 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
707 new_file
= last_line_num
= 0;
712 /* generate line number info */
713 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
717 || cur_text_section
!= text_section
718 || !(f
= put_new_file(s1
))
719 || last_line_num
== f
->line_num
)
721 if (func_ind
!= -1) {
722 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
724 /* from tcc_assemble */
725 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
727 last_line_num
= f
->line_num
;
730 /* put function symbol */
731 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
737 debug_info_root
= NULL
;
739 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
740 if (!(f
= put_new_file(s1
)))
742 cstr_new (&debug_str
);
743 cstr_printf(&debug_str
, "%s:%c", funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
744 tcc_get_debug_info(s1
, sym
->type
.ref
, &debug_str
);
745 put_stabs_r(s1
, debug_str
.data
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
746 cstr_free (&debug_str
);
751 /* put function size */
752 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
756 tcc_debug_stabn(N_RBRAC
, size
);
757 tcc_debug_finish (s1
, debug_info_root
);
760 /* put alternative filename */
761 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
763 if (0 == strcmp(file
->filename
, filename
))
765 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
769 /* begin of #include */
770 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
774 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
778 /* end of #include */
779 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
783 put_stabn(s1
, N_EINCL
, 0, 0, 0);
787 /* ------------------------------------------------------------------------- */
788 /* initialize vstack and types. This must be done also for tcc -E */
789 ST_FUNC
void tccgen_init(TCCState
*s1
)
792 memset(vtop
, 0, sizeof *vtop
);
794 /* define some often used types */
797 char_type
.t
= VT_BYTE
;
798 if (s1
->char_is_unsigned
)
799 char_type
.t
|= VT_UNSIGNED
;
800 char_pointer_type
= char_type
;
801 mk_pointer(&char_pointer_type
);
803 func_old_type
.t
= VT_FUNC
;
804 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
805 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
806 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
807 #ifdef precedence_parser
813 ST_FUNC
int tccgen_compile(TCCState
*s1
)
815 cur_text_section
= NULL
;
817 anon_sym
= SYM_FIRST_ANOM
;
820 nocode_wanted
= 0x80000000;
824 #ifdef TCC_TARGET_ARM
828 printf("%s: **** new file\n", file
->filename
);
830 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
833 gen_inline_functions(s1
);
835 /* end of translation unit info */
840 ST_FUNC
void tccgen_finish(TCCState
*s1
)
843 free_inline_functions(s1
);
844 sym_pop(&global_stack
, NULL
, 0);
845 sym_pop(&local_stack
, NULL
, 0);
846 /* free preprocessor macros */
849 dynarray_reset(&sym_pools
, &nb_sym_pools
);
850 sym_free_first
= NULL
;
853 /* ------------------------------------------------------------------------- */
854 ST_FUNC ElfSym
*elfsym(Sym
*s
)
858 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
861 /* apply storage attributes to Elf symbol */
862 ST_FUNC
void update_storage(Sym
*sym
)
865 int sym_bind
, old_sym_bind
;
871 if (sym
->a
.visibility
)
872 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
875 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
876 sym_bind
= STB_LOCAL
;
877 else if (sym
->a
.weak
)
880 sym_bind
= STB_GLOBAL
;
881 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
882 if (sym_bind
!= old_sym_bind
) {
883 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
887 if (sym
->a
.dllimport
)
888 esym
->st_other
|= ST_PE_IMPORT
;
889 if (sym
->a
.dllexport
)
890 esym
->st_other
|= ST_PE_EXPORT
;
894 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
895 get_tok_str(sym
->v
, NULL
),
896 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
904 /* ------------------------------------------------------------------------- */
905 /* update sym->c so that it points to an external symbol in section
906 'section' with value 'value' */
908 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
909 addr_t value
, unsigned long size
,
910 int can_add_underscore
)
912 int sym_type
, sym_bind
, info
, other
, t
;
918 name
= get_tok_str(sym
->v
, NULL
);
920 if ((t
& VT_BTYPE
) == VT_FUNC
) {
922 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
923 sym_type
= STT_NOTYPE
;
925 sym_type
= STT_OBJECT
;
927 if (t
& (VT_STATIC
| VT_INLINE
))
928 sym_bind
= STB_LOCAL
;
930 sym_bind
= STB_GLOBAL
;
934 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
935 Sym
*ref
= sym
->type
.ref
;
936 if (ref
->a
.nodecorate
) {
937 can_add_underscore
= 0;
939 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
940 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
942 other
|= ST_PE_STDCALL
;
943 can_add_underscore
= 0;
948 if (sym
->asm_label
) {
949 name
= get_tok_str(sym
->asm_label
, NULL
);
950 can_add_underscore
= 0;
953 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
955 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
959 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
960 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
962 if (tcc_state
->do_debug
963 && sym_type
!= STT_FUNC
964 && sym
->v
< SYM_FIRST_ANOM
)
965 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
);
969 esym
->st_value
= value
;
970 esym
->st_size
= size
;
971 esym
->st_shndx
= sh_num
;
976 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
977 addr_t value
, unsigned long size
)
979 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
980 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
983 /* add a new relocation entry to symbol 'sym' in section 's' */
984 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
989 if (nocode_wanted
&& s
== cur_text_section
)
994 put_extern_sym(sym
, NULL
, 0, 0);
998 /* now we can add ELF relocation info */
999 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
1003 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
1005 greloca(s
, sym
, offset
, type
, 0);
1009 /* ------------------------------------------------------------------------- */
1010 /* symbol allocator */
1011 static Sym
*__sym_malloc(void)
1013 Sym
*sym_pool
, *sym
, *last_sym
;
1016 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
1017 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
1019 last_sym
= sym_free_first
;
1021 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
1022 sym
->next
= last_sym
;
1026 sym_free_first
= last_sym
;
1030 static inline Sym
*sym_malloc(void)
1034 sym
= sym_free_first
;
1036 sym
= __sym_malloc();
1037 sym_free_first
= sym
->next
;
1040 sym
= tcc_malloc(sizeof(Sym
));
1045 ST_INLN
void sym_free(Sym
*sym
)
1048 sym
->next
= sym_free_first
;
1049 sym_free_first
= sym
;
1055 /* push, without hashing */
1056 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
1061 memset(s
, 0, sizeof *s
);
1071 /* find a symbol and return its associated structure. 's' is the top
1072 of the symbol stack */
1073 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
1078 else if (s
->v
== -1)
1085 /* structure lookup */
1086 ST_INLN Sym
*struct_find(int v
)
1089 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1091 return table_ident
[v
]->sym_struct
;
1094 /* find an identifier */
1095 ST_INLN Sym
*sym_find(int v
)
1098 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1100 return table_ident
[v
]->sym_identifier
;
1103 static int sym_scope(Sym
*s
)
1105 if (IS_ENUM_VAL (s
->type
.t
))
1106 return s
->type
.ref
->sym_scope
;
1108 return s
->sym_scope
;
1111 /* push a given symbol on the symbol stack */
1112 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
1121 s
= sym_push2(ps
, v
, type
->t
, c
);
1122 s
->type
.ref
= type
->ref
;
1124 /* don't record fields or anonymous symbols */
1126 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1127 /* record symbol in token array */
1128 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1130 ps
= &ts
->sym_struct
;
1132 ps
= &ts
->sym_identifier
;
1135 s
->sym_scope
= local_scope
;
1136 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
1137 tcc_error("redeclaration of '%s'",
1138 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
1143 /* push a global identifier */
1144 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
1147 s
= sym_push2(&global_stack
, v
, t
, c
);
1148 s
->r
= VT_CONST
| VT_SYM
;
1149 /* don't record anonymous symbol */
1150 if (v
< SYM_FIRST_ANOM
) {
1151 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
1152 /* modify the top most local identifier, so that sym_identifier will
1153 point to 's' when popped; happens when called from inline asm */
1154 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
1155 ps
= &(*ps
)->prev_tok
;
1162 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1163 pop them yet from the list, but do remove them from the token array. */
1164 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
1174 /* remove symbol in token array */
1176 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1177 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1179 ps
= &ts
->sym_struct
;
1181 ps
= &ts
->sym_identifier
;
1192 /* ------------------------------------------------------------------------- */
1193 static void vcheck_cmp(void)
1195 /* cannot let cpu flags if other instruction are generated. Also
1196 avoid leaving VT_JMP anywhere except on the top of the stack
1197 because it would complicate the code generator.
1199 Don't do this when nocode_wanted. vtop might come from
1200 !nocode_wanted regions (see 88_codeopt.c) and transforming
1201 it to a register without actually generating code is wrong
1202 as their value might still be used for real. All values
1203 we push under nocode_wanted will eventually be popped
1204 again, so that the VT_CMP/VT_JMP value will be in vtop
1205 when code is unsuppressed again. */
1207 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
1211 static void vsetc(CType
*type
, int r
, CValue
*vc
)
1213 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1214 tcc_error("memory full (vstack)");
1219 vtop
->r2
= VT_CONST
;
1224 ST_FUNC
void vswap(void)
1234 /* pop stack value */
1235 ST_FUNC
void vpop(void)
1238 v
= vtop
->r
& VT_VALMASK
;
1239 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1240 /* for x86, we need to pop the FP stack */
1241 if (v
== TREG_ST0
) {
1242 o(0xd8dd); /* fstp %st(0) */
1246 /* need to put correct jump if && or || without test */
1253 /* push constant of type "type" with useless value */
1254 static void vpush(CType
*type
)
1256 vset(type
, VT_CONST
, 0);
1259 /* push arbitrary 64bit constant */
1260 static void vpush64(int ty
, unsigned long long v
)
1267 vsetc(&ctype
, VT_CONST
, &cval
);
1270 /* push integer constant */
1271 ST_FUNC
void vpushi(int v
)
1276 /* push a pointer sized constant */
1277 static void vpushs(addr_t v
)
1279 vpush64(VT_SIZE_T
, v
);
1282 /* push long long constant */
1283 static inline void vpushll(long long v
)
1285 vpush64(VT_LLONG
, v
);
1288 ST_FUNC
void vset(CType
*type
, int r
, int v
)
1292 vsetc(type
, r
, &cval
);
1295 static void vseti(int r
, int v
)
1303 ST_FUNC
void vpushv(SValue
*v
)
1305 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1306 tcc_error("memory full (vstack)");
1311 static void vdup(void)
1316 /* rotate n first stack elements to the bottom
1317 I1 ... In -> I2 ... In I1 [top is right]
1319 ST_FUNC
void vrotb(int n
)
1326 for(i
=-n
+1;i
!=0;i
++)
1327 vtop
[i
] = vtop
[i
+1];
1331 /* rotate the n elements before entry e towards the top
1332 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1334 ST_FUNC
void vrote(SValue
*e
, int n
)
1341 for(i
= 0;i
< n
- 1; i
++)
1346 /* rotate n first stack elements to the top
1347 I1 ... In -> In I1 ... I(n-1) [top is right]
1349 ST_FUNC
void vrott(int n
)
1354 /* ------------------------------------------------------------------------- */
1355 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1357 /* called from generators to set the result from relational ops */
1358 ST_FUNC
void vset_VT_CMP(int op
)
1366 /* called once before asking generators to load VT_CMP to a register */
1367 static void vset_VT_JMP(void)
1369 int op
= vtop
->cmp_op
;
1371 if (vtop
->jtrue
|| vtop
->jfalse
) {
1372 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1373 int inv
= op
& (op
< 2); /* small optimization */
1374 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1376 /* otherwise convert flags (rsp. 0/1) to register */
1378 if (op
< 2) /* doesn't seem to happen */
1383 /* Set CPU Flags, doesn't yet jump */
1384 static void gvtst_set(int inv
, int t
)
1388 if (vtop
->r
!= VT_CMP
) {
1391 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1392 vset_VT_CMP(vtop
->c
.i
!= 0);
1395 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1396 *p
= gjmp_append(*p
, t
);
1399 /* Generate value test
1401 * Generate a test for any value (jump, comparison and integers) */
1402 static int gvtst(int inv
, int t
)
1407 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1409 x
= u
, u
= t
, t
= x
;
1412 /* jump to the wanted target */
1414 t
= gjmp_cond(op
^ inv
, t
);
1417 /* resolve complementary jumps to here */
1424 /* generate a zero or nozero test */
1425 static void gen_test_zero(int op
)
1427 if (vtop
->r
== VT_CMP
) {
1431 vtop
->jfalse
= vtop
->jtrue
;
1441 /* ------------------------------------------------------------------------- */
1442 /* push a symbol value of TYPE */
1443 static inline void vpushsym(CType
*type
, Sym
*sym
)
1447 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1451 /* Return a static symbol pointing to a section */
1452 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1458 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1459 sym
->type
.t
|= VT_STATIC
;
1460 put_extern_sym(sym
, sec
, offset
, size
);
1464 /* push a reference to a section offset by adding a dummy symbol */
1465 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1467 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1470 /* define a new external reference to a symbol 'v' of type 'u' */
1471 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1477 /* push forward reference */
1478 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1479 s
->type
.ref
= type
->ref
;
1480 } else if (IS_ASM_SYM(s
)) {
1481 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1482 s
->type
.ref
= type
->ref
;
1488 /* Merge symbol attributes. */
1489 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1491 if (sa1
->aligned
&& !sa
->aligned
)
1492 sa
->aligned
= sa1
->aligned
;
1493 sa
->packed
|= sa1
->packed
;
1494 sa
->weak
|= sa1
->weak
;
1495 if (sa1
->visibility
!= STV_DEFAULT
) {
1496 int vis
= sa
->visibility
;
1497 if (vis
== STV_DEFAULT
1498 || vis
> sa1
->visibility
)
1499 vis
= sa1
->visibility
;
1500 sa
->visibility
= vis
;
1502 sa
->dllexport
|= sa1
->dllexport
;
1503 sa
->nodecorate
|= sa1
->nodecorate
;
1504 sa
->dllimport
|= sa1
->dllimport
;
1507 /* Merge function attributes. */
1508 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1510 if (fa1
->func_call
&& !fa
->func_call
)
1511 fa
->func_call
= fa1
->func_call
;
1512 if (fa1
->func_type
&& !fa
->func_type
)
1513 fa
->func_type
= fa1
->func_type
;
1514 if (fa1
->func_args
&& !fa
->func_args
)
1515 fa
->func_args
= fa1
->func_args
;
1516 if (fa1
->func_noreturn
)
1517 fa
->func_noreturn
= 1;
1524 /* Merge attributes. */
1525 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1527 merge_symattr(&ad
->a
, &ad1
->a
);
1528 merge_funcattr(&ad
->f
, &ad1
->f
);
1531 ad
->section
= ad1
->section
;
1532 if (ad1
->alias_target
)
1533 ad
->alias_target
= ad1
->alias_target
;
1535 ad
->asm_label
= ad1
->asm_label
;
1537 ad
->attr_mode
= ad1
->attr_mode
;
1540 /* Merge some type attributes. */
1541 static void patch_type(Sym
*sym
, CType
*type
)
1543 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1544 if (!(sym
->type
.t
& VT_EXTERN
))
1545 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1546 sym
->type
.t
&= ~VT_EXTERN
;
1549 if (IS_ASM_SYM(sym
)) {
1550 /* stay static if both are static */
1551 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1552 sym
->type
.ref
= type
->ref
;
1555 if (!is_compatible_types(&sym
->type
, type
)) {
1556 tcc_error("incompatible types for redefinition of '%s'",
1557 get_tok_str(sym
->v
, NULL
));
1559 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1560 int static_proto
= sym
->type
.t
& VT_STATIC
;
1561 /* warn if static follows non-static function declaration */
1562 if ((type
->t
& VT_STATIC
) && !static_proto
1563 /* XXX this test for inline shouldn't be here. Until we
1564 implement gnu-inline mode again it silences a warning for
1565 mingw caused by our workarounds. */
1566 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1567 tcc_warning("static storage ignored for redefinition of '%s'",
1568 get_tok_str(sym
->v
, NULL
));
1570 /* set 'inline' if both agree or if one has static */
1571 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1572 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1573 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1574 static_proto
|= VT_INLINE
;
1577 if (0 == (type
->t
& VT_EXTERN
)) {
1578 struct FuncAttr f
= sym
->type
.ref
->f
;
1579 /* put complete type, use static from prototype */
1580 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1581 sym
->type
.ref
= type
->ref
;
1582 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1584 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1587 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1588 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1589 sym
->type
.ref
= type
->ref
;
1593 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1594 /* set array size if it was omitted in extern declaration */
1595 sym
->type
.ref
->c
= type
->ref
->c
;
1597 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1598 tcc_warning("storage mismatch for redefinition of '%s'",
1599 get_tok_str(sym
->v
, NULL
));
1603 /* Merge some storage attributes. */
1604 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1607 patch_type(sym
, type
);
1609 #ifdef TCC_TARGET_PE
1610 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1611 tcc_error("incompatible dll linkage for redefinition of '%s'",
1612 get_tok_str(sym
->v
, NULL
));
1614 merge_symattr(&sym
->a
, &ad
->a
);
1616 sym
->asm_label
= ad
->asm_label
;
1617 update_storage(sym
);
1620 /* copy sym to other stack */
1621 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1624 s
= sym_malloc(), *s
= *s0
;
1625 s
->prev
= *ps
, *ps
= s
;
1626 if (s
->v
< SYM_FIRST_ANOM
) {
1627 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1628 s
->prev_tok
= *ps
, *ps
= s
;
1633 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1634 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1636 int bt
= s
->type
.t
& VT_BTYPE
;
1637 if (bt
== VT_FUNC
|| bt
== VT_PTR
) {
1638 Sym
**sp
= &s
->type
.ref
;
1639 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1640 Sym
*s2
= sym_copy(s
, ps
);
1641 sp
= &(*sp
= s2
)->next
;
1642 sym_copy_ref(s2
, ps
);
1647 /* define a new external reference to a symbol 'v' */
1648 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1652 /* look for global symbol */
1654 while (s
&& s
->sym_scope
)
1658 /* push forward reference */
1659 s
= global_identifier_push(v
, type
->t
, 0);
1662 s
->asm_label
= ad
->asm_label
;
1663 s
->type
.ref
= type
->ref
;
1664 /* copy type to the global stack */
1666 sym_copy_ref(s
, &global_stack
);
1668 patch_storage(s
, ad
, type
);
1670 /* push variables on local_stack if any */
1671 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1672 s
= sym_copy(s
, &local_stack
);
1676 /* push a reference to global symbol v */
1677 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1679 vpushsym(type
, external_global_sym(v
, type
));
1682 /* save registers up to (vtop - n) stack entry */
1683 ST_FUNC
void save_regs(int n
)
1686 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1690 /* save r to the memory stack, and mark it as being free */
1691 ST_FUNC
void save_reg(int r
)
1693 save_reg_upstack(r
, 0);
1696 /* save r to the memory stack, and mark it as being free,
1697 if seen up to (vtop - n) stack entry */
1698 ST_FUNC
void save_reg_upstack(int r
, int n
)
1700 int l
, size
, align
, bt
;
1703 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1708 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1709 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1710 /* must save value on stack if not already done */
1712 bt
= p
->type
.t
& VT_BTYPE
;
1715 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1718 size
= type_size(&sv
.type
, &align
);
1719 l
= get_temp_local_var(size
,align
);
1720 sv
.r
= VT_LOCAL
| VT_LVAL
;
1722 store(p
->r
& VT_VALMASK
, &sv
);
1723 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1724 /* x86 specific: need to pop fp register ST0 if saved */
1725 if (r
== TREG_ST0
) {
1726 o(0xd8dd); /* fstp %st(0) */
1729 /* special long long case */
1730 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1735 /* mark that stack entry as being saved on the stack */
1736 if (p
->r
& VT_LVAL
) {
1737 /* also clear the bounded flag because the
1738 relocation address of the function was stored in
1740 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1742 p
->r
= VT_LVAL
| VT_LOCAL
;
1750 #ifdef TCC_TARGET_ARM
1751 /* find a register of class 'rc2' with at most one reference on stack.
1752 * If none, call get_reg(rc) */
1753 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1758 for(r
=0;r
<NB_REGS
;r
++) {
1759 if (reg_classes
[r
] & rc2
) {
1762 for(p
= vstack
; p
<= vtop
; p
++) {
1763 if ((p
->r
& VT_VALMASK
) == r
||
1775 /* find a free register of class 'rc'. If none, save one register */
1776 ST_FUNC
int get_reg(int rc
)
1781 /* find a free register */
1782 for(r
=0;r
<NB_REGS
;r
++) {
1783 if (reg_classes
[r
] & rc
) {
1786 for(p
=vstack
;p
<=vtop
;p
++) {
1787 if ((p
->r
& VT_VALMASK
) == r
||
1796 /* no register left : free the first one on the stack (VERY
1797 IMPORTANT to start from the bottom to ensure that we don't
1798 spill registers used in gen_opi()) */
1799 for(p
=vstack
;p
<=vtop
;p
++) {
1800 /* look at second register (if long long) */
1802 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1804 r
= p
->r
& VT_VALMASK
;
1805 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1811 /* Should never comes here */
1815 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1816 static int get_temp_local_var(int size
,int align
){
1818 struct temp_local_variable
*temp_var
;
1825 for(i
=0;i
<nb_temp_local_vars
;i
++){
1826 temp_var
=&arr_temp_local_vars
[i
];
1827 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1830 /*check if temp_var is free*/
1832 for(p
=vstack
;p
<=vtop
;p
++) {
1834 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1835 if(p
->c
.i
==temp_var
->location
){
1842 found_var
=temp_var
->location
;
1848 loc
= (loc
- size
) & -align
;
1849 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1850 temp_var
=&arr_temp_local_vars
[i
];
1851 temp_var
->location
=loc
;
1852 temp_var
->size
=size
;
1853 temp_var
->align
=align
;
1854 nb_temp_local_vars
++;
1861 static void clear_temp_local_var_list(){
1862 nb_temp_local_vars
=0;
1865 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1867 static void move_reg(int r
, int s
, int t
)
1881 /* get address of vtop (vtop MUST BE an lvalue) */
1882 ST_FUNC
void gaddrof(void)
1884 vtop
->r
&= ~VT_LVAL
;
1885 /* tricky: if saved lvalue, then we can go back to lvalue */
1886 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1887 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1890 #ifdef CONFIG_TCC_BCHECK
1891 /* generate a bounded pointer addition */
1892 static void gen_bounded_ptr_add(void)
1894 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1899 vpush_global_sym(&func_old_type
, TOK___bound_ptr_add
);
1904 /* returned pointer is in REG_IRET */
1905 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1908 /* relocation offset of the bounding function call point */
1909 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1912 /* patch pointer addition in vtop so that pointer dereferencing is
1914 static void gen_bounded_ptr_deref(void)
1924 size
= type_size(&vtop
->type
, &align
);
1926 case 1: func
= TOK___bound_ptr_indir1
; break;
1927 case 2: func
= TOK___bound_ptr_indir2
; break;
1928 case 4: func
= TOK___bound_ptr_indir4
; break;
1929 case 8: func
= TOK___bound_ptr_indir8
; break;
1930 case 12: func
= TOK___bound_ptr_indir12
; break;
1931 case 16: func
= TOK___bound_ptr_indir16
; break;
1933 /* may happen with struct member access */
1936 sym
= external_global_sym(func
, &func_old_type
);
1938 put_extern_sym(sym
, NULL
, 0, 0);
1939 /* patch relocation */
1940 /* XXX: find a better solution ? */
1941 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1942 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1945 /* generate lvalue bound code */
1946 static void gbound(void)
1950 vtop
->r
&= ~VT_MUSTBOUND
;
1951 /* if lvalue, then use checking code before dereferencing */
1952 if (vtop
->r
& VT_LVAL
) {
1953 /* if not VT_BOUNDED value, then make one */
1954 if (!(vtop
->r
& VT_BOUNDED
)) {
1955 /* must save type because we must set it to int to get pointer */
1957 vtop
->type
.t
= VT_PTR
;
1960 gen_bounded_ptr_add();
1964 /* then check for dereferencing */
1965 gen_bounded_ptr_deref();
1969 /* we need to call __bound_ptr_add before we start to load function
1970 args into registers */
1971 ST_FUNC
void gbound_args(int nb_args
)
1976 for (i
= 1; i
<= nb_args
; ++i
)
1977 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1983 sv
= vtop
- nb_args
;
1984 if (sv
->r
& VT_SYM
) {
1988 #ifndef TCC_TARGET_PE
1989 || v
== TOK_sigsetjmp
1990 || v
== TOK___sigsetjmp
1993 vpush_global_sym(&func_old_type
, TOK___bound_setjmp
);
1996 func_bound_add_epilog
= 1;
1998 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1999 if (v
== TOK_alloca
)
2000 func_bound_add_epilog
= 1;
2005 /* Add bounds for local symbols from S to E (via ->prev) */
2006 static void add_local_bounds(Sym
*s
, Sym
*e
)
2008 for (; s
!= e
; s
= s
->prev
) {
2009 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
2011 /* Add arrays/structs/unions because we always take address */
2012 if ((s
->type
.t
& VT_ARRAY
)
2013 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
2014 || s
->a
.addrtaken
) {
2015 /* add local bound info */
2016 int align
, size
= type_size(&s
->type
, &align
);
2017 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
2018 2 * sizeof(addr_t
));
2019 bounds_ptr
[0] = s
->c
;
2020 bounds_ptr
[1] = size
;
2026 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2027 static void pop_local_syms(Sym
**ptop
, Sym
*b
, int keep
, int ellipsis
)
2029 #ifdef CONFIG_TCC_BCHECK
2030 if (tcc_state
->do_bounds_check
&& !ellipsis
&& !keep
)
2031 add_local_bounds(*ptop
, b
);
2033 if (tcc_state
->do_debug
)
2034 tcc_add_debug_info (tcc_state
, !local_scope
, *ptop
, b
);
2035 sym_pop(ptop
, b
, keep
);
2038 static void incr_bf_adr(int o
)
2040 vtop
->type
= char_pointer_type
;
2044 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
2048 /* single-byte load mode for packed or otherwise unaligned bitfields */
2049 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
2052 save_reg_upstack(vtop
->r
, 1);
2053 vpush64(type
->t
& VT_BTYPE
, 0); // B X
2054 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2063 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
2065 vpushi((1 << n
) - 1), gen_op('&');
2068 vpushi(bits
), gen_op(TOK_SHL
);
2071 bits
+= n
, bit_size
-= n
, o
= 1;
2074 if (!(type
->t
& VT_UNSIGNED
)) {
2075 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
2076 vpushi(n
), gen_op(TOK_SHL
);
2077 vpushi(n
), gen_op(TOK_SAR
);
2081 /* single-byte store mode for packed or otherwise unaligned bitfields */
2082 static void store_packed_bf(int bit_pos
, int bit_size
)
2084 int bits
, n
, o
, m
, c
;
2086 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2088 save_reg_upstack(vtop
->r
, 1);
2089 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2091 incr_bf_adr(o
); // X B
2093 c
? vdup() : gv_dup(); // B V X
2096 vpushi(bits
), gen_op(TOK_SHR
);
2098 vpushi(bit_pos
), gen_op(TOK_SHL
);
2103 m
= ((1 << n
) - 1) << bit_pos
;
2104 vpushi(m
), gen_op('&'); // X B V1
2105 vpushv(vtop
-1); // X B V1 B
2106 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
2107 gen_op('&'); // X B V1 B1
2108 gen_op('|'); // X B V2
2110 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
2111 vstore(), vpop(); // X B
2112 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
2117 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
2120 if (0 == sv
->type
.ref
)
2122 t
= sv
->type
.ref
->auxtype
;
2123 if (t
!= -1 && t
!= VT_STRUCT
) {
2124 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
2130 /* store vtop a register belonging to class 'rc'. lvalues are
2131 converted to values. Cannot be used if cannot be converted to
2132 register value (such as structures). */
2133 ST_FUNC
int gv(int rc
)
2135 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
2136 int bit_pos
, bit_size
, size
, align
;
2138 /* NOTE: get_reg can modify vstack[] */
2139 if (vtop
->type
.t
& VT_BITFIELD
) {
2142 bit_pos
= BIT_POS(vtop
->type
.t
);
2143 bit_size
= BIT_SIZE(vtop
->type
.t
);
2144 /* remove bit field info to avoid loops */
2145 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
2148 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
2149 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
2150 type
.t
|= VT_UNSIGNED
;
2152 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
2154 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2159 if (r
== VT_STRUCT
) {
2160 load_packed_bf(&type
, bit_pos
, bit_size
);
2162 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
2163 /* cast to int to propagate signedness in following ops */
2165 /* generate shifts */
2166 vpushi(bits
- (bit_pos
+ bit_size
));
2168 vpushi(bits
- bit_size
);
2169 /* NOTE: transformed to SHR if unsigned */
2174 if (is_float(vtop
->type
.t
) &&
2175 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2176 /* CPUs usually cannot use float constants, so we store them
2177 generically in data segment */
2178 init_params p
= { data_section
};
2179 unsigned long offset
;
2180 size
= type_size(&vtop
->type
, &align
);
2182 size
= 0, align
= 1;
2183 offset
= section_add(p
.sec
, size
, align
);
2184 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
2186 init_putv(&p
, &vtop
->type
, offset
);
2189 #ifdef CONFIG_TCC_BCHECK
2190 if (vtop
->r
& VT_MUSTBOUND
)
2194 bt
= vtop
->type
.t
& VT_BTYPE
;
2196 #ifdef TCC_TARGET_RISCV64
2198 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
2201 rc2
= RC2_TYPE(bt
, rc
);
2203 /* need to reload if:
2205 - lvalue (need to dereference pointer)
2206 - already a register, but not in the right class */
2207 r
= vtop
->r
& VT_VALMASK
;
2208 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
2209 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
2211 if (!r_ok
|| !r2_ok
) {
2215 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
2216 int original_type
= vtop
->type
.t
;
2218 /* two register type load :
2219 expand to two words temporarily */
2220 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2222 unsigned long long ll
= vtop
->c
.i
;
2223 vtop
->c
.i
= ll
; /* first word */
2225 vtop
->r
= r
; /* save register value */
2226 vpushi(ll
>> 32); /* second word */
2227 } else if (vtop
->r
& VT_LVAL
) {
2228 /* We do not want to modifier the long long pointer here.
2229 So we save any other instances down the stack */
2230 save_reg_upstack(vtop
->r
, 1);
2231 /* load from memory */
2232 vtop
->type
.t
= load_type
;
2235 vtop
[-1].r
= r
; /* save register value */
2236 /* increment pointer to get second word */
2237 vtop
->type
.t
= VT_PTRDIFF_T
;
2242 vtop
->type
.t
= load_type
;
2244 /* move registers */
2247 if (r2_ok
&& vtop
->r2
< VT_CONST
)
2250 vtop
[-1].r
= r
; /* save register value */
2251 vtop
->r
= vtop
[-1].r2
;
2253 /* Allocate second register. Here we rely on the fact that
2254 get_reg() tries first to free r2 of an SValue. */
2258 /* write second register */
2261 vtop
->type
.t
= original_type
;
2263 if (vtop
->r
== VT_CMP
)
2265 /* one register type load */
2270 #ifdef TCC_TARGET_C67
2271 /* uses register pairs for doubles */
2272 if (bt
== VT_DOUBLE
)
2279 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2280 ST_FUNC
void gv2(int rc1
, int rc2
)
2282 /* generate more generic register first. But VT_JMP or VT_CMP
2283 values must be generated first in all cases to avoid possible
2285 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
2290 /* test if reload is needed for first register */
2291 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
2301 /* test if reload is needed for first register */
2302 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
2309 /* expand 64bit on stack in two ints */
2310 ST_FUNC
void lexpand(void)
2313 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
2314 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
2315 if (v
== VT_CONST
) {
2318 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
2324 vtop
[0].r
= vtop
[-1].r2
;
2325 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2327 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2332 /* build a long long from two ints */
2333 static void lbuild(int t
)
2335 gv2(RC_INT
, RC_INT
);
2336 vtop
[-1].r2
= vtop
[0].r
;
2337 vtop
[-1].type
.t
= t
;
2342 /* convert stack entry to register and duplicate its value in another
2344 static void gv_dup(void)
2350 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2351 if (t
& VT_BITFIELD
) {
2361 /* stack: H L L1 H1 */
2371 /* duplicate value */
2381 /* generate CPU independent (unsigned) long long operations */
2382 static void gen_opl(int op
)
2384 int t
, a
, b
, op1
, c
, i
;
2386 unsigned short reg_iret
= REG_IRET
;
2387 unsigned short reg_lret
= REG_IRE2
;
2393 func
= TOK___divdi3
;
2396 func
= TOK___udivdi3
;
2399 func
= TOK___moddi3
;
2402 func
= TOK___umoddi3
;
2409 /* call generic long long function */
2410 vpush_global_sym(&func_old_type
, func
);
2415 vtop
->r2
= reg_lret
;
2423 //pv("gen_opl A",0,2);
2429 /* stack: L1 H1 L2 H2 */
2434 vtop
[-2] = vtop
[-3];
2437 /* stack: H1 H2 L1 L2 */
2438 //pv("gen_opl B",0,4);
2444 /* stack: H1 H2 L1 L2 ML MH */
2447 /* stack: ML MH H1 H2 L1 L2 */
2451 /* stack: ML MH H1 L2 H2 L1 */
2456 /* stack: ML MH M1 M2 */
2459 } else if (op
== '+' || op
== '-') {
2460 /* XXX: add non carry method too (for MIPS or alpha) */
2466 /* stack: H1 H2 (L1 op L2) */
2469 gen_op(op1
+ 1); /* TOK_xxxC2 */
2472 /* stack: H1 H2 (L1 op L2) */
2475 /* stack: (L1 op L2) H1 H2 */
2477 /* stack: (L1 op L2) (H1 op H2) */
2485 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2486 t
= vtop
[-1].type
.t
;
2490 /* stack: L H shift */
2492 /* constant: simpler */
2493 /* NOTE: all comments are for SHL. the other cases are
2494 done by swapping words */
2505 if (op
!= TOK_SAR
) {
2538 /* XXX: should provide a faster fallback on x86 ? */
2541 func
= TOK___ashrdi3
;
2544 func
= TOK___lshrdi3
;
2547 func
= TOK___ashldi3
;
2553 /* compare operations */
2559 /* stack: L1 H1 L2 H2 */
2561 vtop
[-1] = vtop
[-2];
2563 /* stack: L1 L2 H1 H2 */
2567 /* when values are equal, we need to compare low words. since
2568 the jump is inverted, we invert the test too. */
2571 else if (op1
== TOK_GT
)
2573 else if (op1
== TOK_ULT
)
2575 else if (op1
== TOK_UGT
)
2585 /* generate non equal test */
2587 vset_VT_CMP(TOK_NE
);
2591 /* compare low. Always unsigned */
2595 else if (op1
== TOK_LE
)
2597 else if (op1
== TOK_GT
)
2599 else if (op1
== TOK_GE
)
2602 #if 0//def TCC_TARGET_I386
2603 if (op
== TOK_NE
) { gsym(b
); break; }
2604 if (op
== TOK_EQ
) { gsym(a
); break; }
2613 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2615 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2616 return (a
^ b
) >> 63 ? -x
: x
;
2619 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2621 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2624 /* handle integer constant optimizations and various machine
2626 static void gen_opic(int op
)
2628 SValue
*v1
= vtop
- 1;
2630 int t1
= v1
->type
.t
& VT_BTYPE
;
2631 int t2
= v2
->type
.t
& VT_BTYPE
;
2632 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2633 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2634 uint64_t l1
= c1
? v1
->c
.i
: 0;
2635 uint64_t l2
= c2
? v2
->c
.i
: 0;
2636 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2638 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2639 l1
= ((uint32_t)l1
|
2640 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2641 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2642 l2
= ((uint32_t)l2
|
2643 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2647 case '+': l1
+= l2
; break;
2648 case '-': l1
-= l2
; break;
2649 case '&': l1
&= l2
; break;
2650 case '^': l1
^= l2
; break;
2651 case '|': l1
|= l2
; break;
2652 case '*': l1
*= l2
; break;
2659 /* if division by zero, generate explicit division */
2661 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2662 tcc_error("division by zero in constant");
2666 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2667 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2668 case TOK_UDIV
: l1
= l1
/ l2
; break;
2669 case TOK_UMOD
: l1
= l1
% l2
; break;
2672 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2673 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2675 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2678 case TOK_ULT
: l1
= l1
< l2
; break;
2679 case TOK_UGE
: l1
= l1
>= l2
; break;
2680 case TOK_EQ
: l1
= l1
== l2
; break;
2681 case TOK_NE
: l1
= l1
!= l2
; break;
2682 case TOK_ULE
: l1
= l1
<= l2
; break;
2683 case TOK_UGT
: l1
= l1
> l2
; break;
2684 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2685 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2686 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2687 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2689 case TOK_LAND
: l1
= l1
&& l2
; break;
2690 case TOK_LOR
: l1
= l1
|| l2
; break;
2694 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2695 l1
= ((uint32_t)l1
|
2696 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2700 /* if commutative ops, put c2 as constant */
2701 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2702 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2704 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2705 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2707 if (!const_wanted
&&
2709 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2710 (l1
== -1 && op
== TOK_SAR
))) {
2711 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2713 } else if (!const_wanted
&&
2714 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2716 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2717 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2718 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2723 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2726 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2727 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2730 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2731 /* filter out NOP operations like x*1, x-0, x&-1... */
2733 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2734 /* try to use shifts instead of muls or divs */
2735 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2744 else if (op
== TOK_PDIV
)
2750 } else if (c2
&& (op
== '+' || op
== '-') &&
2751 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2752 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2753 /* symbol + constant case */
2757 /* The backends can't always deal with addends to symbols
2758 larger than +-1<<31. Don't construct such. */
2765 /* call low level op generator */
2766 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2767 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2775 /* generate a floating point operation with constant propagation */
2776 static void gen_opif(int op
)
2780 #if defined _MSC_VER && defined __x86_64__
2781 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2788 /* currently, we cannot do computations with forward symbols */
2789 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2790 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2792 if (v1
->type
.t
== VT_FLOAT
) {
2795 } else if (v1
->type
.t
== VT_DOUBLE
) {
2803 /* NOTE: we only do constant propagation if finite number (not
2804 NaN or infinity) (ANSI spec) */
2805 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2809 case '+': f1
+= f2
; break;
2810 case '-': f1
-= f2
; break;
2811 case '*': f1
*= f2
; break;
2814 /* If not in initializer we need to potentially generate
2815 FP exceptions at runtime, otherwise we want to fold. */
2821 /* XXX: also handles tests ? */
2825 /* XXX: overflow test ? */
2826 if (v1
->type
.t
== VT_FLOAT
) {
2828 } else if (v1
->type
.t
== VT_DOUBLE
) {
2840 /* print a type. If 'varstr' is not NULL, then the variable is also
2841 printed in the type */
2843 /* XXX: add array and function pointers */
2844 static void type_to_str(char *buf
, int buf_size
,
2845 CType
*type
, const char *varstr
)
2857 pstrcat(buf
, buf_size
, "extern ");
2859 pstrcat(buf
, buf_size
, "static ");
2861 pstrcat(buf
, buf_size
, "typedef ");
2863 pstrcat(buf
, buf_size
, "inline ");
2864 if (t
& VT_VOLATILE
)
2865 pstrcat(buf
, buf_size
, "volatile ");
2866 if (t
& VT_CONSTANT
)
2867 pstrcat(buf
, buf_size
, "const ");
2869 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2870 || ((t
& VT_UNSIGNED
)
2871 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2874 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2876 buf_size
-= strlen(buf
);
2912 tstr
= "long double";
2914 pstrcat(buf
, buf_size
, tstr
);
2921 pstrcat(buf
, buf_size
, tstr
);
2922 v
= type
->ref
->v
& ~SYM_STRUCT
;
2923 if (v
>= SYM_FIRST_ANOM
)
2924 pstrcat(buf
, buf_size
, "<anonymous>");
2926 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2931 if (varstr
&& '*' == *varstr
) {
2932 pstrcat(buf1
, sizeof(buf1
), "(");
2933 pstrcat(buf1
, sizeof(buf1
), varstr
);
2934 pstrcat(buf1
, sizeof(buf1
), ")");
2936 pstrcat(buf1
, buf_size
, "(");
2938 while (sa
!= NULL
) {
2940 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2941 pstrcat(buf1
, sizeof(buf1
), buf2
);
2944 pstrcat(buf1
, sizeof(buf1
), ", ");
2946 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2947 pstrcat(buf1
, sizeof(buf1
), ", ...");
2948 pstrcat(buf1
, sizeof(buf1
), ")");
2949 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2954 if (varstr
&& '*' == *varstr
)
2955 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2957 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2958 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2961 pstrcpy(buf1
, sizeof(buf1
), "*");
2962 if (t
& VT_CONSTANT
)
2963 pstrcat(buf1
, buf_size
, "const ");
2964 if (t
& VT_VOLATILE
)
2965 pstrcat(buf1
, buf_size
, "volatile ");
2967 pstrcat(buf1
, sizeof(buf1
), varstr
);
2968 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2972 pstrcat(buf
, buf_size
, " ");
2973 pstrcat(buf
, buf_size
, varstr
);
2978 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2980 char buf1
[256], buf2
[256];
2981 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2982 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2983 tcc_error(fmt
, buf1
, buf2
);
2986 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2988 char buf1
[256], buf2
[256];
2989 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2990 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2991 tcc_warning(fmt
, buf1
, buf2
);
2994 static int pointed_size(CType
*type
)
2997 return type_size(pointed_type(type
), &align
);
3000 static void vla_runtime_pointed_size(CType
*type
)
3003 vla_runtime_type_size(pointed_type(type
), &align
);
3006 static inline int is_null_pointer(SValue
*p
)
3008 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
3010 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
3011 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
3012 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
3013 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
3014 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
3015 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3019 /* compare function types. OLD functions match any new functions */
3020 static int is_compatible_func(CType
*type1
, CType
*type2
)
3026 if (s1
->f
.func_call
!= s2
->f
.func_call
)
3028 if (s1
->f
.func_type
!= s2
->f
.func_type
3029 && s1
->f
.func_type
!= FUNC_OLD
3030 && s2
->f
.func_type
!= FUNC_OLD
)
3032 /* we should check the function return type for FUNC_OLD too
3033 but that causes problems with the internally used support
3034 functions such as TOK_memmove */
3035 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
3037 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
3040 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
3051 /* return true if type1 and type2 are the same. If unqualified is
3052 true, qualifiers on the types are ignored.
3054 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3058 t1
= type1
->t
& VT_TYPE
;
3059 t2
= type2
->t
& VT_TYPE
;
3061 /* strip qualifiers before comparing */
3062 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3063 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3066 /* Default Vs explicit signedness only matters for char */
3067 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3071 /* XXX: bitfields ? */
3076 && !(type1
->ref
->c
< 0
3077 || type2
->ref
->c
< 0
3078 || type1
->ref
->c
== type2
->ref
->c
))
3081 /* test more complicated cases */
3082 bt1
= t1
& VT_BTYPE
;
3083 if (bt1
== VT_PTR
) {
3084 type1
= pointed_type(type1
);
3085 type2
= pointed_type(type2
);
3086 return is_compatible_types(type1
, type2
);
3087 } else if (bt1
== VT_STRUCT
) {
3088 return (type1
->ref
== type2
->ref
);
3089 } else if (bt1
== VT_FUNC
) {
3090 return is_compatible_func(type1
, type2
);
3091 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
3092 /* If both are enums then they must be the same, if only one is then
3093 t1 and t2 must be equal, which was checked above already. */
3094 return type1
->ref
== type2
->ref
;
3100 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3101 type is stored in DEST if non-null (except for pointer plus/minus) . */
3102 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
3104 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
3105 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
3111 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
3112 ret
= op
== '?' ? 1 : 0;
3113 /* NOTE: as an extension, we accept void on only one side */
3115 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3116 if (op
== '+') ; /* Handled in caller */
3117 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3118 /* If one is a null ptr constant the result type is the other. */
3119 else if (is_null_pointer (op2
)) type
= *type1
;
3120 else if (is_null_pointer (op1
)) type
= *type2
;
3121 else if (bt1
!= bt2
) {
3122 /* accept comparison or cond-expr between pointer and integer
3124 if ((op
== '?' || TOK_ISCOND(op
))
3125 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
3126 tcc_warning("pointer/integer mismatch in %s",
3127 op
== '?' ? "conditional expression" : "comparison");
3128 else if (op
!= '-' || !is_integer_btype(bt2
))
3130 type
= *(bt1
== VT_PTR
? type1
: type2
);
3132 CType
*pt1
= pointed_type(type1
);
3133 CType
*pt2
= pointed_type(type2
);
3134 int pbt1
= pt1
->t
& VT_BTYPE
;
3135 int pbt2
= pt2
->t
& VT_BTYPE
;
3136 int newquals
, copied
= 0;
3137 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
3138 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
3139 if (op
!= '?' && !TOK_ISCOND(op
))
3142 type_incompatibility_warning(type1
, type2
,
3144 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3145 : "pointer type mismatch in comparison('%s' and '%s')");
3148 /* pointers to void get preferred, otherwise the
3149 pointed to types minus qualifs should be compatible */
3150 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
3151 /* combine qualifs */
3152 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
3153 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3156 /* copy the pointer target symbol */
3157 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3160 pointed_type(&type
)->t
|= newquals
;
3162 /* pointers to incomplete arrays get converted to
3163 pointers to completed ones if possible */
3164 if (pt1
->t
& VT_ARRAY
3165 && pt2
->t
& VT_ARRAY
3166 && pointed_type(&type
)->ref
->c
< 0
3167 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
3170 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3172 pointed_type(&type
)->ref
=
3173 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
3174 0, pointed_type(&type
)->ref
->c
);
3175 pointed_type(&type
)->ref
->c
=
3176 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
3182 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
3183 if (op
!= '?' || !compare_types(type1
, type2
, 1))
3186 } else if (is_float(bt1
) || is_float(bt2
)) {
3187 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
3188 type
.t
= VT_LDOUBLE
;
3189 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
3194 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
3195 /* cast to biggest op */
3196 type
.t
= VT_LLONG
| VT_LONG
;
3197 if (bt1
== VT_LLONG
)
3199 if (bt2
== VT_LLONG
)
3201 /* convert to unsigned if it does not fit in a long long */
3202 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
3203 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
3204 type
.t
|= VT_UNSIGNED
;
3206 /* integer operations */
3207 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
3208 /* convert to unsigned if it does not fit in an integer */
3209 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
3210 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
3211 type
.t
|= VT_UNSIGNED
;
3218 /* generic gen_op: handles types problems */
3219 ST_FUNC
void gen_op(int op
)
3221 int u
, t1
, t2
, bt1
, bt2
, t
;
3222 CType type1
, combtype
;
3225 t1
= vtop
[-1].type
.t
;
3226 t2
= vtop
[0].type
.t
;
3227 bt1
= t1
& VT_BTYPE
;
3228 bt2
= t2
& VT_BTYPE
;
3230 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
3231 if (bt2
== VT_FUNC
) {
3232 mk_pointer(&vtop
->type
);
3235 if (bt1
== VT_FUNC
) {
3237 mk_pointer(&vtop
->type
);
3242 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
3243 tcc_error_noabort("invalid operand types for binary operation");
3245 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3246 /* at least one operand is a pointer */
3247 /* relational op: must be both pointers */
3250 /* if both pointers, then it must be the '-' op */
3251 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3253 tcc_error("cannot use pointers here");
3254 if (vtop
[-1].type
.t
& VT_VLA
) {
3255 vla_runtime_pointed_size(&vtop
[-1].type
);
3257 vpushi(pointed_size(&vtop
[-1].type
));
3261 vtop
->type
.t
= VT_PTRDIFF_T
;
3265 /* exactly one pointer : must be '+' or '-'. */
3266 if (op
!= '-' && op
!= '+')
3267 tcc_error("cannot use pointers here");
3268 /* Put pointer as first operand */
3269 if (bt2
== VT_PTR
) {
3271 t
= t1
, t1
= t2
, t2
= t
;
3274 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
3275 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3278 type1
= vtop
[-1].type
;
3279 if (vtop
[-1].type
.t
& VT_VLA
)
3280 vla_runtime_pointed_size(&vtop
[-1].type
);
3282 u
= pointed_size(&vtop
[-1].type
);
3284 tcc_error("unknown array element size");
3288 /* XXX: cast to int ? (long long case) */
3293 #ifdef CONFIG_TCC_BCHECK
3294 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
3295 /* if bounded pointers, we generate a special code to
3302 gen_bounded_ptr_add();
3308 type1
.t
&= ~VT_ARRAY
;
3309 /* put again type if gen_opic() swaped operands */
3313 /* floats can only be used for a few operations */
3314 if (is_float(combtype
.t
)
3315 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3317 tcc_error("invalid operands for binary operation");
3318 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
3319 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
3320 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
3322 t
|= (VT_LONG
& t1
);
3326 t
= t2
= combtype
.t
;
3327 /* XXX: currently, some unsigned operations are explicit, so
3328 we modify them here */
3329 if (t
& VT_UNSIGNED
) {
3336 else if (op
== TOK_LT
)
3338 else if (op
== TOK_GT
)
3340 else if (op
== TOK_LE
)
3342 else if (op
== TOK_GE
)
3348 /* special case for shifts and long long: we keep the shift as
3350 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3357 if (TOK_ISCOND(op
)) {
3358 /* relational op: the result is an int */
3359 vtop
->type
.t
= VT_INT
;
3364 // Make sure that we have converted to an rvalue:
3365 if (vtop
->r
& VT_LVAL
)
3366 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3369 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3370 #define gen_cvt_itof1 gen_cvt_itof
3372 /* generic itof for unsigned long long case */
3373 static void gen_cvt_itof1(int t
)
3375 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3376 (VT_LLONG
| VT_UNSIGNED
)) {
3379 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
3380 #if LDOUBLE_SIZE != 8
3381 else if (t
== VT_LDOUBLE
)
3382 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
3385 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
3396 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3397 #define gen_cvt_ftoi1 gen_cvt_ftoi
3399 /* generic ftoi for unsigned long long case */
3400 static void gen_cvt_ftoi1(int t
)
3403 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3404 /* not handled natively */
3405 st
= vtop
->type
.t
& VT_BTYPE
;
3407 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
3408 #if LDOUBLE_SIZE != 8
3409 else if (st
== VT_LDOUBLE
)
3410 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
3413 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
3424 /* special delayed cast for char/short */
3425 static void force_charshort_cast(void)
3427 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3428 int dbt
= vtop
->type
.t
;
3429 vtop
->r
&= ~VT_MUSTCAST
;
3431 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3435 static void gen_cast_s(int t
)
3443 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3444 static void gen_cast(CType
*type
)
3446 int sbt
, dbt
, sf
, df
, c
;
3447 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3449 /* special delayed cast for char/short */
3450 if (vtop
->r
& VT_MUSTCAST
)
3451 force_charshort_cast();
3453 /* bitfields first get cast to ints */
3454 if (vtop
->type
.t
& VT_BITFIELD
)
3457 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3458 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3466 dbt_bt
= dbt
& VT_BTYPE
;
3467 sbt_bt
= sbt
& VT_BTYPE
;
3469 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3470 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3471 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3474 /* constant case: we can do it now */
3475 /* XXX: in ISOC, cannot do it if error in convert */
3476 if (sbt
== VT_FLOAT
)
3477 vtop
->c
.ld
= vtop
->c
.f
;
3478 else if (sbt
== VT_DOUBLE
)
3479 vtop
->c
.ld
= vtop
->c
.d
;
3482 if (sbt_bt
== VT_LLONG
) {
3483 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3484 vtop
->c
.ld
= vtop
->c
.i
;
3486 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3488 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3489 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3491 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3494 if (dbt
== VT_FLOAT
)
3495 vtop
->c
.f
= (float)vtop
->c
.ld
;
3496 else if (dbt
== VT_DOUBLE
)
3497 vtop
->c
.d
= (double)vtop
->c
.ld
;
3498 } else if (sf
&& dbt
== VT_BOOL
) {
3499 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3502 vtop
->c
.i
= vtop
->c
.ld
;
3503 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3505 else if (sbt
& VT_UNSIGNED
)
3506 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3508 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3510 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3512 else if (dbt
== VT_BOOL
)
3513 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3515 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3516 dbt_bt
== VT_SHORT
? 0xffff :
3519 if (!(dbt
& VT_UNSIGNED
))
3520 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3525 } else if (dbt
== VT_BOOL
3526 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3527 == (VT_CONST
| VT_SYM
)) {
3528 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3534 /* cannot generate code for global or static initializers */
3535 if (STATIC_DATA_WANTED
)
3538 /* non constant case: generate code */
3539 if (dbt
== VT_BOOL
) {
3540 gen_test_zero(TOK_NE
);
3546 /* convert from fp to fp */
3549 /* convert int to fp */
3552 /* convert fp to int */
3554 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3557 goto again
; /* may need char/short cast */
3562 ds
= btype_size(dbt_bt
);
3563 ss
= btype_size(sbt_bt
);
3564 if (ds
== 0 || ss
== 0) {
3565 if (dbt_bt
== VT_VOID
)
3567 cast_error(&vtop
->type
, type
);
3569 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3570 tcc_error("cast to incomplete type");
3572 /* same size and no sign conversion needed */
3573 if (ds
== ss
&& ds
>= 4)
3575 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3576 tcc_warning("cast between pointer and integer of different size");
3577 if (sbt_bt
== VT_PTR
) {
3578 /* put integer type to allow logical operations below */
3579 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3583 /* processor allows { int a = 0, b = *(char*)&a; }
3584 That means that if we cast to less width, we can just
3585 change the type and read it still later. */
3586 #define ALLOW_SUBTYPE_ACCESS 1
3588 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3589 /* value still in memory */
3593 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3595 goto done
; /* no 64bit envolved */
3603 /* generate high word */
3604 if (sbt
& VT_UNSIGNED
) {
3613 } else if (ss
== 8) {
3614 /* from long long: just take low order word */
3622 /* need to convert from 32bit to 64bit */
3623 if (sbt
& VT_UNSIGNED
) {
3624 #if defined(TCC_TARGET_RISCV64)
3625 /* RISC-V keeps 32bit vals in registers sign-extended.
3626 So here we need a zero-extension. */
3635 ss
= ds
, ds
= 4, dbt
= sbt
;
3636 } else if (ss
== 8) {
3637 /* RISC-V keeps 32bit vals in registers sign-extended.
3638 So here we need a sign-extension for signed types and
3639 zero-extension. for unsigned types. */
3640 #if !defined(TCC_TARGET_RISCV64)
3641 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3650 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3656 bits
= (ss
- ds
) * 8;
3657 /* for unsigned, gen_op will convert SAR to SHR */
3658 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3661 vpushi(bits
- trunc
);
3668 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3671 /* return type size as known at compile time. Put alignment at 'a' */
3672 ST_FUNC
int type_size(CType
*type
, int *a
)
3677 bt
= type
->t
& VT_BTYPE
;
3678 if (bt
== VT_STRUCT
) {
3683 } else if (bt
== VT_PTR
) {
3684 if (type
->t
& VT_ARRAY
) {
3688 ts
= type_size(&s
->type
, a
);
3690 if (ts
< 0 && s
->c
< 0)
3698 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3699 return -1; /* incomplete enum */
3700 } else if (bt
== VT_LDOUBLE
) {
3702 return LDOUBLE_SIZE
;
3703 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3704 #ifdef TCC_TARGET_I386
3705 #ifdef TCC_TARGET_PE
3710 #elif defined(TCC_TARGET_ARM)
3720 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3723 } else if (bt
== VT_SHORT
) {
3726 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3730 /* char, void, function, _Bool */
3736 /* push type size as known at runtime time on top of value stack. Put
3738 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
3740 if (type
->t
& VT_VLA
) {
3741 type_size(&type
->ref
->type
, a
);
3742 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3744 vpushi(type_size(type
, a
));
3748 /* return the pointed type of t */
3749 static inline CType
*pointed_type(CType
*type
)
3751 return &type
->ref
->type
;
3754 /* modify type so that its it is a pointer to type. */
3755 ST_FUNC
void mk_pointer(CType
*type
)
3758 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3759 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3763 /* return true if type1 and type2 are exactly the same (including
3766 static int is_compatible_types(CType
*type1
, CType
*type2
)
3768 return compare_types(type1
,type2
,0);
3771 /* return true if type1 and type2 are the same (ignoring qualifiers).
3773 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3775 return compare_types(type1
,type2
,1);
3778 static void cast_error(CType
*st
, CType
*dt
)
3780 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3783 /* verify type compatibility to store vtop in 'dt' type */
3784 static void verify_assign_cast(CType
*dt
)
3786 CType
*st
, *type1
, *type2
;
3787 int dbt
, sbt
, qualwarn
, lvl
;
3789 st
= &vtop
->type
; /* source type */
3790 dbt
= dt
->t
& VT_BTYPE
;
3791 sbt
= st
->t
& VT_BTYPE
;
3792 if (dt
->t
& VT_CONSTANT
)
3793 tcc_warning("assignment of read-only location");
3797 tcc_error("assignment to void expression");
3800 /* special cases for pointers */
3801 /* '0' can also be a pointer */
3802 if (is_null_pointer(vtop
))
3804 /* accept implicit pointer to integer cast with warning */
3805 if (is_integer_btype(sbt
)) {
3806 tcc_warning("assignment makes pointer from integer without a cast");
3809 type1
= pointed_type(dt
);
3811 type2
= pointed_type(st
);
3812 else if (sbt
== VT_FUNC
)
3813 type2
= st
; /* a function is implicitly a function pointer */
3816 if (is_compatible_types(type1
, type2
))
3818 for (qualwarn
= lvl
= 0;; ++lvl
) {
3819 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3820 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3822 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3823 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3824 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3826 type1
= pointed_type(type1
);
3827 type2
= pointed_type(type2
);
3829 if (!is_compatible_unqualified_types(type1
, type2
)) {
3830 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3831 /* void * can match anything */
3832 } else if (dbt
== sbt
3833 && is_integer_btype(sbt
& VT_BTYPE
)
3834 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3835 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3836 /* Like GCC don't warn by default for merely changes
3837 in pointer target signedness. Do warn for different
3838 base types, though, in particular for unsigned enums
3839 and signed int targets. */
3841 tcc_warning("assignment from incompatible pointer type");
3846 tcc_warning("assignment discards qualifiers from pointer target type");
3852 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3853 tcc_warning("assignment makes integer from pointer without a cast");
3854 } else if (sbt
== VT_STRUCT
) {
3855 goto case_VT_STRUCT
;
3857 /* XXX: more tests */
3861 if (!is_compatible_unqualified_types(dt
, st
)) {
3869 static void gen_assign_cast(CType
*dt
)
3871 verify_assign_cast(dt
);
3875 /* store vtop in lvalue pushed on stack */
3876 ST_FUNC
void vstore(void)
3878 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3880 ft
= vtop
[-1].type
.t
;
3881 sbt
= vtop
->type
.t
& VT_BTYPE
;
3882 dbt
= ft
& VT_BTYPE
;
3884 verify_assign_cast(&vtop
[-1].type
);
3886 if (sbt
== VT_STRUCT
) {
3887 /* if structure, only generate pointer */
3888 /* structure assignment : generate memcpy */
3889 /* XXX: optimize if small size */
3890 size
= type_size(&vtop
->type
, &align
);
3894 #ifdef CONFIG_TCC_BCHECK
3895 if (vtop
->r
& VT_MUSTBOUND
)
3896 gbound(); /* check would be wrong after gaddrof() */
3898 vtop
->type
.t
= VT_PTR
;
3901 /* address of memcpy() */
3904 vpush_global_sym(&func_old_type
, TOK_memmove8
);
3905 else if(!(align
& 3))
3906 vpush_global_sym(&func_old_type
, TOK_memmove4
);
3909 /* Use memmove, rather than memcpy, as dest and src may be same: */
3910 vpush_global_sym(&func_old_type
, TOK_memmove
);
3915 #ifdef CONFIG_TCC_BCHECK
3916 if (vtop
->r
& VT_MUSTBOUND
)
3919 vtop
->type
.t
= VT_PTR
;
3924 /* leave source on stack */
3926 } else if (ft
& VT_BITFIELD
) {
3927 /* bitfield store handling */
3929 /* save lvalue as expression result (example: s.b = s.a = n;) */
3930 vdup(), vtop
[-1] = vtop
[-2];
3932 bit_pos
= BIT_POS(ft
);
3933 bit_size
= BIT_SIZE(ft
);
3934 /* remove bit field info to avoid loops */
3935 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3937 if (dbt
== VT_BOOL
) {
3938 gen_cast(&vtop
[-1].type
);
3939 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3941 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3942 if (dbt
!= VT_BOOL
) {
3943 gen_cast(&vtop
[-1].type
);
3944 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3946 if (r
== VT_STRUCT
) {
3947 store_packed_bf(bit_pos
, bit_size
);
3949 unsigned long long mask
= (1ULL << bit_size
) - 1;
3950 if (dbt
!= VT_BOOL
) {
3952 if (dbt
== VT_LLONG
)
3955 vpushi((unsigned)mask
);
3962 /* duplicate destination */
3965 /* load destination, mask and or with source */
3966 if (dbt
== VT_LLONG
)
3967 vpushll(~(mask
<< bit_pos
));
3969 vpushi(~((unsigned)mask
<< bit_pos
));
3974 /* ... and discard */
3977 } else if (dbt
== VT_VOID
) {
3980 /* optimize char/short casts */
3982 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3983 && is_integer_btype(sbt
)
3985 if ((vtop
->r
& VT_MUSTCAST
)
3986 && btype_size(dbt
) > btype_size(sbt
)
3988 force_charshort_cast();
3991 gen_cast(&vtop
[-1].type
);
3994 #ifdef CONFIG_TCC_BCHECK
3995 /* bound check case */
3996 if (vtop
[-1].r
& VT_MUSTBOUND
) {
4002 gv(RC_TYPE(dbt
)); /* generate value */
4005 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
4006 //tcc_warning("deley cast %x -> %x", sbt, dbt);
4007 vtop
->type
.t
= ft
& VT_TYPE
;
4010 /* if lvalue was saved on stack, must read it */
4011 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
4013 r
= get_reg(RC_INT
);
4014 sv
.type
.t
= VT_PTRDIFF_T
;
4015 sv
.r
= VT_LOCAL
| VT_LVAL
;
4016 sv
.c
.i
= vtop
[-1].c
.i
;
4018 vtop
[-1].r
= r
| VT_LVAL
;
4021 r
= vtop
->r
& VT_VALMASK
;
4022 /* two word case handling :
4023 store second register at word + 4 (or +8 for x86-64) */
4024 if (USING_TWO_WORDS(dbt
)) {
4025 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
4026 vtop
[-1].type
.t
= load_type
;
4029 /* convert to int to increment easily */
4030 vtop
->type
.t
= VT_PTRDIFF_T
;
4036 vtop
[-1].type
.t
= load_type
;
4037 /* XXX: it works because r2 is spilled last ! */
4038 store(vtop
->r2
, vtop
- 1);
4044 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
4048 /* post defines POST/PRE add. c is the token ++ or -- */
4049 ST_FUNC
void inc(int post
, int c
)
4052 vdup(); /* save lvalue */
4054 gv_dup(); /* duplicate value */
4059 vpushi(c
- TOK_MID
);
4061 vstore(); /* store value */
4063 vpop(); /* if post op, return saved value */
4066 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
4068 /* read the string */
4072 while (tok
== TOK_STR
) {
4073 /* XXX: add \0 handling too ? */
4074 cstr_cat(astr
, tokc
.str
.data
, -1);
4077 cstr_ccat(astr
, '\0');
4080 /* If I is >= 1 and a power of two, returns log2(i)+1.
4081 If I is 0 returns 0. */
4082 ST_FUNC
int exact_log2p1(int i
)
4087 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
4098 /* Parse __attribute__((...)) GNUC extension. */
4099 static void parse_attribute(AttributeDef
*ad
)
4105 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
4110 while (tok
!= ')') {
4111 if (tok
< TOK_IDENT
)
4112 expect("attribute name");
4124 tcc_warning("implicit declaration of function '%s'",
4125 get_tok_str(tok
, &tokc
));
4126 s
= external_global_sym(tok
, &func_old_type
);
4127 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4128 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
4129 ad
->cleanup_func
= s
;
4134 case TOK_CONSTRUCTOR1
:
4135 case TOK_CONSTRUCTOR2
:
4136 ad
->f
.func_ctor
= 1;
4138 case TOK_DESTRUCTOR1
:
4139 case TOK_DESTRUCTOR2
:
4140 ad
->f
.func_dtor
= 1;
4142 case TOK_ALWAYS_INLINE1
:
4143 case TOK_ALWAYS_INLINE2
:
4144 ad
->f
.func_alwinl
= 1;
4149 parse_mult_str(&astr
, "section name");
4150 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
4157 parse_mult_str(&astr
, "alias(\"target\")");
4158 ad
->alias_target
= /* save string as token, for later */
4159 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
4163 case TOK_VISIBILITY1
:
4164 case TOK_VISIBILITY2
:
4166 parse_mult_str(&astr
,
4167 "visibility(\"default|hidden|internal|protected\")");
4168 if (!strcmp (astr
.data
, "default"))
4169 ad
->a
.visibility
= STV_DEFAULT
;
4170 else if (!strcmp (astr
.data
, "hidden"))
4171 ad
->a
.visibility
= STV_HIDDEN
;
4172 else if (!strcmp (astr
.data
, "internal"))
4173 ad
->a
.visibility
= STV_INTERNAL
;
4174 else if (!strcmp (astr
.data
, "protected"))
4175 ad
->a
.visibility
= STV_PROTECTED
;
4177 expect("visibility(\"default|hidden|internal|protected\")");
4186 if (n
<= 0 || (n
& (n
- 1)) != 0)
4187 tcc_error("alignment must be a positive power of two");
4192 ad
->a
.aligned
= exact_log2p1(n
);
4193 if (n
!= 1 << (ad
->a
.aligned
- 1))
4194 tcc_error("alignment of %d is larger than implemented", n
);
4206 /* currently, no need to handle it because tcc does not
4207 track unused objects */
4211 ad
->f
.func_noreturn
= 1;
4216 ad
->f
.func_call
= FUNC_CDECL
;
4221 ad
->f
.func_call
= FUNC_STDCALL
;
4223 #ifdef TCC_TARGET_I386
4233 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
4239 ad
->f
.func_call
= FUNC_FASTCALLW
;
4246 ad
->attr_mode
= VT_LLONG
+ 1;
4249 ad
->attr_mode
= VT_BYTE
+ 1;
4252 ad
->attr_mode
= VT_SHORT
+ 1;
4256 ad
->attr_mode
= VT_INT
+ 1;
4259 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4266 ad
->a
.dllexport
= 1;
4268 case TOK_NODECORATE
:
4269 ad
->a
.nodecorate
= 1;
4272 ad
->a
.dllimport
= 1;
4275 if (tcc_state
->warn_unsupported
)
4276 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
4277 /* skip parameters */
4279 int parenthesis
= 0;
4283 else if (tok
== ')')
4286 } while (parenthesis
&& tok
!= -1);
4299 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4303 while ((s
= s
->next
) != NULL
) {
4304 if ((s
->v
& SYM_FIELD
) &&
4305 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
4306 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
4307 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
4319 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4321 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4322 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4323 int pcc
= !tcc_state
->ms_bitfields
;
4324 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4331 prevbt
= VT_STRUCT
; /* make it never match */
4336 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4337 if (f
->type
.t
& VT_BITFIELD
)
4338 bit_size
= BIT_SIZE(f
->type
.t
);
4341 size
= type_size(&f
->type
, &align
);
4342 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4345 if (pcc
&& bit_size
== 0) {
4346 /* in pcc mode, packing does not affect zero-width bitfields */
4349 /* in pcc mode, attribute packed overrides if set. */
4350 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4353 /* pragma pack overrides align if lesser and packs bitfields always */
4356 if (pragma_pack
< align
)
4357 align
= pragma_pack
;
4358 /* in pcc mode pragma pack also overrides individual align */
4359 if (pcc
&& pragma_pack
< a
)
4363 /* some individual align was specified */
4367 if (type
->ref
->type
.t
== VT_UNION
) {
4368 if (pcc
&& bit_size
>= 0)
4369 size
= (bit_size
+ 7) >> 3;
4374 } else if (bit_size
< 0) {
4376 c
+= (bit_pos
+ 7) >> 3;
4377 c
= (c
+ align
- 1) & -align
;
4386 /* A bit-field. Layout is more complicated. There are two
4387 options: PCC (GCC) compatible and MS compatible */
4389 /* In PCC layout a bit-field is placed adjacent to the
4390 preceding bit-fields, except if:
4392 - an individual alignment was given
4393 - it would overflow its base type container and
4394 there is no packing */
4395 if (bit_size
== 0) {
4397 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4399 } else if (f
->a
.aligned
) {
4401 } else if (!packed
) {
4403 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4404 if (ofs
> size
/ align
)
4408 /* in pcc mode, long long bitfields have type int if they fit */
4409 if (size
== 8 && bit_size
<= 32)
4410 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4412 while (bit_pos
>= align
* 8)
4413 c
+= align
, bit_pos
-= align
* 8;
4416 /* In PCC layout named bit-fields influence the alignment
4417 of the containing struct using the base types alignment,
4418 except for packed fields (which here have correct align). */
4419 if (f
->v
& SYM_FIRST_ANOM
4420 // && bit_size // ??? gcc on ARM/rpi does that
4425 bt
= f
->type
.t
& VT_BTYPE
;
4426 if ((bit_pos
+ bit_size
> size
* 8)
4427 || (bit_size
> 0) == (bt
!= prevbt
)
4429 c
= (c
+ align
- 1) & -align
;
4432 /* In MS bitfield mode a bit-field run always uses
4433 at least as many bits as the underlying type.
4434 To start a new run it's also required that this
4435 or the last bit-field had non-zero width. */
4436 if (bit_size
|| prev_bit_size
)
4439 /* In MS layout the records alignment is normally
4440 influenced by the field, except for a zero-width
4441 field at the start of a run (but by further zero-width
4442 fields it is again). */
4443 if (bit_size
== 0 && prevbt
!= bt
)
4446 prev_bit_size
= bit_size
;
4449 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4450 | (bit_pos
<< VT_STRUCT_SHIFT
);
4451 bit_pos
+= bit_size
;
4453 if (align
> maxalign
)
4457 printf("set field %s offset %-2d size %-2d align %-2d",
4458 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4459 if (f
->type
.t
& VT_BITFIELD
) {
4460 printf(" pos %-2d bits %-2d",
4473 c
+= (bit_pos
+ 7) >> 3;
4475 /* store size and alignment */
4476 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4480 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4481 /* can happen if individual align for some member was given. In
4482 this case MSVC ignores maxalign when aligning the size */
4487 c
= (c
+ a
- 1) & -a
;
4491 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4494 /* check whether we can access bitfields by their type */
4495 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4499 if (0 == (f
->type
.t
& VT_BITFIELD
))
4503 bit_size
= BIT_SIZE(f
->type
.t
);
4506 bit_pos
= BIT_POS(f
->type
.t
);
4507 size
= type_size(&f
->type
, &align
);
4508 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
4511 /* try to access the field using a different type */
4512 c0
= -1, s
= align
= 1;
4515 px
= f
->c
* 8 + bit_pos
;
4516 cx
= (px
>> 3) & -align
;
4517 px
= px
- (cx
<< 3);
4520 s
= (px
+ bit_size
+ 7) >> 3;
4530 s
= type_size(&t
, &align
);
4534 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
4535 /* update offset and bit position */
4538 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4539 | (bit_pos
<< VT_STRUCT_SHIFT
);
4543 printf("FIX field %s offset %-2d size %-2d align %-2d "
4544 "pos %-2d bits %-2d\n",
4545 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4546 cx
, s
, align
, px
, bit_size
);
4549 /* fall back to load/store single-byte wise */
4550 f
->auxtype
= VT_STRUCT
;
4552 printf("FIX field %s : load byte-wise\n",
4553 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4559 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4560 static void struct_decl(CType
*type
, int u
)
4562 int v
, c
, size
, align
, flexible
;
4563 int bit_size
, bsize
, bt
;
4565 AttributeDef ad
, ad1
;
4568 memset(&ad
, 0, sizeof ad
);
4570 parse_attribute(&ad
);
4574 /* struct already defined ? return it */
4576 expect("struct/union/enum name");
4578 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4581 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4583 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4588 /* Record the original enum/struct/union token. */
4589 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4591 /* we put an undefined size for struct/union */
4592 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4593 s
->r
= 0; /* default alignment is zero as gcc */
4595 type
->t
= s
->type
.t
;
4601 tcc_error("struct/union/enum already defined");
4603 /* cannot be empty */
4604 /* non empty enums are not allowed */
4607 long long ll
= 0, pl
= 0, nl
= 0;
4610 /* enum symbols have static storage */
4611 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4615 expect("identifier");
4617 if (ss
&& !local_stack
)
4618 tcc_error("redefinition of enumerator '%s'",
4619 get_tok_str(v
, NULL
));
4623 ll
= expr_const64();
4625 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4627 *ps
= ss
, ps
= &ss
->next
;
4636 /* NOTE: we accept a trailing comma */
4641 /* set integral type of the enum */
4644 if (pl
!= (unsigned)pl
)
4645 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4647 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4648 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4649 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4651 /* set type for enum members */
4652 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4654 if (ll
== (int)ll
) /* default is int if it fits */
4656 if (t
.t
& VT_UNSIGNED
) {
4657 ss
->type
.t
|= VT_UNSIGNED
;
4658 if (ll
== (unsigned)ll
)
4661 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4662 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4667 while (tok
!= '}') {
4668 if (!parse_btype(&btype
, &ad1
)) {
4674 tcc_error("flexible array member '%s' not at the end of struct",
4675 get_tok_str(v
, NULL
));
4681 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4683 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4684 expect("identifier");
4686 int v
= btype
.ref
->v
;
4687 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4688 if (tcc_state
->ms_extensions
== 0)
4689 expect("identifier");
4693 if (type_size(&type1
, &align
) < 0) {
4694 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4697 tcc_error("field '%s' has incomplete type",
4698 get_tok_str(v
, NULL
));
4700 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4701 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4702 (type1
.t
& VT_STORAGE
))
4703 tcc_error("invalid type for '%s'",
4704 get_tok_str(v
, NULL
));
4708 bit_size
= expr_const();
4709 /* XXX: handle v = 0 case for messages */
4711 tcc_error("negative width in bit-field '%s'",
4712 get_tok_str(v
, NULL
));
4713 if (v
&& bit_size
== 0)
4714 tcc_error("zero width for bit-field '%s'",
4715 get_tok_str(v
, NULL
));
4716 parse_attribute(&ad1
);
4718 size
= type_size(&type1
, &align
);
4719 if (bit_size
>= 0) {
4720 bt
= type1
.t
& VT_BTYPE
;
4726 tcc_error("bitfields must have scalar type");
4728 if (bit_size
> bsize
) {
4729 tcc_error("width of '%s' exceeds its type",
4730 get_tok_str(v
, NULL
));
4731 } else if (bit_size
== bsize
4732 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4733 /* no need for bit fields */
4735 } else if (bit_size
== 64) {
4736 tcc_error("field width 64 not implemented");
4738 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4740 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4743 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4744 /* Remember we've seen a real field to check
4745 for placement of flexible array member. */
4748 /* If member is a struct or bit-field, enforce
4749 placing into the struct (as anonymous). */
4751 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4756 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4761 if (tok
== ';' || tok
== TOK_EOF
)
4768 parse_attribute(&ad
);
4769 if (ad
.cleanup_func
) {
4770 tcc_warning("attribute '__cleanup__' ignored on type");
4772 struct_layout(type
, &ad
);
4777 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4779 merge_symattr(&ad
->a
, &s
->a
);
4780 merge_funcattr(&ad
->f
, &s
->f
);
4783 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4784 are added to the element type, copied because it could be a typedef. */
4785 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4787 while (type
->t
& VT_ARRAY
) {
4788 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4789 type
= &type
->ref
->type
;
4791 type
->t
|= qualifiers
;
4794 /* return 0 if no type declaration. otherwise, return the basic type
4797 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4799 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4803 memset(ad
, 0, sizeof(AttributeDef
));
4813 /* currently, we really ignore extension */
4823 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4824 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4825 tmbt
: tcc_error("too many basic types");
4828 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4833 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4850 memset(&ad1
, 0, sizeof(AttributeDef
));
4851 if (parse_btype(&type1
, &ad1
)) {
4852 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4854 n
= 1 << (ad1
.a
.aligned
- 1);
4856 type_size(&type1
, &n
);
4859 if (n
<= 0 || (n
& (n
- 1)) != 0)
4860 tcc_error("alignment must be a positive power of two");
4863 ad
->a
.aligned
= exact_log2p1(n
);
4867 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4868 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4869 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4870 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4877 #ifdef TCC_TARGET_ARM64
4879 /* GCC's __uint128_t appears in some Linux header files. Make it a
4880 synonym for long double to get the size and alignment right. */
4891 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4892 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4900 struct_decl(&type1
, VT_ENUM
);
4903 type
->ref
= type1
.ref
;
4906 struct_decl(&type1
, VT_STRUCT
);
4909 struct_decl(&type1
, VT_UNION
);
4912 /* type modifiers */
4917 parse_btype_qualify(type
, VT_CONSTANT
);
4925 parse_btype_qualify(type
, VT_VOLATILE
);
4932 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4933 tcc_error("signed and unsigned modifier");
4946 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4947 tcc_error("signed and unsigned modifier");
4948 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4964 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4965 tcc_error("multiple storage classes");
4977 ad
->f
.func_noreturn
= 1;
4979 /* GNUC attribute */
4980 case TOK_ATTRIBUTE1
:
4981 case TOK_ATTRIBUTE2
:
4982 parse_attribute(ad
);
4983 if (ad
->attr_mode
) {
4984 u
= ad
->attr_mode
-1;
4985 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4993 parse_expr_type(&type1
);
4994 /* remove all storage modifiers except typedef */
4995 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4997 sym_to_attr(ad
, type1
.ref
);
5003 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
5007 if (tok
== ':' && !in_generic
) {
5008 /* ignore if it's a label */
5013 t
&= ~(VT_BTYPE
|VT_LONG
);
5014 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
5015 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
5016 type
->ref
= s
->type
.ref
;
5018 parse_btype_qualify(type
, t
);
5020 /* get attributes from typedef */
5029 if (tcc_state
->char_is_unsigned
) {
5030 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
5033 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5034 bt
= t
& (VT_BTYPE
|VT_LONG
);
5036 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
5037 #if defined TCC_TARGET_PE || (defined _WIN32 && defined _MSC_VER)
5038 if (bt
== VT_LDOUBLE
)
5039 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
5045 /* convert a function parameter type (array to pointer and function to
5046 function pointer) */
5047 static inline void convert_parameter_type(CType
*pt
)
5049 /* remove const and volatile qualifiers (XXX: const could be used
5050 to indicate a const function parameter */
5051 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5052 /* array must be transformed to pointer according to ANSI C */
5054 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
5059 ST_FUNC
void parse_asm_str(CString
*astr
)
5062 parse_mult_str(astr
, "string constant");
5065 /* Parse an asm label and return the token */
5066 static int asm_label_instr(void)
5072 parse_asm_str(&astr
);
5075 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
5077 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
5082 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
5084 int n
, l
, t1
, arg_size
, align
, unused_align
;
5085 Sym
**plast
, *s
, *first
;
5090 /* function type, or recursive declarator (return if so) */
5092 if (td
&& !(td
& TYPE_ABSTRACT
))
5096 else if (parse_btype(&pt
, &ad1
))
5099 merge_attr (ad
, &ad1
);
5108 /* read param name and compute offset */
5109 if (l
!= FUNC_OLD
) {
5110 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
5112 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
5113 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
5114 tcc_error("parameter declared as void");
5118 expect("identifier");
5119 pt
.t
= VT_VOID
; /* invalid type */
5123 convert_parameter_type(&pt
);
5124 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
5125 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
5131 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
5136 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
5137 tcc_error("invalid type");
5140 /* if no parameters, then old type prototype */
5143 /* NOTE: const is ignored in returned type as it has a special
5144 meaning in gcc / C++ */
5145 type
->t
&= ~VT_CONSTANT
;
5146 /* some ancient pre-K&R C allows a function to return an array
5147 and the array brackets to be put after the arguments, such
5148 that "int c()[]" means something like "int[] c()" */
5151 skip(']'); /* only handle simple "[]" */
5154 /* we push a anonymous symbol which will contain the function prototype */
5155 ad
->f
.func_args
= arg_size
;
5156 ad
->f
.func_type
= l
;
5157 s
= sym_push(SYM_FIELD
, type
, 0, 0);
5163 } else if (tok
== '[') {
5164 int saved_nocode_wanted
= nocode_wanted
;
5165 /* array definition */
5168 /* XXX The optional type-quals and static should only be accepted
5169 in parameter decls. The '*' as well, and then even only
5170 in prototypes (not function defs). */
5172 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5187 if (!local_stack
|| (storage
& VT_STATIC
))
5188 vpushi(expr_const());
5190 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5191 length must always be evaluated, even under nocode_wanted,
5192 so that its size slot is initialized (e.g. under sizeof
5197 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5200 tcc_error("invalid array size");
5202 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5203 tcc_error("size of variable length array should be an integer");
5209 /* parse next post type */
5210 post_type(type
, ad
, storage
, 0);
5212 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5213 tcc_error("declaration of an array of functions");
5214 if ((type
->t
& VT_BTYPE
) == VT_VOID
5215 || type_size(type
, &unused_align
) < 0)
5216 tcc_error("declaration of an array of incomplete type elements");
5218 t1
|= type
->t
& VT_VLA
;
5222 tcc_error("need explicit inner array size in VLAs");
5223 loc
-= type_size(&int_type
, &align
);
5227 vla_runtime_type_size(type
, &align
);
5229 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5235 nocode_wanted
= saved_nocode_wanted
;
5237 /* we push an anonymous symbol which will contain the array
5239 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5240 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5246 /* Parse a type declarator (except basic type), and return the type
5247 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5248 expected. 'type' should contain the basic type. 'ad' is the
5249 attribute definition of the basic type. It can be modified by
5250 type_decl(). If this (possibly abstract) declarator is a pointer chain
5251 it returns the innermost pointed to type (equals *type, but is a different
5252 pointer), otherwise returns type itself, that's used for recursive calls. */
5253 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5256 int qualifiers
, storage
;
5258 /* recursive type, remove storage bits first, apply them later again */
5259 storage
= type
->t
& VT_STORAGE
;
5260 type
->t
&= ~VT_STORAGE
;
5263 while (tok
== '*') {
5271 qualifiers
|= VT_CONSTANT
;
5276 qualifiers
|= VT_VOLATILE
;
5282 /* XXX: clarify attribute handling */
5283 case TOK_ATTRIBUTE1
:
5284 case TOK_ATTRIBUTE2
:
5285 parse_attribute(ad
);
5289 type
->t
|= qualifiers
;
5291 /* innermost pointed to type is the one for the first derivation */
5292 ret
= pointed_type(type
);
5296 /* This is possibly a parameter type list for abstract declarators
5297 ('int ()'), use post_type for testing this. */
5298 if (!post_type(type
, ad
, 0, td
)) {
5299 /* It's not, so it's a nested declarator, and the post operations
5300 apply to the innermost pointed to type (if any). */
5301 /* XXX: this is not correct to modify 'ad' at this point, but
5302 the syntax is not clear */
5303 parse_attribute(ad
);
5304 post
= type_decl(type
, ad
, v
, td
);
5308 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5309 /* type identifier */
5314 if (!(td
& TYPE_ABSTRACT
))
5315 expect("identifier");
5318 post_type(post
, ad
, storage
, 0);
5319 parse_attribute(ad
);
5324 /* indirection with full error checking and bound check */
5325 ST_FUNC
void indir(void)
5327 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5328 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5332 if (vtop
->r
& VT_LVAL
)
5334 vtop
->type
= *pointed_type(&vtop
->type
);
5335 /* Arrays and functions are never lvalues */
5336 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5337 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5339 /* if bound checking, the referenced pointer must be checked */
5340 #ifdef CONFIG_TCC_BCHECK
5341 if (tcc_state
->do_bounds_check
)
5342 vtop
->r
|= VT_MUSTBOUND
;
5347 /* pass a parameter to a function and do type checking and casting */
5348 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5353 func_type
= func
->f
.func_type
;
5354 if (func_type
== FUNC_OLD
||
5355 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5356 /* default casting : only need to convert float to double */
5357 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5358 gen_cast_s(VT_DOUBLE
);
5359 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5360 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5361 type
.ref
= vtop
->type
.ref
;
5363 } else if (vtop
->r
& VT_MUSTCAST
) {
5364 force_charshort_cast();
5366 } else if (arg
== NULL
) {
5367 tcc_error("too many arguments to function");
5370 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5371 gen_assign_cast(&type
);
5375 /* parse an expression and return its type without any side effect. */
5376 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5385 /* parse an expression of the form '(type)' or '(expr)' and return its
5387 static void parse_expr_type(CType
*type
)
5393 if (parse_btype(type
, &ad
)) {
5394 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5396 expr_type(type
, gexpr
);
5401 static void parse_type(CType
*type
)
5406 if (!parse_btype(type
, &ad
)) {
5409 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5412 static void parse_builtin_params(int nc
, const char *args
)
5421 while ((c
= *args
++)) {
5436 type
.t
= VT_CONSTANT
;
5442 type
.t
= VT_CONSTANT
;
5444 type
.t
|= char_type
.t
;
5456 gen_assign_cast(&type
);
5463 ST_FUNC
void unary(void)
5465 int n
, t
, align
, size
, r
, sizeof_caller
;
5470 /* generate line number info */
5471 if (tcc_state
->do_debug
)
5472 tcc_debug_line(tcc_state
);
5474 sizeof_caller
= in_sizeof
;
5477 /* XXX: GCC 2.95.3 does not generate a table although it should be
5485 #ifdef TCC_TARGET_PE
5486 t
= VT_SHORT
|VT_UNSIGNED
;
5494 vsetc(&type
, VT_CONST
, &tokc
);
5498 t
= VT_INT
| VT_UNSIGNED
;
5504 t
= VT_LLONG
| VT_UNSIGNED
;
5516 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5519 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5521 case TOK___FUNCTION__
:
5523 goto tok_identifier
;
5529 /* special function name identifier */
5530 len
= strlen(funcname
) + 1;
5531 /* generate char[len] type */
5536 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
5537 if (!NODATA_WANTED
) {
5538 ptr
= section_ptr_add(data_section
, len
);
5539 memcpy(ptr
, funcname
, len
);
5545 #ifdef TCC_TARGET_PE
5546 t
= VT_SHORT
| VT_UNSIGNED
;
5552 /* string parsing */
5554 if (tcc_state
->char_is_unsigned
)
5555 t
= VT_BYTE
| VT_UNSIGNED
;
5557 if (tcc_state
->warn_write_strings
)
5562 memset(&ad
, 0, sizeof(AttributeDef
));
5563 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5568 if (parse_btype(&type
, &ad
)) {
5569 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5571 /* check ISOC99 compound literal */
5573 /* data is allocated locally by default */
5578 /* all except arrays are lvalues */
5579 if (!(type
.t
& VT_ARRAY
))
5581 memset(&ad
, 0, sizeof(AttributeDef
));
5582 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5584 if (sizeof_caller
) {
5591 } else if (tok
== '{') {
5592 int saved_nocode_wanted
= nocode_wanted
;
5593 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5595 if (0 == local_scope
)
5596 tcc_error("statement expression outside of function");
5597 /* save all registers */
5599 /* statement expression : we do not accept break/continue
5600 inside as GCC does. We do retain the nocode_wanted state,
5601 as statement expressions can't ever be entered from the
5602 outside, so any reactivation of code emission (from labels
5603 or loop heads) can be disabled again after the end of it. */
5605 nocode_wanted
= saved_nocode_wanted
;
5620 /* functions names must be treated as function pointers,
5621 except for unary '&' and sizeof. Since we consider that
5622 functions are not lvalues, we only have to handle it
5623 there and in function calls. */
5624 /* arrays can also be used although they are not lvalues */
5625 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5626 !(vtop
->type
.t
& VT_ARRAY
))
5629 vtop
->sym
->a
.addrtaken
= 1;
5630 mk_pointer(&vtop
->type
);
5636 gen_test_zero(TOK_EQ
);
5647 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5648 tcc_error("pointer not accepted for unary plus");
5649 /* In order to force cast, we add zero, except for floating point
5650 where we really need an noop (otherwise -0.0 will be transformed
5652 if (!is_float(vtop
->type
.t
)) {
5664 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5666 if (vtop
[1].r
& VT_SYM
)
5667 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5668 size
= type_size(&type
, &align
);
5669 if (s
&& s
->a
.aligned
)
5670 align
= 1 << (s
->a
.aligned
- 1);
5671 if (t
== TOK_SIZEOF
) {
5672 if (!(type
.t
& VT_VLA
)) {
5674 tcc_error("sizeof applied to an incomplete type");
5677 vla_runtime_type_size(&type
, &align
);
5682 vtop
->type
.t
|= VT_UNSIGNED
;
5685 case TOK_builtin_expect
:
5686 /* __builtin_expect is a no-op for now */
5687 parse_builtin_params(0, "ee");
5690 case TOK_builtin_types_compatible_p
:
5691 parse_builtin_params(0, "tt");
5692 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5693 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5694 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5698 case TOK_builtin_choose_expr
:
5725 case TOK_builtin_constant_p
:
5726 parse_builtin_params(1, "e");
5727 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5728 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
5732 case TOK_builtin_frame_address
:
5733 case TOK_builtin_return_address
:
5739 if (tok
!= TOK_CINT
) {
5740 tcc_error("%s only takes positive integers",
5741 tok1
== TOK_builtin_return_address
?
5742 "__builtin_return_address" :
5743 "__builtin_frame_address");
5745 level
= (uint32_t)tokc
.i
;
5750 vset(&type
, VT_LOCAL
, 0); /* local frame */
5752 #ifdef TCC_TARGET_RISCV64
5756 mk_pointer(&vtop
->type
);
5757 indir(); /* -> parent frame */
5759 if (tok1
== TOK_builtin_return_address
) {
5760 // assume return address is just above frame pointer on stack
5761 #ifdef TCC_TARGET_ARM
5764 #elif defined TCC_TARGET_RISCV64
5771 mk_pointer(&vtop
->type
);
5776 #ifdef TCC_TARGET_RISCV64
5777 case TOK_builtin_va_start
:
5778 parse_builtin_params(0, "ee");
5779 r
= vtop
->r
& VT_VALMASK
;
5783 tcc_error("__builtin_va_start expects a local variable");
5788 #ifdef TCC_TARGET_X86_64
5789 #ifdef TCC_TARGET_PE
5790 case TOK_builtin_va_start
:
5791 parse_builtin_params(0, "ee");
5792 r
= vtop
->r
& VT_VALMASK
;
5796 tcc_error("__builtin_va_start expects a local variable");
5798 vtop
->type
= char_pointer_type
;
5803 case TOK_builtin_va_arg_types
:
5804 parse_builtin_params(0, "t");
5805 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5812 #ifdef TCC_TARGET_ARM64
5813 case TOK_builtin_va_start
: {
5814 parse_builtin_params(0, "ee");
5818 vtop
->type
.t
= VT_VOID
;
5821 case TOK_builtin_va_arg
: {
5822 parse_builtin_params(0, "et");
5830 case TOK___arm64_clear_cache
: {
5831 parse_builtin_params(0, "ee");
5834 vtop
->type
.t
= VT_VOID
;
5839 /* pre operations */
5850 t
= vtop
->type
.t
& VT_BTYPE
;
5852 /* In IEEE negate(x) isn't subtract(0,x), but rather
5856 vtop
->c
.f
= -1.0 * 0.0;
5857 else if (t
== VT_DOUBLE
)
5858 vtop
->c
.d
= -1.0 * 0.0;
5860 vtop
->c
.ld
= -1.0 * 0.0;
5868 goto tok_identifier
;
5870 /* allow to take the address of a label */
5871 if (tok
< TOK_UIDENT
)
5872 expect("label identifier");
5873 s
= label_find(tok
);
5875 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5877 if (s
->r
== LABEL_DECLARED
)
5878 s
->r
= LABEL_FORWARD
;
5881 s
->type
.t
= VT_VOID
;
5882 mk_pointer(&s
->type
);
5883 s
->type
.t
|= VT_STATIC
;
5885 vpushsym(&s
->type
, s
);
5891 CType controlling_type
;
5892 int has_default
= 0;
5895 TokenString
*str
= NULL
;
5896 int saved_const_wanted
= const_wanted
;
5901 expr_type(&controlling_type
, expr_eq
);
5902 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5903 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5904 mk_pointer(&controlling_type
);
5905 const_wanted
= saved_const_wanted
;
5909 if (tok
== TOK_DEFAULT
) {
5911 tcc_error("too many 'default'");
5917 AttributeDef ad_tmp
;
5922 parse_btype(&cur_type
, &ad_tmp
);
5925 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5926 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5928 tcc_error("type match twice");
5938 skip_or_save_block(&str
);
5940 skip_or_save_block(NULL
);
5947 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5948 tcc_error("type '%s' does not match any association", buf
);
5950 begin_macro(str
, 1);
5959 // special qnan , snan and infinity values
5964 vtop
->type
.t
= VT_FLOAT
;
5969 goto special_math_val
;
5972 goto special_math_val
;
5979 expect("identifier");
5981 if (!s
|| IS_ASM_SYM(s
)) {
5982 const char *name
= get_tok_str(t
, NULL
);
5984 tcc_error("'%s' undeclared", name
);
5985 /* for simple function calls, we tolerate undeclared
5986 external reference to int() function */
5987 if (tcc_state
->warn_implicit_function_declaration
5988 #ifdef TCC_TARGET_PE
5989 /* people must be warned about using undeclared WINAPI functions
5990 (which usually start with uppercase letter) */
5991 || (name
[0] >= 'A' && name
[0] <= 'Z')
5994 tcc_warning("implicit declaration of function '%s'", name
);
5995 s
= external_global_sym(t
, &func_old_type
);
5999 /* A symbol that has a register is a local register variable,
6000 which starts out as VT_LOCAL value. */
6001 if ((r
& VT_VALMASK
) < VT_CONST
)
6002 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
6004 vset(&s
->type
, r
, s
->c
);
6005 /* Point to s as backpointer (even without r&VT_SYM).
6006 Will be used by at least the x86 inline asm parser for
6012 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
6013 vtop
->c
.i
= s
->enum_val
;
6018 /* post operations */
6020 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
6023 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
6024 int qualifiers
, cumofs
= 0;
6026 if (tok
== TOK_ARROW
)
6028 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
6031 /* expect pointer on structure */
6032 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
6033 expect("struct or union");
6034 if (tok
== TOK_CDOUBLE
)
6035 expect("field name");
6037 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
6038 expect("field name");
6039 s
= find_field(&vtop
->type
, tok
, &cumofs
);
6041 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
6042 /* add field offset to pointer */
6043 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
6044 vpushi(cumofs
+ s
->c
);
6046 /* change type to field type, and set to lvalue */
6047 vtop
->type
= s
->type
;
6048 vtop
->type
.t
|= qualifiers
;
6049 /* an array is never an lvalue */
6050 if (!(vtop
->type
.t
& VT_ARRAY
)) {
6052 #ifdef CONFIG_TCC_BCHECK
6053 /* if bound checking, the referenced pointer must be checked */
6054 if (tcc_state
->do_bounds_check
)
6055 vtop
->r
|= VT_MUSTBOUND
;
6059 } else if (tok
== '[') {
6065 } else if (tok
== '(') {
6068 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
6071 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
6072 /* pointer test (no array accepted) */
6073 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
6074 vtop
->type
= *pointed_type(&vtop
->type
);
6075 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6079 expect("function pointer");
6082 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6084 /* get return type */
6087 sa
= s
->next
; /* first parameter */
6088 nb_args
= regsize
= 0;
6090 /* compute first implicit argument if a structure is returned */
6091 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6092 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6093 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6094 &ret_align
, ®size
);
6095 if (ret_nregs
<= 0) {
6096 /* get some space for the returned structure */
6097 size
= type_size(&s
->type
, &align
);
6098 #ifdef TCC_TARGET_ARM64
6099 /* On arm64, a small struct is return in registers.
6100 It is much easier to write it to memory if we know
6101 that we are allowed to write some extra bytes, so
6102 round the allocated space up to a power of 2: */
6104 while (size
& (size
- 1))
6105 size
= (size
| (size
- 1)) + 1;
6107 loc
= (loc
- size
) & -align
;
6109 ret
.r
= VT_LOCAL
| VT_LVAL
;
6110 /* pass it as 'int' to avoid structure arg passing
6112 vseti(VT_LOCAL
, loc
);
6113 #ifdef CONFIG_TCC_BCHECK
6114 if (tcc_state
->do_bounds_check
)
6128 if (ret_nregs
> 0) {
6129 /* return in register */
6131 PUT_R_RET(&ret
, ret
.type
.t
);
6136 gfunc_param_typed(s
, sa
);
6146 tcc_error("too few arguments to function");
6148 gfunc_call(nb_args
);
6150 if (ret_nregs
< 0) {
6151 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6152 #ifdef TCC_TARGET_RISCV64
6153 arch_transfer_ret_regs(1);
6157 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6158 vsetc(&ret
.type
, r
, &ret
.c
);
6159 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6162 /* handle packed struct return */
6163 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6166 size
= type_size(&s
->type
, &align
);
6167 /* We're writing whole regs often, make sure there's enough
6168 space. Assume register size is power of 2. */
6169 if (regsize
> align
)
6171 loc
= (loc
- size
) & -align
;
6175 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6179 if (--ret_nregs
== 0)
6183 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6186 /* Promote char/short return values. This is matters only
6187 for calling function that were not compiled by TCC and
6188 only on some architectures. For those where it doesn't
6189 matter we expect things to be already promoted to int,
6191 t
= s
->type
.t
& VT_BTYPE
;
6192 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6194 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6196 vtop
->type
.t
= VT_INT
;
6200 if (s
->f
.func_noreturn
)
6208 #ifndef precedence_parser /* original top-down parser */
6210 static void expr_prod(void)
6215 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6222 static void expr_sum(void)
6227 while ((t
= tok
) == '+' || t
== '-') {
6234 static void expr_shift(void)
6239 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6246 static void expr_cmp(void)
6251 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6252 t
== TOK_ULT
|| t
== TOK_UGE
) {
6259 static void expr_cmpeq(void)
6264 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6271 static void expr_and(void)
6274 while (tok
== '&') {
6281 static void expr_xor(void)
6284 while (tok
== '^') {
6291 static void expr_or(void)
6294 while (tok
== '|') {
6301 static void expr_landor(int op
);
6303 static void expr_land(void)
6306 if (tok
== TOK_LAND
)
6310 static void expr_lor(void)
6317 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6318 #else /* defined precedence_parser */
6319 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6320 # define expr_lor() unary(), expr_infix(1)
6322 static int precedence(int tok
)
6325 case TOK_LOR
: return 1;
6326 case TOK_LAND
: return 2;
6330 case TOK_EQ
: case TOK_NE
: return 6;
6331 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6332 case TOK_SHL
: case TOK_SAR
: return 8;
6333 case '+': case '-': return 9;
6334 case '*': case '/': case '%': return 10;
6336 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6341 static unsigned char prec
[256];
6342 static void init_prec(void)
6345 for (i
= 0; i
< 256; i
++)
6346 prec
[i
] = precedence(i
);
6348 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6350 static void expr_landor(int op
);
6352 static void expr_infix(int p
)
6355 while ((p2
= precedence(t
)) >= p
) {
6356 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6361 if (precedence(tok
) > p2
)
6370 /* Assuming vtop is a value used in a conditional context
6371 (i.e. compared with zero) return 0 if it's false, 1 if
6372 true and -1 if it can't be statically determined. */
6373 static int condition_3way(void)
6376 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6377 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6379 gen_cast_s(VT_BOOL
);
6386 static void expr_landor(int op
)
6388 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6390 c
= f
? i
: condition_3way();
6392 save_regs(1), cc
= 0;
6394 nocode_wanted
++, f
= 1;
6402 expr_landor_next(op
);
6414 static int is_cond_bool(SValue
*sv
)
6416 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6417 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6418 return (unsigned)sv
->c
.i
< 2;
6419 if (sv
->r
== VT_CMP
)
6424 static void expr_cond(void)
6426 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6434 c
= condition_3way();
6435 g
= (tok
== ':' && gnu_ext
);
6445 /* needed to avoid having different registers saved in
6452 ncw_prev
= nocode_wanted
;
6458 if (c
< 0 && vtop
->r
== VT_CMP
) {
6465 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6466 mk_pointer(&vtop
->type
);
6467 sv
= *vtop
; /* save value to handle it later */
6468 vtop
--; /* no vpop so that FP stack is not flushed */
6478 nocode_wanted
= ncw_prev
;
6484 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6485 if (sv
.r
== VT_CMP
) {
6496 nocode_wanted
= ncw_prev
;
6497 // tcc_warning("two conditions expr_cond");
6501 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6502 mk_pointer(&vtop
->type
);
6504 /* cast operands to correct type according to ISOC rules */
6505 if (!combine_types(&type
, &sv
, vtop
, '?'))
6506 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6507 "type mismatch in conditional expression (have '%s' and '%s')");
6508 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6509 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6510 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6512 /* now we convert second operand */
6516 mk_pointer(&vtop
->type
);
6518 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6522 rc
= RC_TYPE(type
.t
);
6523 /* for long longs, we use fixed registers to avoid having
6524 to handle a complicated move */
6525 if (USING_TWO_WORDS(type
.t
))
6526 rc
= RC_RET(type
.t
);
6534 nocode_wanted
= ncw_prev
;
6536 /* this is horrible, but we must also convert first
6542 mk_pointer(&vtop
->type
);
6544 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6550 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6560 static void expr_eq(void)
6565 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6573 gen_op(TOK_ASSIGN_OP(t
));
6579 ST_FUNC
void gexpr(void)
6590 /* parse a constant expression and return value in vtop. */
6591 static void expr_const1(void)
6594 nocode_wanted
+= unevalmask
+ 1;
6596 nocode_wanted
-= unevalmask
+ 1;
6600 /* parse an integer constant and return its value. */
6601 static inline int64_t expr_const64(void)
6605 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6606 expect("constant expression");
6612 /* parse an integer constant and return its value.
6613 Complain if it doesn't fit 32bit (signed or unsigned). */
6614 ST_FUNC
int expr_const(void)
6617 int64_t wc
= expr_const64();
6619 if (c
!= wc
&& (unsigned)c
!= wc
)
6620 tcc_error("constant exceeds 32 bit");
6624 /* ------------------------------------------------------------------------- */
6625 /* return from function */
6627 #ifndef TCC_TARGET_ARM64
6628 static void gfunc_return(CType
*func_type
)
6630 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6631 CType type
, ret_type
;
6632 int ret_align
, ret_nregs
, regsize
;
6633 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6634 &ret_align
, ®size
);
6635 if (ret_nregs
< 0) {
6636 #ifdef TCC_TARGET_RISCV64
6637 arch_transfer_ret_regs(0);
6639 } else if (0 == ret_nregs
) {
6640 /* if returning structure, must copy it to implicit
6641 first pointer arg location */
6644 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6647 /* copy structure value to pointer */
6650 /* returning structure packed into registers */
6651 int size
, addr
, align
, rc
;
6652 size
= type_size(func_type
,&align
);
6653 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6654 (vtop
->c
.i
& (ret_align
-1)))
6655 && (align
& (ret_align
-1))) {
6656 loc
= (loc
- size
) & -ret_align
;
6659 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6663 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6665 vtop
->type
= ret_type
;
6666 rc
= RC_RET(ret_type
.t
);
6674 if (--ret_nregs
== 0)
6676 /* We assume that when a structure is returned in multiple
6677 registers, their classes are consecutive values of the
6680 vtop
->c
.i
+= regsize
;
6685 gv(RC_RET(func_type
->t
));
6687 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6691 static void check_func_return(void)
6693 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6695 if (!strcmp (funcname
, "main")
6696 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6697 /* main returns 0 by default */
6699 gen_assign_cast(&func_vt
);
6700 gfunc_return(&func_vt
);
6702 tcc_warning("function might return no value: '%s'", funcname
);
6706 /* ------------------------------------------------------------------------- */
6709 static int case_cmpi(const void *pa
, const void *pb
)
6711 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6712 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6713 return a
< b
? -1 : a
> b
;
6716 static int case_cmpu(const void *pa
, const void *pb
)
6718 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6719 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6720 return a
< b
? -1 : a
> b
;
6723 static void gtst_addr(int t
, int a
)
6725 gsym_addr(gvtst(0, t
), a
);
6728 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6732 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6749 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6751 gcase(base
, len
/2, bsym
);
6755 base
+= e
; len
-= e
;
6765 if (p
->v1
== p
->v2
) {
6767 gtst_addr(0, p
->sym
);
6777 gtst_addr(0, p
->sym
);
6781 *bsym
= gjmp(*bsym
);
6784 /* ------------------------------------------------------------------------- */
6785 /* __attribute__((cleanup(fn))) */
6787 static void try_call_scope_cleanup(Sym
*stop
)
6789 Sym
*cls
= cur_scope
->cl
.s
;
6791 for (; cls
!= stop
; cls
= cls
->ncl
) {
6792 Sym
*fs
= cls
->next
;
6793 Sym
*vs
= cls
->prev_tok
;
6795 vpushsym(&fs
->type
, fs
);
6796 vset(&vs
->type
, vs
->r
, vs
->c
);
6798 mk_pointer(&vtop
->type
);
6804 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6809 if (!cur_scope
->cl
.s
)
6812 /* search NCA of both cleanup chains given parents and initial depth */
6813 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6814 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6816 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6818 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6821 try_call_scope_cleanup(cc
);
6824 /* call 'func' for each __attribute__((cleanup(func))) */
6825 static void block_cleanup(struct scope
*o
)
6829 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6830 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6835 try_call_scope_cleanup(o
->cl
.s
);
6836 pcl
->jnext
= gjmp(0);
6838 goto remove_pending
;
6848 try_call_scope_cleanup(o
->cl
.s
);
6851 /* ------------------------------------------------------------------------- */
6854 static void vla_restore(int loc
)
6857 gen_vla_sp_restore(loc
);
6860 static void vla_leave(struct scope
*o
)
6862 if (o
->vla
.num
< cur_scope
->vla
.num
)
6863 vla_restore(o
->vla
.loc
);
6866 /* ------------------------------------------------------------------------- */
6869 void new_scope(struct scope
*o
)
6871 /* copy and link previous scope */
6873 o
->prev
= cur_scope
;
6876 /* record local declaration stack position */
6877 o
->lstk
= local_stack
;
6878 o
->llstk
= local_label_stack
;
6882 if (tcc_state
->do_debug
)
6883 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
6886 void prev_scope(struct scope
*o
, int is_expr
)
6890 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6891 block_cleanup(o
->prev
);
6893 /* pop locally defined labels */
6894 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6896 /* In the is_expr case (a statement expression is finished here),
6897 vtop might refer to symbols on the local_stack. Either via the
6898 type or via vtop->sym. We can't pop those nor any that in turn
6899 might be referred to. To make it easier we don't roll back
6900 any symbols in that case; some upper level call to block() will
6901 do that. We do have to remove such symbols from the lookup
6902 tables, though. sym_pop will do that. */
6904 /* pop locally defined symbols */
6905 pop_local_syms(&local_stack
, o
->lstk
, is_expr
, 0);
6906 cur_scope
= o
->prev
;
6909 if (tcc_state
->do_debug
)
6910 tcc_debug_stabn(N_RBRAC
, ind
- func_ind
);
6913 /* leave a scope via break/continue(/goto) */
6914 void leave_scope(struct scope
*o
)
6918 try_call_scope_cleanup(o
->cl
.s
);
6922 /* ------------------------------------------------------------------------- */
6923 /* call block from 'for do while' loops */
6925 static void lblock(int *bsym
, int *csym
)
6927 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6928 int *b
= co
->bsym
, *c
= co
->csym
;
6942 static void block(int is_expr
)
6944 int a
, b
, c
, d
, e
, t
;
6949 /* default return value is (void) */
6951 vtop
->type
.t
= VT_VOID
;
6956 /* If the token carries a value, next() might destroy it. Only with
6957 invalid code such as f(){"123"4;} */
6958 if (TOK_HAS_VALUE(t
))
6968 if (tok
== TOK_ELSE
) {
6973 gsym(d
); /* patch else jmp */
6978 } else if (t
== TOK_WHILE
) {
6990 } else if (t
== '{') {
6993 /* handle local labels declarations */
6994 while (tok
== TOK_LABEL
) {
6997 if (tok
< TOK_UIDENT
)
6998 expect("label identifier");
6999 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
7001 } while (tok
== ',');
7005 while (tok
!= '}') {
7014 prev_scope(&o
, is_expr
);
7017 else if (!nocode_wanted
)
7018 check_func_return();
7020 } else if (t
== TOK_RETURN
) {
7021 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
7025 gen_assign_cast(&func_vt
);
7027 if (vtop
->type
.t
!= VT_VOID
)
7028 tcc_warning("void function returns a value");
7032 tcc_warning("'return' with no value");
7035 leave_scope(root_scope
);
7037 gfunc_return(&func_vt
);
7039 /* jump unless last stmt in top-level block */
7040 if (tok
!= '}' || local_scope
!= 1)
7044 } else if (t
== TOK_BREAK
) {
7046 if (!cur_scope
->bsym
)
7047 tcc_error("cannot break");
7048 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
7049 leave_scope(cur_switch
->scope
);
7051 leave_scope(loop_scope
);
7052 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
7055 } else if (t
== TOK_CONTINUE
) {
7057 if (!cur_scope
->csym
)
7058 tcc_error("cannot continue");
7059 leave_scope(loop_scope
);
7060 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
7063 } else if (t
== TOK_FOR
) {
7068 /* c99 for-loop init decl? */
7069 if (!decl0(VT_LOCAL
, 1, NULL
)) {
7070 /* no, regular for-loop init expr */
7098 } else if (t
== TOK_DO
) {
7112 } else if (t
== TOK_SWITCH
) {
7113 struct switch_t
*sw
;
7115 sw
= tcc_mallocz(sizeof *sw
);
7117 sw
->scope
= cur_scope
;
7118 sw
->prev
= cur_switch
;
7124 sw
->sv
= *vtop
--; /* save switch value */
7127 b
= gjmp(0); /* jump to first case */
7129 a
= gjmp(a
); /* add implicit break */
7133 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7134 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7136 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7138 for (b
= 1; b
< sw
->n
; b
++)
7139 if (sw
->sv
.type
.t
& VT_UNSIGNED
7140 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7141 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7142 tcc_error("duplicate case value");
7146 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7149 gsym_addr(d
, sw
->def_sym
);
7155 dynarray_reset(&sw
->p
, &sw
->n
);
7156 cur_switch
= sw
->prev
;
7159 } else if (t
== TOK_CASE
) {
7160 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7163 cr
->v1
= cr
->v2
= expr_const64();
7164 if (gnu_ext
&& tok
== TOK_DOTS
) {
7166 cr
->v2
= expr_const64();
7167 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7168 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7169 tcc_warning("empty case range");
7172 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7175 goto block_after_label
;
7177 } else if (t
== TOK_DEFAULT
) {
7180 if (cur_switch
->def_sym
)
7181 tcc_error("too many 'default'");
7182 cur_switch
->def_sym
= gind();
7185 goto block_after_label
;
7187 } else if (t
== TOK_GOTO
) {
7188 vla_restore(root_scope
->vla
.loc
);
7189 if (tok
== '*' && gnu_ext
) {
7193 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7197 } else if (tok
>= TOK_UIDENT
) {
7198 s
= label_find(tok
);
7199 /* put forward definition if needed */
7201 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7202 else if (s
->r
== LABEL_DECLARED
)
7203 s
->r
= LABEL_FORWARD
;
7205 if (s
->r
& LABEL_FORWARD
) {
7206 /* start new goto chain for cleanups, linked via label->next */
7207 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7208 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7209 pending_gotos
->prev_tok
= s
;
7210 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7211 pending_gotos
->next
= s
;
7213 s
->jnext
= gjmp(s
->jnext
);
7215 try_call_cleanup_goto(s
->cleanupstate
);
7216 gjmp_addr(s
->jnext
);
7221 expect("label identifier");
7225 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7229 if (tok
== ':' && t
>= TOK_UIDENT
) {
7234 if (s
->r
== LABEL_DEFINED
)
7235 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7236 s
->r
= LABEL_DEFINED
;
7238 Sym
*pcl
; /* pending cleanup goto */
7239 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7241 sym_pop(&s
->next
, NULL
, 0);
7245 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7248 s
->cleanupstate
= cur_scope
->cl
.s
;
7251 vla_restore(cur_scope
->vla
.loc
);
7252 /* we accept this, but it is a mistake */
7254 tcc_warning("deprecated use of label at end of compound statement");
7260 /* expression case */
7277 /* This skips over a stream of tokens containing balanced {} and ()
7278 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7279 with a '{'). If STR then allocates and stores the skipped tokens
7280 in *STR. This doesn't check if () and {} are nested correctly,
7281 i.e. "({)}" is accepted. */
7282 static void skip_or_save_block(TokenString
**str
)
7284 int braces
= tok
== '{';
7287 *str
= tok_str_alloc();
7289 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7291 if (tok
== TOK_EOF
) {
7292 if (str
|| level
> 0)
7293 tcc_error("unexpected end of file");
7298 tok_str_add_tok(*str
);
7301 if (t
== '{' || t
== '(') {
7303 } else if (t
== '}' || t
== ')') {
7305 if (level
== 0 && braces
&& t
== '}')
7310 tok_str_add(*str
, -1);
7311 tok_str_add(*str
, 0);
7315 #define EXPR_CONST 1
7318 static void parse_init_elem(int expr_type
)
7320 int saved_global_expr
;
7323 /* compound literals must be allocated globally in this case */
7324 saved_global_expr
= global_expr
;
7327 global_expr
= saved_global_expr
;
7328 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7329 (compound literals). */
7330 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7331 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7332 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7333 #ifdef TCC_TARGET_PE
7334 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7337 tcc_error("initializer element is not constant");
7346 static void init_assert(init_params
*p
, int offset
)
7348 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7349 : !nocode_wanted
&& offset
> p
->local_offset
)
7350 tcc_internal_error("initializer overflow");
7353 #define init_assert(sec, offset)
7356 /* put zeros for variable based init */
7357 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7359 init_assert(p
, c
+ size
);
7361 /* nothing to do because globals are already set to zero */
7363 vpush_global_sym(&func_old_type
, TOK_memset
);
7365 #ifdef TCC_TARGET_ARM
7377 #define DIF_SIZE_ONLY 2
7378 #define DIF_HAVE_ELEM 4
7381 /* delete relocations for specified range c ... c + size. Unfortunatly
7382 in very special cases, relocations may occur unordered */
7383 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7385 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7386 if (!sec
|| !sec
->reloc
)
7388 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7389 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7390 while (rel
< rel_end
) {
7391 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7392 sec
->reloc
->data_offset
-= sizeof *rel
;
7395 memcpy(rel2
, rel
, sizeof *rel
);
7402 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7404 if (ref
== p
->flex_array_ref
) {
7405 if (index
>= ref
->c
)
7407 } else if (ref
->c
< 0)
7408 tcc_error("flexible array has zero size in this context");
7411 /* t is the array or struct type. c is the array or struct
7412 address. cur_field is the pointer to the current
7413 field, for arrays the 'c' member contains the current start
7414 index. 'flags' is as in decl_initializer.
7415 'al' contains the already initialized length of the
7416 current container (starting at c). This returns the new length of that. */
7417 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7418 Sym
**cur_field
, int flags
, int al
)
7421 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7422 unsigned long corig
= c
;
7427 if (flags
& DIF_HAVE_ELEM
)
7430 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7437 /* NOTE: we only support ranges for last designator */
7438 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7440 if (!(type
->t
& VT_ARRAY
))
7441 expect("array type");
7443 index
= index_last
= expr_const();
7444 if (tok
== TOK_DOTS
&& gnu_ext
) {
7446 index_last
= expr_const();
7450 decl_design_flex(p
, s
, index_last
);
7451 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7452 tcc_error("index exceeds array bounds or range is empty");
7454 (*cur_field
)->c
= index_last
;
7455 type
= pointed_type(type
);
7456 elem_size
= type_size(type
, &align
);
7457 c
+= index
* elem_size
;
7458 nb_elems
= index_last
- index
+ 1;
7465 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7466 expect("struct/union type");
7468 f
= find_field(type
, l
, &cumofs
);
7481 } else if (!gnu_ext
) {
7486 if (type
->t
& VT_ARRAY
) {
7487 index
= (*cur_field
)->c
;
7489 decl_design_flex(p
, s
, index
);
7491 tcc_error("too many initializers");
7492 type
= pointed_type(type
);
7493 elem_size
= type_size(type
, &align
);
7494 c
+= index
* elem_size
;
7497 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7498 *cur_field
= f
= f
->next
;
7500 tcc_error("too many initializers");
7506 if (!elem_size
) /* for structs */
7507 elem_size
= type_size(type
, &align
);
7509 /* Using designators the same element can be initialized more
7510 than once. In that case we need to delete possibly already
7511 existing relocations. */
7512 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7513 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7514 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7517 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7519 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7523 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7524 /* make init_putv/vstore believe it were a struct */
7526 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7530 vpush_ref(type
, p
->sec
, c
, elem_size
);
7532 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7533 for (i
= 1; i
< nb_elems
; i
++) {
7535 init_putv(p
, type
, c
+ elem_size
* i
);
7540 c
+= nb_elems
* elem_size
;
7546 /* store a value or an expression directly in global data or in local array */
7547 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7553 Section
*sec
= p
->sec
;
7556 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7558 size
= type_size(type
, &align
);
7559 if (type
->t
& VT_BITFIELD
)
7560 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7561 init_assert(p
, c
+ size
);
7564 /* XXX: not portable */
7565 /* XXX: generate error if incorrect relocation */
7566 gen_assign_cast(&dtype
);
7567 bt
= type
->t
& VT_BTYPE
;
7569 if ((vtop
->r
& VT_SYM
)
7572 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7573 || (type
->t
& VT_BITFIELD
))
7574 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7576 tcc_error("initializer element is not computable at load time");
7578 if (NODATA_WANTED
) {
7583 ptr
= sec
->data
+ c
;
7585 /* XXX: make code faster ? */
7586 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7587 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7588 /* XXX This rejects compound literals like
7589 '(void *){ptr}'. The problem is that '&sym' is
7590 represented the same way, which would be ruled out
7591 by the SYM_FIRST_ANOM check above, but also '"string"'
7592 in 'char *p = "string"' is represented the same
7593 with the type being VT_PTR and the symbol being an
7594 anonymous one. That is, there's no difference in vtop
7595 between '(void *){x}' and '&(void *){x}'. Ignore
7596 pointer typed entities here. Hopefully no real code
7597 will ever use compound literals with scalar type. */
7598 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7599 /* These come from compound literals, memcpy stuff over. */
7603 esym
= elfsym(vtop
->sym
);
7604 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7605 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7607 /* We need to copy over all memory contents, and that
7608 includes relocations. Use the fact that relocs are
7609 created it order, so look from the end of relocs
7610 until we hit one before the copied region. */
7611 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
7612 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
7613 while (num_relocs
--) {
7615 if (rel
->r_offset
>= esym
->st_value
+ size
)
7617 if (rel
->r_offset
< esym
->st_value
)
7619 put_elf_reloca(symtab_section
, sec
,
7620 c
+ rel
->r_offset
- esym
->st_value
,
7621 ELFW(R_TYPE
)(rel
->r_info
),
7622 ELFW(R_SYM
)(rel
->r_info
),
7632 if (type
->t
& VT_BITFIELD
) {
7633 int bit_pos
, bit_size
, bits
, n
;
7634 unsigned char *p
, v
, m
;
7635 bit_pos
= BIT_POS(vtop
->type
.t
);
7636 bit_size
= BIT_SIZE(vtop
->type
.t
);
7637 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7638 bit_pos
&= 7, bits
= 0;
7643 v
= vtop
->c
.i
>> bits
<< bit_pos
;
7644 m
= ((1 << n
) - 1) << bit_pos
;
7645 *p
= (*p
& ~m
) | (v
& m
);
7646 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7650 /* XXX: when cross-compiling we assume that each type has the
7651 same representation on host and target, which is likely to
7652 be wrong in the case of long double */
7654 vtop
->c
.i
= vtop
->c
.i
!= 0;
7656 *(char *)ptr
= vtop
->c
.i
;
7659 *(short *)ptr
= vtop
->c
.i
;
7662 *(float*)ptr
= vtop
->c
.f
;
7665 *(double *)ptr
= vtop
->c
.d
;
7668 #if defined TCC_IS_NATIVE_387
7669 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7670 memcpy(ptr
, &vtop
->c
.ld
, 10);
7672 else if (sizeof (long double) == sizeof (double))
7673 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7675 else if (vtop
->c
.ld
== 0.0)
7679 if (sizeof(long double) == LDOUBLE_SIZE
)
7680 *(long double*)ptr
= vtop
->c
.ld
;
7681 else if (sizeof(double) == LDOUBLE_SIZE
)
7682 *(double *)ptr
= (double)vtop
->c
.ld
;
7684 tcc_error("can't cross compile long double constants");
7688 *(long long *)ptr
= vtop
->c
.i
;
7695 addr_t val
= vtop
->c
.i
;
7697 if (vtop
->r
& VT_SYM
)
7698 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7700 *(addr_t
*)ptr
= val
;
7702 if (vtop
->r
& VT_SYM
)
7703 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7704 *(addr_t
*)ptr
= val
;
7710 int val
= vtop
->c
.i
;
7712 if (vtop
->r
& VT_SYM
)
7713 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7717 if (vtop
->r
& VT_SYM
)
7718 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7727 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7734 /* 't' contains the type and storage info. 'c' is the offset of the
7735 object in section 'sec'. If 'sec' is NULL, it means stack based
7736 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7737 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7738 size only evaluation is wanted (only for arrays). */
7739 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7741 int len
, n
, no_oblock
, i
;
7747 /* generate line number info */
7748 if (!p
->sec
&& tcc_state
->do_debug
)
7749 tcc_debug_line(tcc_state
);
7751 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7752 /* In case of strings we have special handling for arrays, so
7753 don't consume them as initializer value (which would commit them
7754 to some anonymous symbol). */
7755 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7756 !(flags
& DIF_SIZE_ONLY
)) {
7757 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7758 flags
|= DIF_HAVE_ELEM
;
7761 if ((flags
& DIF_HAVE_ELEM
) &&
7762 !(type
->t
& VT_ARRAY
) &&
7763 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7764 The source type might have VT_CONSTANT set, which is
7765 of course assignable to non-const elements. */
7766 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7769 } else if (type
->t
& VT_ARRAY
) {
7771 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7779 t1
= pointed_type(type
);
7780 size1
= type_size(t1
, &align1
);
7782 /* only parse strings here if correct type (otherwise: handle
7783 them as ((w)char *) expressions */
7784 if ((tok
== TOK_LSTR
&&
7785 #ifdef TCC_TARGET_PE
7786 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7788 (t1
->t
& VT_BTYPE
) == VT_INT
7790 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7792 cstr_reset(&initstr
);
7793 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7794 tcc_error("unhandled string literal merging");
7795 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7797 initstr
.size
-= size1
;
7799 len
+= tokc
.str
.size
;
7801 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7803 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7806 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7807 && tok
!= TOK_EOF
) {
7808 /* Not a lone literal but part of a bigger expression. */
7809 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7810 tokc
.str
.size
= initstr
.size
;
7811 tokc
.str
.data
= initstr
.data
;
7815 if (!(flags
& DIF_SIZE_ONLY
)) {
7820 tcc_warning("initializer-string for array is too long");
7821 /* in order to go faster for common case (char
7822 string in global variable, we handle it
7824 if (p
->sec
&& size1
== 1) {
7825 init_assert(p
, c
+ nb
);
7827 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7831 /* only add trailing zero if enough storage (no
7832 warning in this case since it is standard) */
7833 if (flags
& DIF_CLEAR
)
7836 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7840 } else if (size1
== 1)
7841 ch
= ((unsigned char *)initstr
.data
)[i
];
7843 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7845 init_putv(p
, t1
, c
+ i
* size1
);
7849 decl_design_flex(p
, s
, len
);
7858 /* zero memory once in advance */
7859 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7860 init_putz(p
, c
, n
*size1
);
7865 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7866 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7867 flags
&= ~DIF_HAVE_ELEM
;
7868 if (type
->t
& VT_ARRAY
) {
7870 /* special test for multi dimensional arrays (may not
7871 be strictly correct if designators are used at the
7873 if (no_oblock
&& len
>= n
*size1
)
7876 if (s
->type
.t
== VT_UNION
)
7880 if (no_oblock
&& f
== NULL
)
7891 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7893 if ((flags
& DIF_FIRST
) || tok
== '{') {
7902 } else if (tok
== '{') {
7903 if (flags
& DIF_HAVE_ELEM
)
7906 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7908 } else if ((flags
& DIF_SIZE_ONLY
)) {
7909 /* If we supported only ISO C we wouldn't have to accept calling
7910 this on anything than an array if DIF_SIZE_ONLY (and even then
7911 only on the outermost level, so no recursion would be needed),
7912 because initializing a flex array member isn't supported.
7913 But GNU C supports it, so we need to recurse even into
7914 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7915 /* just skip expression */
7916 skip_or_save_block(NULL
);
7918 if (!(flags
& DIF_HAVE_ELEM
)) {
7919 /* This should happen only when we haven't parsed
7920 the init element above for fear of committing a
7921 string constant to memory too early. */
7922 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7923 expect("string constant");
7924 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7927 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7928 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7930 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7934 init_putv(p
, type
, c
);
7938 /* parse an initializer for type 't' if 'has_init' is non zero, and
7939 allocate space in local or global data space ('r' is either
7940 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7941 variable 'v' of scope 'scope' is declared before initializers
7942 are parsed. If 'v' is zero, then a reference to the new object
7943 is put in the value stack. If 'has_init' is 2, a special parsing
7944 is done to handle string constants. */
7945 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7946 int has_init
, int v
, int scope
)
7948 int size
, align
, addr
;
7949 TokenString
*init_str
= NULL
;
7952 Sym
*flexible_array
;
7954 int saved_nocode_wanted
= nocode_wanted
;
7955 #ifdef CONFIG_TCC_BCHECK
7956 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7958 init_params p
= {0};
7960 /* Always allocate static or global variables */
7961 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7962 nocode_wanted
|= 0x80000000;
7964 flexible_array
= NULL
;
7965 size
= type_size(type
, &align
);
7967 /* exactly one flexible array may be initialized, either the
7968 toplevel array or the last member of the toplevel struct */
7971 /* If the base type itself was an array type of unspecified size
7972 (like in 'typedef int arr[]; arr x = {1};') then we will
7973 overwrite the unknown size by the real one for this decl.
7974 We need to unshare the ref symbol holding that size. */
7975 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
7976 p
.flex_array_ref
= type
->ref
;
7978 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7979 Sym
*field
= type
->ref
->next
;
7982 field
= field
->next
;
7983 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
7984 flexible_array
= field
;
7985 p
.flex_array_ref
= field
->type
.ref
;
7992 /* If unknown size, do a dry-run 1st pass */
7994 tcc_error("unknown type size");
7995 if (has_init
== 2) {
7996 /* only get strings */
7997 init_str
= tok_str_alloc();
7998 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7999 tok_str_add_tok(init_str
);
8002 tok_str_add(init_str
, -1);
8003 tok_str_add(init_str
, 0);
8005 skip_or_save_block(&init_str
);
8009 begin_macro(init_str
, 1);
8011 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
8012 /* prepare second initializer parsing */
8013 macro_ptr
= init_str
->str
;
8016 /* if still unknown size, error */
8017 size
= type_size(type
, &align
);
8019 tcc_error("unknown type size");
8021 /* If there's a flex member and it was used in the initializer
8023 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
8024 size
+= flexible_array
->type
.ref
->c
8025 * pointed_size(&flexible_array
->type
);
8028 /* take into account specified alignment if bigger */
8029 if (ad
->a
.aligned
) {
8030 int speca
= 1 << (ad
->a
.aligned
- 1);
8033 } else if (ad
->a
.packed
) {
8037 if (!v
&& NODATA_WANTED
)
8038 size
= 0, align
= 1;
8040 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
8042 #ifdef CONFIG_TCC_BCHECK
8044 /* add padding between stack variables for bound checking */
8048 loc
= (loc
- size
) & -align
;
8050 p
.local_offset
= addr
+ size
;
8051 #ifdef CONFIG_TCC_BCHECK
8053 /* add padding between stack variables for bound checking */
8058 /* local variable */
8059 #ifdef CONFIG_TCC_ASM
8060 if (ad
->asm_label
) {
8061 int reg
= asm_parse_regvar(ad
->asm_label
);
8063 r
= (r
& ~VT_VALMASK
) | reg
;
8066 sym
= sym_push(v
, type
, r
, addr
);
8067 if (ad
->cleanup_func
) {
8068 Sym
*cls
= sym_push2(&all_cleanups
,
8069 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
8070 cls
->prev_tok
= sym
;
8071 cls
->next
= ad
->cleanup_func
;
8072 cls
->ncl
= cur_scope
->cl
.s
;
8073 cur_scope
->cl
.s
= cls
;
8078 /* push local reference */
8079 vset(type
, r
, addr
);
8082 if (v
&& scope
== VT_CONST
) {
8083 /* see if the symbol was already defined */
8086 patch_storage(sym
, ad
, type
);
8087 /* we accept several definitions of the same global variable. */
8088 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8093 /* allocate symbol in corresponding section */
8098 else if (tcc_state
->nocommon
)
8103 addr
= section_add(sec
, size
, align
);
8104 #ifdef CONFIG_TCC_BCHECK
8105 /* add padding if bound check */
8107 section_add(sec
, 1, 1);
8110 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8111 sec
= common_section
;
8116 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8117 patch_storage(sym
, ad
, NULL
);
8119 /* update symbol definition */
8120 put_extern_sym(sym
, sec
, addr
, size
);
8122 /* push global reference */
8123 vpush_ref(type
, sec
, addr
, size
);
8128 #ifdef CONFIG_TCC_BCHECK
8129 /* handles bounds now because the symbol must be defined
8130 before for the relocation */
8134 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8135 /* then add global bound info */
8136 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8137 bounds_ptr
[0] = 0; /* relocated */
8138 bounds_ptr
[1] = size
;
8143 if (type
->t
& VT_VLA
) {
8149 /* save current stack pointer */
8150 if (root_scope
->vla
.loc
== 0) {
8151 struct scope
*v
= cur_scope
;
8152 gen_vla_sp_save(loc
-= PTR_SIZE
);
8153 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
8156 vla_runtime_type_size(type
, &a
);
8157 gen_vla_alloc(type
, a
);
8158 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8159 /* on _WIN64, because of the function args scratch area, the
8160 result of alloca differs from RSP and is returned in RAX. */
8161 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8163 gen_vla_sp_save(addr
);
8164 cur_scope
->vla
.loc
= addr
;
8165 cur_scope
->vla
.num
++;
8166 } else if (has_init
) {
8168 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8169 /* patch flexible array member size back to -1, */
8170 /* for possible subsequent similar declarations */
8172 flexible_array
->type
.ref
->c
= -1;
8176 /* restore parse state if needed */
8182 nocode_wanted
= saved_nocode_wanted
;
8185 /* parse a function defined by symbol 'sym' and generate its code in
8186 'cur_text_section' */
8187 static void gen_function(Sym
*sym
)
8189 struct scope f
= { 0 };
8190 cur_scope
= root_scope
= &f
;
8192 ind
= cur_text_section
->data_offset
;
8193 if (sym
->a
.aligned
) {
8194 size_t newoff
= section_add(cur_text_section
, 0,
8195 1 << (sym
->a
.aligned
- 1));
8196 gen_fill_nops(newoff
- ind
);
8198 /* NOTE: we patch the symbol size later */
8199 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8200 if (sym
->type
.ref
->f
.func_ctor
)
8201 add_array (tcc_state
, ".init_array", sym
->c
);
8202 if (sym
->type
.ref
->f
.func_dtor
)
8203 add_array (tcc_state
, ".fini_array", sym
->c
);
8205 funcname
= get_tok_str(sym
->v
, NULL
);
8207 func_vt
= sym
->type
.ref
->type
;
8208 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8210 /* put debug symbol */
8211 tcc_debug_funcstart(tcc_state
, sym
);
8212 /* push a dummy symbol to enable local sym storage */
8213 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8214 local_scope
= 1; /* for function parameters */
8218 clear_temp_local_var_list();
8222 /* reset local stack */
8223 pop_local_syms(&local_stack
, NULL
, 0, func_var
);
8225 cur_text_section
->data_offset
= ind
;
8227 label_pop(&global_label_stack
, NULL
, 0);
8228 sym_pop(&all_cleanups
, NULL
, 0);
8229 /* patch symbol size */
8230 elfsym(sym
)->st_size
= ind
- func_ind
;
8231 /* end of function */
8232 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8233 /* It's better to crash than to generate wrong code */
8234 cur_text_section
= NULL
;
8235 funcname
= ""; /* for safety */
8236 func_vt
.t
= VT_VOID
; /* for safety */
8237 func_var
= 0; /* for safety */
8238 ind
= 0; /* for safety */
8239 nocode_wanted
= 0x80000000;
8241 /* do this after funcend debug info */
8245 static void gen_inline_functions(TCCState
*s
)
8248 int inline_generated
, i
;
8249 struct InlineFunc
*fn
;
8251 tcc_open_bf(s
, ":inline:", 0);
8252 /* iterate while inline function are referenced */
8254 inline_generated
= 0;
8255 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8256 fn
= s
->inline_fns
[i
];
8258 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8259 /* the function was used or forced (and then not internal):
8260 generate its code and convert it to a normal function */
8262 tcc_debug_putfile(s
, fn
->filename
);
8263 begin_macro(fn
->func_str
, 1);
8265 cur_text_section
= text_section
;
8269 inline_generated
= 1;
8272 } while (inline_generated
);
8276 static void free_inline_functions(TCCState
*s
)
8279 /* free tokens of unused inline functions */
8280 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8281 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8283 tok_str_free(fn
->func_str
);
8285 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8288 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8289 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8290 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8292 int v
, has_init
, r
, oldint
;
8295 AttributeDef ad
, adbase
;
8298 if (tok
== TOK_STATIC_ASSERT
) {
8308 tcc_error("_Static_assert fail");
8310 goto static_assert_out
;
8314 parse_mult_str(&error_str
, "string constant");
8316 tcc_error("%s", (char *)error_str
.data
);
8317 cstr_free(&error_str
);
8325 if (!parse_btype(&btype
, &adbase
)) {
8326 if (is_for_loop_init
)
8328 /* skip redundant ';' if not in old parameter decl scope */
8329 if (tok
== ';' && l
!= VT_CMP
) {
8335 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8336 /* global asm block */
8340 if (tok
>= TOK_UIDENT
) {
8341 /* special test for old K&R protos without explicit int
8342 type. Only accepted when defining global data */
8347 expect("declaration");
8353 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8355 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8356 tcc_warning("unnamed struct/union that defines no instances");
8360 if (IS_ENUM(btype
.t
)) {
8366 while (1) { /* iterate thru each declaration */
8369 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8373 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8374 printf("type = '%s'\n", buf
);
8377 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8378 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8379 tcc_error("function without file scope cannot be static");
8380 /* if old style function prototype, we accept a
8383 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8384 decl0(VT_CMP
, 0, sym
);
8385 #ifdef TCC_TARGET_MACHO
8386 if (sym
->f
.func_alwinl
8387 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8388 == (VT_EXTERN
| VT_INLINE
))) {
8389 /* always_inline functions must be handled as if they
8390 don't generate multiple global defs, even if extern
8391 inline, i.e. GNU inline semantics for those. Rewrite
8392 them into static inline. */
8393 type
.t
&= ~VT_EXTERN
;
8394 type
.t
|= VT_STATIC
;
8397 /* always compile 'extern inline' */
8398 if (type
.t
& VT_EXTERN
)
8399 type
.t
&= ~VT_INLINE
;
8401 } else if (oldint
) {
8402 tcc_warning("type defaults to int");
8405 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8406 ad
.asm_label
= asm_label_instr();
8407 /* parse one last attribute list, after asm label */
8408 parse_attribute(&ad
);
8410 /* gcc does not allow __asm__("label") with function definition,
8417 #ifdef TCC_TARGET_PE
8418 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8419 if (type
.t
& VT_STATIC
)
8420 tcc_error("cannot have dll linkage with static");
8421 if (type
.t
& VT_TYPEDEF
) {
8422 tcc_warning("'%s' attribute ignored for typedef",
8423 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8424 (ad
.a
.dllexport
= 0, "dllexport"));
8425 } else if (ad
.a
.dllimport
) {
8426 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8429 type
.t
|= VT_EXTERN
;
8435 tcc_error("cannot use local functions");
8436 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8437 expect("function definition");
8439 /* reject abstract declarators in function definition
8440 make old style params without decl have int type */
8442 while ((sym
= sym
->next
) != NULL
) {
8443 if (!(sym
->v
& ~SYM_FIELD
))
8444 expect("identifier");
8445 if (sym
->type
.t
== VT_VOID
)
8446 sym
->type
= int_type
;
8449 /* apply post-declaraton attributes */
8450 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8452 /* put function symbol */
8453 type
.t
&= ~VT_EXTERN
;
8454 sym
= external_sym(v
, &type
, 0, &ad
);
8456 /* static inline functions are just recorded as a kind
8457 of macro. Their code will be emitted at the end of
8458 the compilation unit only if they are used */
8459 if (sym
->type
.t
& VT_INLINE
) {
8460 struct InlineFunc
*fn
;
8461 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8462 strcpy(fn
->filename
, file
->filename
);
8464 skip_or_save_block(&fn
->func_str
);
8465 dynarray_add(&tcc_state
->inline_fns
,
8466 &tcc_state
->nb_inline_fns
, fn
);
8468 /* compute text section */
8469 cur_text_section
= ad
.section
;
8470 if (!cur_text_section
)
8471 cur_text_section
= text_section
;
8477 /* find parameter in function parameter list */
8478 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8479 if ((sym
->v
& ~SYM_FIELD
) == v
)
8481 tcc_error("declaration for parameter '%s' but no such parameter",
8482 get_tok_str(v
, NULL
));
8484 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8485 tcc_error("storage class specified for '%s'",
8486 get_tok_str(v
, NULL
));
8487 if (sym
->type
.t
!= VT_VOID
)
8488 tcc_error("redefinition of parameter '%s'",
8489 get_tok_str(v
, NULL
));
8490 convert_parameter_type(&type
);
8492 } else if (type
.t
& VT_TYPEDEF
) {
8493 /* save typedefed type */
8494 /* XXX: test storage specifiers ? */
8496 if (sym
&& sym
->sym_scope
== local_scope
) {
8497 if (!is_compatible_types(&sym
->type
, &type
)
8498 || !(sym
->type
.t
& VT_TYPEDEF
))
8499 tcc_error("incompatible redefinition of '%s'",
8500 get_tok_str(v
, NULL
));
8503 sym
= sym_push(v
, &type
, 0, 0);
8507 if (tcc_state
->do_debug
)
8508 tcc_debug_typedef (tcc_state
, sym
);
8509 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8510 && !(type
.t
& VT_EXTERN
)) {
8511 tcc_error("declaration of void object");
8514 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8515 /* external function definition */
8516 /* specific case for func_call attribute */
8518 } else if (!(type
.t
& VT_ARRAY
)) {
8519 /* not lvalue if array */
8522 has_init
= (tok
== '=');
8523 if (has_init
&& (type
.t
& VT_VLA
))
8524 tcc_error("variable length array cannot be initialized");
8525 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8526 || (type
.t
& VT_BTYPE
) == VT_FUNC
8527 /* as with GCC, uninitialized global arrays with no size
8528 are considered extern: */
8529 || ((type
.t
& VT_ARRAY
) && !has_init
8530 && l
== VT_CONST
&& type
.ref
->c
< 0)
8532 /* external variable or function */
8533 type
.t
|= VT_EXTERN
;
8534 sym
= external_sym(v
, &type
, r
, &ad
);
8535 if (ad
.alias_target
) {
8536 /* Aliases need to be emitted when their target
8537 symbol is emitted, even if perhaps unreferenced.
8538 We only support the case where the base is
8539 already defined, otherwise we would need
8540 deferring to emit the aliases until the end of
8541 the compile unit. */
8542 Sym
*alias_target
= sym_find(ad
.alias_target
);
8543 ElfSym
*esym
= elfsym(alias_target
);
8545 tcc_error("unsupported forward __alias__ attribute");
8546 put_extern_sym2(sym
, esym
->st_shndx
,
8547 esym
->st_value
, esym
->st_size
, 1);
8550 if (type
.t
& VT_STATIC
)
8556 else if (l
== VT_CONST
)
8557 /* uninitialized global variables may be overridden */
8558 type
.t
|= VT_EXTERN
;
8559 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8563 if (is_for_loop_init
)
8575 static void decl(int l
)
8580 /* ------------------------------------------------------------------------- */
8583 /* ------------------------------------------------------------------------- */