2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
66 static int gind(void) { CODE_ON(); return ind
; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
70 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
77 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
79 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
81 static int last_line_num
, new_file
, func_ind
; /* debug info control */
82 ST_DATA
const char *funcname
;
83 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
84 static CString initstr
;
87 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
88 #define VT_PTRDIFF_T VT_INT
90 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_LLONG
93 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
97 ST_DATA
struct switch_t
{
101 } **p
; int n
; /* list of case ranges */
102 int def_sym
; /* default symbol */
105 struct switch_t
*prev
;
107 } *cur_switch
; /* current switch */
109 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
110 /*list of temporary local variables on the stack in current function. */
111 ST_DATA
struct temp_local_variable
{
112 int location
; //offset on stack. Svalue.c.i
115 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
116 short nb_temp_local_vars
;
118 static struct scope
{
120 struct { int loc
, num
; } vla
;
121 struct { Sym
*s
; int n
; } cl
;
124 } *cur_scope
, *loop_scope
, *root_scope
;
132 /********************************************************/
133 /* stab debug support */
135 static const struct {
138 } default_debug
[] = {
139 { VT_INT
, "int:t1=r1;-2147483648;2147483647;" },
140 { VT_BYTE
, "char:t2=r2;0;127;" },
142 { VT_LONG
| VT_INT
, "long int:t3=r3;-2147483648;2147483647;" },
144 { VT_LLONG
| VT_LONG
, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
146 { VT_INT
| VT_UNSIGNED
, "unsigned int:t4=r4;0;037777777777;" },
148 { VT_LONG
| VT_INT
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;037777777777;" },
150 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
151 { VT_LLONG
| VT_LONG
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;01777777777777777777777;" },
153 { VT_QLONG
, "__int128:t6=r6;0;-1;" },
154 { VT_QLONG
| VT_UNSIGNED
, "__int128 unsigned:t7=r7;0;-1;" },
155 { VT_LLONG
, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
156 { VT_LLONG
| VT_UNSIGNED
, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
157 { VT_SHORT
, "short int:t10=r10;-32768;32767;" },
158 { VT_SHORT
| VT_UNSIGNED
, "short unsigned int:t11=r11;0;65535;" },
159 { VT_BYTE
| VT_DEFSIGN
, "signed char:t12=r12;-128;127;" },
160 { VT_BYTE
| VT_DEFSIGN
| VT_UNSIGNED
, "unsigned char:t13=r13;0;255;" },
161 { VT_FLOAT
, "float:t14=r1;4;0;" },
162 { VT_DOUBLE
, "double:t15=r1;8;0;" },
163 { VT_LDOUBLE
, "long double:t16=r1;16;0;" },
164 { -1, "_Float32:t17=r1;4;0;" },
165 { -1, "_Float64:t18=r1;8;0;" },
166 { -1, "_Float128:t19=r1;16;0;" },
167 { -1, "_Float32x:t20=r1;8;0;" },
168 { -1, "_Float64x:t21=r1;16;0;" },
169 { -1, "_Decimal32:t22=r1;4;0;" },
170 { -1, "_Decimal64:t23=r1;8;0;" },
171 { -1, "_Decimal128:t24=r1;16;0;" },
172 /* if default char is unsigned */
173 { VT_BYTE
| VT_UNSIGNED
, "unsigned char:t25=r25;0;255;" },
175 { VT_BOOL
, "bool:t26=r26;0;255;" },
176 { VT_VOID
, "void:t27=27" },
179 static int debug_next_type
;
181 static struct debug_hash
{
186 static int n_debug_hash
;
188 static struct debug_info
{
199 struct debug_info
*child
, *next
, *last
, *parent
;
200 } *debug_info
, *debug_info_root
;
202 /********************************************************/
204 #define precedence_parser
205 static void init_prec(void);
207 /********************************************************/
208 #ifndef CONFIG_TCC_ASM
209 ST_FUNC
void asm_instr(void)
211 tcc_error("inline asm() not supported");
213 ST_FUNC
void asm_global_instr(void)
215 tcc_error("inline asm() not supported");
219 /* ------------------------------------------------------------------------- */
220 static void gen_cast(CType
*type
);
221 static void gen_cast_s(int t
);
222 static inline CType
*pointed_type(CType
*type
);
223 static int is_compatible_types(CType
*type1
, CType
*type2
);
224 static int parse_btype(CType
*type
, AttributeDef
*ad
);
225 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
226 static void parse_expr_type(CType
*type
);
227 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
);
228 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
);
229 static void block(int is_expr
);
230 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
231 static void decl(int l
);
232 static int decl0(int l
, int is_for_loop_init
, Sym
*);
233 static void expr_eq(void);
234 static void vla_runtime_type_size(CType
*type
, int *a
);
235 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
236 static inline int64_t expr_const64(void);
237 static void vpush64(int ty
, unsigned long long v
);
238 static void vpush(CType
*type
);
239 static int gvtst(int inv
, int t
);
240 static void gen_inline_functions(TCCState
*s
);
241 static void free_inline_functions(TCCState
*s
);
242 static void skip_or_save_block(TokenString
**str
);
243 static void gv_dup(void);
244 static int get_temp_local_var(int size
,int align
);
245 static void clear_temp_local_var_list();
246 static void cast_error(CType
*st
, CType
*dt
);
248 ST_INLN
int is_float(int t
)
250 int bt
= t
& VT_BTYPE
;
251 return bt
== VT_LDOUBLE
257 static inline int is_integer_btype(int bt
)
266 static int btype_size(int bt
)
268 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
272 bt
== VT_PTR
? PTR_SIZE
: 0;
275 /* returns function return register from type */
276 static int R_RET(int t
)
280 #ifdef TCC_TARGET_X86_64
281 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
283 #elif defined TCC_TARGET_RISCV64
284 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
290 /* returns 2nd function return register, if any */
291 static int R2_RET(int t
)
297 #elif defined TCC_TARGET_X86_64
302 #elif defined TCC_TARGET_RISCV64
309 /* returns true for two-word types */
310 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
312 /* put function return registers to stack value */
313 static void PUT_R_RET(SValue
*sv
, int t
)
315 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
318 /* returns function return register class for type t */
319 static int RC_RET(int t
)
321 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
324 /* returns generic register class for type t */
325 static int RC_TYPE(int t
)
329 #ifdef TCC_TARGET_X86_64
330 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
332 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
334 #elif defined TCC_TARGET_RISCV64
335 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
341 /* returns 2nd register class corresponding to t and rc */
342 static int RC2_TYPE(int t
, int rc
)
344 if (!USING_TWO_WORDS(t
))
359 /* we use our own 'finite' function to avoid potential problems with
360 non standard math libs */
361 /* XXX: endianness dependent */
362 ST_FUNC
int ieee_finite(double d
)
365 memcpy(p
, &d
, sizeof(double));
366 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
369 /* compiling intel long double natively */
370 #if (defined __i386__ || defined __x86_64__) \
371 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
372 # define TCC_IS_NATIVE_387
375 ST_FUNC
void test_lvalue(void)
377 if (!(vtop
->r
& VT_LVAL
))
381 ST_FUNC
void check_vstack(void)
383 if (vtop
!= vstack
- 1)
384 tcc_error("internal compiler error: vstack leak (%d)",
385 (int)(vtop
- vstack
+ 1));
388 /* ------------------------------------------------------------------------- */
389 /* vstack debugging aid */
392 void pv (const char *lbl
, int a
, int b
)
395 for (i
= a
; i
< a
+ b
; ++i
) {
396 SValue
*p
= &vtop
[-i
];
397 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
398 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
403 /* ------------------------------------------------------------------------- */
404 /* start of translation unit info */
405 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
411 /* file info: full path + filename */
412 section_sym
= put_elf_sym(symtab_section
, 0, 0,
413 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
414 text_section
->sh_num
, NULL
);
415 getcwd(buf
, sizeof(buf
));
417 normalize_slashes(buf
);
419 pstrcat(buf
, sizeof(buf
), "/");
420 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
421 text_section
->data_offset
, text_section
, section_sym
);
422 put_stabs_r(s1
, file
->prev
? file
->prev
->filename
: file
->filename
,
424 text_section
->data_offset
, text_section
, section_sym
);
425 for (i
= 0; i
< sizeof (default_debug
) / sizeof (default_debug
[0]); i
++)
426 put_stabs(s1
, default_debug
[i
].name
, N_LSYM
, 0, 0, 0);
428 new_file
= last_line_num
= 0;
430 debug_next_type
= sizeof(default_debug
) / sizeof(default_debug
[0]);
434 /* we're currently 'including' the <command line> */
438 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
439 symbols can be safely used */
440 put_elf_sym(symtab_section
, 0, 0,
441 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
442 SHN_ABS
, file
->filename
);
445 static void tcc_debug_stabs (TCCState
*s1
, const char *str
, int type
, unsigned long value
,
446 Section
*sec
, int sym_index
)
452 (struct debug_sym
*)tcc_realloc (debug_info
->sym
,
453 sizeof(struct debug_sym
) *
454 (debug_info
->n_sym
+ 1));
455 s
= debug_info
->sym
+ debug_info
->n_sym
++;
458 s
->str
= tcc_strdup(str
);
460 s
->sym_index
= sym_index
;
463 put_stabs_r (s1
, str
, type
, 0, 0, value
, sec
, sym_index
);
465 put_stabs (s1
, str
, type
, 0, 0, value
);
468 static void tcc_debug_stabn(int type
, int value
)
470 if (type
== N_LBRAC
) {
471 struct debug_info
*info
=
472 (struct debug_info
*) tcc_mallocz(sizeof (*info
));
475 info
->parent
= debug_info
;
477 if (debug_info
->child
) {
478 if (debug_info
->child
->last
)
479 debug_info
->child
->last
->next
= info
;
481 debug_info
->child
->next
= info
;
482 debug_info
->child
->last
= info
;
485 debug_info
->child
= info
;
488 debug_info_root
= info
;
492 debug_info
->end
= value
;
493 debug_info
= debug_info
->parent
;
497 static void tcc_get_debug_info(TCCState
*s1
, Sym
*s
, CString
*result
)
506 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
507 if ((type
& VT_BTYPE
) != VT_BYTE
)
509 if (type
== VT_PTR
|| type
== (VT_PTR
| VT_ARRAY
))
510 n
++, t
= t
->type
.ref
;
514 if ((type
& VT_BTYPE
) == VT_STRUCT
) {
518 for (i
= 0; i
< n_debug_hash
; i
++) {
519 if (t
== debug_hash
[i
].type
) {
520 debug_type
= debug_hash
[i
].debug_type
;
524 if (debug_type
== -1) {
525 debug_type
= ++debug_next_type
;
526 debug_hash
= (struct debug_hash
*)
527 tcc_realloc (debug_hash
,
528 (n_debug_hash
+ 1) * sizeof(*debug_hash
));
529 debug_hash
[n_debug_hash
].debug_type
= debug_type
;
530 debug_hash
[n_debug_hash
++].type
= t
;
532 cstr_printf (&str
, "%s:T%d=%c%d",
533 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
534 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
536 IS_UNION (t
->type
.t
) ? 'u' : 's',
539 int pos
, size
, align
;
542 cstr_printf (&str
, "%s:",
543 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
544 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
545 tcc_get_debug_info (s1
, t
, &str
);
546 if (t
->type
.t
& VT_BITFIELD
) {
547 pos
= t
->c
* 8 + BIT_POS(t
->type
.t
);
548 size
= BIT_SIZE(t
->type
.t
);
552 size
= type_size(&t
->type
, &align
) * 8;
554 cstr_printf (&str
, ",%d,%d;", pos
, size
);
556 cstr_printf (&str
, ";");
557 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
561 else if (IS_ENUM(type
)) {
562 Sym
*e
= t
= t
->type
.ref
;
564 debug_type
= ++debug_next_type
;
566 cstr_printf (&str
, "%s:T%d=e",
567 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
568 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
572 cstr_printf (&str
, "%s:",
573 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
574 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
575 cstr_printf (&str
, e
->type
.t
& VT_UNSIGNED
? "%u," : "%d,",
578 cstr_printf (&str
, ";");
579 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
582 else if ((type
& VT_BTYPE
) != VT_FUNC
) {
583 type
&= ~VT_STRUCT_MASK
;
585 debug_type
<= sizeof(default_debug
) / sizeof(default_debug
[0]);
587 if (default_debug
[debug_type
- 1].type
== type
)
589 if (debug_type
> sizeof(default_debug
) / sizeof(default_debug
[0]))
593 cstr_printf (result
, "%d=", ++debug_next_type
);
596 type
= t
->type
.t
& ~(VT_STORAGE
| VT_CONSTANT
| VT_VOLATILE
);
597 if ((type
& VT_BTYPE
) != VT_BYTE
)
600 cstr_printf (result
, "%d=*", ++debug_next_type
);
601 else if (type
== (VT_PTR
| VT_ARRAY
))
602 cstr_printf (result
, "%d=ar1;0;%d;",
603 ++debug_next_type
, t
->type
.ref
->c
- 1);
604 else if (type
== VT_FUNC
) {
605 cstr_printf (result
, "%d=f", ++debug_next_type
);
606 tcc_get_debug_info (s1
, t
->type
.ref
, result
);
613 cstr_printf (result
, "%d", debug_type
);
616 static void tcc_debug_finish (TCCState
*s1
, struct debug_info
*cur
)
620 struct debug_info
*next
= cur
->next
;
622 for (i
= 0; i
< cur
->n_sym
; i
++) {
623 struct debug_sym
*s
= &cur
->sym
[i
];
626 put_stabs_r(s1
, s
->str
, s
->type
, 0, 0, s
->value
,
627 s
->sec
, s
->sym_index
);
629 put_stabs(s1
, s
->str
, s
->type
, 0, 0, s
->value
);
633 put_stabn(s1
, N_LBRAC
, 0, 0, cur
->start
);
634 tcc_debug_finish (s1
, cur
->child
);
635 put_stabn(s1
, N_RBRAC
, 0, 0, cur
->end
);
641 static void tcc_add_debug_info(TCCState
*s1
, int param
, Sym
*s
, Sym
*e
)
644 cstr_new (&debug_str
);
645 for (; s
!= e
; s
= s
->prev
) {
646 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
648 cstr_reset (&debug_str
);
649 cstr_printf (&debug_str
, "%s:%s", get_tok_str(s
->v
, NULL
), param
? "p" : "");
650 tcc_get_debug_info(s1
, s
, &debug_str
);
651 tcc_debug_stabs(s1
, debug_str
.data
, param
? N_PSYM
: N_LSYM
, s
->c
, NULL
, 0);
653 cstr_free (&debug_str
);
656 static void tcc_debug_extern_sym(TCCState
*s1
, Sym
*sym
, int sh_num
, int sym_bind
)
658 Section
*s
= s1
->sections
[sh_num
];
662 cstr_printf (&str
, "%s:%c",
663 get_tok_str(sym
->v
, NULL
),
664 sym_bind
== STB_GLOBAL
? 'G' : local_scope
? 'V' : 'S'
666 tcc_get_debug_info(s1
, sym
, &str
);
667 if (sym_bind
== STB_GLOBAL
)
668 tcc_debug_stabs(s1
, str
.data
, N_GSYM
, 0, NULL
, 0);
670 tcc_debug_stabs(s1
, str
.data
,
671 (sym
->type
.t
& VT_STATIC
) && data_section
== s
672 ? N_STSYM
: N_LCSYM
, 0, s
, sym
->c
);
676 static void tcc_debug_typedef(TCCState
*s1
, Sym
*sym
)
681 cstr_printf (&str
, "%s:t",
682 (sym
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
683 ? "" : get_tok_str(sym
->v
& ~SYM_FIELD
, NULL
));
684 tcc_get_debug_info(s1
, sym
, &str
);
685 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
689 /* put end of translation unit info */
690 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
694 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
695 text_section
->data_offset
, text_section
, section_sym
);
696 tcc_free(debug_hash
);
699 static BufferedFile
* put_new_file(TCCState
*s1
)
701 BufferedFile
*f
= file
;
702 /* use upper file if from inline ":asm:" */
703 if (f
->filename
[0] == ':')
706 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
707 new_file
= last_line_num
= 0;
712 /* generate line number info */
713 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
717 || cur_text_section
!= text_section
718 || !(f
= put_new_file(s1
))
719 || last_line_num
== f
->line_num
)
721 if (func_ind
!= -1) {
722 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
724 /* from tcc_assemble */
725 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
727 last_line_num
= f
->line_num
;
730 /* put function symbol */
731 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
737 debug_info_root
= NULL
;
739 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
740 if (!(f
= put_new_file(s1
)))
742 cstr_new (&debug_str
);
743 cstr_printf(&debug_str
, "%s:%c", funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
744 tcc_get_debug_info(s1
, sym
->type
.ref
, &debug_str
);
745 put_stabs_r(s1
, debug_str
.data
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
746 cstr_free (&debug_str
);
751 /* put function size */
752 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
756 tcc_debug_stabn(N_RBRAC
, size
);
757 tcc_debug_finish (s1
, debug_info_root
);
760 /* put alternative filename */
761 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
763 if (0 == strcmp(file
->filename
, filename
))
765 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
769 /* begin of #include */
770 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
774 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
778 /* end of #include */
779 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
783 put_stabn(s1
, N_EINCL
, 0, 0, 0);
787 /* ------------------------------------------------------------------------- */
788 /* initialize vstack and types. This must be done also for tcc -E */
789 ST_FUNC
void tccgen_init(TCCState
*s1
)
792 memset(vtop
, 0, sizeof *vtop
);
794 /* define some often used types */
797 char_type
.t
= VT_BYTE
;
798 if (s1
->char_is_unsigned
)
799 char_type
.t
|= VT_UNSIGNED
;
800 char_pointer_type
= char_type
;
801 mk_pointer(&char_pointer_type
);
803 func_old_type
.t
= VT_FUNC
;
804 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
805 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
806 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
807 #ifdef precedence_parser
813 ST_FUNC
int tccgen_compile(TCCState
*s1
)
815 cur_text_section
= NULL
;
817 anon_sym
= SYM_FIRST_ANOM
;
820 nocode_wanted
= 0x80000000;
824 #ifdef TCC_TARGET_ARM
828 printf("%s: **** new file\n", file
->filename
);
830 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
833 gen_inline_functions(s1
);
835 /* end of translation unit info */
840 ST_FUNC
void tccgen_finish(TCCState
*s1
)
843 free_inline_functions(s1
);
844 sym_pop(&global_stack
, NULL
, 0);
845 sym_pop(&local_stack
, NULL
, 0);
846 /* free preprocessor macros */
849 dynarray_reset(&sym_pools
, &nb_sym_pools
);
850 sym_free_first
= NULL
;
853 /* ------------------------------------------------------------------------- */
854 ST_FUNC ElfSym
*elfsym(Sym
*s
)
858 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
861 /* apply storage attributes to Elf symbol */
862 ST_FUNC
void update_storage(Sym
*sym
)
865 int sym_bind
, old_sym_bind
;
871 if (sym
->a
.visibility
)
872 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
875 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
876 sym_bind
= STB_LOCAL
;
877 else if (sym
->a
.weak
)
880 sym_bind
= STB_GLOBAL
;
881 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
882 if (sym_bind
!= old_sym_bind
) {
883 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
887 if (sym
->a
.dllimport
)
888 esym
->st_other
|= ST_PE_IMPORT
;
889 if (sym
->a
.dllexport
)
890 esym
->st_other
|= ST_PE_EXPORT
;
894 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
895 get_tok_str(sym
->v
, NULL
),
896 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
904 /* ------------------------------------------------------------------------- */
905 /* update sym->c so that it points to an external symbol in section
906 'section' with value 'value' */
908 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
909 addr_t value
, unsigned long size
,
910 int can_add_underscore
)
912 int sym_type
, sym_bind
, info
, other
, t
;
918 name
= get_tok_str(sym
->v
, NULL
);
920 if ((t
& VT_BTYPE
) == VT_FUNC
) {
922 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
923 sym_type
= STT_NOTYPE
;
924 if ((t
& (VT_BTYPE
|VT_ASM_FUNC
)) == VT_ASM_FUNC
)
927 sym_type
= STT_OBJECT
;
929 if (t
& (VT_STATIC
| VT_INLINE
))
930 sym_bind
= STB_LOCAL
;
932 sym_bind
= STB_GLOBAL
;
936 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
937 Sym
*ref
= sym
->type
.ref
;
938 if (ref
->a
.nodecorate
) {
939 can_add_underscore
= 0;
941 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
942 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
944 other
|= ST_PE_STDCALL
;
945 can_add_underscore
= 0;
950 if (sym
->asm_label
) {
951 name
= get_tok_str(sym
->asm_label
, NULL
);
952 can_add_underscore
= 0;
955 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
957 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
961 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
962 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
964 if (tcc_state
->do_debug
965 && sym_type
!= STT_FUNC
966 && sym
->v
< SYM_FIRST_ANOM
)
967 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
);
971 esym
->st_value
= value
;
972 esym
->st_size
= size
;
973 esym
->st_shndx
= sh_num
;
978 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
979 addr_t value
, unsigned long size
)
981 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
982 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
985 /* add a new relocation entry to symbol 'sym' in section 's' */
986 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
991 if (nocode_wanted
&& s
== cur_text_section
)
996 put_extern_sym(sym
, NULL
, 0, 0);
1000 /* now we can add ELF relocation info */
1001 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
1005 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
1007 greloca(s
, sym
, offset
, type
, 0);
1011 /* ------------------------------------------------------------------------- */
1012 /* symbol allocator */
1013 static Sym
*__sym_malloc(void)
1015 Sym
*sym_pool
, *sym
, *last_sym
;
1018 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
1019 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
1021 last_sym
= sym_free_first
;
1023 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
1024 sym
->next
= last_sym
;
1028 sym_free_first
= last_sym
;
1032 static inline Sym
*sym_malloc(void)
1036 sym
= sym_free_first
;
1038 sym
= __sym_malloc();
1039 sym_free_first
= sym
->next
;
1042 sym
= tcc_malloc(sizeof(Sym
));
1047 ST_INLN
void sym_free(Sym
*sym
)
1050 sym
->next
= sym_free_first
;
1051 sym_free_first
= sym
;
1057 /* push, without hashing */
1058 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
1063 memset(s
, 0, sizeof *s
);
1073 /* find a symbol and return its associated structure. 's' is the top
1074 of the symbol stack */
1075 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
1080 else if (s
->v
== -1)
1087 /* structure lookup */
1088 ST_INLN Sym
*struct_find(int v
)
1091 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1093 return table_ident
[v
]->sym_struct
;
1096 /* find an identifier */
1097 ST_INLN Sym
*sym_find(int v
)
1100 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1102 return table_ident
[v
]->sym_identifier
;
1105 static int sym_scope(Sym
*s
)
1107 if (IS_ENUM_VAL (s
->type
.t
))
1108 return s
->type
.ref
->sym_scope
;
1110 return s
->sym_scope
;
1113 /* push a given symbol on the symbol stack */
1114 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
1123 s
= sym_push2(ps
, v
, type
->t
, c
);
1124 s
->type
.ref
= type
->ref
;
1126 /* don't record fields or anonymous symbols */
1128 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1129 /* record symbol in token array */
1130 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1132 ps
= &ts
->sym_struct
;
1134 ps
= &ts
->sym_identifier
;
1137 s
->sym_scope
= local_scope
;
1138 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
1139 tcc_error("redeclaration of '%s'",
1140 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
1145 /* push a global identifier */
1146 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
1149 s
= sym_push2(&global_stack
, v
, t
, c
);
1150 s
->r
= VT_CONST
| VT_SYM
;
1151 /* don't record anonymous symbol */
1152 if (v
< SYM_FIRST_ANOM
) {
1153 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
1154 /* modify the top most local identifier, so that sym_identifier will
1155 point to 's' when popped; happens when called from inline asm */
1156 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
1157 ps
= &(*ps
)->prev_tok
;
1164 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1165 pop them yet from the list, but do remove them from the token array. */
1166 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
1176 /* remove symbol in token array */
1178 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1179 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1181 ps
= &ts
->sym_struct
;
1183 ps
= &ts
->sym_identifier
;
1194 /* ------------------------------------------------------------------------- */
1195 static void vcheck_cmp(void)
1197 /* cannot let cpu flags if other instruction are generated. Also
1198 avoid leaving VT_JMP anywhere except on the top of the stack
1199 because it would complicate the code generator.
1201 Don't do this when nocode_wanted. vtop might come from
1202 !nocode_wanted regions (see 88_codeopt.c) and transforming
1203 it to a register without actually generating code is wrong
1204 as their value might still be used for real. All values
1205 we push under nocode_wanted will eventually be popped
1206 again, so that the VT_CMP/VT_JMP value will be in vtop
1207 when code is unsuppressed again. */
1209 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
1213 static void vsetc(CType
*type
, int r
, CValue
*vc
)
1215 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1216 tcc_error("memory full (vstack)");
1221 vtop
->r2
= VT_CONST
;
1226 ST_FUNC
void vswap(void)
1236 /* pop stack value */
1237 ST_FUNC
void vpop(void)
1240 v
= vtop
->r
& VT_VALMASK
;
1241 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1242 /* for x86, we need to pop the FP stack */
1243 if (v
== TREG_ST0
) {
1244 o(0xd8dd); /* fstp %st(0) */
1248 /* need to put correct jump if && or || without test */
1255 /* push constant of type "type" with useless value */
1256 static void vpush(CType
*type
)
1258 vset(type
, VT_CONST
, 0);
1261 /* push arbitrary 64bit constant */
1262 static void vpush64(int ty
, unsigned long long v
)
1269 vsetc(&ctype
, VT_CONST
, &cval
);
1272 /* push integer constant */
1273 ST_FUNC
void vpushi(int v
)
1278 /* push a pointer sized constant */
1279 static void vpushs(addr_t v
)
1281 vpush64(VT_SIZE_T
, v
);
1284 /* push long long constant */
1285 static inline void vpushll(long long v
)
1287 vpush64(VT_LLONG
, v
);
1290 ST_FUNC
void vset(CType
*type
, int r
, int v
)
1294 vsetc(type
, r
, &cval
);
1297 static void vseti(int r
, int v
)
1305 ST_FUNC
void vpushv(SValue
*v
)
1307 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1308 tcc_error("memory full (vstack)");
1313 static void vdup(void)
1318 /* rotate n first stack elements to the bottom
1319 I1 ... In -> I2 ... In I1 [top is right]
1321 ST_FUNC
void vrotb(int n
)
1328 for(i
=-n
+1;i
!=0;i
++)
1329 vtop
[i
] = vtop
[i
+1];
1333 /* rotate the n elements before entry e towards the top
1334 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1336 ST_FUNC
void vrote(SValue
*e
, int n
)
1343 for(i
= 0;i
< n
- 1; i
++)
1348 /* rotate n first stack elements to the top
1349 I1 ... In -> In I1 ... I(n-1) [top is right]
1351 ST_FUNC
void vrott(int n
)
1356 /* ------------------------------------------------------------------------- */
1357 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1359 /* called from generators to set the result from relational ops */
1360 ST_FUNC
void vset_VT_CMP(int op
)
1368 /* called once before asking generators to load VT_CMP to a register */
1369 static void vset_VT_JMP(void)
1371 int op
= vtop
->cmp_op
;
1373 if (vtop
->jtrue
|| vtop
->jfalse
) {
1374 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1375 int inv
= op
& (op
< 2); /* small optimization */
1376 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1378 /* otherwise convert flags (rsp. 0/1) to register */
1380 if (op
< 2) /* doesn't seem to happen */
1385 /* Set CPU Flags, doesn't yet jump */
1386 static void gvtst_set(int inv
, int t
)
1390 if (vtop
->r
!= VT_CMP
) {
1393 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1394 vset_VT_CMP(vtop
->c
.i
!= 0);
1397 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1398 *p
= gjmp_append(*p
, t
);
1401 /* Generate value test
1403 * Generate a test for any value (jump, comparison and integers) */
1404 static int gvtst(int inv
, int t
)
1409 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1411 x
= u
, u
= t
, t
= x
;
1414 /* jump to the wanted target */
1416 t
= gjmp_cond(op
^ inv
, t
);
1419 /* resolve complementary jumps to here */
1426 /* generate a zero or nozero test */
1427 static void gen_test_zero(int op
)
1429 if (vtop
->r
== VT_CMP
) {
1433 vtop
->jfalse
= vtop
->jtrue
;
1443 /* ------------------------------------------------------------------------- */
1444 /* push a symbol value of TYPE */
1445 ST_FUNC
void vpushsym(CType
*type
, Sym
*sym
)
1449 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1453 /* Return a static symbol pointing to a section */
1454 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1460 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1461 sym
->type
.t
|= VT_STATIC
;
1462 put_extern_sym(sym
, sec
, offset
, size
);
1466 /* push a reference to a section offset by adding a dummy symbol */
1467 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1469 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1472 /* define a new external reference to a symbol 'v' of type 'u' */
1473 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1479 /* push forward reference */
1480 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1481 s
->type
.ref
= type
->ref
;
1482 } else if (IS_ASM_SYM(s
)) {
1483 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1484 s
->type
.ref
= type
->ref
;
1490 /* create an external reference with no specific type similar to asm labels.
1491 This avoids type conflicts if the symbol is used from C too */
1492 ST_FUNC Sym
*external_helper_sym(int v
)
1494 CType ct
= { VT_ASM_FUNC
, NULL
};
1495 return external_global_sym(v
, &ct
);
1498 /* push a reference to an helper function (such as memmove) */
1499 ST_FUNC
void vpush_helper_func(int v
)
1501 vpushsym(&func_old_type
, external_helper_sym(v
));
1504 /* Merge symbol attributes. */
1505 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1507 if (sa1
->aligned
&& !sa
->aligned
)
1508 sa
->aligned
= sa1
->aligned
;
1509 sa
->packed
|= sa1
->packed
;
1510 sa
->weak
|= sa1
->weak
;
1511 if (sa1
->visibility
!= STV_DEFAULT
) {
1512 int vis
= sa
->visibility
;
1513 if (vis
== STV_DEFAULT
1514 || vis
> sa1
->visibility
)
1515 vis
= sa1
->visibility
;
1516 sa
->visibility
= vis
;
1518 sa
->dllexport
|= sa1
->dllexport
;
1519 sa
->nodecorate
|= sa1
->nodecorate
;
1520 sa
->dllimport
|= sa1
->dllimport
;
1523 /* Merge function attributes. */
1524 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1526 if (fa1
->func_call
&& !fa
->func_call
)
1527 fa
->func_call
= fa1
->func_call
;
1528 if (fa1
->func_type
&& !fa
->func_type
)
1529 fa
->func_type
= fa1
->func_type
;
1530 if (fa1
->func_args
&& !fa
->func_args
)
1531 fa
->func_args
= fa1
->func_args
;
1532 if (fa1
->func_noreturn
)
1533 fa
->func_noreturn
= 1;
1540 /* Merge attributes. */
1541 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1543 merge_symattr(&ad
->a
, &ad1
->a
);
1544 merge_funcattr(&ad
->f
, &ad1
->f
);
1547 ad
->section
= ad1
->section
;
1548 if (ad1
->alias_target
)
1549 ad
->alias_target
= ad1
->alias_target
;
1551 ad
->asm_label
= ad1
->asm_label
;
1553 ad
->attr_mode
= ad1
->attr_mode
;
1556 /* Merge some type attributes. */
1557 static void patch_type(Sym
*sym
, CType
*type
)
1559 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1560 if (!(sym
->type
.t
& VT_EXTERN
))
1561 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1562 sym
->type
.t
&= ~VT_EXTERN
;
1565 if (IS_ASM_SYM(sym
)) {
1566 /* stay static if both are static */
1567 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1568 sym
->type
.ref
= type
->ref
;
1571 if (!is_compatible_types(&sym
->type
, type
)) {
1572 tcc_error("incompatible types for redefinition of '%s'",
1573 get_tok_str(sym
->v
, NULL
));
1575 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1576 int static_proto
= sym
->type
.t
& VT_STATIC
;
1577 /* warn if static follows non-static function declaration */
1578 if ((type
->t
& VT_STATIC
) && !static_proto
1579 /* XXX this test for inline shouldn't be here. Until we
1580 implement gnu-inline mode again it silences a warning for
1581 mingw caused by our workarounds. */
1582 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1583 tcc_warning("static storage ignored for redefinition of '%s'",
1584 get_tok_str(sym
->v
, NULL
));
1586 /* set 'inline' if both agree or if one has static */
1587 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1588 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1589 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1590 static_proto
|= VT_INLINE
;
1593 if (0 == (type
->t
& VT_EXTERN
)) {
1594 struct FuncAttr f
= sym
->type
.ref
->f
;
1595 /* put complete type, use static from prototype */
1596 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1597 sym
->type
.ref
= type
->ref
;
1598 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1600 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1603 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1604 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1605 sym
->type
.ref
= type
->ref
;
1609 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1610 /* set array size if it was omitted in extern declaration */
1611 sym
->type
.ref
->c
= type
->ref
->c
;
1613 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1614 tcc_warning("storage mismatch for redefinition of '%s'",
1615 get_tok_str(sym
->v
, NULL
));
1619 /* Merge some storage attributes. */
1620 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1623 patch_type(sym
, type
);
1625 #ifdef TCC_TARGET_PE
1626 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1627 tcc_error("incompatible dll linkage for redefinition of '%s'",
1628 get_tok_str(sym
->v
, NULL
));
1630 merge_symattr(&sym
->a
, &ad
->a
);
1632 sym
->asm_label
= ad
->asm_label
;
1633 update_storage(sym
);
1636 /* copy sym to other stack */
1637 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1640 s
= sym_malloc(), *s
= *s0
;
1641 s
->prev
= *ps
, *ps
= s
;
1642 if (s
->v
< SYM_FIRST_ANOM
) {
1643 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1644 s
->prev_tok
= *ps
, *ps
= s
;
1649 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1650 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1652 int bt
= s
->type
.t
& VT_BTYPE
;
1653 if (bt
== VT_FUNC
|| bt
== VT_PTR
) {
1654 Sym
**sp
= &s
->type
.ref
;
1655 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1656 Sym
*s2
= sym_copy(s
, ps
);
1657 sp
= &(*sp
= s2
)->next
;
1658 sym_copy_ref(s2
, ps
);
1663 /* define a new external reference to a symbol 'v' */
1664 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1668 /* look for global symbol */
1670 while (s
&& s
->sym_scope
)
1674 /* push forward reference */
1675 s
= global_identifier_push(v
, type
->t
, 0);
1678 s
->asm_label
= ad
->asm_label
;
1679 s
->type
.ref
= type
->ref
;
1680 /* copy type to the global stack */
1682 sym_copy_ref(s
, &global_stack
);
1684 patch_storage(s
, ad
, type
);
1686 /* push variables on local_stack if any */
1687 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1688 s
= sym_copy(s
, &local_stack
);
1692 /* save registers up to (vtop - n) stack entry */
1693 ST_FUNC
void save_regs(int n
)
1696 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1700 /* save r to the memory stack, and mark it as being free */
1701 ST_FUNC
void save_reg(int r
)
1703 save_reg_upstack(r
, 0);
1706 /* save r to the memory stack, and mark it as being free,
1707 if seen up to (vtop - n) stack entry */
1708 ST_FUNC
void save_reg_upstack(int r
, int n
)
1710 int l
, size
, align
, bt
;
1713 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1718 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1719 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1720 /* must save value on stack if not already done */
1722 bt
= p
->type
.t
& VT_BTYPE
;
1725 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1728 size
= type_size(&sv
.type
, &align
);
1729 l
= get_temp_local_var(size
,align
);
1730 sv
.r
= VT_LOCAL
| VT_LVAL
;
1732 store(p
->r
& VT_VALMASK
, &sv
);
1733 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1734 /* x86 specific: need to pop fp register ST0 if saved */
1735 if (r
== TREG_ST0
) {
1736 o(0xd8dd); /* fstp %st(0) */
1739 /* special long long case */
1740 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1745 /* mark that stack entry as being saved on the stack */
1746 if (p
->r
& VT_LVAL
) {
1747 /* also clear the bounded flag because the
1748 relocation address of the function was stored in
1750 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1752 p
->r
= VT_LVAL
| VT_LOCAL
;
1760 #ifdef TCC_TARGET_ARM
1761 /* find a register of class 'rc2' with at most one reference on stack.
1762 * If none, call get_reg(rc) */
1763 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1768 for(r
=0;r
<NB_REGS
;r
++) {
1769 if (reg_classes
[r
] & rc2
) {
1772 for(p
= vstack
; p
<= vtop
; p
++) {
1773 if ((p
->r
& VT_VALMASK
) == r
||
1785 /* find a free register of class 'rc'. If none, save one register */
1786 ST_FUNC
int get_reg(int rc
)
1791 /* find a free register */
1792 for(r
=0;r
<NB_REGS
;r
++) {
1793 if (reg_classes
[r
] & rc
) {
1796 for(p
=vstack
;p
<=vtop
;p
++) {
1797 if ((p
->r
& VT_VALMASK
) == r
||
1806 /* no register left : free the first one on the stack (VERY
1807 IMPORTANT to start from the bottom to ensure that we don't
1808 spill registers used in gen_opi()) */
1809 for(p
=vstack
;p
<=vtop
;p
++) {
1810 /* look at second register (if long long) */
1812 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1814 r
= p
->r
& VT_VALMASK
;
1815 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1821 /* Should never comes here */
1825 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1826 static int get_temp_local_var(int size
,int align
){
1828 struct temp_local_variable
*temp_var
;
1835 for(i
=0;i
<nb_temp_local_vars
;i
++){
1836 temp_var
=&arr_temp_local_vars
[i
];
1837 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1840 /*check if temp_var is free*/
1842 for(p
=vstack
;p
<=vtop
;p
++) {
1844 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1845 if(p
->c
.i
==temp_var
->location
){
1852 found_var
=temp_var
->location
;
1858 loc
= (loc
- size
) & -align
;
1859 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1860 temp_var
=&arr_temp_local_vars
[i
];
1861 temp_var
->location
=loc
;
1862 temp_var
->size
=size
;
1863 temp_var
->align
=align
;
1864 nb_temp_local_vars
++;
1871 static void clear_temp_local_var_list(){
1872 nb_temp_local_vars
=0;
1875 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1877 static void move_reg(int r
, int s
, int t
)
1891 /* get address of vtop (vtop MUST BE an lvalue) */
1892 ST_FUNC
void gaddrof(void)
1894 vtop
->r
&= ~VT_LVAL
;
1895 /* tricky: if saved lvalue, then we can go back to lvalue */
1896 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1897 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1900 #ifdef CONFIG_TCC_BCHECK
1901 /* generate a bounded pointer addition */
1902 static void gen_bounded_ptr_add(void)
1904 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1909 vpush_helper_func(TOK___bound_ptr_add
);
1914 /* returned pointer is in REG_IRET */
1915 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1918 /* relocation offset of the bounding function call point */
1919 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1922 /* patch pointer addition in vtop so that pointer dereferencing is
1924 static void gen_bounded_ptr_deref(void)
1934 size
= type_size(&vtop
->type
, &align
);
1936 case 1: func
= TOK___bound_ptr_indir1
; break;
1937 case 2: func
= TOK___bound_ptr_indir2
; break;
1938 case 4: func
= TOK___bound_ptr_indir4
; break;
1939 case 8: func
= TOK___bound_ptr_indir8
; break;
1940 case 12: func
= TOK___bound_ptr_indir12
; break;
1941 case 16: func
= TOK___bound_ptr_indir16
; break;
1943 /* may happen with struct member access */
1946 sym
= external_helper_sym(func
);
1948 put_extern_sym(sym
, NULL
, 0, 0);
1949 /* patch relocation */
1950 /* XXX: find a better solution ? */
1951 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1952 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1955 /* generate lvalue bound code */
1956 static void gbound(void)
1960 vtop
->r
&= ~VT_MUSTBOUND
;
1961 /* if lvalue, then use checking code before dereferencing */
1962 if (vtop
->r
& VT_LVAL
) {
1963 /* if not VT_BOUNDED value, then make one */
1964 if (!(vtop
->r
& VT_BOUNDED
)) {
1965 /* must save type because we must set it to int to get pointer */
1967 vtop
->type
.t
= VT_PTR
;
1970 gen_bounded_ptr_add();
1974 /* then check for dereferencing */
1975 gen_bounded_ptr_deref();
1979 /* we need to call __bound_ptr_add before we start to load function
1980 args into registers */
1981 ST_FUNC
void gbound_args(int nb_args
)
1986 for (i
= 1; i
<= nb_args
; ++i
)
1987 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1993 sv
= vtop
- nb_args
;
1994 if (sv
->r
& VT_SYM
) {
1998 #ifndef TCC_TARGET_PE
1999 || v
== TOK_sigsetjmp
2000 || v
== TOK___sigsetjmp
2003 vpush_helper_func(TOK___bound_setjmp
);
2006 func_bound_add_epilog
= 1;
2008 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
2009 if (v
== TOK_alloca
)
2010 func_bound_add_epilog
= 1;
2015 /* Add bounds for local symbols from S to E (via ->prev) */
2016 static void add_local_bounds(Sym
*s
, Sym
*e
)
2018 for (; s
!= e
; s
= s
->prev
) {
2019 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
2021 /* Add arrays/structs/unions because we always take address */
2022 if ((s
->type
.t
& VT_ARRAY
)
2023 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
2024 || s
->a
.addrtaken
) {
2025 /* add local bound info */
2026 int align
, size
= type_size(&s
->type
, &align
);
2027 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
2028 2 * sizeof(addr_t
));
2029 bounds_ptr
[0] = s
->c
;
2030 bounds_ptr
[1] = size
;
2036 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2037 static void pop_local_syms(Sym
**ptop
, Sym
*b
, int keep
, int ellipsis
)
2039 #ifdef CONFIG_TCC_BCHECK
2040 if (tcc_state
->do_bounds_check
&& !ellipsis
&& !keep
)
2041 add_local_bounds(*ptop
, b
);
2043 if (tcc_state
->do_debug
)
2044 tcc_add_debug_info (tcc_state
, !local_scope
, *ptop
, b
);
2045 sym_pop(ptop
, b
, keep
);
2048 static void incr_bf_adr(int o
)
2050 vtop
->type
= char_pointer_type
;
2054 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
2058 /* single-byte load mode for packed or otherwise unaligned bitfields */
2059 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
2062 save_reg_upstack(vtop
->r
, 1);
2063 vpush64(type
->t
& VT_BTYPE
, 0); // B X
2064 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2073 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
2075 vpushi((1 << n
) - 1), gen_op('&');
2078 vpushi(bits
), gen_op(TOK_SHL
);
2081 bits
+= n
, bit_size
-= n
, o
= 1;
2084 if (!(type
->t
& VT_UNSIGNED
)) {
2085 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
2086 vpushi(n
), gen_op(TOK_SHL
);
2087 vpushi(n
), gen_op(TOK_SAR
);
2091 /* single-byte store mode for packed or otherwise unaligned bitfields */
2092 static void store_packed_bf(int bit_pos
, int bit_size
)
2094 int bits
, n
, o
, m
, c
;
2096 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2098 save_reg_upstack(vtop
->r
, 1);
2099 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2101 incr_bf_adr(o
); // X B
2103 c
? vdup() : gv_dup(); // B V X
2106 vpushi(bits
), gen_op(TOK_SHR
);
2108 vpushi(bit_pos
), gen_op(TOK_SHL
);
2113 m
= ((1 << n
) - 1) << bit_pos
;
2114 vpushi(m
), gen_op('&'); // X B V1
2115 vpushv(vtop
-1); // X B V1 B
2116 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
2117 gen_op('&'); // X B V1 B1
2118 gen_op('|'); // X B V2
2120 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
2121 vstore(), vpop(); // X B
2122 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
2127 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
2130 if (0 == sv
->type
.ref
)
2132 t
= sv
->type
.ref
->auxtype
;
2133 if (t
!= -1 && t
!= VT_STRUCT
) {
2134 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
2140 /* store vtop a register belonging to class 'rc'. lvalues are
2141 converted to values. Cannot be used if cannot be converted to
2142 register value (such as structures). */
2143 ST_FUNC
int gv(int rc
)
2145 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
2146 int bit_pos
, bit_size
, size
, align
;
2148 /* NOTE: get_reg can modify vstack[] */
2149 if (vtop
->type
.t
& VT_BITFIELD
) {
2152 bit_pos
= BIT_POS(vtop
->type
.t
);
2153 bit_size
= BIT_SIZE(vtop
->type
.t
);
2154 /* remove bit field info to avoid loops */
2155 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
2158 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
2159 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
2160 type
.t
|= VT_UNSIGNED
;
2162 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
2164 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2169 if (r
== VT_STRUCT
) {
2170 load_packed_bf(&type
, bit_pos
, bit_size
);
2172 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
2173 /* cast to int to propagate signedness in following ops */
2175 /* generate shifts */
2176 vpushi(bits
- (bit_pos
+ bit_size
));
2178 vpushi(bits
- bit_size
);
2179 /* NOTE: transformed to SHR if unsigned */
2184 if (is_float(vtop
->type
.t
) &&
2185 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2186 /* CPUs usually cannot use float constants, so we store them
2187 generically in data segment */
2188 init_params p
= { data_section
};
2189 unsigned long offset
;
2190 size
= type_size(&vtop
->type
, &align
);
2192 size
= 0, align
= 1;
2193 offset
= section_add(p
.sec
, size
, align
);
2194 vpush_ref(&vtop
->type
, p
.sec
, offset
, size
);
2196 init_putv(&p
, &vtop
->type
, offset
);
2199 #ifdef CONFIG_TCC_BCHECK
2200 if (vtop
->r
& VT_MUSTBOUND
)
2204 bt
= vtop
->type
.t
& VT_BTYPE
;
2206 #ifdef TCC_TARGET_RISCV64
2208 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
2211 rc2
= RC2_TYPE(bt
, rc
);
2213 /* need to reload if:
2215 - lvalue (need to dereference pointer)
2216 - already a register, but not in the right class */
2217 r
= vtop
->r
& VT_VALMASK
;
2218 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
2219 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
2221 if (!r_ok
|| !r2_ok
) {
2225 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
2226 int original_type
= vtop
->type
.t
;
2228 /* two register type load :
2229 expand to two words temporarily */
2230 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2232 unsigned long long ll
= vtop
->c
.i
;
2233 vtop
->c
.i
= ll
; /* first word */
2235 vtop
->r
= r
; /* save register value */
2236 vpushi(ll
>> 32); /* second word */
2237 } else if (vtop
->r
& VT_LVAL
) {
2238 /* We do not want to modifier the long long pointer here.
2239 So we save any other instances down the stack */
2240 save_reg_upstack(vtop
->r
, 1);
2241 /* load from memory */
2242 vtop
->type
.t
= load_type
;
2245 vtop
[-1].r
= r
; /* save register value */
2246 /* increment pointer to get second word */
2247 vtop
->type
.t
= VT_PTRDIFF_T
;
2252 vtop
->type
.t
= load_type
;
2254 /* move registers */
2257 if (r2_ok
&& vtop
->r2
< VT_CONST
)
2260 vtop
[-1].r
= r
; /* save register value */
2261 vtop
->r
= vtop
[-1].r2
;
2263 /* Allocate second register. Here we rely on the fact that
2264 get_reg() tries first to free r2 of an SValue. */
2268 /* write second register */
2271 vtop
->type
.t
= original_type
;
2273 if (vtop
->r
== VT_CMP
)
2275 /* one register type load */
2280 #ifdef TCC_TARGET_C67
2281 /* uses register pairs for doubles */
2282 if (bt
== VT_DOUBLE
)
2289 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2290 ST_FUNC
void gv2(int rc1
, int rc2
)
2292 /* generate more generic register first. But VT_JMP or VT_CMP
2293 values must be generated first in all cases to avoid possible
2295 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
2300 /* test if reload is needed for first register */
2301 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
2311 /* test if reload is needed for first register */
2312 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
2319 /* expand 64bit on stack in two ints */
2320 ST_FUNC
void lexpand(void)
2323 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
2324 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
2325 if (v
== VT_CONST
) {
2328 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
2334 vtop
[0].r
= vtop
[-1].r2
;
2335 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2337 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2342 /* build a long long from two ints */
2343 static void lbuild(int t
)
2345 gv2(RC_INT
, RC_INT
);
2346 vtop
[-1].r2
= vtop
[0].r
;
2347 vtop
[-1].type
.t
= t
;
2352 /* convert stack entry to register and duplicate its value in another
2354 static void gv_dup(void)
2360 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2361 if (t
& VT_BITFIELD
) {
2371 /* stack: H L L1 H1 */
2381 /* duplicate value */
2391 /* generate CPU independent (unsigned) long long operations */
2392 static void gen_opl(int op
)
2394 int t
, a
, b
, op1
, c
, i
;
2396 unsigned short reg_iret
= REG_IRET
;
2397 unsigned short reg_lret
= REG_IRE2
;
2403 func
= TOK___divdi3
;
2406 func
= TOK___udivdi3
;
2409 func
= TOK___moddi3
;
2412 func
= TOK___umoddi3
;
2419 /* call generic long long function */
2420 vpush_helper_func(func
);
2425 vtop
->r2
= reg_lret
;
2433 //pv("gen_opl A",0,2);
2439 /* stack: L1 H1 L2 H2 */
2444 vtop
[-2] = vtop
[-3];
2447 /* stack: H1 H2 L1 L2 */
2448 //pv("gen_opl B",0,4);
2454 /* stack: H1 H2 L1 L2 ML MH */
2457 /* stack: ML MH H1 H2 L1 L2 */
2461 /* stack: ML MH H1 L2 H2 L1 */
2466 /* stack: ML MH M1 M2 */
2469 } else if (op
== '+' || op
== '-') {
2470 /* XXX: add non carry method too (for MIPS or alpha) */
2476 /* stack: H1 H2 (L1 op L2) */
2479 gen_op(op1
+ 1); /* TOK_xxxC2 */
2482 /* stack: H1 H2 (L1 op L2) */
2485 /* stack: (L1 op L2) H1 H2 */
2487 /* stack: (L1 op L2) (H1 op H2) */
2495 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2496 t
= vtop
[-1].type
.t
;
2500 /* stack: L H shift */
2502 /* constant: simpler */
2503 /* NOTE: all comments are for SHL. the other cases are
2504 done by swapping words */
2515 if (op
!= TOK_SAR
) {
2548 /* XXX: should provide a faster fallback on x86 ? */
2551 func
= TOK___ashrdi3
;
2554 func
= TOK___lshrdi3
;
2557 func
= TOK___ashldi3
;
2563 /* compare operations */
2569 /* stack: L1 H1 L2 H2 */
2571 vtop
[-1] = vtop
[-2];
2573 /* stack: L1 L2 H1 H2 */
2577 /* when values are equal, we need to compare low words. since
2578 the jump is inverted, we invert the test too. */
2581 else if (op1
== TOK_GT
)
2583 else if (op1
== TOK_ULT
)
2585 else if (op1
== TOK_UGT
)
2595 /* generate non equal test */
2597 vset_VT_CMP(TOK_NE
);
2601 /* compare low. Always unsigned */
2605 else if (op1
== TOK_LE
)
2607 else if (op1
== TOK_GT
)
2609 else if (op1
== TOK_GE
)
2612 #if 0//def TCC_TARGET_I386
2613 if (op
== TOK_NE
) { gsym(b
); break; }
2614 if (op
== TOK_EQ
) { gsym(a
); break; }
2623 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2625 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2626 return (a
^ b
) >> 63 ? -x
: x
;
2629 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2631 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2634 /* handle integer constant optimizations and various machine
2636 static void gen_opic(int op
)
2638 SValue
*v1
= vtop
- 1;
2640 int t1
= v1
->type
.t
& VT_BTYPE
;
2641 int t2
= v2
->type
.t
& VT_BTYPE
;
2642 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2643 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2644 uint64_t l1
= c1
? v1
->c
.i
: 0;
2645 uint64_t l2
= c2
? v2
->c
.i
: 0;
2646 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2648 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2649 l1
= ((uint32_t)l1
|
2650 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2651 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2652 l2
= ((uint32_t)l2
|
2653 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2657 case '+': l1
+= l2
; break;
2658 case '-': l1
-= l2
; break;
2659 case '&': l1
&= l2
; break;
2660 case '^': l1
^= l2
; break;
2661 case '|': l1
|= l2
; break;
2662 case '*': l1
*= l2
; break;
2669 /* if division by zero, generate explicit division */
2671 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2672 tcc_error("division by zero in constant");
2676 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2677 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2678 case TOK_UDIV
: l1
= l1
/ l2
; break;
2679 case TOK_UMOD
: l1
= l1
% l2
; break;
2682 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2683 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2685 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2688 case TOK_ULT
: l1
= l1
< l2
; break;
2689 case TOK_UGE
: l1
= l1
>= l2
; break;
2690 case TOK_EQ
: l1
= l1
== l2
; break;
2691 case TOK_NE
: l1
= l1
!= l2
; break;
2692 case TOK_ULE
: l1
= l1
<= l2
; break;
2693 case TOK_UGT
: l1
= l1
> l2
; break;
2694 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2695 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2696 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2697 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2699 case TOK_LAND
: l1
= l1
&& l2
; break;
2700 case TOK_LOR
: l1
= l1
|| l2
; break;
2704 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2705 l1
= ((uint32_t)l1
|
2706 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2710 /* if commutative ops, put c2 as constant */
2711 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2712 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2714 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2715 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2717 if (!const_wanted
&&
2719 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2720 (l1
== -1 && op
== TOK_SAR
))) {
2721 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2723 } else if (!const_wanted
&&
2724 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2726 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2727 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2728 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2733 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2736 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2737 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2740 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2741 /* filter out NOP operations like x*1, x-0, x&-1... */
2743 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2744 /* try to use shifts instead of muls or divs */
2745 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2754 else if (op
== TOK_PDIV
)
2760 } else if (c2
&& (op
== '+' || op
== '-') &&
2761 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2762 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2763 /* symbol + constant case */
2767 /* The backends can't always deal with addends to symbols
2768 larger than +-1<<31. Don't construct such. */
2775 /* call low level op generator */
2776 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2777 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2785 /* generate a floating point operation with constant propagation */
2786 static void gen_opif(int op
)
2790 #if defined _MSC_VER && defined __x86_64__
2791 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2798 /* currently, we cannot do computations with forward symbols */
2799 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2800 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2802 if (v1
->type
.t
== VT_FLOAT
) {
2805 } else if (v1
->type
.t
== VT_DOUBLE
) {
2813 /* NOTE: we only do constant propagation if finite number (not
2814 NaN or infinity) (ANSI spec) */
2815 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2819 case '+': f1
+= f2
; break;
2820 case '-': f1
-= f2
; break;
2821 case '*': f1
*= f2
; break;
2824 /* If not in initializer we need to potentially generate
2825 FP exceptions at runtime, otherwise we want to fold. */
2831 /* XXX: also handles tests ? */
2835 /* XXX: overflow test ? */
2836 if (v1
->type
.t
== VT_FLOAT
) {
2838 } else if (v1
->type
.t
== VT_DOUBLE
) {
2850 /* print a type. If 'varstr' is not NULL, then the variable is also
2851 printed in the type */
2853 /* XXX: add array and function pointers */
2854 static void type_to_str(char *buf
, int buf_size
,
2855 CType
*type
, const char *varstr
)
2867 pstrcat(buf
, buf_size
, "extern ");
2869 pstrcat(buf
, buf_size
, "static ");
2871 pstrcat(buf
, buf_size
, "typedef ");
2873 pstrcat(buf
, buf_size
, "inline ");
2874 if (t
& VT_VOLATILE
)
2875 pstrcat(buf
, buf_size
, "volatile ");
2876 if (t
& VT_CONSTANT
)
2877 pstrcat(buf
, buf_size
, "const ");
2879 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2880 || ((t
& VT_UNSIGNED
)
2881 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2884 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2886 buf_size
-= strlen(buf
);
2922 tstr
= "long double";
2924 pstrcat(buf
, buf_size
, tstr
);
2931 pstrcat(buf
, buf_size
, tstr
);
2932 v
= type
->ref
->v
& ~SYM_STRUCT
;
2933 if (v
>= SYM_FIRST_ANOM
)
2934 pstrcat(buf
, buf_size
, "<anonymous>");
2936 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2941 if (varstr
&& '*' == *varstr
) {
2942 pstrcat(buf1
, sizeof(buf1
), "(");
2943 pstrcat(buf1
, sizeof(buf1
), varstr
);
2944 pstrcat(buf1
, sizeof(buf1
), ")");
2946 pstrcat(buf1
, buf_size
, "(");
2948 while (sa
!= NULL
) {
2950 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2951 pstrcat(buf1
, sizeof(buf1
), buf2
);
2954 pstrcat(buf1
, sizeof(buf1
), ", ");
2956 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2957 pstrcat(buf1
, sizeof(buf1
), ", ...");
2958 pstrcat(buf1
, sizeof(buf1
), ")");
2959 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2964 if (varstr
&& '*' == *varstr
)
2965 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2967 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2968 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2971 pstrcpy(buf1
, sizeof(buf1
), "*");
2972 if (t
& VT_CONSTANT
)
2973 pstrcat(buf1
, buf_size
, "const ");
2974 if (t
& VT_VOLATILE
)
2975 pstrcat(buf1
, buf_size
, "volatile ");
2977 pstrcat(buf1
, sizeof(buf1
), varstr
);
2978 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2982 pstrcat(buf
, buf_size
, " ");
2983 pstrcat(buf
, buf_size
, varstr
);
2988 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2990 char buf1
[256], buf2
[256];
2991 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2992 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2993 tcc_error(fmt
, buf1
, buf2
);
2996 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2998 char buf1
[256], buf2
[256];
2999 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
3000 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
3001 tcc_warning(fmt
, buf1
, buf2
);
3004 static int pointed_size(CType
*type
)
3007 return type_size(pointed_type(type
), &align
);
3010 static void vla_runtime_pointed_size(CType
*type
)
3013 vla_runtime_type_size(pointed_type(type
), &align
);
3016 static inline int is_null_pointer(SValue
*p
)
3018 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
3020 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
3021 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
3022 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
3023 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
3024 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
3025 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3029 /* compare function types. OLD functions match any new functions */
3030 static int is_compatible_func(CType
*type1
, CType
*type2
)
3036 if (s1
->f
.func_call
!= s2
->f
.func_call
)
3038 if (s1
->f
.func_type
!= s2
->f
.func_type
3039 && s1
->f
.func_type
!= FUNC_OLD
3040 && s2
->f
.func_type
!= FUNC_OLD
)
3043 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
3045 if (s1
->f
.func_type
== FUNC_OLD
|| s2
->f
.func_type
== FUNC_OLD
)
3056 /* return true if type1 and type2 are the same. If unqualified is
3057 true, qualifiers on the types are ignored.
3059 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3063 t1
= type1
->t
& VT_TYPE
;
3064 t2
= type2
->t
& VT_TYPE
;
3066 /* strip qualifiers before comparing */
3067 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3068 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3071 /* Default Vs explicit signedness only matters for char */
3072 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3076 /* XXX: bitfields ? */
3081 && !(type1
->ref
->c
< 0
3082 || type2
->ref
->c
< 0
3083 || type1
->ref
->c
== type2
->ref
->c
))
3086 /* test more complicated cases */
3087 bt1
= t1
& VT_BTYPE
;
3088 if (bt1
== VT_PTR
) {
3089 type1
= pointed_type(type1
);
3090 type2
= pointed_type(type2
);
3091 return is_compatible_types(type1
, type2
);
3092 } else if (bt1
== VT_STRUCT
) {
3093 return (type1
->ref
== type2
->ref
);
3094 } else if (bt1
== VT_FUNC
) {
3095 return is_compatible_func(type1
, type2
);
3096 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
3097 /* If both are enums then they must be the same, if only one is then
3098 t1 and t2 must be equal, which was checked above already. */
3099 return type1
->ref
== type2
->ref
;
3105 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3106 type is stored in DEST if non-null (except for pointer plus/minus) . */
3107 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
3109 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
3110 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
3116 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
3117 ret
= op
== '?' ? 1 : 0;
3118 /* NOTE: as an extension, we accept void on only one side */
3120 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3121 if (op
== '+') ; /* Handled in caller */
3122 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3123 /* If one is a null ptr constant the result type is the other. */
3124 else if (is_null_pointer (op2
)) type
= *type1
;
3125 else if (is_null_pointer (op1
)) type
= *type2
;
3126 else if (bt1
!= bt2
) {
3127 /* accept comparison or cond-expr between pointer and integer
3129 if ((op
== '?' || TOK_ISCOND(op
))
3130 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
3131 tcc_warning("pointer/integer mismatch in %s",
3132 op
== '?' ? "conditional expression" : "comparison");
3133 else if (op
!= '-' || !is_integer_btype(bt2
))
3135 type
= *(bt1
== VT_PTR
? type1
: type2
);
3137 CType
*pt1
= pointed_type(type1
);
3138 CType
*pt2
= pointed_type(type2
);
3139 int pbt1
= pt1
->t
& VT_BTYPE
;
3140 int pbt2
= pt2
->t
& VT_BTYPE
;
3141 int newquals
, copied
= 0;
3142 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
3143 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
3144 if (op
!= '?' && !TOK_ISCOND(op
))
3147 type_incompatibility_warning(type1
, type2
,
3149 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3150 : "pointer type mismatch in comparison('%s' and '%s')");
3153 /* pointers to void get preferred, otherwise the
3154 pointed to types minus qualifs should be compatible */
3155 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
3156 /* combine qualifs */
3157 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
3158 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3161 /* copy the pointer target symbol */
3162 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3165 pointed_type(&type
)->t
|= newquals
;
3167 /* pointers to incomplete arrays get converted to
3168 pointers to completed ones if possible */
3169 if (pt1
->t
& VT_ARRAY
3170 && pt2
->t
& VT_ARRAY
3171 && pointed_type(&type
)->ref
->c
< 0
3172 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
3175 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3177 pointed_type(&type
)->ref
=
3178 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
3179 0, pointed_type(&type
)->ref
->c
);
3180 pointed_type(&type
)->ref
->c
=
3181 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
3187 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
3188 if (op
!= '?' || !compare_types(type1
, type2
, 1))
3191 } else if (is_float(bt1
) || is_float(bt2
)) {
3192 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
3193 type
.t
= VT_LDOUBLE
;
3194 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
3199 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
3200 /* cast to biggest op */
3201 type
.t
= VT_LLONG
| VT_LONG
;
3202 if (bt1
== VT_LLONG
)
3204 if (bt2
== VT_LLONG
)
3206 /* convert to unsigned if it does not fit in a long long */
3207 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
3208 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
3209 type
.t
|= VT_UNSIGNED
;
3211 /* integer operations */
3212 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
3213 /* convert to unsigned if it does not fit in an integer */
3214 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
3215 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
3216 type
.t
|= VT_UNSIGNED
;
3223 /* generic gen_op: handles types problems */
3224 ST_FUNC
void gen_op(int op
)
3226 int u
, t1
, t2
, bt1
, bt2
, t
;
3227 CType type1
, combtype
;
3230 t1
= vtop
[-1].type
.t
;
3231 t2
= vtop
[0].type
.t
;
3232 bt1
= t1
& VT_BTYPE
;
3233 bt2
= t2
& VT_BTYPE
;
3235 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
3236 if (bt2
== VT_FUNC
) {
3237 mk_pointer(&vtop
->type
);
3240 if (bt1
== VT_FUNC
) {
3242 mk_pointer(&vtop
->type
);
3247 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
3248 tcc_error_noabort("invalid operand types for binary operation");
3250 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3251 /* at least one operand is a pointer */
3252 /* relational op: must be both pointers */
3255 /* if both pointers, then it must be the '-' op */
3256 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3258 tcc_error("cannot use pointers here");
3259 if (vtop
[-1].type
.t
& VT_VLA
) {
3260 vla_runtime_pointed_size(&vtop
[-1].type
);
3262 vpushi(pointed_size(&vtop
[-1].type
));
3266 vtop
->type
.t
= VT_PTRDIFF_T
;
3270 /* exactly one pointer : must be '+' or '-'. */
3271 if (op
!= '-' && op
!= '+')
3272 tcc_error("cannot use pointers here");
3273 /* Put pointer as first operand */
3274 if (bt2
== VT_PTR
) {
3276 t
= t1
, t1
= t2
, t2
= t
;
3279 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
3280 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3283 type1
= vtop
[-1].type
;
3284 if (vtop
[-1].type
.t
& VT_VLA
)
3285 vla_runtime_pointed_size(&vtop
[-1].type
);
3287 u
= pointed_size(&vtop
[-1].type
);
3289 tcc_error("unknown array element size");
3293 /* XXX: cast to int ? (long long case) */
3298 #ifdef CONFIG_TCC_BCHECK
3299 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
3300 /* if bounded pointers, we generate a special code to
3307 gen_bounded_ptr_add();
3313 type1
.t
&= ~VT_ARRAY
;
3314 /* put again type if gen_opic() swaped operands */
3318 /* floats can only be used for a few operations */
3319 if (is_float(combtype
.t
)
3320 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3322 tcc_error("invalid operands for binary operation");
3323 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
3324 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
3325 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
3327 t
|= (VT_LONG
& t1
);
3331 t
= t2
= combtype
.t
;
3332 /* XXX: currently, some unsigned operations are explicit, so
3333 we modify them here */
3334 if (t
& VT_UNSIGNED
) {
3341 else if (op
== TOK_LT
)
3343 else if (op
== TOK_GT
)
3345 else if (op
== TOK_LE
)
3347 else if (op
== TOK_GE
)
3353 /* special case for shifts and long long: we keep the shift as
3355 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3362 if (TOK_ISCOND(op
)) {
3363 /* relational op: the result is an int */
3364 vtop
->type
.t
= VT_INT
;
3369 // Make sure that we have converted to an rvalue:
3370 if (vtop
->r
& VT_LVAL
)
3371 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3374 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3375 #define gen_cvt_itof1 gen_cvt_itof
3377 /* generic itof for unsigned long long case */
3378 static void gen_cvt_itof1(int t
)
3380 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3381 (VT_LLONG
| VT_UNSIGNED
)) {
3384 vpush_helper_func(TOK___floatundisf
);
3385 #if LDOUBLE_SIZE != 8
3386 else if (t
== VT_LDOUBLE
)
3387 vpush_helper_func(TOK___floatundixf
);
3390 vpush_helper_func(TOK___floatundidf
);
3401 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3402 #define gen_cvt_ftoi1 gen_cvt_ftoi
3404 /* generic ftoi for unsigned long long case */
3405 static void gen_cvt_ftoi1(int t
)
3408 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3409 /* not handled natively */
3410 st
= vtop
->type
.t
& VT_BTYPE
;
3412 vpush_helper_func(TOK___fixunssfdi
);
3413 #if LDOUBLE_SIZE != 8
3414 else if (st
== VT_LDOUBLE
)
3415 vpush_helper_func(TOK___fixunsxfdi
);
3418 vpush_helper_func(TOK___fixunsdfdi
);
3429 /* special delayed cast for char/short */
3430 static void force_charshort_cast(void)
3432 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3433 int dbt
= vtop
->type
.t
;
3434 vtop
->r
&= ~VT_MUSTCAST
;
3436 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3440 static void gen_cast_s(int t
)
3448 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3449 static void gen_cast(CType
*type
)
3451 int sbt
, dbt
, sf
, df
, c
;
3452 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3454 /* special delayed cast for char/short */
3455 if (vtop
->r
& VT_MUSTCAST
)
3456 force_charshort_cast();
3458 /* bitfields first get cast to ints */
3459 if (vtop
->type
.t
& VT_BITFIELD
)
3462 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3463 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3471 dbt_bt
= dbt
& VT_BTYPE
;
3472 sbt_bt
= sbt
& VT_BTYPE
;
3474 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3475 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3476 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3479 /* constant case: we can do it now */
3480 /* XXX: in ISOC, cannot do it if error in convert */
3481 if (sbt
== VT_FLOAT
)
3482 vtop
->c
.ld
= vtop
->c
.f
;
3483 else if (sbt
== VT_DOUBLE
)
3484 vtop
->c
.ld
= vtop
->c
.d
;
3487 if (sbt_bt
== VT_LLONG
) {
3488 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3489 vtop
->c
.ld
= vtop
->c
.i
;
3491 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3493 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3494 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3496 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3499 if (dbt
== VT_FLOAT
)
3500 vtop
->c
.f
= (float)vtop
->c
.ld
;
3501 else if (dbt
== VT_DOUBLE
)
3502 vtop
->c
.d
= (double)vtop
->c
.ld
;
3503 } else if (sf
&& dbt
== VT_BOOL
) {
3504 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3507 vtop
->c
.i
= vtop
->c
.ld
;
3508 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3510 else if (sbt
& VT_UNSIGNED
)
3511 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3513 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3515 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3517 else if (dbt
== VT_BOOL
)
3518 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3520 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3521 dbt_bt
== VT_SHORT
? 0xffff :
3524 if (!(dbt
& VT_UNSIGNED
))
3525 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3530 } else if (dbt
== VT_BOOL
3531 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3532 == (VT_CONST
| VT_SYM
)) {
3533 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3539 /* cannot generate code for global or static initializers */
3540 if (STATIC_DATA_WANTED
)
3543 /* non constant case: generate code */
3544 if (dbt
== VT_BOOL
) {
3545 gen_test_zero(TOK_NE
);
3551 /* convert from fp to fp */
3554 /* convert int to fp */
3557 /* convert fp to int */
3559 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3562 goto again
; /* may need char/short cast */
3567 ds
= btype_size(dbt_bt
);
3568 ss
= btype_size(sbt_bt
);
3569 if (ds
== 0 || ss
== 0) {
3570 if (dbt_bt
== VT_VOID
)
3572 cast_error(&vtop
->type
, type
);
3574 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3575 tcc_error("cast to incomplete type");
3577 /* same size and no sign conversion needed */
3578 if (ds
== ss
&& ds
>= 4)
3580 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3581 tcc_warning("cast between pointer and integer of different size");
3582 if (sbt_bt
== VT_PTR
) {
3583 /* put integer type to allow logical operations below */
3584 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3588 /* processor allows { int a = 0, b = *(char*)&a; }
3589 That means that if we cast to less width, we can just
3590 change the type and read it still later. */
3591 #define ALLOW_SUBTYPE_ACCESS 1
3593 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3594 /* value still in memory */
3598 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3600 goto done
; /* no 64bit envolved */
3608 /* generate high word */
3609 if (sbt
& VT_UNSIGNED
) {
3618 } else if (ss
== 8) {
3619 /* from long long: just take low order word */
3627 /* need to convert from 32bit to 64bit */
3628 if (sbt
& VT_UNSIGNED
) {
3629 #if defined(TCC_TARGET_RISCV64)
3630 /* RISC-V keeps 32bit vals in registers sign-extended.
3631 So here we need a zero-extension. */
3640 ss
= ds
, ds
= 4, dbt
= sbt
;
3641 } else if (ss
== 8) {
3642 /* RISC-V keeps 32bit vals in registers sign-extended.
3643 So here we need a sign-extension for signed types and
3644 zero-extension. for unsigned types. */
3645 #if !defined(TCC_TARGET_RISCV64)
3646 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3655 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3661 bits
= (ss
- ds
) * 8;
3662 /* for unsigned, gen_op will convert SAR to SHR */
3663 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3666 vpushi(bits
- trunc
);
3673 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3676 /* return type size as known at compile time. Put alignment at 'a' */
3677 ST_FUNC
int type_size(CType
*type
, int *a
)
3682 bt
= type
->t
& VT_BTYPE
;
3683 if (bt
== VT_STRUCT
) {
3688 } else if (bt
== VT_PTR
) {
3689 if (type
->t
& VT_ARRAY
) {
3693 ts
= type_size(&s
->type
, a
);
3695 if (ts
< 0 && s
->c
< 0)
3703 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3704 return -1; /* incomplete enum */
3705 } else if (bt
== VT_LDOUBLE
) {
3707 return LDOUBLE_SIZE
;
3708 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3709 #ifdef TCC_TARGET_I386
3710 #ifdef TCC_TARGET_PE
3715 #elif defined(TCC_TARGET_ARM)
3725 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3728 } else if (bt
== VT_SHORT
) {
3731 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3735 /* char, void, function, _Bool */
3741 /* push type size as known at runtime time on top of value stack. Put
3743 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
3745 if (type
->t
& VT_VLA
) {
3746 type_size(&type
->ref
->type
, a
);
3747 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3749 vpushi(type_size(type
, a
));
3753 /* return the pointed type of t */
3754 static inline CType
*pointed_type(CType
*type
)
3756 return &type
->ref
->type
;
3759 /* modify type so that its it is a pointer to type. */
3760 ST_FUNC
void mk_pointer(CType
*type
)
3763 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3764 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3768 /* return true if type1 and type2 are exactly the same (including
3771 static int is_compatible_types(CType
*type1
, CType
*type2
)
3773 return compare_types(type1
,type2
,0);
3776 /* return true if type1 and type2 are the same (ignoring qualifiers).
3778 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3780 return compare_types(type1
,type2
,1);
3783 static void cast_error(CType
*st
, CType
*dt
)
3785 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3788 /* verify type compatibility to store vtop in 'dt' type */
3789 static void verify_assign_cast(CType
*dt
)
3791 CType
*st
, *type1
, *type2
;
3792 int dbt
, sbt
, qualwarn
, lvl
;
3794 st
= &vtop
->type
; /* source type */
3795 dbt
= dt
->t
& VT_BTYPE
;
3796 sbt
= st
->t
& VT_BTYPE
;
3797 if (dt
->t
& VT_CONSTANT
)
3798 tcc_warning("assignment of read-only location");
3802 tcc_error("assignment to void expression");
3805 /* special cases for pointers */
3806 /* '0' can also be a pointer */
3807 if (is_null_pointer(vtop
))
3809 /* accept implicit pointer to integer cast with warning */
3810 if (is_integer_btype(sbt
)) {
3811 tcc_warning("assignment makes pointer from integer without a cast");
3814 type1
= pointed_type(dt
);
3816 type2
= pointed_type(st
);
3817 else if (sbt
== VT_FUNC
)
3818 type2
= st
; /* a function is implicitly a function pointer */
3821 if (is_compatible_types(type1
, type2
))
3823 for (qualwarn
= lvl
= 0;; ++lvl
) {
3824 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3825 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3827 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3828 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3829 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3831 type1
= pointed_type(type1
);
3832 type2
= pointed_type(type2
);
3834 if (!is_compatible_unqualified_types(type1
, type2
)) {
3835 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3836 /* void * can match anything */
3837 } else if (dbt
== sbt
3838 && is_integer_btype(sbt
& VT_BTYPE
)
3839 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3840 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3841 /* Like GCC don't warn by default for merely changes
3842 in pointer target signedness. Do warn for different
3843 base types, though, in particular for unsigned enums
3844 and signed int targets. */
3846 tcc_warning("assignment from incompatible pointer type");
3851 tcc_warning("assignment discards qualifiers from pointer target type");
3857 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3858 tcc_warning("assignment makes integer from pointer without a cast");
3859 } else if (sbt
== VT_STRUCT
) {
3860 goto case_VT_STRUCT
;
3862 /* XXX: more tests */
3866 if (!is_compatible_unqualified_types(dt
, st
)) {
3874 static void gen_assign_cast(CType
*dt
)
3876 verify_assign_cast(dt
);
3880 /* store vtop in lvalue pushed on stack */
3881 ST_FUNC
void vstore(void)
3883 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3885 ft
= vtop
[-1].type
.t
;
3886 sbt
= vtop
->type
.t
& VT_BTYPE
;
3887 dbt
= ft
& VT_BTYPE
;
3889 verify_assign_cast(&vtop
[-1].type
);
3891 if (sbt
== VT_STRUCT
) {
3892 /* if structure, only generate pointer */
3893 /* structure assignment : generate memcpy */
3894 /* XXX: optimize if small size */
3895 size
= type_size(&vtop
->type
, &align
);
3899 #ifdef CONFIG_TCC_BCHECK
3900 if (vtop
->r
& VT_MUSTBOUND
)
3901 gbound(); /* check would be wrong after gaddrof() */
3903 vtop
->type
.t
= VT_PTR
;
3906 /* address of memcpy() */
3909 vpush_helper_func(TOK_memmove8
);
3910 else if(!(align
& 3))
3911 vpush_helper_func(TOK_memmove4
);
3914 /* Use memmove, rather than memcpy, as dest and src may be same: */
3915 vpush_helper_func(TOK_memmove
);
3920 #ifdef CONFIG_TCC_BCHECK
3921 if (vtop
->r
& VT_MUSTBOUND
)
3924 vtop
->type
.t
= VT_PTR
;
3929 /* leave source on stack */
3931 } else if (ft
& VT_BITFIELD
) {
3932 /* bitfield store handling */
3934 /* save lvalue as expression result (example: s.b = s.a = n;) */
3935 vdup(), vtop
[-1] = vtop
[-2];
3937 bit_pos
= BIT_POS(ft
);
3938 bit_size
= BIT_SIZE(ft
);
3939 /* remove bit field info to avoid loops */
3940 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3942 if (dbt
== VT_BOOL
) {
3943 gen_cast(&vtop
[-1].type
);
3944 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3946 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3947 if (dbt
!= VT_BOOL
) {
3948 gen_cast(&vtop
[-1].type
);
3949 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3951 if (r
== VT_STRUCT
) {
3952 store_packed_bf(bit_pos
, bit_size
);
3954 unsigned long long mask
= (1ULL << bit_size
) - 1;
3955 if (dbt
!= VT_BOOL
) {
3957 if (dbt
== VT_LLONG
)
3960 vpushi((unsigned)mask
);
3967 /* duplicate destination */
3970 /* load destination, mask and or with source */
3971 if (dbt
== VT_LLONG
)
3972 vpushll(~(mask
<< bit_pos
));
3974 vpushi(~((unsigned)mask
<< bit_pos
));
3979 /* ... and discard */
3982 } else if (dbt
== VT_VOID
) {
3985 /* optimize char/short casts */
3987 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3988 && is_integer_btype(sbt
)
3990 if ((vtop
->r
& VT_MUSTCAST
)
3991 && btype_size(dbt
) > btype_size(sbt
)
3993 force_charshort_cast();
3996 gen_cast(&vtop
[-1].type
);
3999 #ifdef CONFIG_TCC_BCHECK
4000 /* bound check case */
4001 if (vtop
[-1].r
& VT_MUSTBOUND
) {
4007 gv(RC_TYPE(dbt
)); /* generate value */
4010 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
4011 //tcc_warning("deley cast %x -> %x", sbt, dbt);
4012 vtop
->type
.t
= ft
& VT_TYPE
;
4015 /* if lvalue was saved on stack, must read it */
4016 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
4018 r
= get_reg(RC_INT
);
4019 sv
.type
.t
= VT_PTRDIFF_T
;
4020 sv
.r
= VT_LOCAL
| VT_LVAL
;
4021 sv
.c
.i
= vtop
[-1].c
.i
;
4023 vtop
[-1].r
= r
| VT_LVAL
;
4026 r
= vtop
->r
& VT_VALMASK
;
4027 /* two word case handling :
4028 store second register at word + 4 (or +8 for x86-64) */
4029 if (USING_TWO_WORDS(dbt
)) {
4030 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
4031 vtop
[-1].type
.t
= load_type
;
4034 /* convert to int to increment easily */
4035 vtop
->type
.t
= VT_PTRDIFF_T
;
4041 vtop
[-1].type
.t
= load_type
;
4042 /* XXX: it works because r2 is spilled last ! */
4043 store(vtop
->r2
, vtop
- 1);
4049 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
4053 /* post defines POST/PRE add. c is the token ++ or -- */
4054 ST_FUNC
void inc(int post
, int c
)
4057 vdup(); /* save lvalue */
4059 gv_dup(); /* duplicate value */
4064 vpushi(c
- TOK_MID
);
4066 vstore(); /* store value */
4068 vpop(); /* if post op, return saved value */
4071 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
4073 /* read the string */
4077 while (tok
== TOK_STR
) {
4078 /* XXX: add \0 handling too ? */
4079 cstr_cat(astr
, tokc
.str
.data
, -1);
4082 cstr_ccat(astr
, '\0');
4085 /* If I is >= 1 and a power of two, returns log2(i)+1.
4086 If I is 0 returns 0. */
4087 ST_FUNC
int exact_log2p1(int i
)
4092 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
4103 /* Parse __attribute__((...)) GNUC extension. */
4104 static void parse_attribute(AttributeDef
*ad
)
4110 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
4115 while (tok
!= ')') {
4116 if (tok
< TOK_IDENT
)
4117 expect("attribute name");
4129 tcc_warning("implicit declaration of function '%s'",
4130 get_tok_str(tok
, &tokc
));
4131 s
= external_global_sym(tok
, &func_old_type
);
4132 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4133 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
4134 ad
->cleanup_func
= s
;
4139 case TOK_CONSTRUCTOR1
:
4140 case TOK_CONSTRUCTOR2
:
4141 ad
->f
.func_ctor
= 1;
4143 case TOK_DESTRUCTOR1
:
4144 case TOK_DESTRUCTOR2
:
4145 ad
->f
.func_dtor
= 1;
4147 case TOK_ALWAYS_INLINE1
:
4148 case TOK_ALWAYS_INLINE2
:
4149 ad
->f
.func_alwinl
= 1;
4154 parse_mult_str(&astr
, "section name");
4155 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
4162 parse_mult_str(&astr
, "alias(\"target\")");
4163 ad
->alias_target
= /* save string as token, for later */
4164 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
4168 case TOK_VISIBILITY1
:
4169 case TOK_VISIBILITY2
:
4171 parse_mult_str(&astr
,
4172 "visibility(\"default|hidden|internal|protected\")");
4173 if (!strcmp (astr
.data
, "default"))
4174 ad
->a
.visibility
= STV_DEFAULT
;
4175 else if (!strcmp (astr
.data
, "hidden"))
4176 ad
->a
.visibility
= STV_HIDDEN
;
4177 else if (!strcmp (astr
.data
, "internal"))
4178 ad
->a
.visibility
= STV_INTERNAL
;
4179 else if (!strcmp (astr
.data
, "protected"))
4180 ad
->a
.visibility
= STV_PROTECTED
;
4182 expect("visibility(\"default|hidden|internal|protected\")");
4191 if (n
<= 0 || (n
& (n
- 1)) != 0)
4192 tcc_error("alignment must be a positive power of two");
4197 ad
->a
.aligned
= exact_log2p1(n
);
4198 if (n
!= 1 << (ad
->a
.aligned
- 1))
4199 tcc_error("alignment of %d is larger than implemented", n
);
4211 /* currently, no need to handle it because tcc does not
4212 track unused objects */
4216 ad
->f
.func_noreturn
= 1;
4221 ad
->f
.func_call
= FUNC_CDECL
;
4226 ad
->f
.func_call
= FUNC_STDCALL
;
4228 #ifdef TCC_TARGET_I386
4238 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
4244 ad
->f
.func_call
= FUNC_FASTCALLW
;
4251 ad
->attr_mode
= VT_LLONG
+ 1;
4254 ad
->attr_mode
= VT_BYTE
+ 1;
4257 ad
->attr_mode
= VT_SHORT
+ 1;
4261 ad
->attr_mode
= VT_INT
+ 1;
4264 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4271 ad
->a
.dllexport
= 1;
4273 case TOK_NODECORATE
:
4274 ad
->a
.nodecorate
= 1;
4277 ad
->a
.dllimport
= 1;
4280 if (tcc_state
->warn_unsupported
)
4281 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
4282 /* skip parameters */
4284 int parenthesis
= 0;
4288 else if (tok
== ')')
4291 } while (parenthesis
&& tok
!= -1);
4304 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4308 while ((s
= s
->next
) != NULL
) {
4309 if ((s
->v
& SYM_FIELD
) &&
4310 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
4311 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
4312 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
4324 static void check_fields (CType
*type
, int check
)
4328 while ((s
= s
->next
) != NULL
) {
4329 int v
= s
->v
& ~SYM_FIELD
;
4330 if (v
< SYM_FIRST_ANOM
) {
4331 TokenSym
*ts
= table_ident
[v
- TOK_IDENT
];
4332 if (check
&& (ts
->tok
& SYM_FIELD
))
4333 tcc_error("duplicate member '%s'", get_tok_str(v
, NULL
));
4334 ts
->tok
^= SYM_FIELD
;
4335 } else if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
)
4336 check_fields (&s
->type
, check
);
4340 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4342 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4343 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4344 int pcc
= !tcc_state
->ms_bitfields
;
4345 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4352 prevbt
= VT_STRUCT
; /* make it never match */
4357 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4358 if (f
->type
.t
& VT_BITFIELD
)
4359 bit_size
= BIT_SIZE(f
->type
.t
);
4362 size
= type_size(&f
->type
, &align
);
4363 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4366 if (pcc
&& bit_size
== 0) {
4367 /* in pcc mode, packing does not affect zero-width bitfields */
4370 /* in pcc mode, attribute packed overrides if set. */
4371 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4374 /* pragma pack overrides align if lesser and packs bitfields always */
4377 if (pragma_pack
< align
)
4378 align
= pragma_pack
;
4379 /* in pcc mode pragma pack also overrides individual align */
4380 if (pcc
&& pragma_pack
< a
)
4384 /* some individual align was specified */
4388 if (type
->ref
->type
.t
== VT_UNION
) {
4389 if (pcc
&& bit_size
>= 0)
4390 size
= (bit_size
+ 7) >> 3;
4395 } else if (bit_size
< 0) {
4397 c
+= (bit_pos
+ 7) >> 3;
4398 c
= (c
+ align
- 1) & -align
;
4407 /* A bit-field. Layout is more complicated. There are two
4408 options: PCC (GCC) compatible and MS compatible */
4410 /* In PCC layout a bit-field is placed adjacent to the
4411 preceding bit-fields, except if:
4413 - an individual alignment was given
4414 - it would overflow its base type container and
4415 there is no packing */
4416 if (bit_size
== 0) {
4418 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4420 } else if (f
->a
.aligned
) {
4422 } else if (!packed
) {
4424 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4425 if (ofs
> size
/ align
)
4429 /* in pcc mode, long long bitfields have type int if they fit */
4430 if (size
== 8 && bit_size
<= 32)
4431 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4433 while (bit_pos
>= align
* 8)
4434 c
+= align
, bit_pos
-= align
* 8;
4437 /* In PCC layout named bit-fields influence the alignment
4438 of the containing struct using the base types alignment,
4439 except for packed fields (which here have correct align). */
4440 if (f
->v
& SYM_FIRST_ANOM
4441 // && bit_size // ??? gcc on ARM/rpi does that
4446 bt
= f
->type
.t
& VT_BTYPE
;
4447 if ((bit_pos
+ bit_size
> size
* 8)
4448 || (bit_size
> 0) == (bt
!= prevbt
)
4450 c
= (c
+ align
- 1) & -align
;
4453 /* In MS bitfield mode a bit-field run always uses
4454 at least as many bits as the underlying type.
4455 To start a new run it's also required that this
4456 or the last bit-field had non-zero width. */
4457 if (bit_size
|| prev_bit_size
)
4460 /* In MS layout the records alignment is normally
4461 influenced by the field, except for a zero-width
4462 field at the start of a run (but by further zero-width
4463 fields it is again). */
4464 if (bit_size
== 0 && prevbt
!= bt
)
4467 prev_bit_size
= bit_size
;
4470 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4471 | (bit_pos
<< VT_STRUCT_SHIFT
);
4472 bit_pos
+= bit_size
;
4474 if (align
> maxalign
)
4478 printf("set field %s offset %-2d size %-2d align %-2d",
4479 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4480 if (f
->type
.t
& VT_BITFIELD
) {
4481 printf(" pos %-2d bits %-2d",
4494 c
+= (bit_pos
+ 7) >> 3;
4496 /* store size and alignment */
4497 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4501 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4502 /* can happen if individual align for some member was given. In
4503 this case MSVC ignores maxalign when aligning the size */
4508 c
= (c
+ a
- 1) & -a
;
4512 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4515 /* check whether we can access bitfields by their type */
4516 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4520 if (0 == (f
->type
.t
& VT_BITFIELD
))
4524 bit_size
= BIT_SIZE(f
->type
.t
);
4527 bit_pos
= BIT_POS(f
->type
.t
);
4528 size
= type_size(&f
->type
, &align
);
4529 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
4532 /* try to access the field using a different type */
4533 c0
= -1, s
= align
= 1;
4536 px
= f
->c
* 8 + bit_pos
;
4537 cx
= (px
>> 3) & -align
;
4538 px
= px
- (cx
<< 3);
4541 s
= (px
+ bit_size
+ 7) >> 3;
4551 s
= type_size(&t
, &align
);
4555 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
4556 /* update offset and bit position */
4559 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4560 | (bit_pos
<< VT_STRUCT_SHIFT
);
4564 printf("FIX field %s offset %-2d size %-2d align %-2d "
4565 "pos %-2d bits %-2d\n",
4566 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4567 cx
, s
, align
, px
, bit_size
);
4570 /* fall back to load/store single-byte wise */
4571 f
->auxtype
= VT_STRUCT
;
4573 printf("FIX field %s : load byte-wise\n",
4574 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4580 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4581 static void struct_decl(CType
*type
, int u
)
4583 int v
, c
, size
, align
, flexible
;
4584 int bit_size
, bsize
, bt
;
4586 AttributeDef ad
, ad1
;
4589 memset(&ad
, 0, sizeof ad
);
4591 parse_attribute(&ad
);
4595 /* struct already defined ? return it */
4597 expect("struct/union/enum name");
4599 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4602 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4604 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4609 /* Record the original enum/struct/union token. */
4610 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4612 /* we put an undefined size for struct/union */
4613 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4614 s
->r
= 0; /* default alignment is zero as gcc */
4616 type
->t
= s
->type
.t
;
4622 tcc_error("struct/union/enum already defined");
4624 /* cannot be empty */
4625 /* non empty enums are not allowed */
4628 long long ll
= 0, pl
= 0, nl
= 0;
4631 /* enum symbols have static storage */
4632 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4636 expect("identifier");
4638 if (ss
&& !local_stack
)
4639 tcc_error("redefinition of enumerator '%s'",
4640 get_tok_str(v
, NULL
));
4644 ll
= expr_const64();
4646 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4648 *ps
= ss
, ps
= &ss
->next
;
4657 /* NOTE: we accept a trailing comma */
4662 /* set integral type of the enum */
4665 if (pl
!= (unsigned)pl
)
4666 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4668 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4669 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4670 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4672 /* set type for enum members */
4673 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4675 if (ll
== (int)ll
) /* default is int if it fits */
4677 if (t
.t
& VT_UNSIGNED
) {
4678 ss
->type
.t
|= VT_UNSIGNED
;
4679 if (ll
== (unsigned)ll
)
4682 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4683 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4688 while (tok
!= '}') {
4689 if (!parse_btype(&btype
, &ad1
)) {
4695 tcc_error("flexible array member '%s' not at the end of struct",
4696 get_tok_str(v
, NULL
));
4702 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4704 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4705 expect("identifier");
4707 int v
= btype
.ref
->v
;
4708 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4709 if (tcc_state
->ms_extensions
== 0)
4710 expect("identifier");
4714 if (type_size(&type1
, &align
) < 0) {
4715 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4718 tcc_error("field '%s' has incomplete type",
4719 get_tok_str(v
, NULL
));
4721 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4722 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4723 (type1
.t
& VT_STORAGE
))
4724 tcc_error("invalid type for '%s'",
4725 get_tok_str(v
, NULL
));
4729 bit_size
= expr_const();
4730 /* XXX: handle v = 0 case for messages */
4732 tcc_error("negative width in bit-field '%s'",
4733 get_tok_str(v
, NULL
));
4734 if (v
&& bit_size
== 0)
4735 tcc_error("zero width for bit-field '%s'",
4736 get_tok_str(v
, NULL
));
4737 parse_attribute(&ad1
);
4739 size
= type_size(&type1
, &align
);
4740 if (bit_size
>= 0) {
4741 bt
= type1
.t
& VT_BTYPE
;
4747 tcc_error("bitfields must have scalar type");
4749 if (bit_size
> bsize
) {
4750 tcc_error("width of '%s' exceeds its type",
4751 get_tok_str(v
, NULL
));
4752 } else if (bit_size
== bsize
4753 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4754 /* no need for bit fields */
4756 } else if (bit_size
== 64) {
4757 tcc_error("field width 64 not implemented");
4759 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4761 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4764 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4765 /* Remember we've seen a real field to check
4766 for placement of flexible array member. */
4769 /* If member is a struct or bit-field, enforce
4770 placing into the struct (as anonymous). */
4772 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4777 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4782 if (tok
== ';' || tok
== TOK_EOF
)
4789 parse_attribute(&ad
);
4790 if (ad
.cleanup_func
) {
4791 tcc_warning("attribute '__cleanup__' ignored on type");
4793 check_fields(type
, 1);
4794 check_fields(type
, 0);
4795 struct_layout(type
, &ad
);
4800 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4802 merge_symattr(&ad
->a
, &s
->a
);
4803 merge_funcattr(&ad
->f
, &s
->f
);
4806 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4807 are added to the element type, copied because it could be a typedef. */
4808 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4810 while (type
->t
& VT_ARRAY
) {
4811 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4812 type
= &type
->ref
->type
;
4814 type
->t
|= qualifiers
;
4817 /* return 0 if no type declaration. otherwise, return the basic type
4820 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4822 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4826 memset(ad
, 0, sizeof(AttributeDef
));
4836 /* currently, we really ignore extension */
4846 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4847 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4848 tmbt
: tcc_error("too many basic types");
4851 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4856 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4873 memset(&ad1
, 0, sizeof(AttributeDef
));
4874 if (parse_btype(&type1
, &ad1
)) {
4875 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4877 n
= 1 << (ad1
.a
.aligned
- 1);
4879 type_size(&type1
, &n
);
4882 if (n
<= 0 || (n
& (n
- 1)) != 0)
4883 tcc_error("alignment must be a positive power of two");
4886 ad
->a
.aligned
= exact_log2p1(n
);
4890 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4891 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4892 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4893 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4900 #ifdef TCC_TARGET_ARM64
4902 /* GCC's __uint128_t appears in some Linux header files. Make it a
4903 synonym for long double to get the size and alignment right. */
4914 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4915 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4923 struct_decl(&type1
, VT_ENUM
);
4926 type
->ref
= type1
.ref
;
4929 struct_decl(&type1
, VT_STRUCT
);
4932 struct_decl(&type1
, VT_UNION
);
4935 /* type modifiers */
4940 parse_btype_qualify(type
, VT_CONSTANT
);
4948 parse_btype_qualify(type
, VT_VOLATILE
);
4955 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4956 tcc_error("signed and unsigned modifier");
4969 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4970 tcc_error("signed and unsigned modifier");
4971 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4987 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4988 tcc_error("multiple storage classes");
5000 ad
->f
.func_noreturn
= 1;
5002 /* GNUC attribute */
5003 case TOK_ATTRIBUTE1
:
5004 case TOK_ATTRIBUTE2
:
5005 parse_attribute(ad
);
5006 if (ad
->attr_mode
) {
5007 u
= ad
->attr_mode
-1;
5008 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
5016 parse_expr_type(&type1
);
5017 /* remove all storage modifiers except typedef */
5018 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
5020 sym_to_attr(ad
, type1
.ref
);
5026 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
5030 if (tok
== ':' && !in_generic
) {
5031 /* ignore if it's a label */
5036 t
&= ~(VT_BTYPE
|VT_LONG
);
5037 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
5038 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
5039 type
->ref
= s
->type
.ref
;
5041 parse_btype_qualify(type
, t
);
5043 /* get attributes from typedef */
5052 if (tcc_state
->char_is_unsigned
) {
5053 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
5056 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5057 bt
= t
& (VT_BTYPE
|VT_LONG
);
5059 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
5060 #if defined TCC_TARGET_PE || (defined _WIN32 && defined _MSC_VER)
5061 if (bt
== VT_LDOUBLE
)
5062 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
5068 /* convert a function parameter type (array to pointer and function to
5069 function pointer) */
5070 static inline void convert_parameter_type(CType
*pt
)
5072 /* remove const and volatile qualifiers (XXX: const could be used
5073 to indicate a const function parameter */
5074 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5075 /* array must be transformed to pointer according to ANSI C */
5077 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
5082 ST_FUNC
void parse_asm_str(CString
*astr
)
5085 parse_mult_str(astr
, "string constant");
5088 /* Parse an asm label and return the token */
5089 static int asm_label_instr(void)
5095 parse_asm_str(&astr
);
5098 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
5100 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
5105 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
5107 int n
, l
, t1
, arg_size
, align
, unused_align
;
5108 Sym
**plast
, *s
, *first
;
5113 /* function type, or recursive declarator (return if so) */
5115 if (td
&& !(td
& TYPE_ABSTRACT
))
5119 else if (parse_btype(&pt
, &ad1
))
5122 merge_attr (ad
, &ad1
);
5131 /* read param name and compute offset */
5132 if (l
!= FUNC_OLD
) {
5133 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
5135 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
5136 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
5137 tcc_error("parameter declared as void");
5141 expect("identifier");
5142 pt
.t
= VT_VOID
; /* invalid type */
5146 convert_parameter_type(&pt
);
5147 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
5148 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
5154 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
5159 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
5160 tcc_error("invalid type");
5163 /* if no parameters, then old type prototype */
5166 /* NOTE: const is ignored in returned type as it has a special
5167 meaning in gcc / C++ */
5168 type
->t
&= ~VT_CONSTANT
;
5169 /* some ancient pre-K&R C allows a function to return an array
5170 and the array brackets to be put after the arguments, such
5171 that "int c()[]" means something like "int[] c()" */
5174 skip(']'); /* only handle simple "[]" */
5177 /* we push a anonymous symbol which will contain the function prototype */
5178 ad
->f
.func_args
= arg_size
;
5179 ad
->f
.func_type
= l
;
5180 s
= sym_push(SYM_FIELD
, type
, 0, 0);
5186 } else if (tok
== '[') {
5187 int saved_nocode_wanted
= nocode_wanted
;
5188 /* array definition */
5191 /* XXX The optional type-quals and static should only be accepted
5192 in parameter decls. The '*' as well, and then even only
5193 in prototypes (not function defs). */
5195 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5210 if (!local_stack
|| (storage
& VT_STATIC
))
5211 vpushi(expr_const());
5213 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5214 length must always be evaluated, even under nocode_wanted,
5215 so that its size slot is initialized (e.g. under sizeof
5220 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5223 tcc_error("invalid array size");
5225 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5226 tcc_error("size of variable length array should be an integer");
5232 /* parse next post type */
5233 post_type(type
, ad
, storage
, 0);
5235 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5236 tcc_error("declaration of an array of functions");
5237 if ((type
->t
& VT_BTYPE
) == VT_VOID
5238 || type_size(type
, &unused_align
) < 0)
5239 tcc_error("declaration of an array of incomplete type elements");
5241 t1
|= type
->t
& VT_VLA
;
5245 tcc_error("need explicit inner array size in VLAs");
5246 loc
-= type_size(&int_type
, &align
);
5250 vla_runtime_type_size(type
, &align
);
5252 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5258 nocode_wanted
= saved_nocode_wanted
;
5260 /* we push an anonymous symbol which will contain the array
5262 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5263 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5269 /* Parse a type declarator (except basic type), and return the type
5270 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5271 expected. 'type' should contain the basic type. 'ad' is the
5272 attribute definition of the basic type. It can be modified by
5273 type_decl(). If this (possibly abstract) declarator is a pointer chain
5274 it returns the innermost pointed to type (equals *type, but is a different
5275 pointer), otherwise returns type itself, that's used for recursive calls. */
5276 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5279 int qualifiers
, storage
;
5281 /* recursive type, remove storage bits first, apply them later again */
5282 storage
= type
->t
& VT_STORAGE
;
5283 type
->t
&= ~VT_STORAGE
;
5286 while (tok
== '*') {
5294 qualifiers
|= VT_CONSTANT
;
5299 qualifiers
|= VT_VOLATILE
;
5305 /* XXX: clarify attribute handling */
5306 case TOK_ATTRIBUTE1
:
5307 case TOK_ATTRIBUTE2
:
5308 parse_attribute(ad
);
5312 type
->t
|= qualifiers
;
5314 /* innermost pointed to type is the one for the first derivation */
5315 ret
= pointed_type(type
);
5319 /* This is possibly a parameter type list for abstract declarators
5320 ('int ()'), use post_type for testing this. */
5321 if (!post_type(type
, ad
, 0, td
)) {
5322 /* It's not, so it's a nested declarator, and the post operations
5323 apply to the innermost pointed to type (if any). */
5324 /* XXX: this is not correct to modify 'ad' at this point, but
5325 the syntax is not clear */
5326 parse_attribute(ad
);
5327 post
= type_decl(type
, ad
, v
, td
);
5331 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5332 /* type identifier */
5337 if (!(td
& TYPE_ABSTRACT
))
5338 expect("identifier");
5341 post_type(post
, ad
, storage
, 0);
5342 parse_attribute(ad
);
5347 /* indirection with full error checking and bound check */
5348 ST_FUNC
void indir(void)
5350 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5351 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5355 if (vtop
->r
& VT_LVAL
)
5357 vtop
->type
= *pointed_type(&vtop
->type
);
5358 /* Arrays and functions are never lvalues */
5359 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5360 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5362 /* if bound checking, the referenced pointer must be checked */
5363 #ifdef CONFIG_TCC_BCHECK
5364 if (tcc_state
->do_bounds_check
)
5365 vtop
->r
|= VT_MUSTBOUND
;
5370 /* pass a parameter to a function and do type checking and casting */
5371 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5376 func_type
= func
->f
.func_type
;
5377 if (func_type
== FUNC_OLD
||
5378 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5379 /* default casting : only need to convert float to double */
5380 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5381 gen_cast_s(VT_DOUBLE
);
5382 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5383 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5384 type
.ref
= vtop
->type
.ref
;
5386 } else if (vtop
->r
& VT_MUSTCAST
) {
5387 force_charshort_cast();
5389 } else if (arg
== NULL
) {
5390 tcc_error("too many arguments to function");
5393 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5394 gen_assign_cast(&type
);
5398 /* parse an expression and return its type without any side effect. */
5399 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5408 /* parse an expression of the form '(type)' or '(expr)' and return its
5410 static void parse_expr_type(CType
*type
)
5416 if (parse_btype(type
, &ad
)) {
5417 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5419 expr_type(type
, gexpr
);
5424 static void parse_type(CType
*type
)
5429 if (!parse_btype(type
, &ad
)) {
5432 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5435 static void parse_builtin_params(int nc
, const char *args
)
5444 while ((c
= *args
++)) {
5459 type
.t
= VT_CONSTANT
;
5465 type
.t
= VT_CONSTANT
;
5467 type
.t
|= char_type
.t
;
5479 gen_assign_cast(&type
);
5486 ST_FUNC
void unary(void)
5488 int n
, t
, align
, size
, r
, sizeof_caller
;
5493 /* generate line number info */
5494 if (tcc_state
->do_debug
)
5495 tcc_debug_line(tcc_state
);
5497 sizeof_caller
= in_sizeof
;
5500 /* XXX: GCC 2.95.3 does not generate a table although it should be
5508 #ifdef TCC_TARGET_PE
5509 t
= VT_SHORT
|VT_UNSIGNED
;
5517 vsetc(&type
, VT_CONST
, &tokc
);
5521 t
= VT_INT
| VT_UNSIGNED
;
5527 t
= VT_LLONG
| VT_UNSIGNED
;
5539 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5542 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5544 case TOK___FUNCTION__
:
5546 goto tok_identifier
;
5552 /* special function name identifier */
5553 len
= strlen(funcname
) + 1;
5554 /* generate char[len] type */
5559 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
5560 if (!NODATA_WANTED
) {
5561 ptr
= section_ptr_add(data_section
, len
);
5562 memcpy(ptr
, funcname
, len
);
5568 #ifdef TCC_TARGET_PE
5569 t
= VT_SHORT
| VT_UNSIGNED
;
5575 /* string parsing */
5577 if (tcc_state
->char_is_unsigned
)
5578 t
= VT_BYTE
| VT_UNSIGNED
;
5580 if (tcc_state
->warn_write_strings
)
5585 memset(&ad
, 0, sizeof(AttributeDef
));
5586 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5591 if (parse_btype(&type
, &ad
)) {
5592 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5594 /* check ISOC99 compound literal */
5596 /* data is allocated locally by default */
5601 /* all except arrays are lvalues */
5602 if (!(type
.t
& VT_ARRAY
))
5604 memset(&ad
, 0, sizeof(AttributeDef
));
5605 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5607 if (sizeof_caller
) {
5614 } else if (tok
== '{') {
5615 int saved_nocode_wanted
= nocode_wanted
;
5616 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5618 if (0 == local_scope
)
5619 tcc_error("statement expression outside of function");
5620 /* save all registers */
5622 /* statement expression : we do not accept break/continue
5623 inside as GCC does. We do retain the nocode_wanted state,
5624 as statement expressions can't ever be entered from the
5625 outside, so any reactivation of code emission (from labels
5626 or loop heads) can be disabled again after the end of it. */
5628 nocode_wanted
= saved_nocode_wanted
;
5643 /* functions names must be treated as function pointers,
5644 except for unary '&' and sizeof. Since we consider that
5645 functions are not lvalues, we only have to handle it
5646 there and in function calls. */
5647 /* arrays can also be used although they are not lvalues */
5648 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5649 !(vtop
->type
.t
& VT_ARRAY
))
5652 vtop
->sym
->a
.addrtaken
= 1;
5653 mk_pointer(&vtop
->type
);
5659 gen_test_zero(TOK_EQ
);
5670 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5671 tcc_error("pointer not accepted for unary plus");
5672 /* In order to force cast, we add zero, except for floating point
5673 where we really need an noop (otherwise -0.0 will be transformed
5675 if (!is_float(vtop
->type
.t
)) {
5687 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5689 if (vtop
[1].r
& VT_SYM
)
5690 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5691 size
= type_size(&type
, &align
);
5692 if (s
&& s
->a
.aligned
)
5693 align
= 1 << (s
->a
.aligned
- 1);
5694 if (t
== TOK_SIZEOF
) {
5695 if (!(type
.t
& VT_VLA
)) {
5697 tcc_error("sizeof applied to an incomplete type");
5700 vla_runtime_type_size(&type
, &align
);
5705 vtop
->type
.t
|= VT_UNSIGNED
;
5708 case TOK_builtin_expect
:
5709 /* __builtin_expect is a no-op for now */
5710 parse_builtin_params(0, "ee");
5713 case TOK_builtin_types_compatible_p
:
5714 parse_builtin_params(0, "tt");
5715 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5716 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5717 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5721 case TOK_builtin_choose_expr
:
5748 case TOK_builtin_constant_p
:
5749 parse_builtin_params(1, "e");
5750 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5751 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
5755 case TOK_builtin_frame_address
:
5756 case TOK_builtin_return_address
:
5762 if (tok
!= TOK_CINT
) {
5763 tcc_error("%s only takes positive integers",
5764 tok1
== TOK_builtin_return_address
?
5765 "__builtin_return_address" :
5766 "__builtin_frame_address");
5768 level
= (uint32_t)tokc
.i
;
5773 vset(&type
, VT_LOCAL
, 0); /* local frame */
5775 #ifdef TCC_TARGET_RISCV64
5779 mk_pointer(&vtop
->type
);
5780 indir(); /* -> parent frame */
5782 if (tok1
== TOK_builtin_return_address
) {
5783 // assume return address is just above frame pointer on stack
5784 #ifdef TCC_TARGET_ARM
5787 #elif defined TCC_TARGET_RISCV64
5794 mk_pointer(&vtop
->type
);
5799 #ifdef TCC_TARGET_RISCV64
5800 case TOK_builtin_va_start
:
5801 parse_builtin_params(0, "ee");
5802 r
= vtop
->r
& VT_VALMASK
;
5806 tcc_error("__builtin_va_start expects a local variable");
5811 #ifdef TCC_TARGET_X86_64
5812 #ifdef TCC_TARGET_PE
5813 case TOK_builtin_va_start
:
5814 parse_builtin_params(0, "ee");
5815 r
= vtop
->r
& VT_VALMASK
;
5819 tcc_error("__builtin_va_start expects a local variable");
5821 vtop
->type
= char_pointer_type
;
5826 case TOK_builtin_va_arg_types
:
5827 parse_builtin_params(0, "t");
5828 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5835 #ifdef TCC_TARGET_ARM64
5836 case TOK_builtin_va_start
: {
5837 parse_builtin_params(0, "ee");
5841 vtop
->type
.t
= VT_VOID
;
5844 case TOK_builtin_va_arg
: {
5845 parse_builtin_params(0, "et");
5853 case TOK___arm64_clear_cache
: {
5854 parse_builtin_params(0, "ee");
5857 vtop
->type
.t
= VT_VOID
;
5862 /* pre operations */
5873 t
= vtop
->type
.t
& VT_BTYPE
;
5875 /* In IEEE negate(x) isn't subtract(0,x), but rather
5879 vtop
->c
.f
= -1.0 * 0.0;
5880 else if (t
== VT_DOUBLE
)
5881 vtop
->c
.d
= -1.0 * 0.0;
5883 vtop
->c
.ld
= -1.0 * 0.0;
5891 goto tok_identifier
;
5893 /* allow to take the address of a label */
5894 if (tok
< TOK_UIDENT
)
5895 expect("label identifier");
5896 s
= label_find(tok
);
5898 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5900 if (s
->r
== LABEL_DECLARED
)
5901 s
->r
= LABEL_FORWARD
;
5904 s
->type
.t
= VT_VOID
;
5905 mk_pointer(&s
->type
);
5906 s
->type
.t
|= VT_STATIC
;
5908 vpushsym(&s
->type
, s
);
5914 CType controlling_type
;
5915 int has_default
= 0;
5918 TokenString
*str
= NULL
;
5919 int saved_const_wanted
= const_wanted
;
5924 expr_type(&controlling_type
, expr_eq
);
5925 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5926 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5927 mk_pointer(&controlling_type
);
5928 const_wanted
= saved_const_wanted
;
5932 if (tok
== TOK_DEFAULT
) {
5934 tcc_error("too many 'default'");
5940 AttributeDef ad_tmp
;
5945 parse_btype(&cur_type
, &ad_tmp
);
5948 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5949 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5951 tcc_error("type match twice");
5961 skip_or_save_block(&str
);
5963 skip_or_save_block(NULL
);
5970 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5971 tcc_error("type '%s' does not match any association", buf
);
5973 begin_macro(str
, 1);
5982 // special qnan , snan and infinity values
5987 vtop
->type
.t
= VT_FLOAT
;
5992 goto special_math_val
;
5995 goto special_math_val
;
6002 expect("identifier");
6004 if (!s
|| IS_ASM_SYM(s
)) {
6005 const char *name
= get_tok_str(t
, NULL
);
6007 tcc_error("'%s' undeclared", name
);
6008 /* for simple function calls, we tolerate undeclared
6009 external reference to int() function */
6010 if (tcc_state
->warn_implicit_function_declaration
6011 #ifdef TCC_TARGET_PE
6012 /* people must be warned about using undeclared WINAPI functions
6013 (which usually start with uppercase letter) */
6014 || (name
[0] >= 'A' && name
[0] <= 'Z')
6017 tcc_warning("implicit declaration of function '%s'", name
);
6018 s
= external_global_sym(t
, &func_old_type
);
6022 /* A symbol that has a register is a local register variable,
6023 which starts out as VT_LOCAL value. */
6024 if ((r
& VT_VALMASK
) < VT_CONST
)
6025 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
6027 vset(&s
->type
, r
, s
->c
);
6028 /* Point to s as backpointer (even without r&VT_SYM).
6029 Will be used by at least the x86 inline asm parser for
6035 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
6036 vtop
->c
.i
= s
->enum_val
;
6041 /* post operations */
6043 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
6046 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
6047 int qualifiers
, cumofs
= 0;
6049 if (tok
== TOK_ARROW
)
6051 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
6054 /* expect pointer on structure */
6055 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
6056 expect("struct or union");
6057 if (tok
== TOK_CDOUBLE
)
6058 expect("field name");
6060 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
6061 expect("field name");
6062 s
= find_field(&vtop
->type
, tok
, &cumofs
);
6064 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
6065 /* add field offset to pointer */
6066 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
6067 vpushi(cumofs
+ s
->c
);
6069 /* change type to field type, and set to lvalue */
6070 vtop
->type
= s
->type
;
6071 vtop
->type
.t
|= qualifiers
;
6072 /* an array is never an lvalue */
6073 if (!(vtop
->type
.t
& VT_ARRAY
)) {
6075 #ifdef CONFIG_TCC_BCHECK
6076 /* if bound checking, the referenced pointer must be checked */
6077 if (tcc_state
->do_bounds_check
)
6078 vtop
->r
|= VT_MUSTBOUND
;
6082 } else if (tok
== '[') {
6088 } else if (tok
== '(') {
6091 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
6094 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
6095 /* pointer test (no array accepted) */
6096 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
6097 vtop
->type
= *pointed_type(&vtop
->type
);
6098 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6102 expect("function pointer");
6105 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6107 /* get return type */
6110 sa
= s
->next
; /* first parameter */
6111 nb_args
= regsize
= 0;
6113 /* compute first implicit argument if a structure is returned */
6114 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6115 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6116 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6117 &ret_align
, ®size
);
6118 if (ret_nregs
<= 0) {
6119 /* get some space for the returned structure */
6120 size
= type_size(&s
->type
, &align
);
6121 #ifdef TCC_TARGET_ARM64
6122 /* On arm64, a small struct is return in registers.
6123 It is much easier to write it to memory if we know
6124 that we are allowed to write some extra bytes, so
6125 round the allocated space up to a power of 2: */
6127 while (size
& (size
- 1))
6128 size
= (size
| (size
- 1)) + 1;
6130 loc
= (loc
- size
) & -align
;
6132 ret
.r
= VT_LOCAL
| VT_LVAL
;
6133 /* pass it as 'int' to avoid structure arg passing
6135 vseti(VT_LOCAL
, loc
);
6136 #ifdef CONFIG_TCC_BCHECK
6137 if (tcc_state
->do_bounds_check
)
6151 if (ret_nregs
> 0) {
6152 /* return in register */
6154 PUT_R_RET(&ret
, ret
.type
.t
);
6159 gfunc_param_typed(s
, sa
);
6169 tcc_error("too few arguments to function");
6171 gfunc_call(nb_args
);
6173 if (ret_nregs
< 0) {
6174 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6175 #ifdef TCC_TARGET_RISCV64
6176 arch_transfer_ret_regs(1);
6180 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6181 vsetc(&ret
.type
, r
, &ret
.c
);
6182 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6185 /* handle packed struct return */
6186 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6189 size
= type_size(&s
->type
, &align
);
6190 /* We're writing whole regs often, make sure there's enough
6191 space. Assume register size is power of 2. */
6192 if (regsize
> align
)
6194 loc
= (loc
- size
) & -align
;
6198 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6202 if (--ret_nregs
== 0)
6206 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6209 /* Promote char/short return values. This is matters only
6210 for calling function that were not compiled by TCC and
6211 only on some architectures. For those where it doesn't
6212 matter we expect things to be already promoted to int,
6214 t
= s
->type
.t
& VT_BTYPE
;
6215 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6217 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6219 vtop
->type
.t
= VT_INT
;
6223 if (s
->f
.func_noreturn
)
6231 #ifndef precedence_parser /* original top-down parser */
6233 static void expr_prod(void)
6238 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6245 static void expr_sum(void)
6250 while ((t
= tok
) == '+' || t
== '-') {
6257 static void expr_shift(void)
6262 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6269 static void expr_cmp(void)
6274 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6275 t
== TOK_ULT
|| t
== TOK_UGE
) {
6282 static void expr_cmpeq(void)
6287 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6294 static void expr_and(void)
6297 while (tok
== '&') {
6304 static void expr_xor(void)
6307 while (tok
== '^') {
6314 static void expr_or(void)
6317 while (tok
== '|') {
6324 static void expr_landor(int op
);
6326 static void expr_land(void)
6329 if (tok
== TOK_LAND
)
6333 static void expr_lor(void)
6340 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6341 #else /* defined precedence_parser */
6342 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6343 # define expr_lor() unary(), expr_infix(1)
6345 static int precedence(int tok
)
6348 case TOK_LOR
: return 1;
6349 case TOK_LAND
: return 2;
6353 case TOK_EQ
: case TOK_NE
: return 6;
6354 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6355 case TOK_SHL
: case TOK_SAR
: return 8;
6356 case '+': case '-': return 9;
6357 case '*': case '/': case '%': return 10;
6359 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6364 static unsigned char prec
[256];
6365 static void init_prec(void)
6368 for (i
= 0; i
< 256; i
++)
6369 prec
[i
] = precedence(i
);
6371 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6373 static void expr_landor(int op
);
6375 static void expr_infix(int p
)
6378 while ((p2
= precedence(t
)) >= p
) {
6379 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6384 if (precedence(tok
) > p2
)
6393 /* Assuming vtop is a value used in a conditional context
6394 (i.e. compared with zero) return 0 if it's false, 1 if
6395 true and -1 if it can't be statically determined. */
6396 static int condition_3way(void)
6399 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6400 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6402 gen_cast_s(VT_BOOL
);
6409 static void expr_landor(int op
)
6411 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6413 c
= f
? i
: condition_3way();
6415 save_regs(1), cc
= 0;
6417 nocode_wanted
++, f
= 1;
6425 expr_landor_next(op
);
6437 static int is_cond_bool(SValue
*sv
)
6439 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6440 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6441 return (unsigned)sv
->c
.i
< 2;
6442 if (sv
->r
== VT_CMP
)
6447 static void expr_cond(void)
6449 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6457 c
= condition_3way();
6458 g
= (tok
== ':' && gnu_ext
);
6468 /* needed to avoid having different registers saved in
6475 ncw_prev
= nocode_wanted
;
6481 if (c
< 0 && vtop
->r
== VT_CMP
) {
6488 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6489 mk_pointer(&vtop
->type
);
6490 sv
= *vtop
; /* save value to handle it later */
6491 vtop
--; /* no vpop so that FP stack is not flushed */
6501 nocode_wanted
= ncw_prev
;
6507 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6508 if (sv
.r
== VT_CMP
) {
6519 nocode_wanted
= ncw_prev
;
6520 // tcc_warning("two conditions expr_cond");
6524 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6525 mk_pointer(&vtop
->type
);
6527 /* cast operands to correct type according to ISOC rules */
6528 if (!combine_types(&type
, &sv
, vtop
, '?'))
6529 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6530 "type mismatch in conditional expression (have '%s' and '%s')");
6531 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6532 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6533 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6535 /* now we convert second operand */
6539 mk_pointer(&vtop
->type
);
6541 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6545 rc
= RC_TYPE(type
.t
);
6546 /* for long longs, we use fixed registers to avoid having
6547 to handle a complicated move */
6548 if (USING_TWO_WORDS(type
.t
))
6549 rc
= RC_RET(type
.t
);
6557 nocode_wanted
= ncw_prev
;
6559 /* this is horrible, but we must also convert first
6565 mk_pointer(&vtop
->type
);
6567 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6573 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6583 static void expr_eq(void)
6588 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6596 gen_op(TOK_ASSIGN_OP(t
));
6602 ST_FUNC
void gexpr(void)
6613 /* parse a constant expression and return value in vtop. */
6614 static void expr_const1(void)
6617 nocode_wanted
+= unevalmask
+ 1;
6619 nocode_wanted
-= unevalmask
+ 1;
6623 /* parse an integer constant and return its value. */
6624 static inline int64_t expr_const64(void)
6628 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6629 expect("constant expression");
6635 /* parse an integer constant and return its value.
6636 Complain if it doesn't fit 32bit (signed or unsigned). */
6637 ST_FUNC
int expr_const(void)
6640 int64_t wc
= expr_const64();
6642 if (c
!= wc
&& (unsigned)c
!= wc
)
6643 tcc_error("constant exceeds 32 bit");
6647 /* ------------------------------------------------------------------------- */
6648 /* return from function */
6650 #ifndef TCC_TARGET_ARM64
6651 static void gfunc_return(CType
*func_type
)
6653 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6654 CType type
, ret_type
;
6655 int ret_align
, ret_nregs
, regsize
;
6656 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6657 &ret_align
, ®size
);
6658 if (ret_nregs
< 0) {
6659 #ifdef TCC_TARGET_RISCV64
6660 arch_transfer_ret_regs(0);
6662 } else if (0 == ret_nregs
) {
6663 /* if returning structure, must copy it to implicit
6664 first pointer arg location */
6667 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6670 /* copy structure value to pointer */
6673 /* returning structure packed into registers */
6674 int size
, addr
, align
, rc
;
6675 size
= type_size(func_type
,&align
);
6676 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6677 (vtop
->c
.i
& (ret_align
-1)))
6678 && (align
& (ret_align
-1))) {
6679 loc
= (loc
- size
) & -ret_align
;
6682 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6686 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6688 vtop
->type
= ret_type
;
6689 rc
= RC_RET(ret_type
.t
);
6697 if (--ret_nregs
== 0)
6699 /* We assume that when a structure is returned in multiple
6700 registers, their classes are consecutive values of the
6703 vtop
->c
.i
+= regsize
;
6708 gv(RC_RET(func_type
->t
));
6710 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6714 static void check_func_return(void)
6716 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6718 if (!strcmp (funcname
, "main")
6719 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6720 /* main returns 0 by default */
6722 gen_assign_cast(&func_vt
);
6723 gfunc_return(&func_vt
);
6725 tcc_warning("function might return no value: '%s'", funcname
);
6729 /* ------------------------------------------------------------------------- */
6732 static int case_cmpi(const void *pa
, const void *pb
)
6734 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6735 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6736 return a
< b
? -1 : a
> b
;
6739 static int case_cmpu(const void *pa
, const void *pb
)
6741 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6742 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6743 return a
< b
? -1 : a
> b
;
6746 static void gtst_addr(int t
, int a
)
6748 gsym_addr(gvtst(0, t
), a
);
6751 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6755 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6772 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6774 gcase(base
, len
/2, bsym
);
6778 base
+= e
; len
-= e
;
6788 if (p
->v1
== p
->v2
) {
6790 gtst_addr(0, p
->sym
);
6800 gtst_addr(0, p
->sym
);
6804 *bsym
= gjmp(*bsym
);
6807 /* ------------------------------------------------------------------------- */
6808 /* __attribute__((cleanup(fn))) */
6810 static void try_call_scope_cleanup(Sym
*stop
)
6812 Sym
*cls
= cur_scope
->cl
.s
;
6814 for (; cls
!= stop
; cls
= cls
->ncl
) {
6815 Sym
*fs
= cls
->next
;
6816 Sym
*vs
= cls
->prev_tok
;
6818 vpushsym(&fs
->type
, fs
);
6819 vset(&vs
->type
, vs
->r
, vs
->c
);
6821 mk_pointer(&vtop
->type
);
6827 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6832 if (!cur_scope
->cl
.s
)
6835 /* search NCA of both cleanup chains given parents and initial depth */
6836 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6837 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6839 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6841 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6844 try_call_scope_cleanup(cc
);
6847 /* call 'func' for each __attribute__((cleanup(func))) */
6848 static void block_cleanup(struct scope
*o
)
6852 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6853 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6858 try_call_scope_cleanup(o
->cl
.s
);
6859 pcl
->jnext
= gjmp(0);
6861 goto remove_pending
;
6871 try_call_scope_cleanup(o
->cl
.s
);
6874 /* ------------------------------------------------------------------------- */
6877 static void vla_restore(int loc
)
6880 gen_vla_sp_restore(loc
);
6883 static void vla_leave(struct scope
*o
)
6885 if (o
->vla
.num
< cur_scope
->vla
.num
)
6886 vla_restore(o
->vla
.loc
);
6889 /* ------------------------------------------------------------------------- */
6892 void new_scope(struct scope
*o
)
6894 /* copy and link previous scope */
6896 o
->prev
= cur_scope
;
6899 /* record local declaration stack position */
6900 o
->lstk
= local_stack
;
6901 o
->llstk
= local_label_stack
;
6905 if (tcc_state
->do_debug
)
6906 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
6909 void prev_scope(struct scope
*o
, int is_expr
)
6913 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6914 block_cleanup(o
->prev
);
6916 /* pop locally defined labels */
6917 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6919 /* In the is_expr case (a statement expression is finished here),
6920 vtop might refer to symbols on the local_stack. Either via the
6921 type or via vtop->sym. We can't pop those nor any that in turn
6922 might be referred to. To make it easier we don't roll back
6923 any symbols in that case; some upper level call to block() will
6924 do that. We do have to remove such symbols from the lookup
6925 tables, though. sym_pop will do that. */
6927 /* pop locally defined symbols */
6928 pop_local_syms(&local_stack
, o
->lstk
, is_expr
, 0);
6929 cur_scope
= o
->prev
;
6932 if (tcc_state
->do_debug
)
6933 tcc_debug_stabn(N_RBRAC
, ind
- func_ind
);
6936 /* leave a scope via break/continue(/goto) */
6937 void leave_scope(struct scope
*o
)
6941 try_call_scope_cleanup(o
->cl
.s
);
6945 /* ------------------------------------------------------------------------- */
6946 /* call block from 'for do while' loops */
6948 static void lblock(int *bsym
, int *csym
)
6950 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6951 int *b
= co
->bsym
, *c
= co
->csym
;
6965 static void block(int is_expr
)
6967 int a
, b
, c
, d
, e
, t
;
6972 /* default return value is (void) */
6974 vtop
->type
.t
= VT_VOID
;
6979 /* If the token carries a value, next() might destroy it. Only with
6980 invalid code such as f(){"123"4;} */
6981 if (TOK_HAS_VALUE(t
))
6991 if (tok
== TOK_ELSE
) {
6996 gsym(d
); /* patch else jmp */
7001 } else if (t
== TOK_WHILE
) {
7013 } else if (t
== '{') {
7016 /* handle local labels declarations */
7017 while (tok
== TOK_LABEL
) {
7020 if (tok
< TOK_UIDENT
)
7021 expect("label identifier");
7022 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
7024 } while (tok
== ',');
7028 while (tok
!= '}') {
7037 prev_scope(&o
, is_expr
);
7040 else if (!nocode_wanted
)
7041 check_func_return();
7043 } else if (t
== TOK_RETURN
) {
7044 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
7048 gen_assign_cast(&func_vt
);
7050 if (vtop
->type
.t
!= VT_VOID
)
7051 tcc_warning("void function returns a value");
7055 tcc_warning("'return' with no value");
7058 leave_scope(root_scope
);
7060 gfunc_return(&func_vt
);
7062 /* jump unless last stmt in top-level block */
7063 if (tok
!= '}' || local_scope
!= 1)
7067 } else if (t
== TOK_BREAK
) {
7069 if (!cur_scope
->bsym
)
7070 tcc_error("cannot break");
7071 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
7072 leave_scope(cur_switch
->scope
);
7074 leave_scope(loop_scope
);
7075 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
7078 } else if (t
== TOK_CONTINUE
) {
7080 if (!cur_scope
->csym
)
7081 tcc_error("cannot continue");
7082 leave_scope(loop_scope
);
7083 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
7086 } else if (t
== TOK_FOR
) {
7091 /* c99 for-loop init decl? */
7092 if (!decl0(VT_LOCAL
, 1, NULL
)) {
7093 /* no, regular for-loop init expr */
7121 } else if (t
== TOK_DO
) {
7135 } else if (t
== TOK_SWITCH
) {
7136 struct switch_t
*sw
;
7138 sw
= tcc_mallocz(sizeof *sw
);
7140 sw
->scope
= cur_scope
;
7141 sw
->prev
= cur_switch
;
7147 sw
->sv
= *vtop
--; /* save switch value */
7150 b
= gjmp(0); /* jump to first case */
7152 a
= gjmp(a
); /* add implicit break */
7156 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7157 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7159 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7161 for (b
= 1; b
< sw
->n
; b
++)
7162 if (sw
->sv
.type
.t
& VT_UNSIGNED
7163 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7164 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7165 tcc_error("duplicate case value");
7169 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7172 gsym_addr(d
, sw
->def_sym
);
7178 dynarray_reset(&sw
->p
, &sw
->n
);
7179 cur_switch
= sw
->prev
;
7182 } else if (t
== TOK_CASE
) {
7183 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7186 cr
->v1
= cr
->v2
= expr_const64();
7187 if (gnu_ext
&& tok
== TOK_DOTS
) {
7189 cr
->v2
= expr_const64();
7190 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7191 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7192 tcc_warning("empty case range");
7195 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7198 goto block_after_label
;
7200 } else if (t
== TOK_DEFAULT
) {
7203 if (cur_switch
->def_sym
)
7204 tcc_error("too many 'default'");
7205 cur_switch
->def_sym
= gind();
7208 goto block_after_label
;
7210 } else if (t
== TOK_GOTO
) {
7211 vla_restore(root_scope
->vla
.loc
);
7212 if (tok
== '*' && gnu_ext
) {
7216 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7220 } else if (tok
>= TOK_UIDENT
) {
7221 s
= label_find(tok
);
7222 /* put forward definition if needed */
7224 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7225 else if (s
->r
== LABEL_DECLARED
)
7226 s
->r
= LABEL_FORWARD
;
7228 if (s
->r
& LABEL_FORWARD
) {
7229 /* start new goto chain for cleanups, linked via label->next */
7230 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7231 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7232 pending_gotos
->prev_tok
= s
;
7233 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7234 pending_gotos
->next
= s
;
7236 s
->jnext
= gjmp(s
->jnext
);
7238 try_call_cleanup_goto(s
->cleanupstate
);
7239 gjmp_addr(s
->jnext
);
7244 expect("label identifier");
7248 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7252 if (tok
== ':' && t
>= TOK_UIDENT
) {
7257 if (s
->r
== LABEL_DEFINED
)
7258 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7259 s
->r
= LABEL_DEFINED
;
7261 Sym
*pcl
; /* pending cleanup goto */
7262 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7264 sym_pop(&s
->next
, NULL
, 0);
7268 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7271 s
->cleanupstate
= cur_scope
->cl
.s
;
7274 vla_restore(cur_scope
->vla
.loc
);
7275 /* we accept this, but it is a mistake */
7277 tcc_warning("deprecated use of label at end of compound statement");
7283 /* expression case */
7300 /* This skips over a stream of tokens containing balanced {} and ()
7301 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7302 with a '{'). If STR then allocates and stores the skipped tokens
7303 in *STR. This doesn't check if () and {} are nested correctly,
7304 i.e. "({)}" is accepted. */
7305 static void skip_or_save_block(TokenString
**str
)
7307 int braces
= tok
== '{';
7310 *str
= tok_str_alloc();
7312 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7314 if (tok
== TOK_EOF
) {
7315 if (str
|| level
> 0)
7316 tcc_error("unexpected end of file");
7321 tok_str_add_tok(*str
);
7324 if (t
== '{' || t
== '(') {
7326 } else if (t
== '}' || t
== ')') {
7328 if (level
== 0 && braces
&& t
== '}')
7333 tok_str_add(*str
, -1);
7334 tok_str_add(*str
, 0);
7338 #define EXPR_CONST 1
7341 static void parse_init_elem(int expr_type
)
7343 int saved_global_expr
;
7346 /* compound literals must be allocated globally in this case */
7347 saved_global_expr
= global_expr
;
7350 global_expr
= saved_global_expr
;
7351 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7352 (compound literals). */
7353 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7354 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7355 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7356 #ifdef TCC_TARGET_PE
7357 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7360 tcc_error("initializer element is not constant");
7369 static void init_assert(init_params
*p
, int offset
)
7371 if (p
->sec
? !NODATA_WANTED
&& offset
> p
->sec
->data_offset
7372 : !nocode_wanted
&& offset
> p
->local_offset
)
7373 tcc_internal_error("initializer overflow");
7376 #define init_assert(sec, offset)
7379 /* put zeros for variable based init */
7380 static void init_putz(init_params
*p
, unsigned long c
, int size
)
7382 init_assert(p
, c
+ size
);
7384 /* nothing to do because globals are already set to zero */
7386 vpush_helper_func(TOK_memset
);
7388 #ifdef TCC_TARGET_ARM
7400 #define DIF_SIZE_ONLY 2
7401 #define DIF_HAVE_ELEM 4
7404 /* delete relocations for specified range c ... c + size. Unfortunatly
7405 in very special cases, relocations may occur unordered */
7406 static void decl_design_delrels(Section
*sec
, int c
, int size
)
7408 ElfW_Rel
*rel
, *rel2
, *rel_end
;
7409 if (!sec
|| !sec
->reloc
)
7411 rel
= rel2
= (ElfW_Rel
*)sec
->reloc
->data
;
7412 rel_end
= (ElfW_Rel
*)(sec
->reloc
->data
+ sec
->reloc
->data_offset
);
7413 while (rel
< rel_end
) {
7414 if (rel
->r_offset
>= c
&& rel
->r_offset
< c
+ size
) {
7415 sec
->reloc
->data_offset
-= sizeof *rel
;
7418 memcpy(rel2
, rel
, sizeof *rel
);
7425 static void decl_design_flex(init_params
*p
, Sym
*ref
, int index
)
7427 if (ref
== p
->flex_array_ref
) {
7428 if (index
>= ref
->c
)
7430 } else if (ref
->c
< 0)
7431 tcc_error("flexible array has zero size in this context");
7434 /* t is the array or struct type. c is the array or struct
7435 address. cur_field is the pointer to the current
7436 field, for arrays the 'c' member contains the current start
7437 index. 'flags' is as in decl_initializer.
7438 'al' contains the already initialized length of the
7439 current container (starting at c). This returns the new length of that. */
7440 static int decl_designator(init_params
*p
, CType
*type
, unsigned long c
,
7441 Sym
**cur_field
, int flags
, int al
)
7444 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7445 unsigned long corig
= c
;
7450 if (flags
& DIF_HAVE_ELEM
)
7453 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7460 /* NOTE: we only support ranges for last designator */
7461 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7463 if (!(type
->t
& VT_ARRAY
))
7464 expect("array type");
7466 index
= index_last
= expr_const();
7467 if (tok
== TOK_DOTS
&& gnu_ext
) {
7469 index_last
= expr_const();
7473 decl_design_flex(p
, s
, index_last
);
7474 if (index
< 0 || index_last
>= s
->c
|| index_last
< index
)
7475 tcc_error("index exceeds array bounds or range is empty");
7477 (*cur_field
)->c
= index_last
;
7478 type
= pointed_type(type
);
7479 elem_size
= type_size(type
, &align
);
7480 c
+= index
* elem_size
;
7481 nb_elems
= index_last
- index
+ 1;
7488 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7489 expect("struct/union type");
7491 f
= find_field(type
, l
, &cumofs
);
7504 } else if (!gnu_ext
) {
7509 if (type
->t
& VT_ARRAY
) {
7510 index
= (*cur_field
)->c
;
7512 decl_design_flex(p
, s
, index
);
7514 tcc_error("too many initializers");
7515 type
= pointed_type(type
);
7516 elem_size
= type_size(type
, &align
);
7517 c
+= index
* elem_size
;
7520 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7521 *cur_field
= f
= f
->next
;
7523 tcc_error("too many initializers");
7529 if (!elem_size
) /* for structs */
7530 elem_size
= type_size(type
, &align
);
7532 /* Using designators the same element can be initialized more
7533 than once. In that case we need to delete possibly already
7534 existing relocations. */
7535 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
< al
) {
7536 decl_design_delrels(p
->sec
, c
, elem_size
* nb_elems
);
7537 flags
&= ~DIF_CLEAR
; /* mark stack dirty too */
7540 decl_initializer(p
, type
, c
, flags
& ~DIF_FIRST
);
7542 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7546 if (p
->sec
|| (type
->t
& VT_ARRAY
)) {
7547 /* make init_putv/vstore believe it were a struct */
7549 t1
.t
= VT_STRUCT
, t1
.ref
= &aref
;
7553 vpush_ref(type
, p
->sec
, c
, elem_size
);
7555 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7556 for (i
= 1; i
< nb_elems
; i
++) {
7558 init_putv(p
, type
, c
+ elem_size
* i
);
7563 c
+= nb_elems
* elem_size
;
7569 /* store a value or an expression directly in global data or in local array */
7570 static void init_putv(init_params
*p
, CType
*type
, unsigned long c
)
7576 Section
*sec
= p
->sec
;
7579 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7581 size
= type_size(type
, &align
);
7582 if (type
->t
& VT_BITFIELD
)
7583 size
= (BIT_POS(type
->t
) + BIT_SIZE(type
->t
) + 7) / 8;
7584 init_assert(p
, c
+ size
);
7587 /* XXX: not portable */
7588 /* XXX: generate error if incorrect relocation */
7589 gen_assign_cast(&dtype
);
7590 bt
= type
->t
& VT_BTYPE
;
7592 if ((vtop
->r
& VT_SYM
)
7595 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7596 || (type
->t
& VT_BITFIELD
))
7597 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7599 tcc_error("initializer element is not computable at load time");
7601 if (NODATA_WANTED
) {
7606 ptr
= sec
->data
+ c
;
7608 /* XXX: make code faster ? */
7609 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7610 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7611 /* XXX This rejects compound literals like
7612 '(void *){ptr}'. The problem is that '&sym' is
7613 represented the same way, which would be ruled out
7614 by the SYM_FIRST_ANOM check above, but also '"string"'
7615 in 'char *p = "string"' is represented the same
7616 with the type being VT_PTR and the symbol being an
7617 anonymous one. That is, there's no difference in vtop
7618 between '(void *){x}' and '&(void *){x}'. Ignore
7619 pointer typed entities here. Hopefully no real code
7620 will ever use compound literals with scalar type. */
7621 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7622 /* These come from compound literals, memcpy stuff over. */
7626 esym
= elfsym(vtop
->sym
);
7627 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7628 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7630 /* We need to copy over all memory contents, and that
7631 includes relocations. Use the fact that relocs are
7632 created it order, so look from the end of relocs
7633 until we hit one before the copied region. */
7634 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
7635 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
7636 while (num_relocs
--) {
7638 if (rel
->r_offset
>= esym
->st_value
+ size
)
7640 if (rel
->r_offset
< esym
->st_value
)
7642 put_elf_reloca(symtab_section
, sec
,
7643 c
+ rel
->r_offset
- esym
->st_value
,
7644 ELFW(R_TYPE
)(rel
->r_info
),
7645 ELFW(R_SYM
)(rel
->r_info
),
7655 if (type
->t
& VT_BITFIELD
) {
7656 int bit_pos
, bit_size
, bits
, n
;
7657 unsigned char *p
, v
, m
;
7658 bit_pos
= BIT_POS(vtop
->type
.t
);
7659 bit_size
= BIT_SIZE(vtop
->type
.t
);
7660 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7661 bit_pos
&= 7, bits
= 0;
7666 v
= vtop
->c
.i
>> bits
<< bit_pos
;
7667 m
= ((1 << n
) - 1) << bit_pos
;
7668 *p
= (*p
& ~m
) | (v
& m
);
7669 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7673 /* XXX: when cross-compiling we assume that each type has the
7674 same representation on host and target, which is likely to
7675 be wrong in the case of long double */
7677 vtop
->c
.i
= vtop
->c
.i
!= 0;
7679 *(char *)ptr
= vtop
->c
.i
;
7682 *(short *)ptr
= vtop
->c
.i
;
7685 *(float*)ptr
= vtop
->c
.f
;
7688 *(double *)ptr
= vtop
->c
.d
;
7691 #if defined TCC_IS_NATIVE_387
7692 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7693 memcpy(ptr
, &vtop
->c
.ld
, 10);
7695 else if (sizeof (long double) == sizeof (double))
7696 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7698 else if (vtop
->c
.ld
== 0.0)
7702 if (sizeof(long double) == LDOUBLE_SIZE
)
7703 *(long double*)ptr
= vtop
->c
.ld
;
7704 else if (sizeof(double) == LDOUBLE_SIZE
)
7705 *(double *)ptr
= (double)vtop
->c
.ld
;
7707 tcc_error("can't cross compile long double constants");
7711 *(long long *)ptr
= vtop
->c
.i
;
7718 addr_t val
= vtop
->c
.i
;
7720 if (vtop
->r
& VT_SYM
)
7721 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7723 *(addr_t
*)ptr
= val
;
7725 if (vtop
->r
& VT_SYM
)
7726 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7727 *(addr_t
*)ptr
= val
;
7733 int val
= vtop
->c
.i
;
7735 if (vtop
->r
& VT_SYM
)
7736 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7740 if (vtop
->r
& VT_SYM
)
7741 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7750 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7757 /* 't' contains the type and storage info. 'c' is the offset of the
7758 object in section 'sec'. If 'sec' is NULL, it means stack based
7759 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7760 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7761 size only evaluation is wanted (only for arrays). */
7762 static void decl_initializer(init_params
*p
, CType
*type
, unsigned long c
, int flags
)
7764 int len
, n
, no_oblock
, i
;
7770 /* generate line number info */
7771 if (!p
->sec
&& tcc_state
->do_debug
)
7772 tcc_debug_line(tcc_state
);
7774 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7775 /* In case of strings we have special handling for arrays, so
7776 don't consume them as initializer value (which would commit them
7777 to some anonymous symbol). */
7778 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7779 !(flags
& DIF_SIZE_ONLY
)) {
7780 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7781 flags
|= DIF_HAVE_ELEM
;
7784 if ((flags
& DIF_HAVE_ELEM
) &&
7785 !(type
->t
& VT_ARRAY
) &&
7786 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7787 The source type might have VT_CONSTANT set, which is
7788 of course assignable to non-const elements. */
7789 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7792 } else if (type
->t
& VT_ARRAY
) {
7794 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7802 t1
= pointed_type(type
);
7803 size1
= type_size(t1
, &align1
);
7805 /* only parse strings here if correct type (otherwise: handle
7806 them as ((w)char *) expressions */
7807 if ((tok
== TOK_LSTR
&&
7808 #ifdef TCC_TARGET_PE
7809 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7811 (t1
->t
& VT_BTYPE
) == VT_INT
7813 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7815 cstr_reset(&initstr
);
7816 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7817 tcc_error("unhandled string literal merging");
7818 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7820 initstr
.size
-= size1
;
7822 len
+= tokc
.str
.size
;
7824 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7826 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7829 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7830 && tok
!= TOK_EOF
) {
7831 /* Not a lone literal but part of a bigger expression. */
7832 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7833 tokc
.str
.size
= initstr
.size
;
7834 tokc
.str
.data
= initstr
.data
;
7838 if (!(flags
& DIF_SIZE_ONLY
)) {
7843 tcc_warning("initializer-string for array is too long");
7844 /* in order to go faster for common case (char
7845 string in global variable, we handle it
7847 if (p
->sec
&& size1
== 1) {
7848 init_assert(p
, c
+ nb
);
7850 memcpy(p
->sec
->data
+ c
, initstr
.data
, nb
);
7854 /* only add trailing zero if enough storage (no
7855 warning in this case since it is standard) */
7856 if (flags
& DIF_CLEAR
)
7859 init_putz(p
, c
+ i
* size1
, (n
- i
) * size1
);
7863 } else if (size1
== 1)
7864 ch
= ((unsigned char *)initstr
.data
)[i
];
7866 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7868 init_putv(p
, t1
, c
+ i
* size1
);
7872 decl_design_flex(p
, s
, len
);
7881 /* zero memory once in advance */
7882 if (!(flags
& (DIF_CLEAR
| DIF_SIZE_ONLY
))) {
7883 init_putz(p
, c
, n
*size1
);
7888 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7889 len
= decl_designator(p
, type
, c
, &f
, flags
, len
);
7890 flags
&= ~DIF_HAVE_ELEM
;
7891 if (type
->t
& VT_ARRAY
) {
7893 /* special test for multi dimensional arrays (may not
7894 be strictly correct if designators are used at the
7896 if (no_oblock
&& len
>= n
*size1
)
7899 if (s
->type
.t
== VT_UNION
)
7903 if (no_oblock
&& f
== NULL
)
7914 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7916 if ((flags
& DIF_FIRST
) || tok
== '{') {
7925 } else if (tok
== '{') {
7926 if (flags
& DIF_HAVE_ELEM
)
7929 decl_initializer(p
, type
, c
, flags
& ~DIF_HAVE_ELEM
);
7931 } else if ((flags
& DIF_SIZE_ONLY
)) {
7932 /* If we supported only ISO C we wouldn't have to accept calling
7933 this on anything than an array if DIF_SIZE_ONLY (and even then
7934 only on the outermost level, so no recursion would be needed),
7935 because initializing a flex array member isn't supported.
7936 But GNU C supports it, so we need to recurse even into
7937 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7938 /* just skip expression */
7939 skip_or_save_block(NULL
);
7941 if (!(flags
& DIF_HAVE_ELEM
)) {
7942 /* This should happen only when we haven't parsed
7943 the init element above for fear of committing a
7944 string constant to memory too early. */
7945 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7946 expect("string constant");
7947 parse_init_elem(!p
->sec
? EXPR_ANY
: EXPR_CONST
);
7950 if (!p
->sec
&& (flags
& DIF_CLEAR
) /* container was already zero'd */
7951 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
7953 && btype_size(type
->t
& VT_BTYPE
) /* not for fp constants */
7957 init_putv(p
, type
, c
);
7961 /* parse an initializer for type 't' if 'has_init' is non zero, and
7962 allocate space in local or global data space ('r' is either
7963 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7964 variable 'v' of scope 'scope' is declared before initializers
7965 are parsed. If 'v' is zero, then a reference to the new object
7966 is put in the value stack. If 'has_init' is 2, a special parsing
7967 is done to handle string constants. */
7968 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7969 int has_init
, int v
, int scope
)
7971 int size
, align
, addr
;
7972 TokenString
*init_str
= NULL
;
7975 Sym
*flexible_array
;
7977 int saved_nocode_wanted
= nocode_wanted
;
7978 #ifdef CONFIG_TCC_BCHECK
7979 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7981 init_params p
= {0};
7983 /* Always allocate static or global variables */
7984 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7985 nocode_wanted
|= 0x80000000;
7987 flexible_array
= NULL
;
7988 size
= type_size(type
, &align
);
7990 /* exactly one flexible array may be initialized, either the
7991 toplevel array or the last member of the toplevel struct */
7994 /* If the base type itself was an array type of unspecified size
7995 (like in 'typedef int arr[]; arr x = {1};') then we will
7996 overwrite the unknown size by the real one for this decl.
7997 We need to unshare the ref symbol holding that size. */
7998 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
7999 p
.flex_array_ref
= type
->ref
;
8001 } else if (has_init
&& (type
->t
& VT_BTYPE
) == VT_STRUCT
) {
8002 Sym
*field
= type
->ref
->next
;
8005 field
= field
->next
;
8006 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0) {
8007 flexible_array
= field
;
8008 p
.flex_array_ref
= field
->type
.ref
;
8015 /* If unknown size, do a dry-run 1st pass */
8017 tcc_error("unknown type size");
8018 if (has_init
== 2) {
8019 /* only get strings */
8020 init_str
= tok_str_alloc();
8021 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
8022 tok_str_add_tok(init_str
);
8025 tok_str_add(init_str
, -1);
8026 tok_str_add(init_str
, 0);
8028 skip_or_save_block(&init_str
);
8032 begin_macro(init_str
, 1);
8034 decl_initializer(&p
, type
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
8035 /* prepare second initializer parsing */
8036 macro_ptr
= init_str
->str
;
8039 /* if still unknown size, error */
8040 size
= type_size(type
, &align
);
8042 tcc_error("unknown type size");
8044 /* If there's a flex member and it was used in the initializer
8046 if (flexible_array
&& flexible_array
->type
.ref
->c
> 0)
8047 size
+= flexible_array
->type
.ref
->c
8048 * pointed_size(&flexible_array
->type
);
8051 /* take into account specified alignment if bigger */
8052 if (ad
->a
.aligned
) {
8053 int speca
= 1 << (ad
->a
.aligned
- 1);
8056 } else if (ad
->a
.packed
) {
8060 if (!v
&& NODATA_WANTED
)
8061 size
= 0, align
= 1;
8063 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
8065 #ifdef CONFIG_TCC_BCHECK
8067 /* add padding between stack variables for bound checking */
8071 loc
= (loc
- size
) & -align
;
8073 p
.local_offset
= addr
+ size
;
8074 #ifdef CONFIG_TCC_BCHECK
8076 /* add padding between stack variables for bound checking */
8081 /* local variable */
8082 #ifdef CONFIG_TCC_ASM
8083 if (ad
->asm_label
) {
8084 int reg
= asm_parse_regvar(ad
->asm_label
);
8086 r
= (r
& ~VT_VALMASK
) | reg
;
8089 sym
= sym_push(v
, type
, r
, addr
);
8090 if (ad
->cleanup_func
) {
8091 Sym
*cls
= sym_push2(&all_cleanups
,
8092 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
8093 cls
->prev_tok
= sym
;
8094 cls
->next
= ad
->cleanup_func
;
8095 cls
->ncl
= cur_scope
->cl
.s
;
8096 cur_scope
->cl
.s
= cls
;
8101 /* push local reference */
8102 vset(type
, r
, addr
);
8105 if (v
&& scope
== VT_CONST
) {
8106 /* see if the symbol was already defined */
8109 patch_storage(sym
, ad
, type
);
8110 /* we accept several definitions of the same global variable. */
8111 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
8116 /* allocate symbol in corresponding section */
8121 else if (tcc_state
->nocommon
)
8126 addr
= section_add(sec
, size
, align
);
8127 #ifdef CONFIG_TCC_BCHECK
8128 /* add padding if bound check */
8130 section_add(sec
, 1, 1);
8133 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8134 sec
= common_section
;
8139 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8140 patch_storage(sym
, ad
, NULL
);
8142 /* update symbol definition */
8143 put_extern_sym(sym
, sec
, addr
, size
);
8145 /* push global reference */
8146 vpush_ref(type
, sec
, addr
, size
);
8151 #ifdef CONFIG_TCC_BCHECK
8152 /* handles bounds now because the symbol must be defined
8153 before for the relocation */
8157 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8158 /* then add global bound info */
8159 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8160 bounds_ptr
[0] = 0; /* relocated */
8161 bounds_ptr
[1] = size
;
8166 if (type
->t
& VT_VLA
) {
8172 /* save current stack pointer */
8173 if (root_scope
->vla
.loc
== 0) {
8174 struct scope
*v
= cur_scope
;
8175 gen_vla_sp_save(loc
-= PTR_SIZE
);
8176 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
8179 vla_runtime_type_size(type
, &a
);
8180 gen_vla_alloc(type
, a
);
8181 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8182 /* on _WIN64, because of the function args scratch area, the
8183 result of alloca differs from RSP and is returned in RAX. */
8184 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8186 gen_vla_sp_save(addr
);
8187 cur_scope
->vla
.loc
= addr
;
8188 cur_scope
->vla
.num
++;
8189 } else if (has_init
) {
8191 decl_initializer(&p
, type
, addr
, DIF_FIRST
);
8192 /* patch flexible array member size back to -1, */
8193 /* for possible subsequent similar declarations */
8195 flexible_array
->type
.ref
->c
= -1;
8199 /* restore parse state if needed */
8205 nocode_wanted
= saved_nocode_wanted
;
8208 /* parse a function defined by symbol 'sym' and generate its code in
8209 'cur_text_section' */
8210 static void gen_function(Sym
*sym
)
8212 struct scope f
= { 0 };
8213 cur_scope
= root_scope
= &f
;
8215 ind
= cur_text_section
->data_offset
;
8216 if (sym
->a
.aligned
) {
8217 size_t newoff
= section_add(cur_text_section
, 0,
8218 1 << (sym
->a
.aligned
- 1));
8219 gen_fill_nops(newoff
- ind
);
8221 /* NOTE: we patch the symbol size later */
8222 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8223 if (sym
->type
.ref
->f
.func_ctor
)
8224 add_array (tcc_state
, ".init_array", sym
->c
);
8225 if (sym
->type
.ref
->f
.func_dtor
)
8226 add_array (tcc_state
, ".fini_array", sym
->c
);
8228 funcname
= get_tok_str(sym
->v
, NULL
);
8230 func_vt
= sym
->type
.ref
->type
;
8231 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8233 /* put debug symbol */
8234 tcc_debug_funcstart(tcc_state
, sym
);
8235 /* push a dummy symbol to enable local sym storage */
8236 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8237 local_scope
= 1; /* for function parameters */
8241 clear_temp_local_var_list();
8245 /* reset local stack */
8246 pop_local_syms(&local_stack
, NULL
, 0, func_var
);
8248 cur_text_section
->data_offset
= ind
;
8250 label_pop(&global_label_stack
, NULL
, 0);
8251 sym_pop(&all_cleanups
, NULL
, 0);
8252 /* patch symbol size */
8253 elfsym(sym
)->st_size
= ind
- func_ind
;
8254 /* end of function */
8255 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8256 /* It's better to crash than to generate wrong code */
8257 cur_text_section
= NULL
;
8258 funcname
= ""; /* for safety */
8259 func_vt
.t
= VT_VOID
; /* for safety */
8260 func_var
= 0; /* for safety */
8261 ind
= 0; /* for safety */
8262 nocode_wanted
= 0x80000000;
8264 /* do this after funcend debug info */
8268 static void gen_inline_functions(TCCState
*s
)
8271 int inline_generated
, i
;
8272 struct InlineFunc
*fn
;
8274 tcc_open_bf(s
, ":inline:", 0);
8275 /* iterate while inline function are referenced */
8277 inline_generated
= 0;
8278 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8279 fn
= s
->inline_fns
[i
];
8281 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8282 /* the function was used or forced (and then not internal):
8283 generate its code and convert it to a normal function */
8285 tcc_debug_putfile(s
, fn
->filename
);
8286 begin_macro(fn
->func_str
, 1);
8288 cur_text_section
= text_section
;
8292 inline_generated
= 1;
8295 } while (inline_generated
);
8299 static void free_inline_functions(TCCState
*s
)
8302 /* free tokens of unused inline functions */
8303 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8304 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8306 tok_str_free(fn
->func_str
);
8308 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8311 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8312 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8313 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8315 int v
, has_init
, r
, oldint
;
8318 AttributeDef ad
, adbase
;
8321 if (tok
== TOK_STATIC_ASSERT
) {
8331 tcc_error("_Static_assert fail");
8333 goto static_assert_out
;
8337 parse_mult_str(&error_str
, "string constant");
8339 tcc_error("%s", (char *)error_str
.data
);
8340 cstr_free(&error_str
);
8348 if (!parse_btype(&btype
, &adbase
)) {
8349 if (is_for_loop_init
)
8351 /* skip redundant ';' if not in old parameter decl scope */
8352 if (tok
== ';' && l
!= VT_CMP
) {
8358 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8359 /* global asm block */
8363 if (tok
>= TOK_UIDENT
) {
8364 /* special test for old K&R protos without explicit int
8365 type. Only accepted when defining global data */
8370 expect("declaration");
8376 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8378 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8379 tcc_warning("unnamed struct/union that defines no instances");
8383 if (IS_ENUM(btype
.t
)) {
8389 while (1) { /* iterate thru each declaration */
8392 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8396 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8397 printf("type = '%s'\n", buf
);
8400 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8401 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8402 tcc_error("function without file scope cannot be static");
8403 /* if old style function prototype, we accept a
8406 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8407 decl0(VT_CMP
, 0, sym
);
8408 #ifdef TCC_TARGET_MACHO
8409 if (sym
->f
.func_alwinl
8410 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8411 == (VT_EXTERN
| VT_INLINE
))) {
8412 /* always_inline functions must be handled as if they
8413 don't generate multiple global defs, even if extern
8414 inline, i.e. GNU inline semantics for those. Rewrite
8415 them into static inline. */
8416 type
.t
&= ~VT_EXTERN
;
8417 type
.t
|= VT_STATIC
;
8420 /* always compile 'extern inline' */
8421 if (type
.t
& VT_EXTERN
)
8422 type
.t
&= ~VT_INLINE
;
8424 } else if (oldint
) {
8425 tcc_warning("type defaults to int");
8428 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8429 ad
.asm_label
= asm_label_instr();
8430 /* parse one last attribute list, after asm label */
8431 parse_attribute(&ad
);
8433 /* gcc does not allow __asm__("label") with function definition,
8440 #ifdef TCC_TARGET_PE
8441 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8442 if (type
.t
& VT_STATIC
)
8443 tcc_error("cannot have dll linkage with static");
8444 if (type
.t
& VT_TYPEDEF
) {
8445 tcc_warning("'%s' attribute ignored for typedef",
8446 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8447 (ad
.a
.dllexport
= 0, "dllexport"));
8448 } else if (ad
.a
.dllimport
) {
8449 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8452 type
.t
|= VT_EXTERN
;
8458 tcc_error("cannot use local functions");
8459 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8460 expect("function definition");
8462 /* reject abstract declarators in function definition
8463 make old style params without decl have int type */
8465 while ((sym
= sym
->next
) != NULL
) {
8466 if (!(sym
->v
& ~SYM_FIELD
))
8467 expect("identifier");
8468 if (sym
->type
.t
== VT_VOID
)
8469 sym
->type
= int_type
;
8472 /* apply post-declaraton attributes */
8473 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8475 /* put function symbol */
8476 type
.t
&= ~VT_EXTERN
;
8477 sym
= external_sym(v
, &type
, 0, &ad
);
8479 /* static inline functions are just recorded as a kind
8480 of macro. Their code will be emitted at the end of
8481 the compilation unit only if they are used */
8482 if (sym
->type
.t
& VT_INLINE
) {
8483 struct InlineFunc
*fn
;
8484 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8485 strcpy(fn
->filename
, file
->filename
);
8487 skip_or_save_block(&fn
->func_str
);
8488 dynarray_add(&tcc_state
->inline_fns
,
8489 &tcc_state
->nb_inline_fns
, fn
);
8491 /* compute text section */
8492 cur_text_section
= ad
.section
;
8493 if (!cur_text_section
)
8494 cur_text_section
= text_section
;
8500 /* find parameter in function parameter list */
8501 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8502 if ((sym
->v
& ~SYM_FIELD
) == v
)
8504 tcc_error("declaration for parameter '%s' but no such parameter",
8505 get_tok_str(v
, NULL
));
8507 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8508 tcc_error("storage class specified for '%s'",
8509 get_tok_str(v
, NULL
));
8510 if (sym
->type
.t
!= VT_VOID
)
8511 tcc_error("redefinition of parameter '%s'",
8512 get_tok_str(v
, NULL
));
8513 convert_parameter_type(&type
);
8515 } else if (type
.t
& VT_TYPEDEF
) {
8516 /* save typedefed type */
8517 /* XXX: test storage specifiers ? */
8519 if (sym
&& sym
->sym_scope
== local_scope
) {
8520 if (!is_compatible_types(&sym
->type
, &type
)
8521 || !(sym
->type
.t
& VT_TYPEDEF
))
8522 tcc_error("incompatible redefinition of '%s'",
8523 get_tok_str(v
, NULL
));
8526 sym
= sym_push(v
, &type
, 0, 0);
8530 if (tcc_state
->do_debug
)
8531 tcc_debug_typedef (tcc_state
, sym
);
8532 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8533 && !(type
.t
& VT_EXTERN
)) {
8534 tcc_error("declaration of void object");
8537 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8538 /* external function definition */
8539 /* specific case for func_call attribute */
8541 } else if (!(type
.t
& VT_ARRAY
)) {
8542 /* not lvalue if array */
8545 has_init
= (tok
== '=');
8546 if (has_init
&& (type
.t
& VT_VLA
))
8547 tcc_error("variable length array cannot be initialized");
8548 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8549 || (type
.t
& VT_BTYPE
) == VT_FUNC
8550 /* as with GCC, uninitialized global arrays with no size
8551 are considered extern: */
8552 || ((type
.t
& VT_ARRAY
) && !has_init
8553 && l
== VT_CONST
&& type
.ref
->c
< 0)
8555 /* external variable or function */
8556 type
.t
|= VT_EXTERN
;
8557 sym
= external_sym(v
, &type
, r
, &ad
);
8558 if (ad
.alias_target
) {
8559 /* Aliases need to be emitted when their target
8560 symbol is emitted, even if perhaps unreferenced.
8561 We only support the case where the base is
8562 already defined, otherwise we would need
8563 deferring to emit the aliases until the end of
8564 the compile unit. */
8565 Sym
*alias_target
= sym_find(ad
.alias_target
);
8566 ElfSym
*esym
= elfsym(alias_target
);
8568 tcc_error("unsupported forward __alias__ attribute");
8569 put_extern_sym2(sym
, esym
->st_shndx
,
8570 esym
->st_value
, esym
->st_size
, 1);
8573 if (type
.t
& VT_STATIC
)
8579 else if (l
== VT_CONST
)
8580 /* uninitialized global variables may be overridden */
8581 type
.t
|= VT_EXTERN
;
8582 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8586 if (is_for_loop_init
)
8598 static void decl(int l
)
8603 /* ------------------------------------------------------------------------- */
8606 /* ------------------------------------------------------------------------- */