2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
66 static int gind(void) { CODE_ON(); return ind
; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
70 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
77 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
79 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
81 static int last_line_num
, new_file
, func_ind
; /* debug info control */
82 ST_DATA
const char *funcname
;
83 ST_DATA CType int_type
, func_old_type
, char_pointer_type
;
84 static CString initstr
;
87 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
88 #define VT_PTRDIFF_T VT_INT
90 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_LLONG
93 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
97 ST_DATA
struct switch_t
{
101 } **p
; int n
; /* list of case ranges */
102 int def_sym
; /* default symbol */
105 struct switch_t
*prev
;
107 } *cur_switch
; /* current switch */
109 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
110 /*list of temporary local variables on the stack in current function. */
111 ST_DATA
struct temp_local_variable
{
112 int location
; //offset on stack. Svalue.c.i
115 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
116 short nb_temp_local_vars
;
118 static struct scope
{
120 struct { int loc
, num
; } vla
;
121 struct { Sym
*s
; int n
; } cl
;
124 } *cur_scope
, *loop_scope
, *root_scope
;
126 /********************************************************/
127 /* stab debug support */
129 static const struct {
132 } default_debug
[] = {
133 { VT_INT
, "int:t1=r1;-2147483648;2147483647;" },
134 { VT_BYTE
, "char:t2=r2;0;127;" },
136 { VT_LONG
| VT_INT
, "long int:t3=r3;-2147483648;2147483647;" },
138 { VT_LLONG
| VT_LONG
, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
140 { VT_INT
| VT_UNSIGNED
, "unsigned int:t4=r4;0;037777777777;" },
142 { VT_LONG
| VT_INT
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;037777777777;" },
144 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
145 { VT_LLONG
| VT_LONG
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;01777777777777777777777;" },
147 { VT_QLONG
, "__int128:t6=r6;0;-1;" },
148 { VT_QLONG
| VT_UNSIGNED
, "__int128 unsigned:t7=r7;0;-1;" },
149 { VT_LLONG
, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
150 { VT_LLONG
| VT_UNSIGNED
, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
151 { VT_SHORT
, "short int:t10=r10;-32768;32767;" },
152 { VT_SHORT
| VT_UNSIGNED
, "short unsigned int:t11=r11;0;65535;" },
153 { VT_BYTE
| VT_DEFSIGN
, "signed char:t12=r12;-128;127;" },
154 { VT_BYTE
| VT_DEFSIGN
| VT_UNSIGNED
, "unsigned char:t13=r13;0;255;" },
155 { VT_FLOAT
, "float:t14=r1;4;0;" },
156 { VT_DOUBLE
, "double:t15=r1;8;0;" },
157 { VT_LDOUBLE
, "long double:t16=r1;16;0;" },
158 { -1, "_Float32:t17=r1;4;0;" },
159 { -1, "_Float64:t18=r1;8;0;" },
160 { -1, "_Float128:t19=r1;16;0;" },
161 { -1, "_Float32x:t20=r1;8;0;" },
162 { -1, "_Float64x:t21=r1;16;0;" },
163 { -1, "_Decimal32:t22=r1;4;0;" },
164 { -1, "_Decimal64:t23=r1;8;0;" },
165 { -1, "_Decimal128:t24=r1;16;0;" },
166 /* if default char is unsigned */
167 { VT_BYTE
| VT_UNSIGNED
, "unsigned char:t25=r25;0;255;" },
168 { VT_VOID
, "void:t26=26" },
171 static int debug_next_type
;
173 static struct debug_hash
{
178 static int n_debug_hash
;
180 static struct debug_info
{
191 struct debug_info
*child
, *next
, *last
, *parent
;
192 } *debug_info
, *debug_info_root
;
194 /********************************************************/
196 #define precedence_parser
197 static void init_prec(void);
199 /********************************************************/
200 #ifndef CONFIG_TCC_ASM
201 ST_FUNC
void asm_instr(void)
203 tcc_error("inline asm() not supported");
205 ST_FUNC
void asm_global_instr(void)
207 tcc_error("inline asm() not supported");
211 /* ------------------------------------------------------------------------- */
212 static void gen_cast(CType
*type
);
213 static void gen_cast_s(int t
);
214 static inline CType
*pointed_type(CType
*type
);
215 static int is_compatible_types(CType
*type1
, CType
*type2
);
216 static int parse_btype(CType
*type
, AttributeDef
*ad
);
217 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
218 static void parse_expr_type(CType
*type
);
219 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
220 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
221 static void block(int is_expr
);
222 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
223 static void decl(int l
);
224 static int decl0(int l
, int is_for_loop_init
, Sym
*);
225 static void expr_eq(void);
226 static void vla_runtime_type_size(CType
*type
, int *a
);
227 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
228 static inline int64_t expr_const64(void);
229 static void vpush64(int ty
, unsigned long long v
);
230 static void vpush(CType
*type
);
231 static int gvtst(int inv
, int t
);
232 static void gen_inline_functions(TCCState
*s
);
233 static void free_inline_functions(TCCState
*s
);
234 static void skip_or_save_block(TokenString
**str
);
235 static void gv_dup(void);
236 static int get_temp_local_var(int size
,int align
);
237 static void clear_temp_local_var_list();
238 static void cast_error(CType
*st
, CType
*dt
);
240 ST_INLN
int is_float(int t
)
242 int bt
= t
& VT_BTYPE
;
243 return bt
== VT_LDOUBLE
249 static inline int is_integer_btype(int bt
)
258 static int btype_size(int bt
)
260 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
264 bt
== VT_PTR
? PTR_SIZE
: 0;
267 /* returns function return register from type */
268 static int R_RET(int t
)
272 #ifdef TCC_TARGET_X86_64
273 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
275 #elif defined TCC_TARGET_RISCV64
276 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
282 /* returns 2nd function return register, if any */
283 static int R2_RET(int t
)
289 #elif defined TCC_TARGET_X86_64
294 #elif defined TCC_TARGET_RISCV64
301 /* returns true for two-word types */
302 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
304 /* put function return registers to stack value */
305 static void PUT_R_RET(SValue
*sv
, int t
)
307 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
310 /* returns function return register class for type t */
311 static int RC_RET(int t
)
313 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
316 /* returns generic register class for type t */
317 static int RC_TYPE(int t
)
321 #ifdef TCC_TARGET_X86_64
322 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
324 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
326 #elif defined TCC_TARGET_RISCV64
327 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
333 /* returns 2nd register class corresponding to t and rc */
334 static int RC2_TYPE(int t
, int rc
)
336 if (!USING_TWO_WORDS(t
))
351 /* we use our own 'finite' function to avoid potential problems with
352 non standard math libs */
353 /* XXX: endianness dependent */
354 ST_FUNC
int ieee_finite(double d
)
357 memcpy(p
, &d
, sizeof(double));
358 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
361 /* compiling intel long double natively */
362 #if (defined __i386__ || defined __x86_64__) \
363 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
364 # define TCC_IS_NATIVE_387
367 ST_FUNC
void test_lvalue(void)
369 if (!(vtop
->r
& VT_LVAL
))
373 ST_FUNC
void check_vstack(void)
375 if (vtop
!= vstack
- 1)
376 tcc_error("internal compiler error: vstack leak (%d)",
377 (int)(vtop
- vstack
+ 1));
380 /* ------------------------------------------------------------------------- */
381 /* vstack debugging aid */
384 void pv (const char *lbl
, int a
, int b
)
387 for (i
= a
; i
< a
+ b
; ++i
) {
388 SValue
*p
= &vtop
[-i
];
389 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
390 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
395 /* ------------------------------------------------------------------------- */
396 /* start of translation unit info */
397 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
403 /* file info: full path + filename */
404 section_sym
= put_elf_sym(symtab_section
, 0, 0,
405 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
406 text_section
->sh_num
, NULL
);
407 getcwd(buf
, sizeof(buf
));
409 normalize_slashes(buf
);
411 pstrcat(buf
, sizeof(buf
), "/");
412 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
413 text_section
->data_offset
, text_section
, section_sym
);
414 put_stabs_r(s1
, file
->prev
->filename
, N_SO
, 0, 0,
415 text_section
->data_offset
, text_section
, section_sym
);
416 for (i
= 0; i
< sizeof (default_debug
) / sizeof (default_debug
[0]); i
++)
417 put_stabs(s1
, default_debug
[i
].name
, N_LSYM
, 0, 0, 0);
419 new_file
= last_line_num
= 0;
421 debug_next_type
= sizeof(default_debug
) / sizeof(default_debug
[0]);
425 /* we're currently 'including' the <command line> */
429 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
430 symbols can be safely used */
431 put_elf_sym(symtab_section
, 0, 0,
432 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
433 SHN_ABS
, file
->filename
);
436 static void tcc_debug_stabs (TCCState
*s1
, const char *str
, int type
, unsigned long value
,
437 Section
*sec
, int sym_index
)
443 (struct debug_sym
*)tcc_realloc (debug_info
->sym
,
444 sizeof(struct debug_sym
) *
445 (debug_info
->n_sym
+ 1));
446 s
= debug_info
->sym
+ debug_info
->n_sym
++;
449 s
->str
= tcc_strdup(str
);
451 s
->sym_index
= sym_index
;
454 put_stabs_r (s1
, str
, type
, 0, 0, value
, sec
, sym_index
);
456 put_stabs (s1
, str
, type
, 0, 0, value
);
459 static void tcc_debug_stabn(int type
, int value
)
461 if (type
== N_LBRAC
) {
462 struct debug_info
*info
=
463 (struct debug_info
*) tcc_mallocz(sizeof (*info
));
466 info
->parent
= debug_info
;
468 if (debug_info
->child
) {
469 if (debug_info
->child
->last
)
470 debug_info
->child
->last
->next
= info
;
472 debug_info
->child
->next
= info
;
473 debug_info
->child
->last
= info
;
476 debug_info
->child
= info
;
479 debug_info_root
= info
;
483 debug_info
->end
= value
;
484 debug_info
= debug_info
->parent
;
488 static void tcc_get_debug_info(TCCState
*s1
, Sym
*s
, CString
*result
)
497 type
= t
->type
.t
& ~(VT_EXTERN
| VT_STATIC
| VT_CONSTANT
| VT_VOLATILE
);
498 if ((type
& VT_BTYPE
) != VT_BYTE
)
500 if (type
== VT_PTR
|| type
== (VT_PTR
| VT_ARRAY
))
501 n
++, t
= t
->type
.ref
;
505 if ((type
& VT_BTYPE
) == VT_STRUCT
) {
509 for (i
= 0; i
< n_debug_hash
; i
++) {
510 if (t
== debug_hash
[i
].type
) {
511 debug_type
= debug_hash
[i
].debug_type
;
515 if (debug_type
== -1) {
516 debug_type
= ++debug_next_type
;
517 debug_hash
= (struct debug_hash
*)
518 tcc_realloc (debug_hash
,
519 (n_debug_hash
+ 1) * sizeof(*debug_hash
));
520 debug_hash
[n_debug_hash
].debug_type
= debug_type
;
521 debug_hash
[n_debug_hash
++].type
= t
;
523 cstr_printf (&str
, "%s:T%d=%c%d",
524 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
525 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
527 IS_UNION (t
->type
.t
) ? 'u' : 's',
530 int pos
, size
, align
;
533 cstr_printf (&str
, "%s:",
534 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
535 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
536 tcc_get_debug_info (s1
, t
, &str
);
537 if (t
->type
.t
& VT_BITFIELD
) {
538 pos
= t
->c
* 8 + BIT_POS(t
->type
.t
);
539 size
= BIT_SIZE(t
->type
.t
);
543 size
= type_size(&t
->type
, &align
) * 8;
545 cstr_printf (&str
, ",%d,%d;", pos
, size
);
547 cstr_printf (&str
, ";");
548 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
552 else if (IS_ENUM(type
)) {
553 Sym
*e
= t
= t
->type
.ref
;
555 debug_type
= ++debug_next_type
;
557 cstr_printf (&str
, "%s:T%d=e",
558 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
559 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
563 cstr_printf (&str
, "%s:",
564 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
565 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
566 cstr_printf (&str
, e
->type
.t
& VT_UNSIGNED
? "%u," : "%d,",
569 cstr_printf (&str
, ";");
570 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
573 else if ((type
& VT_BTYPE
) != VT_FUNC
) {
574 type
&= ~VT_STRUCT_MASK
;
576 debug_type
<= sizeof(default_debug
) / sizeof(default_debug
[0]);
578 if (default_debug
[debug_type
- 1].type
== type
)
580 if (debug_type
> sizeof(default_debug
) / sizeof(default_debug
[0]))
584 cstr_printf (result
, "%d=", ++debug_next_type
);
587 type
= t
->type
.t
& ~(VT_EXTERN
| VT_STATIC
| VT_CONSTANT
| VT_VOLATILE
);
588 if ((type
& VT_BTYPE
) != VT_BYTE
)
591 cstr_printf (result
, "%d=*", ++debug_next_type
);
592 else if (type
== (VT_PTR
| VT_ARRAY
))
593 cstr_printf (result
, "%d=ar1;0;%d;",
594 ++debug_next_type
, t
->type
.ref
->c
- 1);
595 else if (type
== VT_FUNC
) {
596 cstr_printf (result
, "%d=f", ++debug_next_type
);
597 tcc_get_debug_info (s1
, t
->type
.ref
, result
);
604 cstr_printf (result
, "%d", debug_type
);
607 static void tcc_debug_finish (TCCState
*s1
, struct debug_info
*cur
)
611 struct debug_info
*next
= cur
->next
;
613 for (i
= 0; i
< cur
->n_sym
; i
++) {
614 struct debug_sym
*s
= &cur
->sym
[i
];
617 put_stabs_r(s1
, s
->str
, s
->type
, 0, 0, s
->value
,
618 s
->sec
, s
->sym_index
);
620 put_stabs(s1
, s
->str
, s
->type
, 0, 0, s
->value
);
624 put_stabn(s1
, N_LBRAC
, 0, 0, cur
->start
);
625 tcc_debug_finish (s1
, cur
->child
);
626 put_stabn(s1
, N_RBRAC
, 0, 0, cur
->end
);
632 static void tcc_add_debug_info(TCCState
*s1
, int param
, Sym
*s
, Sym
*e
)
635 cstr_new (&debug_str
);
636 for (; s
!= e
; s
= s
->prev
) {
637 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
639 cstr_reset (&debug_str
);
640 cstr_printf (&debug_str
, "%s:%s", get_tok_str(s
->v
, NULL
), param
? "p" : "");
641 tcc_get_debug_info(s1
, s
, &debug_str
);
642 tcc_debug_stabs(s1
, debug_str
.data
, param
? N_PSYM
: N_LSYM
, s
->c
, NULL
, 0);
644 cstr_free (&debug_str
);
647 static void tcc_debug_extern_sym(TCCState
*s1
, Sym
*sym
, int sh_num
, int sym_bind
)
649 Section
*s
= s1
->sections
[sh_num
];
653 cstr_printf (&str
, "%s:%c",
654 get_tok_str(sym
->v
, NULL
),
655 sym_bind
== STB_GLOBAL
? 'G' : local_scope
? 'V' : 'S'
657 tcc_get_debug_info(s1
, sym
, &str
);
658 if (sym_bind
== STB_GLOBAL
)
659 tcc_debug_stabs(s1
, str
.data
, N_GSYM
, 0, NULL
, 0);
661 tcc_debug_stabs(s1
, str
.data
,
662 (sym
->type
.t
& VT_STATIC
) && data_section
== s
663 ? N_STSYM
: N_LCSYM
, 0, s
, sym
->c
);
667 /* put end of translation unit info */
668 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
672 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
673 text_section
->data_offset
, text_section
, section_sym
);
674 tcc_free(debug_hash
);
677 static BufferedFile
* put_new_file(TCCState
*s1
)
679 BufferedFile
*f
= file
;
680 /* use upper file if from inline ":asm:" */
681 if (f
->filename
[0] == ':')
684 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
685 new_file
= last_line_num
= 0;
690 /* generate line number info */
691 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
695 || cur_text_section
!= text_section
696 || !(f
= put_new_file(s1
))
697 || last_line_num
== f
->line_num
)
699 if (func_ind
!= -1) {
700 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
702 /* from tcc_assemble */
703 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
705 last_line_num
= f
->line_num
;
708 /* put function symbol */
709 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
715 debug_info_root
= NULL
;
717 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
718 if (!(f
= put_new_file(s1
)))
720 cstr_new (&debug_str
);
721 cstr_printf(&debug_str
, "%s:%c", funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
722 tcc_get_debug_info(s1
, sym
->type
.ref
, &debug_str
);
723 put_stabs_r(s1
, debug_str
.data
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
724 cstr_free (&debug_str
);
729 /* put function size */
730 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
734 tcc_debug_stabn(N_RBRAC
, size
);
735 tcc_debug_finish (s1
, debug_info_root
);
738 /* put alternative filename */
739 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
741 if (0 == strcmp(file
->filename
, filename
))
743 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
747 /* begin of #include */
748 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
752 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
756 /* end of #include */
757 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
761 put_stabn(s1
, N_EINCL
, 0, 0, 0);
765 /* ------------------------------------------------------------------------- */
766 /* initialize vstack and types. This must be done also for tcc -E */
767 ST_FUNC
void tccgen_init(TCCState
*s1
)
770 memset(vtop
, 0, sizeof *vtop
);
772 /* define some often used types */
774 char_pointer_type
.t
= VT_BYTE
;
775 mk_pointer(&char_pointer_type
);
776 func_old_type
.t
= VT_FUNC
;
777 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
778 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
779 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
780 #ifdef precedence_parser
786 ST_FUNC
int tccgen_compile(TCCState
*s1
)
788 cur_text_section
= NULL
;
790 anon_sym
= SYM_FIRST_ANOM
;
793 nocode_wanted
= 0x80000000;
797 #ifdef TCC_TARGET_ARM
801 printf("%s: **** new file\n", file
->filename
);
803 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
806 gen_inline_functions(s1
);
808 /* end of translation unit info */
813 ST_FUNC
void tccgen_finish(TCCState
*s1
)
816 free_inline_functions(s1
);
817 sym_pop(&global_stack
, NULL
, 0);
818 sym_pop(&local_stack
, NULL
, 0);
819 /* free preprocessor macros */
822 dynarray_reset(&sym_pools
, &nb_sym_pools
);
823 sym_free_first
= NULL
;
826 /* ------------------------------------------------------------------------- */
827 ST_FUNC ElfSym
*elfsym(Sym
*s
)
831 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
834 /* apply storage attributes to Elf symbol */
835 ST_FUNC
void update_storage(Sym
*sym
)
838 int sym_bind
, old_sym_bind
;
844 if (sym
->a
.visibility
)
845 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
848 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
849 sym_bind
= STB_LOCAL
;
850 else if (sym
->a
.weak
)
853 sym_bind
= STB_GLOBAL
;
854 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
855 if (sym_bind
!= old_sym_bind
) {
856 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
860 if (sym
->a
.dllimport
)
861 esym
->st_other
|= ST_PE_IMPORT
;
862 if (sym
->a
.dllexport
)
863 esym
->st_other
|= ST_PE_EXPORT
;
867 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
868 get_tok_str(sym
->v
, NULL
),
869 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
877 /* ------------------------------------------------------------------------- */
878 /* update sym->c so that it points to an external symbol in section
879 'section' with value 'value' */
881 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
882 addr_t value
, unsigned long size
,
883 int can_add_underscore
)
885 int sym_type
, sym_bind
, info
, other
, t
;
889 #ifdef CONFIG_TCC_BCHECK
893 name
= get_tok_str(sym
->v
, NULL
);
894 #ifdef CONFIG_TCC_BCHECK
895 if (tcc_state
->do_bounds_check
) {
896 /* XXX: avoid doing that for statics ? */
897 /* if bound checking is activated, we change some function
898 names by adding the "__bound" prefix */
899 #if defined(TCC_TARGET_ARM) && defined(TCC_ARM_EABI)
900 if (strcmp (name
, "memcpy") == 0 ||
901 strcmp (name
, "memmove") == 0 ||
902 strcmp (name
, "memset") == 0)
907 /* XXX: we rely only on malloc hooks */
916 #if defined(TCC_TARGET_ARM) && defined(TCC_ARM_EABI)
930 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
936 #ifndef TCC_TARGET_PE
939 #if defined(TCC_TARGET_ARM) && defined(TCC_ARM_EABI)
942 strcpy(buf
, "__bound_");
950 if ((t
& VT_BTYPE
) == VT_FUNC
) {
952 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
953 sym_type
= STT_NOTYPE
;
955 sym_type
= STT_OBJECT
;
957 if (t
& (VT_STATIC
| VT_INLINE
))
958 sym_bind
= STB_LOCAL
;
960 sym_bind
= STB_GLOBAL
;
963 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
964 Sym
*ref
= sym
->type
.ref
;
965 if (ref
->a
.nodecorate
) {
966 can_add_underscore
= 0;
968 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
969 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
971 other
|= ST_PE_STDCALL
;
972 can_add_underscore
= 0;
976 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
978 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
982 name
= get_tok_str(sym
->asm_label
, NULL
);
983 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
984 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
986 if (tcc_state
->do_debug
987 && sym_type
!= STT_FUNC
988 && sym
->v
< SYM_FIRST_ANOM
)
989 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
);
993 esym
->st_value
= value
;
994 esym
->st_size
= size
;
995 esym
->st_shndx
= sh_num
;
1000 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
1001 addr_t value
, unsigned long size
)
1003 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
1004 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
1007 /* add a new relocation entry to symbol 'sym' in section 's' */
1008 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
1013 if (nocode_wanted
&& s
== cur_text_section
)
1018 put_extern_sym(sym
, NULL
, 0, 0);
1022 /* now we can add ELF relocation info */
1023 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
1027 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
1029 greloca(s
, sym
, offset
, type
, 0);
1033 /* ------------------------------------------------------------------------- */
1034 /* symbol allocator */
1035 static Sym
*__sym_malloc(void)
1037 Sym
*sym_pool
, *sym
, *last_sym
;
1040 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
1041 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
1043 last_sym
= sym_free_first
;
1045 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
1046 sym
->next
= last_sym
;
1050 sym_free_first
= last_sym
;
1054 static inline Sym
*sym_malloc(void)
1058 sym
= sym_free_first
;
1060 sym
= __sym_malloc();
1061 sym_free_first
= sym
->next
;
1064 sym
= tcc_malloc(sizeof(Sym
));
1069 ST_INLN
void sym_free(Sym
*sym
)
1072 sym
->next
= sym_free_first
;
1073 sym_free_first
= sym
;
1079 /* push, without hashing */
1080 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
1085 memset(s
, 0, sizeof *s
);
1095 /* find a symbol and return its associated structure. 's' is the top
1096 of the symbol stack */
1097 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
1102 else if (s
->v
== -1)
1109 /* structure lookup */
1110 ST_INLN Sym
*struct_find(int v
)
1113 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1115 return table_ident
[v
]->sym_struct
;
1118 /* find an identifier */
1119 ST_INLN Sym
*sym_find(int v
)
1122 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1124 return table_ident
[v
]->sym_identifier
;
1127 static int sym_scope(Sym
*s
)
1129 if (IS_ENUM_VAL (s
->type
.t
))
1130 return s
->type
.ref
->sym_scope
;
1132 return s
->sym_scope
;
1135 /* push a given symbol on the symbol stack */
1136 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
1145 s
= sym_push2(ps
, v
, type
->t
, c
);
1146 s
->type
.ref
= type
->ref
;
1148 /* don't record fields or anonymous symbols */
1150 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1151 /* record symbol in token array */
1152 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1154 ps
= &ts
->sym_struct
;
1156 ps
= &ts
->sym_identifier
;
1159 s
->sym_scope
= local_scope
;
1160 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
1161 tcc_error("redeclaration of '%s'",
1162 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
1167 /* push a global identifier */
1168 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
1171 s
= sym_push2(&global_stack
, v
, t
, c
);
1172 s
->r
= VT_CONST
| VT_SYM
;
1173 /* don't record anonymous symbol */
1174 if (v
< SYM_FIRST_ANOM
) {
1175 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
1176 /* modify the top most local identifier, so that sym_identifier will
1177 point to 's' when popped; happens when called from inline asm */
1178 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
1179 ps
= &(*ps
)->prev_tok
;
1186 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1187 pop them yet from the list, but do remove them from the token array. */
1188 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
1198 /* remove symbol in token array */
1200 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1201 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1203 ps
= &ts
->sym_struct
;
1205 ps
= &ts
->sym_identifier
;
1216 /* ------------------------------------------------------------------------- */
1217 static void vcheck_cmp(void)
1219 /* cannot let cpu flags if other instruction are generated. Also
1220 avoid leaving VT_JMP anywhere except on the top of the stack
1221 because it would complicate the code generator.
1223 Don't do this when nocode_wanted. vtop might come from
1224 !nocode_wanted regions (see 88_codeopt.c) and transforming
1225 it to a register without actually generating code is wrong
1226 as their value might still be used for real. All values
1227 we push under nocode_wanted will eventually be popped
1228 again, so that the VT_CMP/VT_JMP value will be in vtop
1229 when code is unsuppressed again. */
1231 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
1235 static void vsetc(CType
*type
, int r
, CValue
*vc
)
1237 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1238 tcc_error("memory full (vstack)");
1243 vtop
->r2
= VT_CONST
;
1248 ST_FUNC
void vswap(void)
1258 /* pop stack value */
1259 ST_FUNC
void vpop(void)
1262 v
= vtop
->r
& VT_VALMASK
;
1263 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1264 /* for x86, we need to pop the FP stack */
1265 if (v
== TREG_ST0
) {
1266 o(0xd8dd); /* fstp %st(0) */
1270 /* need to put correct jump if && or || without test */
1277 /* push constant of type "type" with useless value */
1278 static void vpush(CType
*type
)
1280 vset(type
, VT_CONST
, 0);
1283 /* push arbitrary 64bit constant */
1284 static void vpush64(int ty
, unsigned long long v
)
1291 vsetc(&ctype
, VT_CONST
, &cval
);
1294 /* push integer constant */
1295 ST_FUNC
void vpushi(int v
)
1300 /* push a pointer sized constant */
1301 static void vpushs(addr_t v
)
1303 vpush64(VT_SIZE_T
, v
);
1306 /* push long long constant */
1307 static inline void vpushll(long long v
)
1309 vpush64(VT_LLONG
, v
);
1312 ST_FUNC
void vset(CType
*type
, int r
, int v
)
1316 vsetc(type
, r
, &cval
);
1319 static void vseti(int r
, int v
)
1327 ST_FUNC
void vpushv(SValue
*v
)
1329 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1330 tcc_error("memory full (vstack)");
1335 static void vdup(void)
1340 /* rotate n first stack elements to the bottom
1341 I1 ... In -> I2 ... In I1 [top is right]
1343 ST_FUNC
void vrotb(int n
)
1350 for(i
=-n
+1;i
!=0;i
++)
1351 vtop
[i
] = vtop
[i
+1];
1355 /* rotate the n elements before entry e towards the top
1356 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1358 ST_FUNC
void vrote(SValue
*e
, int n
)
1365 for(i
= 0;i
< n
- 1; i
++)
1370 /* rotate n first stack elements to the top
1371 I1 ... In -> In I1 ... I(n-1) [top is right]
1373 ST_FUNC
void vrott(int n
)
1378 /* ------------------------------------------------------------------------- */
1379 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1381 /* called from generators to set the result from relational ops */
1382 ST_FUNC
void vset_VT_CMP(int op
)
1390 /* called once before asking generators to load VT_CMP to a register */
1391 static void vset_VT_JMP(void)
1393 int op
= vtop
->cmp_op
;
1395 if (vtop
->jtrue
|| vtop
->jfalse
) {
1396 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1397 int inv
= op
& (op
< 2); /* small optimization */
1398 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1400 /* otherwise convert flags (rsp. 0/1) to register */
1402 if (op
< 2) /* doesn't seem to happen */
1407 /* Set CPU Flags, doesn't yet jump */
1408 static void gvtst_set(int inv
, int t
)
1412 if (vtop
->r
!= VT_CMP
) {
1415 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1416 vset_VT_CMP(vtop
->c
.i
!= 0);
1419 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1420 *p
= gjmp_append(*p
, t
);
1423 /* Generate value test
1425 * Generate a test for any value (jump, comparison and integers) */
1426 static int gvtst(int inv
, int t
)
1431 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1433 x
= u
, u
= t
, t
= x
;
1436 /* jump to the wanted target */
1438 t
= gjmp_cond(op
^ inv
, t
);
1441 /* resolve complementary jumps to here */
1448 /* generate a zero or nozero test */
1449 static void gen_test_zero(int op
)
1451 if (vtop
->r
== VT_CMP
) {
1455 vtop
->jfalse
= vtop
->jtrue
;
1465 /* ------------------------------------------------------------------------- */
1466 /* push a symbol value of TYPE */
1467 static inline void vpushsym(CType
*type
, Sym
*sym
)
1471 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1475 /* Return a static symbol pointing to a section */
1476 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1482 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1483 sym
->type
.t
|= VT_STATIC
;
1484 put_extern_sym(sym
, sec
, offset
, size
);
1488 /* push a reference to a section offset by adding a dummy symbol */
1489 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1491 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1494 /* define a new external reference to a symbol 'v' of type 'u' */
1495 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1501 /* push forward reference */
1502 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1503 s
->type
.ref
= type
->ref
;
1504 } else if (IS_ASM_SYM(s
)) {
1505 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1506 s
->type
.ref
= type
->ref
;
1512 /* Merge symbol attributes. */
1513 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1515 if (sa1
->aligned
&& !sa
->aligned
)
1516 sa
->aligned
= sa1
->aligned
;
1517 sa
->packed
|= sa1
->packed
;
1518 sa
->weak
|= sa1
->weak
;
1519 if (sa1
->visibility
!= STV_DEFAULT
) {
1520 int vis
= sa
->visibility
;
1521 if (vis
== STV_DEFAULT
1522 || vis
> sa1
->visibility
)
1523 vis
= sa1
->visibility
;
1524 sa
->visibility
= vis
;
1526 sa
->dllexport
|= sa1
->dllexport
;
1527 sa
->nodecorate
|= sa1
->nodecorate
;
1528 sa
->dllimport
|= sa1
->dllimport
;
1531 /* Merge function attributes. */
1532 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1534 if (fa1
->func_call
&& !fa
->func_call
)
1535 fa
->func_call
= fa1
->func_call
;
1536 if (fa1
->func_type
&& !fa
->func_type
)
1537 fa
->func_type
= fa1
->func_type
;
1538 if (fa1
->func_args
&& !fa
->func_args
)
1539 fa
->func_args
= fa1
->func_args
;
1540 if (fa1
->func_noreturn
)
1541 fa
->func_noreturn
= 1;
1548 /* Merge attributes. */
1549 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1551 merge_symattr(&ad
->a
, &ad1
->a
);
1552 merge_funcattr(&ad
->f
, &ad1
->f
);
1555 ad
->section
= ad1
->section
;
1556 if (ad1
->alias_target
)
1557 ad
->alias_target
= ad1
->alias_target
;
1559 ad
->asm_label
= ad1
->asm_label
;
1561 ad
->attr_mode
= ad1
->attr_mode
;
1564 /* Merge some type attributes. */
1565 static void patch_type(Sym
*sym
, CType
*type
)
1567 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1568 if (!(sym
->type
.t
& VT_EXTERN
))
1569 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1570 sym
->type
.t
&= ~VT_EXTERN
;
1573 if (IS_ASM_SYM(sym
)) {
1574 /* stay static if both are static */
1575 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1576 sym
->type
.ref
= type
->ref
;
1579 if (!is_compatible_types(&sym
->type
, type
)) {
1580 tcc_error("incompatible types for redefinition of '%s'",
1581 get_tok_str(sym
->v
, NULL
));
1583 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1584 int static_proto
= sym
->type
.t
& VT_STATIC
;
1585 /* warn if static follows non-static function declaration */
1586 if ((type
->t
& VT_STATIC
) && !static_proto
1587 /* XXX this test for inline shouldn't be here. Until we
1588 implement gnu-inline mode again it silences a warning for
1589 mingw caused by our workarounds. */
1590 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1591 tcc_warning("static storage ignored for redefinition of '%s'",
1592 get_tok_str(sym
->v
, NULL
));
1594 /* set 'inline' if both agree or if one has static */
1595 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1596 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1597 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1598 static_proto
|= VT_INLINE
;
1601 if (0 == (type
->t
& VT_EXTERN
)) {
1602 struct FuncAttr f
= sym
->type
.ref
->f
;
1603 /* put complete type, use static from prototype */
1604 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1605 sym
->type
.ref
= type
->ref
;
1606 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1608 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1611 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1612 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1613 sym
->type
.ref
= type
->ref
;
1617 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1618 /* set array size if it was omitted in extern declaration */
1619 sym
->type
.ref
->c
= type
->ref
->c
;
1621 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1622 tcc_warning("storage mismatch for redefinition of '%s'",
1623 get_tok_str(sym
->v
, NULL
));
1627 /* Merge some storage attributes. */
1628 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1631 patch_type(sym
, type
);
1633 #ifdef TCC_TARGET_PE
1634 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1635 tcc_error("incompatible dll linkage for redefinition of '%s'",
1636 get_tok_str(sym
->v
, NULL
));
1638 merge_symattr(&sym
->a
, &ad
->a
);
1640 sym
->asm_label
= ad
->asm_label
;
1641 update_storage(sym
);
1644 /* copy sym to other stack */
1645 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1648 s
= sym_malloc(), *s
= *s0
;
1649 s
->prev
= *ps
, *ps
= s
;
1650 if (s
->v
< SYM_FIRST_ANOM
) {
1651 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1652 s
->prev_tok
= *ps
, *ps
= s
;
1657 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1658 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1660 int bt
= s
->type
.t
& VT_BTYPE
;
1661 if (bt
== VT_FUNC
|| bt
== VT_PTR
) {
1662 Sym
**sp
= &s
->type
.ref
;
1663 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1664 Sym
*s2
= sym_copy(s
, ps
);
1665 sp
= &(*sp
= s2
)->next
;
1666 sym_copy_ref(s2
, ps
);
1671 /* define a new external reference to a symbol 'v' */
1672 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1676 /* look for global symbol */
1678 while (s
&& s
->sym_scope
)
1682 /* push forward reference */
1683 s
= global_identifier_push(v
, type
->t
, 0);
1686 s
->asm_label
= ad
->asm_label
;
1687 s
->type
.ref
= type
->ref
;
1688 /* copy type to the global stack */
1690 sym_copy_ref(s
, &global_stack
);
1692 patch_storage(s
, ad
, type
);
1694 /* push variables on local_stack if any */
1695 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1696 s
= sym_copy(s
, &local_stack
);
1700 /* push a reference to global symbol v */
1701 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1703 vpushsym(type
, external_global_sym(v
, type
));
1706 /* save registers up to (vtop - n) stack entry */
1707 ST_FUNC
void save_regs(int n
)
1710 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1714 /* save r to the memory stack, and mark it as being free */
1715 ST_FUNC
void save_reg(int r
)
1717 save_reg_upstack(r
, 0);
1720 /* save r to the memory stack, and mark it as being free,
1721 if seen up to (vtop - n) stack entry */
1722 ST_FUNC
void save_reg_upstack(int r
, int n
)
1724 int l
, size
, align
, bt
;
1727 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1732 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1733 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1734 /* must save value on stack if not already done */
1736 bt
= p
->type
.t
& VT_BTYPE
;
1739 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1742 size
= type_size(&sv
.type
, &align
);
1743 l
= get_temp_local_var(size
,align
);
1744 sv
.r
= VT_LOCAL
| VT_LVAL
;
1746 store(p
->r
& VT_VALMASK
, &sv
);
1747 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1748 /* x86 specific: need to pop fp register ST0 if saved */
1749 if (r
== TREG_ST0
) {
1750 o(0xd8dd); /* fstp %st(0) */
1753 /* special long long case */
1754 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1759 /* mark that stack entry as being saved on the stack */
1760 if (p
->r
& VT_LVAL
) {
1761 /* also clear the bounded flag because the
1762 relocation address of the function was stored in
1764 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1766 p
->r
= VT_LVAL
| VT_LOCAL
;
1774 #ifdef TCC_TARGET_ARM
1775 /* find a register of class 'rc2' with at most one reference on stack.
1776 * If none, call get_reg(rc) */
1777 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1782 for(r
=0;r
<NB_REGS
;r
++) {
1783 if (reg_classes
[r
] & rc2
) {
1786 for(p
= vstack
; p
<= vtop
; p
++) {
1787 if ((p
->r
& VT_VALMASK
) == r
||
1799 /* find a free register of class 'rc'. If none, save one register */
1800 ST_FUNC
int get_reg(int rc
)
1805 /* find a free register */
1806 for(r
=0;r
<NB_REGS
;r
++) {
1807 if (reg_classes
[r
] & rc
) {
1810 for(p
=vstack
;p
<=vtop
;p
++) {
1811 if ((p
->r
& VT_VALMASK
) == r
||
1820 /* no register left : free the first one on the stack (VERY
1821 IMPORTANT to start from the bottom to ensure that we don't
1822 spill registers used in gen_opi()) */
1823 for(p
=vstack
;p
<=vtop
;p
++) {
1824 /* look at second register (if long long) */
1826 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1828 r
= p
->r
& VT_VALMASK
;
1829 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1835 /* Should never comes here */
1839 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1840 static int get_temp_local_var(int size
,int align
){
1842 struct temp_local_variable
*temp_var
;
1849 for(i
=0;i
<nb_temp_local_vars
;i
++){
1850 temp_var
=&arr_temp_local_vars
[i
];
1851 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1854 /*check if temp_var is free*/
1856 for(p
=vstack
;p
<=vtop
;p
++) {
1858 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1859 if(p
->c
.i
==temp_var
->location
){
1866 found_var
=temp_var
->location
;
1872 loc
= (loc
- size
) & -align
;
1873 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1874 temp_var
=&arr_temp_local_vars
[i
];
1875 temp_var
->location
=loc
;
1876 temp_var
->size
=size
;
1877 temp_var
->align
=align
;
1878 nb_temp_local_vars
++;
1885 static void clear_temp_local_var_list(){
1886 nb_temp_local_vars
=0;
1889 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1891 static void move_reg(int r
, int s
, int t
)
1905 /* get address of vtop (vtop MUST BE an lvalue) */
1906 ST_FUNC
void gaddrof(void)
1908 vtop
->r
&= ~VT_LVAL
;
1909 /* tricky: if saved lvalue, then we can go back to lvalue */
1910 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1911 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1914 #ifdef CONFIG_TCC_BCHECK
1915 /* generate lvalue bound code */
1916 static void gbound(void)
1920 vtop
->r
&= ~VT_MUSTBOUND
;
1921 /* if lvalue, then use checking code before dereferencing */
1922 if (vtop
->r
& VT_LVAL
) {
1923 /* if not VT_BOUNDED value, then make one */
1924 if (!(vtop
->r
& VT_BOUNDED
)) {
1925 /* must save type because we must set it to int to get pointer */
1927 vtop
->type
.t
= VT_PTR
;
1930 gen_bounded_ptr_add();
1934 /* then check for dereferencing */
1935 gen_bounded_ptr_deref();
1939 /* we need to call __bound_ptr_add before we start to load function
1940 args into registers */
1941 ST_FUNC
void gbound_args(int nb_args
)
1944 for (i
= 1; i
<= nb_args
; ++i
)
1945 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1952 /* Add bounds for local symbols from S to E (via ->prev) */
1953 static void add_local_bounds(Sym
*s
, Sym
*e
)
1955 for (; s
!= e
; s
= s
->prev
) {
1956 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1958 /* Add arrays/structs/unions because we always take address */
1959 if ((s
->type
.t
& VT_ARRAY
)
1960 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1961 || s
->a
.addrtaken
) {
1962 /* add local bound info */
1963 int align
, size
= type_size(&s
->type
, &align
);
1964 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1965 2 * sizeof(addr_t
));
1966 bounds_ptr
[0] = s
->c
;
1967 bounds_ptr
[1] = size
;
1973 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1974 static void pop_local_syms(Sym
**ptop
, Sym
*b
, int keep
, int ellipsis
)
1976 #ifdef CONFIG_TCC_BCHECK
1977 if (tcc_state
->do_bounds_check
&& !ellipsis
&& !keep
)
1978 add_local_bounds(*ptop
, b
);
1980 if (tcc_state
->do_debug
)
1981 tcc_add_debug_info (tcc_state
, !local_scope
, *ptop
, b
);
1982 sym_pop(ptop
, b
, keep
);
1985 static void incr_bf_adr(int o
)
1987 vtop
->type
= char_pointer_type
;
1991 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1995 /* single-byte load mode for packed or otherwise unaligned bitfields */
1996 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1999 save_reg_upstack(vtop
->r
, 1);
2000 vpush64(type
->t
& VT_BTYPE
, 0); // B X
2001 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2010 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
2012 vpushi((1 << n
) - 1), gen_op('&');
2015 vpushi(bits
), gen_op(TOK_SHL
);
2018 bits
+= n
, bit_size
-= n
, o
= 1;
2021 if (!(type
->t
& VT_UNSIGNED
)) {
2022 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
2023 vpushi(n
), gen_op(TOK_SHL
);
2024 vpushi(n
), gen_op(TOK_SAR
);
2028 /* single-byte store mode for packed or otherwise unaligned bitfields */
2029 static void store_packed_bf(int bit_pos
, int bit_size
)
2031 int bits
, n
, o
, m
, c
;
2033 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2035 save_reg_upstack(vtop
->r
, 1);
2036 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2038 incr_bf_adr(o
); // X B
2040 c
? vdup() : gv_dup(); // B V X
2043 vpushi(bits
), gen_op(TOK_SHR
);
2045 vpushi(bit_pos
), gen_op(TOK_SHL
);
2050 m
= ((1 << n
) - 1) << bit_pos
;
2051 vpushi(m
), gen_op('&'); // X B V1
2052 vpushv(vtop
-1); // X B V1 B
2053 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
2054 gen_op('&'); // X B V1 B1
2055 gen_op('|'); // X B V2
2057 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
2058 vstore(), vpop(); // X B
2059 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
2064 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
2067 if (0 == sv
->type
.ref
)
2069 t
= sv
->type
.ref
->auxtype
;
2070 if (t
!= -1 && t
!= VT_STRUCT
) {
2071 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
2077 /* store vtop a register belonging to class 'rc'. lvalues are
2078 converted to values. Cannot be used if cannot be converted to
2079 register value (such as structures). */
2080 ST_FUNC
int gv(int rc
)
2082 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
2083 int bit_pos
, bit_size
, size
, align
;
2085 /* NOTE: get_reg can modify vstack[] */
2086 if (vtop
->type
.t
& VT_BITFIELD
) {
2089 bit_pos
= BIT_POS(vtop
->type
.t
);
2090 bit_size
= BIT_SIZE(vtop
->type
.t
);
2091 /* remove bit field info to avoid loops */
2092 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
2095 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
2096 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
2097 type
.t
|= VT_UNSIGNED
;
2099 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
2101 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2106 if (r
== VT_STRUCT
) {
2107 load_packed_bf(&type
, bit_pos
, bit_size
);
2109 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
2110 /* cast to int to propagate signedness in following ops */
2112 /* generate shifts */
2113 vpushi(bits
- (bit_pos
+ bit_size
));
2115 vpushi(bits
- bit_size
);
2116 /* NOTE: transformed to SHR if unsigned */
2121 if (is_float(vtop
->type
.t
) &&
2122 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2123 unsigned long offset
;
2124 /* CPUs usually cannot use float constants, so we store them
2125 generically in data segment */
2126 size
= type_size(&vtop
->type
, &align
);
2128 size
= 0, align
= 1;
2129 offset
= section_add(data_section
, size
, align
);
2130 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
2132 init_putv(&vtop
->type
, data_section
, offset
);
2135 #ifdef CONFIG_TCC_BCHECK
2136 if (vtop
->r
& VT_MUSTBOUND
)
2140 bt
= vtop
->type
.t
& VT_BTYPE
;
2142 #ifdef TCC_TARGET_RISCV64
2144 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
2147 rc2
= RC2_TYPE(bt
, rc
);
2149 /* need to reload if:
2151 - lvalue (need to dereference pointer)
2152 - already a register, but not in the right class */
2153 r
= vtop
->r
& VT_VALMASK
;
2154 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
2155 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
2157 if (!r_ok
|| !r2_ok
) {
2161 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
2162 int original_type
= vtop
->type
.t
;
2164 /* two register type load :
2165 expand to two words temporarily */
2166 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2168 unsigned long long ll
= vtop
->c
.i
;
2169 vtop
->c
.i
= ll
; /* first word */
2171 vtop
->r
= r
; /* save register value */
2172 vpushi(ll
>> 32); /* second word */
2173 } else if (vtop
->r
& VT_LVAL
) {
2174 /* We do not want to modifier the long long pointer here.
2175 So we save any other instances down the stack */
2176 save_reg_upstack(vtop
->r
, 1);
2177 /* load from memory */
2178 vtop
->type
.t
= load_type
;
2181 vtop
[-1].r
= r
; /* save register value */
2182 /* increment pointer to get second word */
2183 vtop
->type
.t
= VT_PTRDIFF_T
;
2188 vtop
->type
.t
= load_type
;
2190 /* move registers */
2193 if (r2_ok
&& vtop
->r2
< VT_CONST
)
2196 vtop
[-1].r
= r
; /* save register value */
2197 vtop
->r
= vtop
[-1].r2
;
2199 /* Allocate second register. Here we rely on the fact that
2200 get_reg() tries first to free r2 of an SValue. */
2204 /* write second register */
2207 vtop
->type
.t
= original_type
;
2209 if (vtop
->r
== VT_CMP
)
2211 /* one register type load */
2216 #ifdef TCC_TARGET_C67
2217 /* uses register pairs for doubles */
2218 if (bt
== VT_DOUBLE
)
2225 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2226 ST_FUNC
void gv2(int rc1
, int rc2
)
2228 /* generate more generic register first. But VT_JMP or VT_CMP
2229 values must be generated first in all cases to avoid possible
2231 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
2236 /* test if reload is needed for first register */
2237 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
2247 /* test if reload is needed for first register */
2248 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
2255 /* expand 64bit on stack in two ints */
2256 ST_FUNC
void lexpand(void)
2259 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
2260 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
2261 if (v
== VT_CONST
) {
2264 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
2270 vtop
[0].r
= vtop
[-1].r2
;
2271 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2273 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2278 /* build a long long from two ints */
2279 static void lbuild(int t
)
2281 gv2(RC_INT
, RC_INT
);
2282 vtop
[-1].r2
= vtop
[0].r
;
2283 vtop
[-1].type
.t
= t
;
2288 /* convert stack entry to register and duplicate its value in another
2290 static void gv_dup(void)
2296 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2297 if (t
& VT_BITFIELD
) {
2307 /* stack: H L L1 H1 */
2317 /* duplicate value */
2327 /* generate CPU independent (unsigned) long long operations */
2328 static void gen_opl(int op
)
2330 int t
, a
, b
, op1
, c
, i
;
2332 unsigned short reg_iret
= REG_IRET
;
2333 unsigned short reg_lret
= REG_IRE2
;
2339 func
= TOK___divdi3
;
2342 func
= TOK___udivdi3
;
2345 func
= TOK___moddi3
;
2348 func
= TOK___umoddi3
;
2355 /* call generic long long function */
2356 vpush_global_sym(&func_old_type
, func
);
2361 vtop
->r2
= reg_lret
;
2369 //pv("gen_opl A",0,2);
2375 /* stack: L1 H1 L2 H2 */
2380 vtop
[-2] = vtop
[-3];
2383 /* stack: H1 H2 L1 L2 */
2384 //pv("gen_opl B",0,4);
2390 /* stack: H1 H2 L1 L2 ML MH */
2393 /* stack: ML MH H1 H2 L1 L2 */
2397 /* stack: ML MH H1 L2 H2 L1 */
2402 /* stack: ML MH M1 M2 */
2405 } else if (op
== '+' || op
== '-') {
2406 /* XXX: add non carry method too (for MIPS or alpha) */
2412 /* stack: H1 H2 (L1 op L2) */
2415 gen_op(op1
+ 1); /* TOK_xxxC2 */
2418 /* stack: H1 H2 (L1 op L2) */
2421 /* stack: (L1 op L2) H1 H2 */
2423 /* stack: (L1 op L2) (H1 op H2) */
2431 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2432 t
= vtop
[-1].type
.t
;
2436 /* stack: L H shift */
2438 /* constant: simpler */
2439 /* NOTE: all comments are for SHL. the other cases are
2440 done by swapping words */
2451 if (op
!= TOK_SAR
) {
2484 /* XXX: should provide a faster fallback on x86 ? */
2487 func
= TOK___ashrdi3
;
2490 func
= TOK___lshrdi3
;
2493 func
= TOK___ashldi3
;
2499 /* compare operations */
2505 /* stack: L1 H1 L2 H2 */
2507 vtop
[-1] = vtop
[-2];
2509 /* stack: L1 L2 H1 H2 */
2513 /* when values are equal, we need to compare low words. since
2514 the jump is inverted, we invert the test too. */
2517 else if (op1
== TOK_GT
)
2519 else if (op1
== TOK_ULT
)
2521 else if (op1
== TOK_UGT
)
2531 /* generate non equal test */
2533 vset_VT_CMP(TOK_NE
);
2537 /* compare low. Always unsigned */
2541 else if (op1
== TOK_LE
)
2543 else if (op1
== TOK_GT
)
2545 else if (op1
== TOK_GE
)
2548 #if 0//def TCC_TARGET_I386
2549 if (op
== TOK_NE
) { gsym(b
); break; }
2550 if (op
== TOK_EQ
) { gsym(a
); break; }
2559 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2561 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2562 return (a
^ b
) >> 63 ? -x
: x
;
2565 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2567 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2570 /* handle integer constant optimizations and various machine
2572 static void gen_opic(int op
)
2574 SValue
*v1
= vtop
- 1;
2576 int t1
= v1
->type
.t
& VT_BTYPE
;
2577 int t2
= v2
->type
.t
& VT_BTYPE
;
2578 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2579 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2580 uint64_t l1
= c1
? v1
->c
.i
: 0;
2581 uint64_t l2
= c2
? v2
->c
.i
: 0;
2582 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2584 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2585 l1
= ((uint32_t)l1
|
2586 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2587 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2588 l2
= ((uint32_t)l2
|
2589 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2593 case '+': l1
+= l2
; break;
2594 case '-': l1
-= l2
; break;
2595 case '&': l1
&= l2
; break;
2596 case '^': l1
^= l2
; break;
2597 case '|': l1
|= l2
; break;
2598 case '*': l1
*= l2
; break;
2605 /* if division by zero, generate explicit division */
2607 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2608 tcc_error("division by zero in constant");
2612 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2613 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2614 case TOK_UDIV
: l1
= l1
/ l2
; break;
2615 case TOK_UMOD
: l1
= l1
% l2
; break;
2618 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2619 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2621 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2624 case TOK_ULT
: l1
= l1
< l2
; break;
2625 case TOK_UGE
: l1
= l1
>= l2
; break;
2626 case TOK_EQ
: l1
= l1
== l2
; break;
2627 case TOK_NE
: l1
= l1
!= l2
; break;
2628 case TOK_ULE
: l1
= l1
<= l2
; break;
2629 case TOK_UGT
: l1
= l1
> l2
; break;
2630 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2631 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2632 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2633 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2635 case TOK_LAND
: l1
= l1
&& l2
; break;
2636 case TOK_LOR
: l1
= l1
|| l2
; break;
2640 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2641 l1
= ((uint32_t)l1
|
2642 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2646 /* if commutative ops, put c2 as constant */
2647 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2648 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2650 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2651 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2653 if (!const_wanted
&&
2655 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2656 (l1
== -1 && op
== TOK_SAR
))) {
2657 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2659 } else if (!const_wanted
&&
2660 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2662 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2663 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2664 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2669 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2672 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2673 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2676 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2677 /* filter out NOP operations like x*1, x-0, x&-1... */
2679 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2680 /* try to use shifts instead of muls or divs */
2681 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2690 else if (op
== TOK_PDIV
)
2696 } else if (c2
&& (op
== '+' || op
== '-') &&
2697 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2698 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2699 /* symbol + constant case */
2703 /* The backends can't always deal with addends to symbols
2704 larger than +-1<<31. Don't construct such. */
2711 /* call low level op generator */
2712 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2713 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2721 /* generate a floating point operation with constant propagation */
2722 static void gen_opif(int op
)
2726 #if defined _MSC_VER && defined __x86_64__
2727 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2734 /* currently, we cannot do computations with forward symbols */
2735 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2736 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2738 if (v1
->type
.t
== VT_FLOAT
) {
2741 } else if (v1
->type
.t
== VT_DOUBLE
) {
2749 /* NOTE: we only do constant propagation if finite number (not
2750 NaN or infinity) (ANSI spec) */
2751 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2755 case '+': f1
+= f2
; break;
2756 case '-': f1
-= f2
; break;
2757 case '*': f1
*= f2
; break;
2760 /* If not in initializer we need to potentially generate
2761 FP exceptions at runtime, otherwise we want to fold. */
2767 /* XXX: also handles tests ? */
2771 /* XXX: overflow test ? */
2772 if (v1
->type
.t
== VT_FLOAT
) {
2774 } else if (v1
->type
.t
== VT_DOUBLE
) {
2786 /* print a type. If 'varstr' is not NULL, then the variable is also
2787 printed in the type */
2789 /* XXX: add array and function pointers */
2790 static void type_to_str(char *buf
, int buf_size
,
2791 CType
*type
, const char *varstr
)
2803 pstrcat(buf
, buf_size
, "extern ");
2805 pstrcat(buf
, buf_size
, "static ");
2807 pstrcat(buf
, buf_size
, "typedef ");
2809 pstrcat(buf
, buf_size
, "inline ");
2810 if (t
& VT_VOLATILE
)
2811 pstrcat(buf
, buf_size
, "volatile ");
2812 if (t
& VT_CONSTANT
)
2813 pstrcat(buf
, buf_size
, "const ");
2815 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2816 || ((t
& VT_UNSIGNED
)
2817 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2820 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2822 buf_size
-= strlen(buf
);
2858 tstr
= "long double";
2860 pstrcat(buf
, buf_size
, tstr
);
2867 pstrcat(buf
, buf_size
, tstr
);
2868 v
= type
->ref
->v
& ~SYM_STRUCT
;
2869 if (v
>= SYM_FIRST_ANOM
)
2870 pstrcat(buf
, buf_size
, "<anonymous>");
2872 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2877 if (varstr
&& '*' == *varstr
) {
2878 pstrcat(buf1
, sizeof(buf1
), "(");
2879 pstrcat(buf1
, sizeof(buf1
), varstr
);
2880 pstrcat(buf1
, sizeof(buf1
), ")");
2882 pstrcat(buf1
, buf_size
, "(");
2884 while (sa
!= NULL
) {
2886 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2887 pstrcat(buf1
, sizeof(buf1
), buf2
);
2890 pstrcat(buf1
, sizeof(buf1
), ", ");
2892 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2893 pstrcat(buf1
, sizeof(buf1
), ", ...");
2894 pstrcat(buf1
, sizeof(buf1
), ")");
2895 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2900 if (varstr
&& '*' == *varstr
)
2901 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2903 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2904 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2907 pstrcpy(buf1
, sizeof(buf1
), "*");
2908 if (t
& VT_CONSTANT
)
2909 pstrcat(buf1
, buf_size
, "const ");
2910 if (t
& VT_VOLATILE
)
2911 pstrcat(buf1
, buf_size
, "volatile ");
2913 pstrcat(buf1
, sizeof(buf1
), varstr
);
2914 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2918 pstrcat(buf
, buf_size
, " ");
2919 pstrcat(buf
, buf_size
, varstr
);
2924 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2926 char buf1
[256], buf2
[256];
2927 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2928 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2929 tcc_error(fmt
, buf1
, buf2
);
2932 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2934 char buf1
[256], buf2
[256];
2935 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2936 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2937 tcc_warning(fmt
, buf1
, buf2
);
2940 static int pointed_size(CType
*type
)
2943 return type_size(pointed_type(type
), &align
);
2946 static void vla_runtime_pointed_size(CType
*type
)
2949 vla_runtime_type_size(pointed_type(type
), &align
);
2952 static inline int is_null_pointer(SValue
*p
)
2954 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2956 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2957 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2958 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2959 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2960 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2961 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2965 /* compare function types. OLD functions match any new functions */
2966 static int is_compatible_func(CType
*type1
, CType
*type2
)
2972 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2974 if (s1
->f
.func_type
!= s2
->f
.func_type
2975 && s1
->f
.func_type
!= FUNC_OLD
2976 && s2
->f
.func_type
!= FUNC_OLD
)
2978 /* we should check the function return type for FUNC_OLD too
2979 but that causes problems with the internally used support
2980 functions such as TOK_memmove */
2981 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
2983 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
2986 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2997 /* return true if type1 and type2 are the same. If unqualified is
2998 true, qualifiers on the types are ignored.
3000 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3004 t1
= type1
->t
& VT_TYPE
;
3005 t2
= type2
->t
& VT_TYPE
;
3007 /* strip qualifiers before comparing */
3008 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3009 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3012 /* Default Vs explicit signedness only matters for char */
3013 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3017 /* XXX: bitfields ? */
3022 && !(type1
->ref
->c
< 0
3023 || type2
->ref
->c
< 0
3024 || type1
->ref
->c
== type2
->ref
->c
))
3027 /* test more complicated cases */
3028 bt1
= t1
& VT_BTYPE
;
3029 if (bt1
== VT_PTR
) {
3030 type1
= pointed_type(type1
);
3031 type2
= pointed_type(type2
);
3032 return is_compatible_types(type1
, type2
);
3033 } else if (bt1
== VT_STRUCT
) {
3034 return (type1
->ref
== type2
->ref
);
3035 } else if (bt1
== VT_FUNC
) {
3036 return is_compatible_func(type1
, type2
);
3037 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
3038 /* If both are enums then they must be the same, if only one is then
3039 t1 and t2 must be equal, which was checked above already. */
3040 return type1
->ref
== type2
->ref
;
3046 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3047 type is stored in DEST if non-null (except for pointer plus/minus) . */
3048 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
3050 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
3051 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
3057 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
3058 ret
= op
== '?' ? 1 : 0;
3059 /* NOTE: as an extension, we accept void on only one side */
3061 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3062 if (op
== '+') ; /* Handled in caller */
3063 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3064 /* If one is a null ptr constant the result type is the other. */
3065 else if (is_null_pointer (op2
)) type
= *type1
;
3066 else if (is_null_pointer (op1
)) type
= *type2
;
3067 else if (bt1
!= bt2
) {
3068 /* accept comparison or cond-expr between pointer and integer
3070 if ((op
== '?' || TOK_ISCOND(op
))
3071 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
3072 tcc_warning("pointer/integer mismatch in %s",
3073 op
== '?' ? "conditional expression" : "comparison");
3074 else if (op
!= '-' || !is_integer_btype(bt2
))
3076 type
= *(bt1
== VT_PTR
? type1
: type2
);
3078 CType
*pt1
= pointed_type(type1
);
3079 CType
*pt2
= pointed_type(type2
);
3080 int pbt1
= pt1
->t
& VT_BTYPE
;
3081 int pbt2
= pt2
->t
& VT_BTYPE
;
3082 int newquals
, copied
= 0;
3083 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
3084 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
3085 if (op
!= '?' && !TOK_ISCOND(op
))
3088 type_incompatibility_warning(type1
, type2
,
3090 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3091 : "pointer type mismatch in comparison('%s' and '%s')");
3094 /* pointers to void get preferred, otherwise the
3095 pointed to types minus qualifs should be compatible */
3096 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
3097 /* combine qualifs */
3098 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
3099 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3102 /* copy the pointer target symbol */
3103 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3106 pointed_type(&type
)->t
|= newquals
;
3108 /* pointers to incomplete arrays get converted to
3109 pointers to completed ones if possible */
3110 if (pt1
->t
& VT_ARRAY
3111 && pt2
->t
& VT_ARRAY
3112 && pointed_type(&type
)->ref
->c
< 0
3113 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
3116 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3118 pointed_type(&type
)->ref
=
3119 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
3120 0, pointed_type(&type
)->ref
->c
);
3121 pointed_type(&type
)->ref
->c
=
3122 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
3128 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
3129 if (op
!= '?' || !compare_types(type1
, type2
, 1))
3132 } else if (is_float(bt1
) || is_float(bt2
)) {
3133 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
3134 type
.t
= VT_LDOUBLE
;
3135 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
3140 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
3141 /* cast to biggest op */
3142 type
.t
= VT_LLONG
| VT_LONG
;
3143 if (bt1
== VT_LLONG
)
3145 if (bt2
== VT_LLONG
)
3147 /* convert to unsigned if it does not fit in a long long */
3148 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
3149 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
3150 type
.t
|= VT_UNSIGNED
;
3152 /* integer operations */
3153 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
3154 /* convert to unsigned if it does not fit in an integer */
3155 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
3156 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
3157 type
.t
|= VT_UNSIGNED
;
3164 /* generic gen_op: handles types problems */
3165 ST_FUNC
void gen_op(int op
)
3167 int u
, t1
, t2
, bt1
, bt2
, t
;
3168 CType type1
, combtype
;
3171 t1
= vtop
[-1].type
.t
;
3172 t2
= vtop
[0].type
.t
;
3173 bt1
= t1
& VT_BTYPE
;
3174 bt2
= t2
& VT_BTYPE
;
3176 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
3177 if (bt2
== VT_FUNC
) {
3178 mk_pointer(&vtop
->type
);
3181 if (bt1
== VT_FUNC
) {
3183 mk_pointer(&vtop
->type
);
3188 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
3189 tcc_error_noabort("invalid operand types for binary operation");
3191 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3192 /* at least one operand is a pointer */
3193 /* relational op: must be both pointers */
3196 /* if both pointers, then it must be the '-' op */
3197 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3199 tcc_error("cannot use pointers here");
3200 if (vtop
[-1].type
.t
& VT_VLA
) {
3201 vla_runtime_pointed_size(&vtop
[-1].type
);
3203 vpushi(pointed_size(&vtop
[-1].type
));
3207 vtop
->type
.t
= VT_PTRDIFF_T
;
3211 /* exactly one pointer : must be '+' or '-'. */
3212 if (op
!= '-' && op
!= '+')
3213 tcc_error("cannot use pointers here");
3214 /* Put pointer as first operand */
3215 if (bt2
== VT_PTR
) {
3217 t
= t1
, t1
= t2
, t2
= t
;
3220 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
3221 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3224 type1
= vtop
[-1].type
;
3225 if (vtop
[-1].type
.t
& VT_VLA
)
3226 vla_runtime_pointed_size(&vtop
[-1].type
);
3228 u
= pointed_size(&vtop
[-1].type
);
3230 tcc_error("unknown array element size");
3234 /* XXX: cast to int ? (long long case) */
3239 #ifdef CONFIG_TCC_BCHECK
3240 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
3241 /* if bounded pointers, we generate a special code to
3248 gen_bounded_ptr_add();
3254 type1
.t
&= ~VT_ARRAY
;
3255 /* put again type if gen_opic() swaped operands */
3259 /* floats can only be used for a few operations */
3260 if (is_float(combtype
.t
)
3261 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3263 tcc_error("invalid operands for binary operation");
3264 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
3265 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
3266 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
3268 t
|= (VT_LONG
& t1
);
3272 t
= t2
= combtype
.t
;
3273 /* XXX: currently, some unsigned operations are explicit, so
3274 we modify them here */
3275 if (t
& VT_UNSIGNED
) {
3282 else if (op
== TOK_LT
)
3284 else if (op
== TOK_GT
)
3286 else if (op
== TOK_LE
)
3288 else if (op
== TOK_GE
)
3294 /* special case for shifts and long long: we keep the shift as
3296 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3303 if (TOK_ISCOND(op
)) {
3304 /* relational op: the result is an int */
3305 vtop
->type
.t
= VT_INT
;
3310 // Make sure that we have converted to an rvalue:
3311 if (vtop
->r
& VT_LVAL
)
3312 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3315 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3316 #define gen_cvt_itof1 gen_cvt_itof
3318 /* generic itof for unsigned long long case */
3319 static void gen_cvt_itof1(int t
)
3321 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3322 (VT_LLONG
| VT_UNSIGNED
)) {
3325 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
3326 #if LDOUBLE_SIZE != 8
3327 else if (t
== VT_LDOUBLE
)
3328 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
3331 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
3342 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3343 #define gen_cvt_ftoi1 gen_cvt_ftoi
3345 /* generic ftoi for unsigned long long case */
3346 static void gen_cvt_ftoi1(int t
)
3349 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3350 /* not handled natively */
3351 st
= vtop
->type
.t
& VT_BTYPE
;
3353 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
3354 #if LDOUBLE_SIZE != 8
3355 else if (st
== VT_LDOUBLE
)
3356 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
3359 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
3370 /* special delayed cast for char/short */
3371 static void force_charshort_cast(void)
3373 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3374 int dbt
= vtop
->type
.t
;
3375 vtop
->r
&= ~VT_MUSTCAST
;
3377 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3381 static void gen_cast_s(int t
)
3389 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3390 static void gen_cast(CType
*type
)
3392 int sbt
, dbt
, sf
, df
, c
;
3393 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3395 /* special delayed cast for char/short */
3396 if (vtop
->r
& VT_MUSTCAST
)
3397 force_charshort_cast();
3399 /* bitfields first get cast to ints */
3400 if (vtop
->type
.t
& VT_BITFIELD
)
3403 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3404 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3412 dbt_bt
= dbt
& VT_BTYPE
;
3413 sbt_bt
= sbt
& VT_BTYPE
;
3415 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3416 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3417 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3420 /* constant case: we can do it now */
3421 /* XXX: in ISOC, cannot do it if error in convert */
3422 if (sbt
== VT_FLOAT
)
3423 vtop
->c
.ld
= vtop
->c
.f
;
3424 else if (sbt
== VT_DOUBLE
)
3425 vtop
->c
.ld
= vtop
->c
.d
;
3428 if (sbt_bt
== VT_LLONG
) {
3429 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3430 vtop
->c
.ld
= vtop
->c
.i
;
3432 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3434 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3435 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3437 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3440 if (dbt
== VT_FLOAT
)
3441 vtop
->c
.f
= (float)vtop
->c
.ld
;
3442 else if (dbt
== VT_DOUBLE
)
3443 vtop
->c
.d
= (double)vtop
->c
.ld
;
3444 } else if (sf
&& dbt
== VT_BOOL
) {
3445 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3448 vtop
->c
.i
= vtop
->c
.ld
;
3449 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3451 else if (sbt
& VT_UNSIGNED
)
3452 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3454 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3456 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3458 else if (dbt
== VT_BOOL
)
3459 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3461 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3462 dbt_bt
== VT_SHORT
? 0xffff :
3465 if (!(dbt
& VT_UNSIGNED
))
3466 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3471 } else if (dbt
== VT_BOOL
3472 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3473 == (VT_CONST
| VT_SYM
)) {
3474 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3480 /* cannot generate code for global or static initializers */
3481 if (STATIC_DATA_WANTED
)
3484 /* non constant case: generate code */
3485 if (dbt
== VT_BOOL
) {
3486 gen_test_zero(TOK_NE
);
3492 /* convert from fp to fp */
3495 /* convert int to fp */
3498 /* convert fp to int */
3500 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3503 goto again
; /* may need char/short cast */
3508 ds
= btype_size(dbt_bt
);
3509 ss
= btype_size(sbt_bt
);
3510 if (ds
== 0 || ss
== 0) {
3511 if (dbt_bt
== VT_VOID
)
3513 cast_error(&vtop
->type
, type
);
3515 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3516 tcc_error("cast to incomplete type");
3518 /* same size and no sign conversion needed */
3519 if (ds
== ss
&& ds
>= 4)
3521 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3522 tcc_warning("cast between pointer and integer of different size");
3523 if (sbt_bt
== VT_PTR
) {
3524 /* put integer type to allow logical operations below */
3525 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3529 /* processor allows { int a = 0, b = *(char*)&a; }
3530 That means that if we cast to less width, we can just
3531 change the type and read it still later. */
3532 #define ALLOW_SUBTYPE_ACCESS 1
3534 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3535 /* value still in memory */
3541 goto done
; /* no 64bit envolved */
3549 /* generate high word */
3550 if (sbt
& VT_UNSIGNED
) {
3559 } else if (ss
== 8) {
3560 /* from long long: just take low order word */
3568 /* need to convert from 32bit to 64bit */
3569 if (sbt
& VT_UNSIGNED
) {
3570 #if defined(TCC_TARGET_RISCV64)
3571 /* RISC-V keeps 32bit vals in registers sign-extended.
3572 So here we need a zero-extension. */
3581 ss
= ds
, ds
= 4, dbt
= sbt
;
3582 } else if (ss
== 8) {
3583 /* XXX some architectures (e.g. risc-v) would like it
3584 better for this merely being a 32-to-64 sign or zero-
3586 trunc
= 32; /* zero upper 32 bits */
3594 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3600 bits
= (ss
- ds
) * 8;
3601 /* for unsigned, gen_op will convert SAR to SHR */
3602 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3605 vpushi(bits
- trunc
);
3612 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3615 /* return type size as known at compile time. Put alignment at 'a' */
3616 ST_FUNC
int type_size(CType
*type
, int *a
)
3621 bt
= type
->t
& VT_BTYPE
;
3622 if (bt
== VT_STRUCT
) {
3627 } else if (bt
== VT_PTR
) {
3628 if (type
->t
& VT_ARRAY
) {
3632 ts
= type_size(&s
->type
, a
);
3634 if (ts
< 0 && s
->c
< 0)
3642 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3643 return -1; /* incomplete enum */
3644 } else if (bt
== VT_LDOUBLE
) {
3646 return LDOUBLE_SIZE
;
3647 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3648 #ifdef TCC_TARGET_I386
3649 #ifdef TCC_TARGET_PE
3654 #elif defined(TCC_TARGET_ARM)
3664 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3667 } else if (bt
== VT_SHORT
) {
3670 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3674 /* char, void, function, _Bool */
3680 /* push type size as known at runtime time on top of value stack. Put
3682 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
3684 if (type
->t
& VT_VLA
) {
3685 type_size(&type
->ref
->type
, a
);
3686 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3688 vpushi(type_size(type
, a
));
3692 /* return the pointed type of t */
3693 static inline CType
*pointed_type(CType
*type
)
3695 return &type
->ref
->type
;
3698 /* modify type so that its it is a pointer to type. */
3699 ST_FUNC
void mk_pointer(CType
*type
)
3702 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3703 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3707 /* return true if type1 and type2 are exactly the same (including
3710 static int is_compatible_types(CType
*type1
, CType
*type2
)
3712 return compare_types(type1
,type2
,0);
3715 /* return true if type1 and type2 are the same (ignoring qualifiers).
3717 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3719 return compare_types(type1
,type2
,1);
3722 static void cast_error(CType
*st
, CType
*dt
)
3724 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3727 /* verify type compatibility to store vtop in 'dt' type */
3728 static void verify_assign_cast(CType
*dt
)
3730 CType
*st
, *type1
, *type2
;
3731 int dbt
, sbt
, qualwarn
, lvl
;
3733 st
= &vtop
->type
; /* source type */
3734 dbt
= dt
->t
& VT_BTYPE
;
3735 sbt
= st
->t
& VT_BTYPE
;
3736 if (dt
->t
& VT_CONSTANT
)
3737 tcc_warning("assignment of read-only location");
3741 tcc_error("assignment to void expression");
3744 /* special cases for pointers */
3745 /* '0' can also be a pointer */
3746 if (is_null_pointer(vtop
))
3748 /* accept implicit pointer to integer cast with warning */
3749 if (is_integer_btype(sbt
)) {
3750 tcc_warning("assignment makes pointer from integer without a cast");
3753 type1
= pointed_type(dt
);
3755 type2
= pointed_type(st
);
3756 else if (sbt
== VT_FUNC
)
3757 type2
= st
; /* a function is implicitly a function pointer */
3760 if (is_compatible_types(type1
, type2
))
3762 for (qualwarn
= lvl
= 0;; ++lvl
) {
3763 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3764 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3766 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3767 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3768 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3770 type1
= pointed_type(type1
);
3771 type2
= pointed_type(type2
);
3773 if (!is_compatible_unqualified_types(type1
, type2
)) {
3774 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3775 /* void * can match anything */
3776 } else if (dbt
== sbt
3777 && is_integer_btype(sbt
& VT_BTYPE
)
3778 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3779 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3780 /* Like GCC don't warn by default for merely changes
3781 in pointer target signedness. Do warn for different
3782 base types, though, in particular for unsigned enums
3783 and signed int targets. */
3785 tcc_warning("assignment from incompatible pointer type");
3790 tcc_warning("assignment discards qualifiers from pointer target type");
3796 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3797 tcc_warning("assignment makes integer from pointer without a cast");
3798 } else if (sbt
== VT_STRUCT
) {
3799 goto case_VT_STRUCT
;
3801 /* XXX: more tests */
3805 if (!is_compatible_unqualified_types(dt
, st
)) {
3813 static void gen_assign_cast(CType
*dt
)
3815 verify_assign_cast(dt
);
3819 /* store vtop in lvalue pushed on stack */
3820 ST_FUNC
void vstore(void)
3822 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3824 ft
= vtop
[-1].type
.t
;
3825 sbt
= vtop
->type
.t
& VT_BTYPE
;
3826 dbt
= ft
& VT_BTYPE
;
3828 verify_assign_cast(&vtop
[-1].type
);
3830 if (sbt
== VT_STRUCT
) {
3831 /* if structure, only generate pointer */
3832 /* structure assignment : generate memcpy */
3833 /* XXX: optimize if small size */
3834 size
= type_size(&vtop
->type
, &align
);
3838 #ifdef CONFIG_TCC_BCHECK
3839 if (vtop
->r
& VT_MUSTBOUND
)
3840 gbound(); /* check would be wrong after gaddrof() */
3842 vtop
->type
.t
= VT_PTR
;
3845 /* address of memcpy() */
3848 vpush_global_sym(&func_old_type
, TOK_memmove8
);
3849 else if(!(align
& 3))
3850 vpush_global_sym(&func_old_type
, TOK_memmove4
);
3853 /* Use memmove, rather than memcpy, as dest and src may be same: */
3854 vpush_global_sym(&func_old_type
, TOK_memmove
);
3859 #ifdef CONFIG_TCC_BCHECK
3860 if (vtop
->r
& VT_MUSTBOUND
)
3863 vtop
->type
.t
= VT_PTR
;
3868 /* leave source on stack */
3870 } else if (ft
& VT_BITFIELD
) {
3871 /* bitfield store handling */
3873 /* save lvalue as expression result (example: s.b = s.a = n;) */
3874 vdup(), vtop
[-1] = vtop
[-2];
3876 bit_pos
= BIT_POS(ft
);
3877 bit_size
= BIT_SIZE(ft
);
3878 /* remove bit field info to avoid loops */
3879 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3881 if (dbt
== VT_BOOL
) {
3882 gen_cast(&vtop
[-1].type
);
3883 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3885 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3886 if (dbt
!= VT_BOOL
) {
3887 gen_cast(&vtop
[-1].type
);
3888 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3890 if (r
== VT_STRUCT
) {
3891 store_packed_bf(bit_pos
, bit_size
);
3893 unsigned long long mask
= (1ULL << bit_size
) - 1;
3894 if (dbt
!= VT_BOOL
) {
3896 if (dbt
== VT_LLONG
)
3899 vpushi((unsigned)mask
);
3906 /* duplicate destination */
3909 /* load destination, mask and or with source */
3910 if (dbt
== VT_LLONG
)
3911 vpushll(~(mask
<< bit_pos
));
3913 vpushi(~((unsigned)mask
<< bit_pos
));
3918 /* ... and discard */
3921 } else if (dbt
== VT_VOID
) {
3924 /* optimize char/short casts */
3926 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3927 && is_integer_btype(sbt
)
3929 if ((vtop
->r
& VT_MUSTCAST
)
3930 && btype_size(dbt
) > btype_size(sbt
)
3932 force_charshort_cast();
3935 gen_cast(&vtop
[-1].type
);
3938 #ifdef CONFIG_TCC_BCHECK
3939 /* bound check case */
3940 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3946 gv(RC_TYPE(dbt
)); /* generate value */
3949 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3950 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3951 vtop
->type
.t
= ft
& VT_TYPE
;
3954 /* if lvalue was saved on stack, must read it */
3955 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3957 r
= get_reg(RC_INT
);
3958 sv
.type
.t
= VT_PTRDIFF_T
;
3959 sv
.r
= VT_LOCAL
| VT_LVAL
;
3960 sv
.c
.i
= vtop
[-1].c
.i
;
3962 vtop
[-1].r
= r
| VT_LVAL
;
3965 r
= vtop
->r
& VT_VALMASK
;
3966 /* two word case handling :
3967 store second register at word + 4 (or +8 for x86-64) */
3968 if (USING_TWO_WORDS(dbt
)) {
3969 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3970 vtop
[-1].type
.t
= load_type
;
3973 /* convert to int to increment easily */
3974 vtop
->type
.t
= VT_PTRDIFF_T
;
3980 vtop
[-1].type
.t
= load_type
;
3981 /* XXX: it works because r2 is spilled last ! */
3982 store(vtop
->r2
, vtop
- 1);
3988 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3992 /* post defines POST/PRE add. c is the token ++ or -- */
3993 ST_FUNC
void inc(int post
, int c
)
3996 vdup(); /* save lvalue */
3998 gv_dup(); /* duplicate value */
4003 vpushi(c
- TOK_MID
);
4005 vstore(); /* store value */
4007 vpop(); /* if post op, return saved value */
4010 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
4012 /* read the string */
4016 while (tok
== TOK_STR
) {
4017 /* XXX: add \0 handling too ? */
4018 cstr_cat(astr
, tokc
.str
.data
, -1);
4021 cstr_ccat(astr
, '\0');
4024 /* If I is >= 1 and a power of two, returns log2(i)+1.
4025 If I is 0 returns 0. */
4026 ST_FUNC
int exact_log2p1(int i
)
4031 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
4042 /* Parse __attribute__((...)) GNUC extension. */
4043 static void parse_attribute(AttributeDef
*ad
)
4049 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
4054 while (tok
!= ')') {
4055 if (tok
< TOK_IDENT
)
4056 expect("attribute name");
4068 tcc_warning("implicit declaration of function '%s'",
4069 get_tok_str(tok
, &tokc
));
4070 s
= external_global_sym(tok
, &func_old_type
);
4071 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4072 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
4073 ad
->cleanup_func
= s
;
4078 case TOK_CONSTRUCTOR1
:
4079 case TOK_CONSTRUCTOR2
:
4080 ad
->f
.func_ctor
= 1;
4082 case TOK_DESTRUCTOR1
:
4083 case TOK_DESTRUCTOR2
:
4084 ad
->f
.func_dtor
= 1;
4086 case TOK_ALWAYS_INLINE1
:
4087 case TOK_ALWAYS_INLINE2
:
4088 ad
->f
.func_alwinl
= 1;
4093 parse_mult_str(&astr
, "section name");
4094 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
4101 parse_mult_str(&astr
, "alias(\"target\")");
4102 ad
->alias_target
= /* save string as token, for later */
4103 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
4107 case TOK_VISIBILITY1
:
4108 case TOK_VISIBILITY2
:
4110 parse_mult_str(&astr
,
4111 "visibility(\"default|hidden|internal|protected\")");
4112 if (!strcmp (astr
.data
, "default"))
4113 ad
->a
.visibility
= STV_DEFAULT
;
4114 else if (!strcmp (astr
.data
, "hidden"))
4115 ad
->a
.visibility
= STV_HIDDEN
;
4116 else if (!strcmp (astr
.data
, "internal"))
4117 ad
->a
.visibility
= STV_INTERNAL
;
4118 else if (!strcmp (astr
.data
, "protected"))
4119 ad
->a
.visibility
= STV_PROTECTED
;
4121 expect("visibility(\"default|hidden|internal|protected\")");
4130 if (n
<= 0 || (n
& (n
- 1)) != 0)
4131 tcc_error("alignment must be a positive power of two");
4136 ad
->a
.aligned
= exact_log2p1(n
);
4137 if (n
!= 1 << (ad
->a
.aligned
- 1))
4138 tcc_error("alignment of %d is larger than implemented", n
);
4150 /* currently, no need to handle it because tcc does not
4151 track unused objects */
4155 ad
->f
.func_noreturn
= 1;
4160 ad
->f
.func_call
= FUNC_CDECL
;
4165 ad
->f
.func_call
= FUNC_STDCALL
;
4167 #ifdef TCC_TARGET_I386
4177 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
4183 ad
->f
.func_call
= FUNC_FASTCALLW
;
4190 ad
->attr_mode
= VT_LLONG
+ 1;
4193 ad
->attr_mode
= VT_BYTE
+ 1;
4196 ad
->attr_mode
= VT_SHORT
+ 1;
4200 ad
->attr_mode
= VT_INT
+ 1;
4203 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4210 ad
->a
.dllexport
= 1;
4212 case TOK_NODECORATE
:
4213 ad
->a
.nodecorate
= 1;
4216 ad
->a
.dllimport
= 1;
4219 if (tcc_state
->warn_unsupported
)
4220 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
4221 /* skip parameters */
4223 int parenthesis
= 0;
4227 else if (tok
== ')')
4230 } while (parenthesis
&& tok
!= -1);
4243 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4247 while ((s
= s
->next
) != NULL
) {
4248 if ((s
->v
& SYM_FIELD
) &&
4249 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
4250 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
4251 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
4263 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4265 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4266 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4267 int pcc
= !tcc_state
->ms_bitfields
;
4268 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4275 prevbt
= VT_STRUCT
; /* make it never match */
4280 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4281 if (f
->type
.t
& VT_BITFIELD
)
4282 bit_size
= BIT_SIZE(f
->type
.t
);
4285 size
= type_size(&f
->type
, &align
);
4286 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4289 if (pcc
&& bit_size
== 0) {
4290 /* in pcc mode, packing does not affect zero-width bitfields */
4293 /* in pcc mode, attribute packed overrides if set. */
4294 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4297 /* pragma pack overrides align if lesser and packs bitfields always */
4300 if (pragma_pack
< align
)
4301 align
= pragma_pack
;
4302 /* in pcc mode pragma pack also overrides individual align */
4303 if (pcc
&& pragma_pack
< a
)
4307 /* some individual align was specified */
4311 if (type
->ref
->type
.t
== VT_UNION
) {
4312 if (pcc
&& bit_size
>= 0)
4313 size
= (bit_size
+ 7) >> 3;
4318 } else if (bit_size
< 0) {
4320 c
+= (bit_pos
+ 7) >> 3;
4321 c
= (c
+ align
- 1) & -align
;
4330 /* A bit-field. Layout is more complicated. There are two
4331 options: PCC (GCC) compatible and MS compatible */
4333 /* In PCC layout a bit-field is placed adjacent to the
4334 preceding bit-fields, except if:
4336 - an individual alignment was given
4337 - it would overflow its base type container and
4338 there is no packing */
4339 if (bit_size
== 0) {
4341 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4343 } else if (f
->a
.aligned
) {
4345 } else if (!packed
) {
4347 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4348 if (ofs
> size
/ align
)
4352 /* in pcc mode, long long bitfields have type int if they fit */
4353 if (size
== 8 && bit_size
<= 32)
4354 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4356 while (bit_pos
>= align
* 8)
4357 c
+= align
, bit_pos
-= align
* 8;
4360 /* In PCC layout named bit-fields influence the alignment
4361 of the containing struct using the base types alignment,
4362 except for packed fields (which here have correct align). */
4363 if (f
->v
& SYM_FIRST_ANOM
4364 // && bit_size // ??? gcc on ARM/rpi does that
4369 bt
= f
->type
.t
& VT_BTYPE
;
4370 if ((bit_pos
+ bit_size
> size
* 8)
4371 || (bit_size
> 0) == (bt
!= prevbt
)
4373 c
= (c
+ align
- 1) & -align
;
4376 /* In MS bitfield mode a bit-field run always uses
4377 at least as many bits as the underlying type.
4378 To start a new run it's also required that this
4379 or the last bit-field had non-zero width. */
4380 if (bit_size
|| prev_bit_size
)
4383 /* In MS layout the records alignment is normally
4384 influenced by the field, except for a zero-width
4385 field at the start of a run (but by further zero-width
4386 fields it is again). */
4387 if (bit_size
== 0 && prevbt
!= bt
)
4390 prev_bit_size
= bit_size
;
4393 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4394 | (bit_pos
<< VT_STRUCT_SHIFT
);
4395 bit_pos
+= bit_size
;
4397 if (align
> maxalign
)
4401 printf("set field %s offset %-2d size %-2d align %-2d",
4402 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4403 if (f
->type
.t
& VT_BITFIELD
) {
4404 printf(" pos %-2d bits %-2d",
4417 c
+= (bit_pos
+ 7) >> 3;
4419 /* store size and alignment */
4420 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4424 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4425 /* can happen if individual align for some member was given. In
4426 this case MSVC ignores maxalign when aligning the size */
4431 c
= (c
+ a
- 1) & -a
;
4435 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4438 /* check whether we can access bitfields by their type */
4439 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4443 if (0 == (f
->type
.t
& VT_BITFIELD
))
4447 bit_size
= BIT_SIZE(f
->type
.t
);
4450 bit_pos
= BIT_POS(f
->type
.t
);
4451 size
= type_size(&f
->type
, &align
);
4452 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
4455 /* try to access the field using a different type */
4456 c0
= -1, s
= align
= 1;
4459 px
= f
->c
* 8 + bit_pos
;
4460 cx
= (px
>> 3) & -align
;
4461 px
= px
- (cx
<< 3);
4464 s
= (px
+ bit_size
+ 7) >> 3;
4474 s
= type_size(&t
, &align
);
4478 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
4479 /* update offset and bit position */
4482 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4483 | (bit_pos
<< VT_STRUCT_SHIFT
);
4487 printf("FIX field %s offset %-2d size %-2d align %-2d "
4488 "pos %-2d bits %-2d\n",
4489 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4490 cx
, s
, align
, px
, bit_size
);
4493 /* fall back to load/store single-byte wise */
4494 f
->auxtype
= VT_STRUCT
;
4496 printf("FIX field %s : load byte-wise\n",
4497 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4503 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4504 static void struct_decl(CType
*type
, int u
)
4506 int v
, c
, size
, align
, flexible
;
4507 int bit_size
, bsize
, bt
;
4509 AttributeDef ad
, ad1
;
4512 memset(&ad
, 0, sizeof ad
);
4514 parse_attribute(&ad
);
4518 /* struct already defined ? return it */
4520 expect("struct/union/enum name");
4522 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4525 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4527 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4532 /* Record the original enum/struct/union token. */
4533 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4535 /* we put an undefined size for struct/union */
4536 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4537 s
->r
= 0; /* default alignment is zero as gcc */
4539 type
->t
= s
->type
.t
;
4545 tcc_error("struct/union/enum already defined");
4547 /* cannot be empty */
4548 /* non empty enums are not allowed */
4551 long long ll
= 0, pl
= 0, nl
= 0;
4554 /* enum symbols have static storage */
4555 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4559 expect("identifier");
4561 if (ss
&& !local_stack
)
4562 tcc_error("redefinition of enumerator '%s'",
4563 get_tok_str(v
, NULL
));
4567 ll
= expr_const64();
4569 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4571 *ps
= ss
, ps
= &ss
->next
;
4580 /* NOTE: we accept a trailing comma */
4585 /* set integral type of the enum */
4588 if (pl
!= (unsigned)pl
)
4589 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4591 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4592 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4593 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4595 /* set type for enum members */
4596 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4598 if (ll
== (int)ll
) /* default is int if it fits */
4600 if (t
.t
& VT_UNSIGNED
) {
4601 ss
->type
.t
|= VT_UNSIGNED
;
4602 if (ll
== (unsigned)ll
)
4605 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4606 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4611 while (tok
!= '}') {
4612 if (!parse_btype(&btype
, &ad1
)) {
4618 tcc_error("flexible array member '%s' not at the end of struct",
4619 get_tok_str(v
, NULL
));
4625 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4627 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4628 expect("identifier");
4630 int v
= btype
.ref
->v
;
4631 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4632 if (tcc_state
->ms_extensions
== 0)
4633 expect("identifier");
4637 if (type_size(&type1
, &align
) < 0) {
4638 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4641 tcc_error("field '%s' has incomplete type",
4642 get_tok_str(v
, NULL
));
4644 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4645 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4646 (type1
.t
& VT_STORAGE
))
4647 tcc_error("invalid type for '%s'",
4648 get_tok_str(v
, NULL
));
4652 bit_size
= expr_const();
4653 /* XXX: handle v = 0 case for messages */
4655 tcc_error("negative width in bit-field '%s'",
4656 get_tok_str(v
, NULL
));
4657 if (v
&& bit_size
== 0)
4658 tcc_error("zero width for bit-field '%s'",
4659 get_tok_str(v
, NULL
));
4660 parse_attribute(&ad1
);
4662 size
= type_size(&type1
, &align
);
4663 if (bit_size
>= 0) {
4664 bt
= type1
.t
& VT_BTYPE
;
4670 tcc_error("bitfields must have scalar type");
4672 if (bit_size
> bsize
) {
4673 tcc_error("width of '%s' exceeds its type",
4674 get_tok_str(v
, NULL
));
4675 } else if (bit_size
== bsize
4676 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4677 /* no need for bit fields */
4679 } else if (bit_size
== 64) {
4680 tcc_error("field width 64 not implemented");
4682 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4684 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4687 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4688 /* Remember we've seen a real field to check
4689 for placement of flexible array member. */
4692 /* If member is a struct or bit-field, enforce
4693 placing into the struct (as anonymous). */
4695 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4700 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4705 if (tok
== ';' || tok
== TOK_EOF
)
4712 parse_attribute(&ad
);
4713 if (ad
.cleanup_func
) {
4714 tcc_warning("attribute '__cleanup__' ignored on type");
4716 struct_layout(type
, &ad
);
4721 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4723 merge_symattr(&ad
->a
, &s
->a
);
4724 merge_funcattr(&ad
->f
, &s
->f
);
4727 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4728 are added to the element type, copied because it could be a typedef. */
4729 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4731 while (type
->t
& VT_ARRAY
) {
4732 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4733 type
= &type
->ref
->type
;
4735 type
->t
|= qualifiers
;
4738 /* return 0 if no type declaration. otherwise, return the basic type
4741 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4743 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4747 memset(ad
, 0, sizeof(AttributeDef
));
4757 /* currently, we really ignore extension */
4767 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4768 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4769 tmbt
: tcc_error("too many basic types");
4772 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4777 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4794 memset(&ad1
, 0, sizeof(AttributeDef
));
4795 if (parse_btype(&type1
, &ad1
)) {
4796 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4798 n
= 1 << (ad1
.a
.aligned
- 1);
4800 type_size(&type1
, &n
);
4803 if (n
<= 0 || (n
& (n
- 1)) != 0)
4804 tcc_error("alignment must be a positive power of two");
4807 ad
->a
.aligned
= exact_log2p1(n
);
4811 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4812 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4813 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4814 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4821 #ifdef TCC_TARGET_ARM64
4823 /* GCC's __uint128_t appears in some Linux header files. Make it a
4824 synonym for long double to get the size and alignment right. */
4835 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4836 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4844 struct_decl(&type1
, VT_ENUM
);
4847 type
->ref
= type1
.ref
;
4850 struct_decl(&type1
, VT_STRUCT
);
4853 struct_decl(&type1
, VT_UNION
);
4856 /* type modifiers */
4861 parse_btype_qualify(type
, VT_CONSTANT
);
4869 parse_btype_qualify(type
, VT_VOLATILE
);
4876 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4877 tcc_error("signed and unsigned modifier");
4890 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4891 tcc_error("signed and unsigned modifier");
4892 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4908 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4909 tcc_error("multiple storage classes");
4921 ad
->f
.func_noreturn
= 1;
4923 /* GNUC attribute */
4924 case TOK_ATTRIBUTE1
:
4925 case TOK_ATTRIBUTE2
:
4926 parse_attribute(ad
);
4927 if (ad
->attr_mode
) {
4928 u
= ad
->attr_mode
-1;
4929 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4937 parse_expr_type(&type1
);
4938 /* remove all storage modifiers except typedef */
4939 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4941 sym_to_attr(ad
, type1
.ref
);
4947 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4951 if (tok
== ':' && !in_generic
) {
4952 /* ignore if it's a label */
4957 t
&= ~(VT_BTYPE
|VT_LONG
);
4958 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4959 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4960 type
->ref
= s
->type
.ref
;
4962 parse_btype_qualify(type
, t
);
4964 /* get attributes from typedef */
4973 if (tcc_state
->char_is_unsigned
) {
4974 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4977 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4978 bt
= t
& (VT_BTYPE
|VT_LONG
);
4980 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4981 #ifdef TCC_TARGET_PE
4982 if (bt
== VT_LDOUBLE
)
4983 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4989 /* convert a function parameter type (array to pointer and function to
4990 function pointer) */
4991 static inline void convert_parameter_type(CType
*pt
)
4993 /* remove const and volatile qualifiers (XXX: const could be used
4994 to indicate a const function parameter */
4995 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4996 /* array must be transformed to pointer according to ANSI C */
4998 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
5003 ST_FUNC
void parse_asm_str(CString
*astr
)
5006 parse_mult_str(astr
, "string constant");
5009 /* Parse an asm label and return the token */
5010 static int asm_label_instr(void)
5016 parse_asm_str(&astr
);
5019 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
5021 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
5026 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
5028 int n
, l
, t1
, arg_size
, align
, unused_align
;
5029 Sym
**plast
, *s
, *first
;
5034 /* function type, or recursive declarator (return if so) */
5036 if (td
&& !(td
& TYPE_ABSTRACT
))
5040 else if (parse_btype(&pt
, &ad1
))
5043 merge_attr (ad
, &ad1
);
5052 /* read param name and compute offset */
5053 if (l
!= FUNC_OLD
) {
5054 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
5056 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
5057 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
5058 tcc_error("parameter declared as void");
5062 expect("identifier");
5063 pt
.t
= VT_VOID
; /* invalid type */
5067 convert_parameter_type(&pt
);
5068 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
5069 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
5075 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
5080 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
5081 tcc_error("invalid type");
5084 /* if no parameters, then old type prototype */
5087 /* NOTE: const is ignored in returned type as it has a special
5088 meaning in gcc / C++ */
5089 type
->t
&= ~VT_CONSTANT
;
5090 /* some ancient pre-K&R C allows a function to return an array
5091 and the array brackets to be put after the arguments, such
5092 that "int c()[]" means something like "int[] c()" */
5095 skip(']'); /* only handle simple "[]" */
5098 /* we push a anonymous symbol which will contain the function prototype */
5099 ad
->f
.func_args
= arg_size
;
5100 ad
->f
.func_type
= l
;
5101 s
= sym_push(SYM_FIELD
, type
, 0, 0);
5107 } else if (tok
== '[') {
5108 int saved_nocode_wanted
= nocode_wanted
;
5109 /* array definition */
5112 /* XXX The optional type-quals and static should only be accepted
5113 in parameter decls. The '*' as well, and then even only
5114 in prototypes (not function defs). */
5116 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5131 if (!local_stack
|| (storage
& VT_STATIC
))
5132 vpushi(expr_const());
5134 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5135 length must always be evaluated, even under nocode_wanted,
5136 so that its size slot is initialized (e.g. under sizeof
5141 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5144 tcc_error("invalid array size");
5146 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5147 tcc_error("size of variable length array should be an integer");
5153 /* parse next post type */
5154 post_type(type
, ad
, storage
, 0);
5156 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5157 tcc_error("declaration of an array of functions");
5158 if ((type
->t
& VT_BTYPE
) == VT_VOID
5159 || type_size(type
, &unused_align
) < 0)
5160 tcc_error("declaration of an array of incomplete type elements");
5162 t1
|= type
->t
& VT_VLA
;
5166 tcc_error("need explicit inner array size in VLAs");
5167 loc
-= type_size(&int_type
, &align
);
5171 vla_runtime_type_size(type
, &align
);
5173 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5179 nocode_wanted
= saved_nocode_wanted
;
5181 /* we push an anonymous symbol which will contain the array
5183 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5184 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5190 /* Parse a type declarator (except basic type), and return the type
5191 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5192 expected. 'type' should contain the basic type. 'ad' is the
5193 attribute definition of the basic type. It can be modified by
5194 type_decl(). If this (possibly abstract) declarator is a pointer chain
5195 it returns the innermost pointed to type (equals *type, but is a different
5196 pointer), otherwise returns type itself, that's used for recursive calls. */
5197 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5200 int qualifiers
, storage
;
5202 /* recursive type, remove storage bits first, apply them later again */
5203 storage
= type
->t
& VT_STORAGE
;
5204 type
->t
&= ~VT_STORAGE
;
5207 while (tok
== '*') {
5215 qualifiers
|= VT_CONSTANT
;
5220 qualifiers
|= VT_VOLATILE
;
5226 /* XXX: clarify attribute handling */
5227 case TOK_ATTRIBUTE1
:
5228 case TOK_ATTRIBUTE2
:
5229 parse_attribute(ad
);
5233 type
->t
|= qualifiers
;
5235 /* innermost pointed to type is the one for the first derivation */
5236 ret
= pointed_type(type
);
5240 /* This is possibly a parameter type list for abstract declarators
5241 ('int ()'), use post_type for testing this. */
5242 if (!post_type(type
, ad
, 0, td
)) {
5243 /* It's not, so it's a nested declarator, and the post operations
5244 apply to the innermost pointed to type (if any). */
5245 /* XXX: this is not correct to modify 'ad' at this point, but
5246 the syntax is not clear */
5247 parse_attribute(ad
);
5248 post
= type_decl(type
, ad
, v
, td
);
5252 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5253 /* type identifier */
5258 if (!(td
& TYPE_ABSTRACT
))
5259 expect("identifier");
5262 post_type(post
, ad
, storage
, 0);
5263 parse_attribute(ad
);
5268 /* indirection with full error checking and bound check */
5269 ST_FUNC
void indir(void)
5271 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5272 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5276 if (vtop
->r
& VT_LVAL
)
5278 vtop
->type
= *pointed_type(&vtop
->type
);
5279 /* Arrays and functions are never lvalues */
5280 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5281 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5283 /* if bound checking, the referenced pointer must be checked */
5284 #ifdef CONFIG_TCC_BCHECK
5285 if (tcc_state
->do_bounds_check
)
5286 vtop
->r
|= VT_MUSTBOUND
;
5291 /* pass a parameter to a function and do type checking and casting */
5292 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5297 func_type
= func
->f
.func_type
;
5298 if (func_type
== FUNC_OLD
||
5299 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5300 /* default casting : only need to convert float to double */
5301 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5302 gen_cast_s(VT_DOUBLE
);
5303 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5304 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5305 type
.ref
= vtop
->type
.ref
;
5307 } else if (vtop
->r
& VT_MUSTCAST
) {
5308 force_charshort_cast();
5310 } else if (arg
== NULL
) {
5311 tcc_error("too many arguments to function");
5314 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5315 gen_assign_cast(&type
);
5319 /* parse an expression and return its type without any side effect. */
5320 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5329 /* parse an expression of the form '(type)' or '(expr)' and return its
5331 static void parse_expr_type(CType
*type
)
5337 if (parse_btype(type
, &ad
)) {
5338 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5340 expr_type(type
, gexpr
);
5345 static void parse_type(CType
*type
)
5350 if (!parse_btype(type
, &ad
)) {
5353 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5356 static void parse_builtin_params(int nc
, const char *args
)
5363 while ((c
= *args
++)) {
5367 case 'e': expr_eq(); continue;
5368 case 't': parse_type(&t
); vpush(&t
); continue;
5369 default: tcc_error("internal error"); break;
5377 ST_FUNC
void unary(void)
5379 int n
, t
, align
, size
, r
, sizeof_caller
;
5384 /* generate line number info */
5385 if (tcc_state
->do_debug
)
5386 tcc_debug_line(tcc_state
);
5388 sizeof_caller
= in_sizeof
;
5391 /* XXX: GCC 2.95.3 does not generate a table although it should be
5399 #ifdef TCC_TARGET_PE
5400 t
= VT_SHORT
|VT_UNSIGNED
;
5408 vsetc(&type
, VT_CONST
, &tokc
);
5412 t
= VT_INT
| VT_UNSIGNED
;
5418 t
= VT_LLONG
| VT_UNSIGNED
;
5430 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5433 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5435 case TOK___FUNCTION__
:
5437 goto tok_identifier
;
5443 /* special function name identifier */
5444 len
= strlen(funcname
) + 1;
5445 /* generate char[len] type */
5450 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
5451 if (!NODATA_WANTED
) {
5452 ptr
= section_ptr_add(data_section
, len
);
5453 memcpy(ptr
, funcname
, len
);
5459 #ifdef TCC_TARGET_PE
5460 t
= VT_SHORT
| VT_UNSIGNED
;
5466 /* string parsing */
5468 if (tcc_state
->char_is_unsigned
)
5469 t
= VT_BYTE
| VT_UNSIGNED
;
5471 if (tcc_state
->warn_write_strings
)
5476 memset(&ad
, 0, sizeof(AttributeDef
));
5477 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5482 if (parse_btype(&type
, &ad
)) {
5483 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5485 /* check ISOC99 compound literal */
5487 /* data is allocated locally by default */
5492 /* all except arrays are lvalues */
5493 if (!(type
.t
& VT_ARRAY
))
5495 memset(&ad
, 0, sizeof(AttributeDef
));
5496 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5498 if (sizeof_caller
) {
5505 } else if (tok
== '{') {
5506 int saved_nocode_wanted
= nocode_wanted
;
5507 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5508 tcc_error("expected constant");
5509 /* save all registers */
5511 /* statement expression : we do not accept break/continue
5512 inside as GCC does. We do retain the nocode_wanted state,
5513 as statement expressions can't ever be entered from the
5514 outside, so any reactivation of code emission (from labels
5515 or loop heads) can be disabled again after the end of it. */
5517 nocode_wanted
= saved_nocode_wanted
;
5532 /* functions names must be treated as function pointers,
5533 except for unary '&' and sizeof. Since we consider that
5534 functions are not lvalues, we only have to handle it
5535 there and in function calls. */
5536 /* arrays can also be used although they are not lvalues */
5537 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5538 !(vtop
->type
.t
& VT_ARRAY
))
5541 vtop
->sym
->a
.addrtaken
= 1;
5542 mk_pointer(&vtop
->type
);
5548 gen_test_zero(TOK_EQ
);
5559 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5560 tcc_error("pointer not accepted for unary plus");
5561 /* In order to force cast, we add zero, except for floating point
5562 where we really need an noop (otherwise -0.0 will be transformed
5564 if (!is_float(vtop
->type
.t
)) {
5576 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5578 if (vtop
[1].r
& VT_SYM
)
5579 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5580 size
= type_size(&type
, &align
);
5581 if (s
&& s
->a
.aligned
)
5582 align
= 1 << (s
->a
.aligned
- 1);
5583 if (t
== TOK_SIZEOF
) {
5584 if (!(type
.t
& VT_VLA
)) {
5586 tcc_error("sizeof applied to an incomplete type");
5589 vla_runtime_type_size(&type
, &align
);
5594 vtop
->type
.t
|= VT_UNSIGNED
;
5597 case TOK_builtin_expect
:
5598 /* __builtin_expect is a no-op for now */
5599 parse_builtin_params(0, "ee");
5602 case TOK_builtin_types_compatible_p
:
5603 parse_builtin_params(0, "tt");
5604 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5605 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5606 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5610 case TOK_builtin_choose_expr
:
5637 case TOK_builtin_constant_p
:
5638 parse_builtin_params(1, "e");
5639 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5643 case TOK_builtin_frame_address
:
5644 case TOK_builtin_return_address
:
5650 if (tok
!= TOK_CINT
) {
5651 tcc_error("%s only takes positive integers",
5652 tok1
== TOK_builtin_return_address
?
5653 "__builtin_return_address" :
5654 "__builtin_frame_address");
5656 level
= (uint32_t)tokc
.i
;
5661 vset(&type
, VT_LOCAL
, 0); /* local frame */
5663 #ifdef TCC_TARGET_RISCV64
5667 mk_pointer(&vtop
->type
);
5668 indir(); /* -> parent frame */
5670 if (tok1
== TOK_builtin_return_address
) {
5671 // assume return address is just above frame pointer on stack
5672 #ifdef TCC_TARGET_ARM
5675 #elif defined TCC_TARGET_RISCV64
5682 mk_pointer(&vtop
->type
);
5687 #ifdef TCC_TARGET_RISCV64
5688 case TOK_builtin_va_start
:
5689 parse_builtin_params(0, "ee");
5690 r
= vtop
->r
& VT_VALMASK
;
5694 tcc_error("__builtin_va_start expects a local variable");
5699 #ifdef TCC_TARGET_X86_64
5700 #ifdef TCC_TARGET_PE
5701 case TOK_builtin_va_start
:
5702 parse_builtin_params(0, "ee");
5703 r
= vtop
->r
& VT_VALMASK
;
5707 tcc_error("__builtin_va_start expects a local variable");
5709 vtop
->type
= char_pointer_type
;
5714 case TOK_builtin_va_arg_types
:
5715 parse_builtin_params(0, "t");
5716 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5723 #ifdef TCC_TARGET_ARM64
5724 case TOK_builtin_va_start
: {
5725 parse_builtin_params(0, "ee");
5729 vtop
->type
.t
= VT_VOID
;
5732 case TOK_builtin_va_arg
: {
5733 parse_builtin_params(0, "et");
5741 case TOK___arm64_clear_cache
: {
5742 parse_builtin_params(0, "ee");
5745 vtop
->type
.t
= VT_VOID
;
5749 /* pre operations */
5760 t
= vtop
->type
.t
& VT_BTYPE
;
5762 /* In IEEE negate(x) isn't subtract(0,x), but rather
5766 vtop
->c
.f
= -1.0 * 0.0;
5767 else if (t
== VT_DOUBLE
)
5768 vtop
->c
.d
= -1.0 * 0.0;
5770 vtop
->c
.ld
= -1.0 * 0.0;
5778 goto tok_identifier
;
5780 /* allow to take the address of a label */
5781 if (tok
< TOK_UIDENT
)
5782 expect("label identifier");
5783 s
= label_find(tok
);
5785 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5787 if (s
->r
== LABEL_DECLARED
)
5788 s
->r
= LABEL_FORWARD
;
5791 s
->type
.t
= VT_VOID
;
5792 mk_pointer(&s
->type
);
5793 s
->type
.t
|= VT_STATIC
;
5795 vpushsym(&s
->type
, s
);
5801 CType controlling_type
;
5802 int has_default
= 0;
5805 TokenString
*str
= NULL
;
5806 int saved_const_wanted
= const_wanted
;
5811 expr_type(&controlling_type
, expr_eq
);
5812 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5813 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5814 mk_pointer(&controlling_type
);
5815 const_wanted
= saved_const_wanted
;
5819 if (tok
== TOK_DEFAULT
) {
5821 tcc_error("too many 'default'");
5827 AttributeDef ad_tmp
;
5832 parse_btype(&cur_type
, &ad_tmp
);
5835 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5836 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5838 tcc_error("type match twice");
5848 skip_or_save_block(&str
);
5850 skip_or_save_block(NULL
);
5857 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5858 tcc_error("type '%s' does not match any association", buf
);
5860 begin_macro(str
, 1);
5869 // special qnan , snan and infinity values
5874 vtop
->type
.t
= VT_FLOAT
;
5879 goto special_math_val
;
5882 goto special_math_val
;
5889 expect("identifier");
5891 if (!s
|| IS_ASM_SYM(s
)) {
5892 const char *name
= get_tok_str(t
, NULL
);
5894 tcc_error("'%s' undeclared", name
);
5895 /* for simple function calls, we tolerate undeclared
5896 external reference to int() function */
5897 if (tcc_state
->warn_implicit_function_declaration
5898 #ifdef TCC_TARGET_PE
5899 /* people must be warned about using undeclared WINAPI functions
5900 (which usually start with uppercase letter) */
5901 || (name
[0] >= 'A' && name
[0] <= 'Z')
5904 tcc_warning("implicit declaration of function '%s'", name
);
5905 s
= external_global_sym(t
, &func_old_type
);
5909 /* A symbol that has a register is a local register variable,
5910 which starts out as VT_LOCAL value. */
5911 if ((r
& VT_VALMASK
) < VT_CONST
)
5912 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5914 vset(&s
->type
, r
, s
->c
);
5915 /* Point to s as backpointer (even without r&VT_SYM).
5916 Will be used by at least the x86 inline asm parser for
5922 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5923 vtop
->c
.i
= s
->enum_val
;
5928 /* post operations */
5930 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5933 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5934 int qualifiers
, cumofs
= 0;
5936 if (tok
== TOK_ARROW
)
5938 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5941 /* expect pointer on structure */
5942 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5943 expect("struct or union");
5944 if (tok
== TOK_CDOUBLE
)
5945 expect("field name");
5947 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5948 expect("field name");
5949 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5951 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5952 /* add field offset to pointer */
5953 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5954 vpushi(cumofs
+ s
->c
);
5956 /* change type to field type, and set to lvalue */
5957 vtop
->type
= s
->type
;
5958 vtop
->type
.t
|= qualifiers
;
5959 /* an array is never an lvalue */
5960 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5962 #ifdef CONFIG_TCC_BCHECK
5963 /* if bound checking, the referenced pointer must be checked */
5964 if (tcc_state
->do_bounds_check
)
5965 vtop
->r
|= VT_MUSTBOUND
;
5969 } else if (tok
== '[') {
5975 } else if (tok
== '(') {
5978 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5981 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5982 /* pointer test (no array accepted) */
5983 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5984 vtop
->type
= *pointed_type(&vtop
->type
);
5985 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5989 expect("function pointer");
5992 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5994 /* get return type */
5997 sa
= s
->next
; /* first parameter */
5998 nb_args
= regsize
= 0;
6000 /* compute first implicit argument if a structure is returned */
6001 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6002 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6003 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6004 &ret_align
, ®size
);
6005 if (ret_nregs
<= 0) {
6006 /* get some space for the returned structure */
6007 size
= type_size(&s
->type
, &align
);
6008 #ifdef TCC_TARGET_ARM64
6009 /* On arm64, a small struct is return in registers.
6010 It is much easier to write it to memory if we know
6011 that we are allowed to write some extra bytes, so
6012 round the allocated space up to a power of 2: */
6014 while (size
& (size
- 1))
6015 size
= (size
| (size
- 1)) + 1;
6017 loc
= (loc
- size
) & -align
;
6019 ret
.r
= VT_LOCAL
| VT_LVAL
;
6020 /* pass it as 'int' to avoid structure arg passing
6022 vseti(VT_LOCAL
, loc
);
6034 if (ret_nregs
> 0) {
6035 /* return in register */
6037 PUT_R_RET(&ret
, ret
.type
.t
);
6042 gfunc_param_typed(s
, sa
);
6052 tcc_error("too few arguments to function");
6054 #ifdef CONFIG_TCC_BCHECK
6055 if (tcc_state
->do_bounds_check
&&
6056 (nb_args
== 1 || nb_args
== 2) &&
6057 (vtop
[-nb_args
].r
& VT_SYM
) &&
6058 (vtop
[-nb_args
].sym
->v
== TOK_setjmp
||
6059 vtop
[-nb_args
].sym
->v
== TOK__setjmp
6060 #ifndef TCC_TARGET_PE
6061 || vtop
[-nb_args
].sym
->v
== TOK_sigsetjmp
6062 || vtop
[-nb_args
].sym
->v
== TOK___sigsetjmp
6065 vpush_global_sym(&func_old_type
, TOK___bound_setjmp
);
6066 vpushv(vtop
- nb_args
);
6068 vpushv(vtop
- nb_args
);
6069 gfunc_call(nb_args
);
6072 gfunc_call(nb_args
);
6074 if (ret_nregs
< 0) {
6075 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6076 #ifdef TCC_TARGET_RISCV64
6077 arch_transfer_ret_regs(1);
6081 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6082 vsetc(&ret
.type
, r
, &ret
.c
);
6083 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6086 /* handle packed struct return */
6087 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6090 size
= type_size(&s
->type
, &align
);
6091 /* We're writing whole regs often, make sure there's enough
6092 space. Assume register size is power of 2. */
6093 if (regsize
> align
)
6095 loc
= (loc
- size
) & -align
;
6099 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6103 if (--ret_nregs
== 0)
6107 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6110 /* Promote char/short return values. This is matters only
6111 for calling function that were not compiled by TCC and
6112 only on some architectures. For those where it doesn't
6113 matter we expect things to be already promoted to int,
6115 t
= s
->type
.t
& VT_BTYPE
;
6116 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6118 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6120 vtop
->type
.t
= VT_INT
;
6124 if (s
->f
.func_noreturn
)
6132 #ifndef precedence_parser /* original top-down parser */
6134 static void expr_prod(void)
6139 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6146 static void expr_sum(void)
6151 while ((t
= tok
) == '+' || t
== '-') {
6158 static void expr_shift(void)
6163 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6170 static void expr_cmp(void)
6175 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6176 t
== TOK_ULT
|| t
== TOK_UGE
) {
6183 static void expr_cmpeq(void)
6188 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6195 static void expr_and(void)
6198 while (tok
== '&') {
6205 static void expr_xor(void)
6208 while (tok
== '^') {
6215 static void expr_or(void)
6218 while (tok
== '|') {
6225 static void expr_landor(int op
);
6227 static void expr_land(void)
6230 if (tok
== TOK_LAND
)
6234 static void expr_lor(void)
6241 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6242 #else /* defined precedence_parser */
6243 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6244 # define expr_lor() unary(), expr_infix(1)
6246 static int precedence(int tok
)
6249 case TOK_LOR
: return 1;
6250 case TOK_LAND
: return 2;
6254 case TOK_EQ
: case TOK_NE
: return 6;
6255 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6256 case TOK_SHL
: case TOK_SAR
: return 8;
6257 case '+': case '-': return 9;
6258 case '*': case '/': case '%': return 10;
6260 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6265 static unsigned char prec
[256];
6266 static void init_prec(void)
6269 for (i
= 0; i
< 256; i
++)
6270 prec
[i
] = precedence(i
);
6272 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6274 static void expr_landor(int op
);
6276 static void expr_infix(int p
)
6279 while ((p2
= precedence(t
)) >= p
) {
6280 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6285 if (precedence(tok
) > p2
)
6294 /* Assuming vtop is a value used in a conditional context
6295 (i.e. compared with zero) return 0 if it's false, 1 if
6296 true and -1 if it can't be statically determined. */
6297 static int condition_3way(void)
6300 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6301 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6303 gen_cast_s(VT_BOOL
);
6310 static void expr_landor(int op
)
6312 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6314 c
= f
? i
: condition_3way();
6316 save_regs(1), cc
= 0;
6318 nocode_wanted
++, f
= 1;
6326 expr_landor_next(op
);
6338 static int is_cond_bool(SValue
*sv
)
6340 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6341 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6342 return (unsigned)sv
->c
.i
< 2;
6343 if (sv
->r
== VT_CMP
)
6348 static void expr_cond(void)
6350 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6358 c
= condition_3way();
6359 g
= (tok
== ':' && gnu_ext
);
6369 /* needed to avoid having different registers saved in
6376 ncw_prev
= nocode_wanted
;
6382 if (c
< 0 && vtop
->r
== VT_CMP
) {
6389 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6390 mk_pointer(&vtop
->type
);
6391 sv
= *vtop
; /* save value to handle it later */
6392 vtop
--; /* no vpop so that FP stack is not flushed */
6402 nocode_wanted
= ncw_prev
;
6408 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6409 if (sv
.r
== VT_CMP
) {
6420 nocode_wanted
= ncw_prev
;
6421 // tcc_warning("two conditions expr_cond");
6425 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6426 mk_pointer(&vtop
->type
);
6428 /* cast operands to correct type according to ISOC rules */
6429 if (!combine_types(&type
, &sv
, vtop
, '?'))
6430 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6431 "type mismatch in conditional expression (have '%s' and '%s')");
6432 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6433 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6434 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6436 /* now we convert second operand */
6440 mk_pointer(&vtop
->type
);
6442 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6446 rc
= RC_TYPE(type
.t
);
6447 /* for long longs, we use fixed registers to avoid having
6448 to handle a complicated move */
6449 if (USING_TWO_WORDS(type
.t
))
6450 rc
= RC_RET(type
.t
);
6458 nocode_wanted
= ncw_prev
;
6460 /* this is horrible, but we must also convert first
6466 mk_pointer(&vtop
->type
);
6468 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6474 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6484 static void expr_eq(void)
6489 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6497 gen_op(TOK_ASSIGN_OP(t
));
6503 ST_FUNC
void gexpr(void)
6514 /* parse a constant expression and return value in vtop. */
6515 static void expr_const1(void)
6518 nocode_wanted
+= unevalmask
+ 1;
6520 nocode_wanted
-= unevalmask
+ 1;
6524 /* parse an integer constant and return its value. */
6525 static inline int64_t expr_const64(void)
6529 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6530 expect("constant expression");
6536 /* parse an integer constant and return its value.
6537 Complain if it doesn't fit 32bit (signed or unsigned). */
6538 ST_FUNC
int expr_const(void)
6541 int64_t wc
= expr_const64();
6543 if (c
!= wc
&& (unsigned)c
!= wc
)
6544 tcc_error("constant exceeds 32 bit");
6548 /* ------------------------------------------------------------------------- */
6549 /* return from function */
6551 #ifndef TCC_TARGET_ARM64
6552 static void gfunc_return(CType
*func_type
)
6554 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6555 CType type
, ret_type
;
6556 int ret_align
, ret_nregs
, regsize
;
6557 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6558 &ret_align
, ®size
);
6559 if (ret_nregs
< 0) {
6560 #ifdef TCC_TARGET_RISCV64
6561 arch_transfer_ret_regs(0);
6563 } else if (0 == ret_nregs
) {
6564 /* if returning structure, must copy it to implicit
6565 first pointer arg location */
6568 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6571 /* copy structure value to pointer */
6574 /* returning structure packed into registers */
6575 int size
, addr
, align
, rc
;
6576 size
= type_size(func_type
,&align
);
6577 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6578 (vtop
->c
.i
& (ret_align
-1)))
6579 && (align
& (ret_align
-1))) {
6580 loc
= (loc
- size
) & -ret_align
;
6583 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6587 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6589 vtop
->type
= ret_type
;
6590 rc
= RC_RET(ret_type
.t
);
6598 if (--ret_nregs
== 0)
6600 /* We assume that when a structure is returned in multiple
6601 registers, their classes are consecutive values of the
6604 vtop
->c
.i
+= regsize
;
6609 gv(RC_RET(func_type
->t
));
6611 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6615 static void check_func_return(void)
6617 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6619 if (!strcmp (funcname
, "main")
6620 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6621 /* main returns 0 by default */
6623 gen_assign_cast(&func_vt
);
6624 gfunc_return(&func_vt
);
6626 tcc_warning("function might return no value: '%s'", funcname
);
6630 /* ------------------------------------------------------------------------- */
6633 static int case_cmp(const void *pa
, const void *pb
)
6635 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6636 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6637 return a
< b
? -1 : a
> b
;
6640 static void gtst_addr(int t
, int a
)
6642 gsym_addr(gvtst(0, t
), a
);
6645 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6649 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6666 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6668 gcase(base
, len
/2, bsym
);
6672 base
+= e
; len
-= e
;
6682 if (p
->v1
== p
->v2
) {
6684 gtst_addr(0, p
->sym
);
6694 gtst_addr(0, p
->sym
);
6698 *bsym
= gjmp(*bsym
);
6701 /* ------------------------------------------------------------------------- */
6702 /* __attribute__((cleanup(fn))) */
6704 static void try_call_scope_cleanup(Sym
*stop
)
6706 Sym
*cls
= cur_scope
->cl
.s
;
6708 for (; cls
!= stop
; cls
= cls
->ncl
) {
6709 Sym
*fs
= cls
->next
;
6710 Sym
*vs
= cls
->prev_tok
;
6712 vpushsym(&fs
->type
, fs
);
6713 vset(&vs
->type
, vs
->r
, vs
->c
);
6715 mk_pointer(&vtop
->type
);
6721 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6726 if (!cur_scope
->cl
.s
)
6729 /* search NCA of both cleanup chains given parents and initial depth */
6730 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6731 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6733 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6735 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6738 try_call_scope_cleanup(cc
);
6741 /* call 'func' for each __attribute__((cleanup(func))) */
6742 static void block_cleanup(struct scope
*o
)
6746 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6747 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6752 try_call_scope_cleanup(o
->cl
.s
);
6753 pcl
->jnext
= gjmp(0);
6755 goto remove_pending
;
6765 try_call_scope_cleanup(o
->cl
.s
);
6768 /* ------------------------------------------------------------------------- */
6771 static void vla_restore(int loc
)
6774 gen_vla_sp_restore(loc
);
6777 static void vla_leave(struct scope
*o
)
6779 if (o
->vla
.num
< cur_scope
->vla
.num
)
6780 vla_restore(o
->vla
.loc
);
6783 /* ------------------------------------------------------------------------- */
6786 void new_scope(struct scope
*o
)
6788 /* copy and link previous scope */
6790 o
->prev
= cur_scope
;
6793 /* record local declaration stack position */
6794 o
->lstk
= local_stack
;
6795 o
->llstk
= local_label_stack
;
6799 if (tcc_state
->do_debug
)
6800 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
6803 void prev_scope(struct scope
*o
, int is_expr
)
6807 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6808 block_cleanup(o
->prev
);
6810 /* pop locally defined labels */
6811 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6813 /* In the is_expr case (a statement expression is finished here),
6814 vtop might refer to symbols on the local_stack. Either via the
6815 type or via vtop->sym. We can't pop those nor any that in turn
6816 might be referred to. To make it easier we don't roll back
6817 any symbols in that case; some upper level call to block() will
6818 do that. We do have to remove such symbols from the lookup
6819 tables, though. sym_pop will do that. */
6821 /* pop locally defined symbols */
6822 pop_local_syms(&local_stack
, o
->lstk
, is_expr
, 0);
6823 cur_scope
= o
->prev
;
6826 if (tcc_state
->do_debug
)
6827 tcc_debug_stabn(N_RBRAC
, ind
- func_ind
);
6830 /* leave a scope via break/continue(/goto) */
6831 void leave_scope(struct scope
*o
)
6835 try_call_scope_cleanup(o
->cl
.s
);
6839 /* ------------------------------------------------------------------------- */
6840 /* call block from 'for do while' loops */
6842 static void lblock(int *bsym
, int *csym
)
6844 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6845 int *b
= co
->bsym
, *c
= co
->csym
;
6859 static void block(int is_expr
)
6861 int a
, b
, c
, d
, e
, t
;
6866 /* default return value is (void) */
6868 vtop
->type
.t
= VT_VOID
;
6880 if (tok
== TOK_ELSE
) {
6885 gsym(d
); /* patch else jmp */
6890 } else if (t
== TOK_WHILE
) {
6902 } else if (t
== '{') {
6905 /* handle local labels declarations */
6906 while (tok
== TOK_LABEL
) {
6909 if (tok
< TOK_UIDENT
)
6910 expect("label identifier");
6911 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6913 } while (tok
== ',');
6917 while (tok
!= '}') {
6926 prev_scope(&o
, is_expr
);
6929 else if (!nocode_wanted
)
6930 check_func_return();
6932 } else if (t
== TOK_RETURN
) {
6933 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6937 gen_assign_cast(&func_vt
);
6939 if (vtop
->type
.t
!= VT_VOID
)
6940 tcc_warning("void function returns a value");
6944 tcc_warning("'return' with no value");
6947 leave_scope(root_scope
);
6949 gfunc_return(&func_vt
);
6951 /* jump unless last stmt in top-level block */
6952 if (tok
!= '}' || local_scope
!= 1)
6956 } else if (t
== TOK_BREAK
) {
6958 if (!cur_scope
->bsym
)
6959 tcc_error("cannot break");
6960 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
6961 leave_scope(cur_switch
->scope
);
6963 leave_scope(loop_scope
);
6964 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6967 } else if (t
== TOK_CONTINUE
) {
6969 if (!cur_scope
->csym
)
6970 tcc_error("cannot continue");
6971 leave_scope(loop_scope
);
6972 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6975 } else if (t
== TOK_FOR
) {
6980 /* c99 for-loop init decl? */
6981 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6982 /* no, regular for-loop init expr */
7010 } else if (t
== TOK_DO
) {
7024 } else if (t
== TOK_SWITCH
) {
7025 struct switch_t
*sw
;
7027 sw
= tcc_mallocz(sizeof *sw
);
7029 sw
->scope
= cur_scope
;
7030 sw
->prev
= cur_switch
;
7036 sw
->sv
= *vtop
--; /* save switch value */
7039 b
= gjmp(0); /* jump to first case */
7041 a
= gjmp(a
); /* add implicit break */
7045 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmp
);
7046 for (b
= 1; b
< sw
->n
; b
++)
7047 if (sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7048 tcc_error("duplicate case value");
7050 /* Our switch table sorting is signed, so the compared
7051 value needs to be as well when it's 64bit. */
7053 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
7054 vtop
->type
.t
&= ~VT_UNSIGNED
;
7056 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7059 gsym_addr(d
, sw
->def_sym
);
7065 dynarray_reset(&sw
->p
, &sw
->n
);
7066 cur_switch
= sw
->prev
;
7069 } else if (t
== TOK_CASE
) {
7070 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7073 cr
->v1
= cr
->v2
= expr_const64();
7074 if (gnu_ext
&& tok
== TOK_DOTS
) {
7076 cr
->v2
= expr_const64();
7077 if (cr
->v2
< cr
->v1
)
7078 tcc_warning("empty case range");
7081 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7084 goto block_after_label
;
7086 } else if (t
== TOK_DEFAULT
) {
7089 if (cur_switch
->def_sym
)
7090 tcc_error("too many 'default'");
7091 cur_switch
->def_sym
= gind();
7094 goto block_after_label
;
7096 } else if (t
== TOK_GOTO
) {
7097 vla_restore(root_scope
->vla
.loc
);
7098 if (tok
== '*' && gnu_ext
) {
7102 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7106 } else if (tok
>= TOK_UIDENT
) {
7107 s
= label_find(tok
);
7108 /* put forward definition if needed */
7110 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7111 else if (s
->r
== LABEL_DECLARED
)
7112 s
->r
= LABEL_FORWARD
;
7114 if (s
->r
& LABEL_FORWARD
) {
7115 /* start new goto chain for cleanups, linked via label->next */
7116 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7117 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7118 pending_gotos
->prev_tok
= s
;
7119 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7120 pending_gotos
->next
= s
;
7122 s
->jnext
= gjmp(s
->jnext
);
7124 try_call_cleanup_goto(s
->cleanupstate
);
7125 gjmp_addr(s
->jnext
);
7130 expect("label identifier");
7134 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7138 if (tok
== ':' && t
>= TOK_UIDENT
) {
7143 if (s
->r
== LABEL_DEFINED
)
7144 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7145 s
->r
= LABEL_DEFINED
;
7147 Sym
*pcl
; /* pending cleanup goto */
7148 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7150 sym_pop(&s
->next
, NULL
, 0);
7154 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7157 s
->cleanupstate
= cur_scope
->cl
.s
;
7160 vla_restore(cur_scope
->vla
.loc
);
7161 /* we accept this, but it is a mistake */
7163 tcc_warning("deprecated use of label at end of compound statement");
7169 /* expression case */
7185 /* This skips over a stream of tokens containing balanced {} and ()
7186 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7187 with a '{'). If STR then allocates and stores the skipped tokens
7188 in *STR. This doesn't check if () and {} are nested correctly,
7189 i.e. "({)}" is accepted. */
7190 static void skip_or_save_block(TokenString
**str
)
7192 int braces
= tok
== '{';
7195 *str
= tok_str_alloc();
7197 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7199 if (tok
== TOK_EOF
) {
7200 if (str
|| level
> 0)
7201 tcc_error("unexpected end of file");
7206 tok_str_add_tok(*str
);
7209 if (t
== '{' || t
== '(') {
7211 } else if (t
== '}' || t
== ')') {
7213 if (level
== 0 && braces
&& t
== '}')
7218 tok_str_add(*str
, -1);
7219 tok_str_add(*str
, 0);
7223 #define EXPR_CONST 1
7226 static void parse_init_elem(int expr_type
)
7228 int saved_global_expr
;
7231 /* compound literals must be allocated globally in this case */
7232 saved_global_expr
= global_expr
;
7235 global_expr
= saved_global_expr
;
7236 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7237 (compound literals). */
7238 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7239 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7240 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7241 #ifdef TCC_TARGET_PE
7242 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7245 tcc_error("initializer element is not constant");
7253 /* put zeros for variable based init */
7254 static void init_putz(Section
*sec
, unsigned long c
, int size
)
7257 /* nothing to do because globals are already set to zero */
7259 vpush_global_sym(&func_old_type
, TOK_memset
);
7261 #ifdef TCC_TARGET_ARM
7273 #define DIF_SIZE_ONLY 2
7274 #define DIF_HAVE_ELEM 4
7276 /* t is the array or struct type. c is the array or struct
7277 address. cur_field is the pointer to the current
7278 field, for arrays the 'c' member contains the current start
7279 index. 'flags' is as in decl_initializer.
7280 'al' contains the already initialized length of the
7281 current container (starting at c). This returns the new length of that. */
7282 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
7283 Sym
**cur_field
, int flags
, int al
)
7286 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7287 unsigned long corig
= c
;
7292 if (flags
& DIF_HAVE_ELEM
)
7295 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7302 /* NOTE: we only support ranges for last designator */
7303 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7305 if (!(type
->t
& VT_ARRAY
))
7306 expect("array type");
7308 index
= index_last
= expr_const();
7309 if (tok
== TOK_DOTS
&& gnu_ext
) {
7311 index_last
= expr_const();
7315 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
7317 tcc_error("invalid index");
7319 (*cur_field
)->c
= index_last
;
7320 type
= pointed_type(type
);
7321 elem_size
= type_size(type
, &align
);
7322 c
+= index
* elem_size
;
7323 nb_elems
= index_last
- index
+ 1;
7330 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7331 expect("struct/union type");
7333 f
= find_field(type
, l
, &cumofs
);
7346 } else if (!gnu_ext
) {
7351 if (type
->t
& VT_ARRAY
) {
7352 index
= (*cur_field
)->c
;
7353 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
7354 tcc_error("index too large");
7355 type
= pointed_type(type
);
7356 c
+= index
* type_size(type
, &align
);
7359 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7360 *cur_field
= f
= f
->next
;
7362 tcc_error("too many field init");
7367 /* must put zero in holes (note that doing it that way
7368 ensures that it even works with designators) */
7369 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
7370 init_putz(sec
, corig
+ al
, c
- corig
- al
);
7371 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
7373 /* XXX: make it more general */
7374 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7375 unsigned long c_end
;
7380 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7381 for (i
= 1; i
< nb_elems
; i
++) {
7382 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
7387 } else if (!NODATA_WANTED
) {
7388 c_end
= c
+ nb_elems
* elem_size
;
7389 if (c_end
> sec
->data_allocated
)
7390 section_realloc(sec
, c_end
);
7391 src
= sec
->data
+ c
;
7393 for(i
= 1; i
< nb_elems
; i
++) {
7395 memcpy(dst
, src
, elem_size
);
7399 c
+= nb_elems
* type_size(type
, &align
);
7405 /* store a value or an expression directly in global data or in local array */
7406 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
7413 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7417 /* XXX: not portable */
7418 /* XXX: generate error if incorrect relocation */
7419 gen_assign_cast(&dtype
);
7420 bt
= type
->t
& VT_BTYPE
;
7422 if ((vtop
->r
& VT_SYM
)
7425 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7426 || (type
->t
& VT_BITFIELD
))
7427 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7429 tcc_error("initializer element is not computable at load time");
7431 if (NODATA_WANTED
) {
7436 size
= type_size(type
, &align
);
7437 section_reserve(sec
, c
+ size
);
7438 ptr
= sec
->data
+ c
;
7440 /* XXX: make code faster ? */
7441 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7442 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7443 /* XXX This rejects compound literals like
7444 '(void *){ptr}'. The problem is that '&sym' is
7445 represented the same way, which would be ruled out
7446 by the SYM_FIRST_ANOM check above, but also '"string"'
7447 in 'char *p = "string"' is represented the same
7448 with the type being VT_PTR and the symbol being an
7449 anonymous one. That is, there's no difference in vtop
7450 between '(void *){x}' and '&(void *){x}'. Ignore
7451 pointer typed entities here. Hopefully no real code
7452 will ever use compound literals with scalar type. */
7453 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7454 /* These come from compound literals, memcpy stuff over. */
7458 esym
= elfsym(vtop
->sym
);
7459 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7460 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7462 /* We need to copy over all memory contents, and that
7463 includes relocations. Use the fact that relocs are
7464 created it order, so look from the end of relocs
7465 until we hit one before the copied region. */
7466 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
7467 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
7468 while (num_relocs
--) {
7470 if (rel
->r_offset
>= esym
->st_value
+ size
)
7472 if (rel
->r_offset
< esym
->st_value
)
7474 /* Note: if the same fields are initialized multiple
7475 times (possible with designators) then we possibly
7476 add multiple relocations for the same offset here.
7477 That would lead to wrong code, the last reloc needs
7478 to win. We clean this up later after the whole
7479 initializer is parsed. */
7480 put_elf_reloca(symtab_section
, sec
,
7481 c
+ rel
->r_offset
- esym
->st_value
,
7482 ELFW(R_TYPE
)(rel
->r_info
),
7483 ELFW(R_SYM
)(rel
->r_info
),
7493 if (type
->t
& VT_BITFIELD
) {
7494 int bit_pos
, bit_size
, bits
, n
;
7495 unsigned char *p
, v
, m
;
7496 bit_pos
= BIT_POS(vtop
->type
.t
);
7497 bit_size
= BIT_SIZE(vtop
->type
.t
);
7498 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7499 bit_pos
&= 7, bits
= 0;
7504 v
= vtop
->c
.i
>> bits
<< bit_pos
;
7505 m
= ((1 << n
) - 1) << bit_pos
;
7506 *p
= (*p
& ~m
) | (v
& m
);
7507 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7511 /* XXX: when cross-compiling we assume that each type has the
7512 same representation on host and target, which is likely to
7513 be wrong in the case of long double */
7515 vtop
->c
.i
= vtop
->c
.i
!= 0;
7517 *(char *)ptr
|= vtop
->c
.i
;
7520 *(short *)ptr
|= vtop
->c
.i
;
7523 *(float*)ptr
= vtop
->c
.f
;
7526 *(double *)ptr
= vtop
->c
.d
;
7529 #if defined TCC_IS_NATIVE_387
7530 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7531 memcpy(ptr
, &vtop
->c
.ld
, 10);
7533 else if (sizeof (long double) == sizeof (double))
7534 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7536 else if (vtop
->c
.ld
== 0.0)
7540 if (sizeof(long double) == LDOUBLE_SIZE
)
7541 *(long double*)ptr
= vtop
->c
.ld
;
7542 else if (sizeof(double) == LDOUBLE_SIZE
)
7543 *(double *)ptr
= (double)vtop
->c
.ld
;
7545 tcc_error("can't cross compile long double constants");
7549 *(long long *)ptr
|= vtop
->c
.i
;
7556 addr_t val
= vtop
->c
.i
;
7558 if (vtop
->r
& VT_SYM
)
7559 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7561 *(addr_t
*)ptr
|= val
;
7563 if (vtop
->r
& VT_SYM
)
7564 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7565 *(addr_t
*)ptr
|= val
;
7571 int val
= vtop
->c
.i
;
7573 if (vtop
->r
& VT_SYM
)
7574 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7578 if (vtop
->r
& VT_SYM
)
7579 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7588 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7595 /* 't' contains the type and storage info. 'c' is the offset of the
7596 object in section 'sec'. If 'sec' is NULL, it means stack based
7597 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7598 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7599 size only evaluation is wanted (only for arrays). */
7600 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
7603 int len
, n
, no_oblock
, i
;
7609 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7610 /* In case of strings we have special handling for arrays, so
7611 don't consume them as initializer value (which would commit them
7612 to some anonymous symbol). */
7613 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7614 !(flags
& DIF_SIZE_ONLY
)) {
7615 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7616 flags
|= DIF_HAVE_ELEM
;
7619 if ((flags
& DIF_HAVE_ELEM
) &&
7620 !(type
->t
& VT_ARRAY
) &&
7621 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7622 The source type might have VT_CONSTANT set, which is
7623 of course assignable to non-const elements. */
7624 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7625 init_putv(type
, sec
, c
);
7626 } else if (type
->t
& VT_ARRAY
) {
7629 t1
= pointed_type(type
);
7630 size1
= type_size(t1
, &align1
);
7633 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7636 tcc_error("character array initializer must be a literal,"
7637 " optionally enclosed in braces");
7642 /* only parse strings here if correct type (otherwise: handle
7643 them as ((w)char *) expressions */
7644 if ((tok
== TOK_LSTR
&&
7645 #ifdef TCC_TARGET_PE
7646 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7648 (t1
->t
& VT_BTYPE
) == VT_INT
7650 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7653 cstr_reset(&initstr
);
7654 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7655 tcc_error("unhandled string literal merging");
7656 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7658 initstr
.size
-= size1
;
7660 len
+= tokc
.str
.size
;
7662 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7664 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7667 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7668 && tok
!= TOK_EOF
) {
7669 /* Not a lone literal but part of a bigger expression. */
7670 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7671 tokc
.str
.size
= initstr
.size
;
7672 tokc
.str
.data
= initstr
.data
;
7678 if (n
>= 0 && len
> n
)
7680 if (!(flags
& DIF_SIZE_ONLY
)) {
7682 tcc_warning("initializer-string for array is too long");
7683 /* in order to go faster for common case (char
7684 string in global variable, we handle it
7686 if (sec
&& size1
== 1) {
7688 memcpy(sec
->data
+ c
, initstr
.data
, nb
);
7692 ch
= ((unsigned char *)initstr
.data
)[i
];
7694 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7696 init_putv(t1
, sec
, c
+ i
* size1
);
7700 /* only add trailing zero if enough storage (no
7701 warning in this case since it is standard) */
7702 if (n
< 0 || len
< n
) {
7703 if (!(flags
& DIF_SIZE_ONLY
)) {
7705 init_putv(t1
, sec
, c
+ (len
* size1
));
7716 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7717 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7718 flags
&= ~DIF_HAVE_ELEM
;
7719 if (type
->t
& VT_ARRAY
) {
7721 /* special test for multi dimensional arrays (may not
7722 be strictly correct if designators are used at the
7724 if (no_oblock
&& len
>= n
*size1
)
7727 if (s
->type
.t
== VT_UNION
)
7731 if (no_oblock
&& f
== NULL
)
7740 /* put zeros at the end */
7741 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7742 init_putz(sec
, c
+ len
, n
*size1
- len
);
7745 /* patch type size if needed, which happens only for array types */
7747 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7748 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7751 if ((flags
& DIF_FIRST
) || tok
== '{') {
7759 } else if (tok
== '{') {
7760 if (flags
& DIF_HAVE_ELEM
)
7763 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7765 } else if ((flags
& DIF_SIZE_ONLY
)) {
7766 /* If we supported only ISO C we wouldn't have to accept calling
7767 this on anything than an array if DIF_SIZE_ONLY (and even then
7768 only on the outermost level, so no recursion would be needed),
7769 because initializing a flex array member isn't supported.
7770 But GNU C supports it, so we need to recurse even into
7771 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7772 /* just skip expression */
7773 skip_or_save_block(NULL
);
7775 if (!(flags
& DIF_HAVE_ELEM
)) {
7776 /* This should happen only when we haven't parsed
7777 the init element above for fear of committing a
7778 string constant to memory too early. */
7779 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7780 expect("string constant");
7781 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7783 init_putv(type
, sec
, c
);
7787 /* parse an initializer for type 't' if 'has_init' is non zero, and
7788 allocate space in local or global data space ('r' is either
7789 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7790 variable 'v' of scope 'scope' is declared before initializers
7791 are parsed. If 'v' is zero, then a reference to the new object
7792 is put in the value stack. If 'has_init' is 2, a special parsing
7793 is done to handle string constants. */
7794 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7795 int has_init
, int v
, int scope
)
7797 int size
, align
, addr
;
7798 TokenString
*init_str
= NULL
;
7801 Sym
*flexible_array
;
7803 int saved_nocode_wanted
= nocode_wanted
;
7804 #ifdef CONFIG_TCC_BCHECK
7805 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7808 /* Always allocate static or global variables */
7809 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7810 nocode_wanted
|= 0x80000000;
7812 flexible_array
= NULL
;
7813 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7814 Sym
*field
= type
->ref
->next
;
7817 field
= field
->next
;
7818 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7819 flexible_array
= field
;
7823 size
= type_size(type
, &align
);
7824 /* If unknown size, we must evaluate it before
7825 evaluating initializers because
7826 initializers can generate global data too
7827 (e.g. string pointers or ISOC99 compound
7828 literals). It also simplifies local
7829 initializers handling */
7830 if (size
< 0 || (flexible_array
&& has_init
)) {
7832 tcc_error("unknown type size");
7833 /* get all init string */
7834 if (has_init
== 2) {
7835 init_str
= tok_str_alloc();
7836 /* only get strings */
7837 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7838 tok_str_add_tok(init_str
);
7841 tok_str_add(init_str
, -1);
7842 tok_str_add(init_str
, 0);
7844 skip_or_save_block(&init_str
);
7849 begin_macro(init_str
, 1);
7851 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7852 /* prepare second initializer parsing */
7853 macro_ptr
= init_str
->str
;
7856 /* if still unknown size, error */
7857 size
= type_size(type
, &align
);
7859 tcc_error("unknown type size");
7861 /* If there's a flex member and it was used in the initializer
7863 if (flexible_array
&&
7864 flexible_array
->type
.ref
->c
> 0)
7865 size
+= flexible_array
->type
.ref
->c
7866 * pointed_size(&flexible_array
->type
);
7867 /* take into account specified alignment if bigger */
7868 if (ad
->a
.aligned
) {
7869 int speca
= 1 << (ad
->a
.aligned
- 1);
7872 } else if (ad
->a
.packed
) {
7876 if (!v
&& NODATA_WANTED
)
7877 size
= 0, align
= 1;
7879 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7881 #ifdef CONFIG_TCC_BCHECK
7883 /* add padding between stack variables for bound checking */
7887 loc
= (loc
- size
) & -align
;
7889 #ifdef CONFIG_TCC_BCHECK
7891 /* add padding between stack variables for bound checking */
7896 /* local variable */
7897 #ifdef CONFIG_TCC_ASM
7898 if (ad
->asm_label
) {
7899 int reg
= asm_parse_regvar(ad
->asm_label
);
7901 r
= (r
& ~VT_VALMASK
) | reg
;
7904 sym
= sym_push(v
, type
, r
, addr
);
7905 if (ad
->cleanup_func
) {
7906 Sym
*cls
= sym_push2(&all_cleanups
,
7907 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7908 cls
->prev_tok
= sym
;
7909 cls
->next
= ad
->cleanup_func
;
7910 cls
->ncl
= cur_scope
->cl
.s
;
7911 cur_scope
->cl
.s
= cls
;
7916 /* push local reference */
7917 vset(type
, r
, addr
);
7920 if (v
&& scope
== VT_CONST
) {
7921 /* see if the symbol was already defined */
7924 patch_storage(sym
, ad
, type
);
7925 /* we accept several definitions of the same global variable. */
7926 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7931 /* allocate symbol in corresponding section */
7936 else if (tcc_state
->nocommon
)
7941 addr
= section_add(sec
, size
, align
);
7942 #ifdef CONFIG_TCC_BCHECK
7943 /* add padding if bound check */
7945 section_add(sec
, 1, 1);
7948 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7949 sec
= common_section
;
7954 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7955 patch_storage(sym
, ad
, NULL
);
7957 /* update symbol definition */
7958 put_extern_sym(sym
, sec
, addr
, size
);
7960 /* push global reference */
7961 vpush_ref(type
, sec
, addr
, size
);
7966 #ifdef CONFIG_TCC_BCHECK
7967 /* handles bounds now because the symbol must be defined
7968 before for the relocation */
7972 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7973 /* then add global bound info */
7974 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7975 bounds_ptr
[0] = 0; /* relocated */
7976 bounds_ptr
[1] = size
;
7981 if (type
->t
& VT_VLA
) {
7987 /* save current stack pointer */
7988 if (root_scope
->vla
.loc
== 0) {
7989 struct scope
*v
= cur_scope
;
7990 gen_vla_sp_save(loc
-= PTR_SIZE
);
7991 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
7994 vla_runtime_type_size(type
, &a
);
7995 gen_vla_alloc(type
, a
);
7996 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7997 /* on _WIN64, because of the function args scratch area, the
7998 result of alloca differs from RSP and is returned in RAX. */
7999 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8001 gen_vla_sp_save(addr
);
8002 cur_scope
->vla
.loc
= addr
;
8003 cur_scope
->vla
.num
++;
8004 } else if (has_init
) {
8005 size_t oldreloc_offset
= 0;
8006 if (sec
&& sec
->reloc
)
8007 oldreloc_offset
= sec
->reloc
->data_offset
;
8008 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
8009 if (sec
&& sec
->reloc
)
8010 squeeze_multi_relocs(sec
, oldreloc_offset
);
8011 /* patch flexible array member size back to -1, */
8012 /* for possible subsequent similar declarations */
8014 flexible_array
->type
.ref
->c
= -1;
8018 /* restore parse state if needed */
8024 nocode_wanted
= saved_nocode_wanted
;
8027 /* parse a function defined by symbol 'sym' and generate its code in
8028 'cur_text_section' */
8029 static void gen_function(Sym
*sym
)
8031 /* Initialize VLA state */
8032 struct scope f
= { 0 };
8033 cur_scope
= root_scope
= &f
;
8036 ind
= cur_text_section
->data_offset
;
8037 if (sym
->a
.aligned
) {
8038 size_t newoff
= section_add(cur_text_section
, 0,
8039 1 << (sym
->a
.aligned
- 1));
8040 gen_fill_nops(newoff
- ind
);
8042 /* NOTE: we patch the symbol size later */
8043 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8044 if (sym
->type
.ref
->f
.func_ctor
)
8045 add_array (tcc_state
, ".init_array", sym
->c
);
8046 if (sym
->type
.ref
->f
.func_dtor
)
8047 add_array (tcc_state
, ".fini_array", sym
->c
);
8049 funcname
= get_tok_str(sym
->v
, NULL
);
8051 func_vt
= sym
->type
.ref
->type
;
8052 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8054 /* put debug symbol */
8055 tcc_debug_funcstart(tcc_state
, sym
);
8056 /* push a dummy symbol to enable local sym storage */
8057 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8058 local_scope
= 1; /* for function parameters */
8062 clear_temp_local_var_list();
8066 /* reset local stack */
8067 pop_local_syms(&local_stack
, NULL
, 0, func_var
);
8069 cur_text_section
->data_offset
= ind
;
8071 label_pop(&global_label_stack
, NULL
, 0);
8072 sym_pop(&all_cleanups
, NULL
, 0);
8073 /* patch symbol size */
8074 elfsym(sym
)->st_size
= ind
- func_ind
;
8075 /* end of function */
8076 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8077 /* It's better to crash than to generate wrong code */
8078 cur_text_section
= NULL
;
8079 funcname
= ""; /* for safety */
8080 func_vt
.t
= VT_VOID
; /* for safety */
8081 func_var
= 0; /* for safety */
8082 ind
= 0; /* for safety */
8083 nocode_wanted
= 0x80000000;
8085 /* do this after funcend debug info */
8089 static void gen_inline_functions(TCCState
*s
)
8092 int inline_generated
, i
;
8093 struct InlineFunc
*fn
;
8095 tcc_open_bf(s
, ":inline:", 0);
8096 /* iterate while inline function are referenced */
8098 inline_generated
= 0;
8099 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8100 fn
= s
->inline_fns
[i
];
8102 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8103 /* the function was used or forced (and then not internal):
8104 generate its code and convert it to a normal function */
8106 tcc_debug_putfile(s
, fn
->filename
);
8107 begin_macro(fn
->func_str
, 1);
8109 cur_text_section
= text_section
;
8113 inline_generated
= 1;
8116 } while (inline_generated
);
8120 static void free_inline_functions(TCCState
*s
)
8123 /* free tokens of unused inline functions */
8124 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8125 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8127 tok_str_free(fn
->func_str
);
8129 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8132 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8133 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8134 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8139 AttributeDef ad
, adbase
;
8142 if (tok
== TOK_STATIC_ASSERT
) {
8152 tcc_error("_Static_assert fail");
8154 goto static_assert_out
;
8158 parse_mult_str(&error_str
, "string constant");
8160 tcc_error("%s", (char *)error_str
.data
);
8161 cstr_free(&error_str
);
8167 if (!parse_btype(&btype
, &adbase
)) {
8168 if (is_for_loop_init
)
8170 /* skip redundant ';' if not in old parameter decl scope */
8171 if (tok
== ';' && l
!= VT_CMP
) {
8177 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8178 /* global asm block */
8182 if (tok
>= TOK_UIDENT
) {
8183 /* special test for old K&R protos without explicit int
8184 type. Only accepted when defining global data */
8188 expect("declaration");
8193 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8194 int v
= btype
.ref
->v
;
8195 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8196 tcc_warning("unnamed struct/union that defines no instances");
8200 if (IS_ENUM(btype
.t
)) {
8205 while (1) { /* iterate thru each declaration */
8207 /* If the base type itself was an array type of unspecified
8208 size (like in 'typedef int arr[]; arr x = {1};') then
8209 we will overwrite the unknown size by the real one for
8210 this decl. We need to unshare the ref symbol holding
8212 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
8213 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
8216 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8220 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8221 printf("type = '%s'\n", buf
);
8224 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8225 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8226 tcc_error("function without file scope cannot be static");
8227 /* if old style function prototype, we accept a
8230 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8231 decl0(VT_CMP
, 0, sym
);
8232 if (sym
->f
.func_alwinl
8233 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8234 == (VT_EXTERN
| VT_INLINE
))) {
8235 /* always_inline functions must be handled as if they
8236 don't generate multiple global defs, even if extern
8237 inline, i.e. GNU inline semantics for those. Rewrite
8238 them into static inline. */
8239 type
.t
&= ~VT_EXTERN
;
8240 type
.t
|= VT_STATIC
;
8242 /* always compile 'extern inline' */
8243 if (type
.t
& VT_EXTERN
)
8244 type
.t
&= ~VT_INLINE
;
8247 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8248 ad
.asm_label
= asm_label_instr();
8249 /* parse one last attribute list, after asm label */
8250 parse_attribute(&ad
);
8252 /* gcc does not allow __asm__("label") with function definition,
8259 #ifdef TCC_TARGET_PE
8260 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8261 if (type
.t
& VT_STATIC
)
8262 tcc_error("cannot have dll linkage with static");
8263 if (type
.t
& VT_TYPEDEF
) {
8264 tcc_warning("'%s' attribute ignored for typedef",
8265 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8266 (ad
.a
.dllexport
= 0, "dllexport"));
8267 } else if (ad
.a
.dllimport
) {
8268 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8271 type
.t
|= VT_EXTERN
;
8277 tcc_error("cannot use local functions");
8278 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8279 expect("function definition");
8281 /* reject abstract declarators in function definition
8282 make old style params without decl have int type */
8284 while ((sym
= sym
->next
) != NULL
) {
8285 if (!(sym
->v
& ~SYM_FIELD
))
8286 expect("identifier");
8287 if (sym
->type
.t
== VT_VOID
)
8288 sym
->type
= int_type
;
8291 /* apply post-declaraton attributes */
8292 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8294 /* put function symbol */
8295 type
.t
&= ~VT_EXTERN
;
8296 sym
= external_sym(v
, &type
, 0, &ad
);
8298 /* static inline functions are just recorded as a kind
8299 of macro. Their code will be emitted at the end of
8300 the compilation unit only if they are used */
8301 if (sym
->type
.t
& VT_INLINE
) {
8302 struct InlineFunc
*fn
;
8303 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8304 strcpy(fn
->filename
, file
->filename
);
8306 skip_or_save_block(&fn
->func_str
);
8307 dynarray_add(&tcc_state
->inline_fns
,
8308 &tcc_state
->nb_inline_fns
, fn
);
8310 /* compute text section */
8311 cur_text_section
= ad
.section
;
8312 if (!cur_text_section
)
8313 cur_text_section
= text_section
;
8319 /* find parameter in function parameter list */
8320 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8321 if ((sym
->v
& ~SYM_FIELD
) == v
)
8323 tcc_error("declaration for parameter '%s' but no such parameter",
8324 get_tok_str(v
, NULL
));
8326 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8327 tcc_error("storage class specified for '%s'",
8328 get_tok_str(v
, NULL
));
8329 if (sym
->type
.t
!= VT_VOID
)
8330 tcc_error("redefinition of parameter '%s'",
8331 get_tok_str(v
, NULL
));
8332 convert_parameter_type(&type
);
8334 } else if (type
.t
& VT_TYPEDEF
) {
8335 /* save typedefed type */
8336 /* XXX: test storage specifiers ? */
8338 if (sym
&& sym
->sym_scope
== local_scope
) {
8339 if (!is_compatible_types(&sym
->type
, &type
)
8340 || !(sym
->type
.t
& VT_TYPEDEF
))
8341 tcc_error("incompatible redefinition of '%s'",
8342 get_tok_str(v
, NULL
));
8345 sym
= sym_push(v
, &type
, 0, 0);
8349 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8350 && !(type
.t
& VT_EXTERN
)) {
8351 tcc_error("declaration of void object");
8354 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8355 /* external function definition */
8356 /* specific case for func_call attribute */
8358 } else if (!(type
.t
& VT_ARRAY
)) {
8359 /* not lvalue if array */
8362 has_init
= (tok
== '=');
8363 if (has_init
&& (type
.t
& VT_VLA
))
8364 tcc_error("variable length array cannot be initialized");
8365 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8366 || (type
.t
& VT_BTYPE
) == VT_FUNC
8367 /* as with GCC, uninitialized global arrays with no size
8368 are considered extern: */
8369 || ((type
.t
& VT_ARRAY
) && !has_init
8370 && l
== VT_CONST
&& type
.ref
->c
< 0)
8372 /* external variable or function */
8373 type
.t
|= VT_EXTERN
;
8374 sym
= external_sym(v
, &type
, r
, &ad
);
8375 if (ad
.alias_target
) {
8378 alias_target
= sym_find(ad
.alias_target
);
8379 esym
= elfsym(alias_target
);
8381 tcc_error("unsupported forward __alias__ attribute");
8382 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
8385 if (type
.t
& VT_STATIC
)
8391 else if (l
== VT_CONST
)
8392 /* uninitialized global variables may be overridden */
8393 type
.t
|= VT_EXTERN
;
8394 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8398 if (is_for_loop_init
)
8410 static void decl(int l
)
8415 /* ------------------------------------------------------------------------- */
8418 /* ------------------------------------------------------------------------- */