2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
66 static int gind(void) { CODE_ON(); return ind
; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
70 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
77 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
79 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
81 static int last_line_num
, new_file
, func_ind
; /* debug info control */
82 ST_DATA
const char *funcname
;
83 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
84 static CString initstr
;
87 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
88 #define VT_PTRDIFF_T VT_INT
90 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_LLONG
93 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
97 ST_DATA
struct switch_t
{
101 } **p
; int n
; /* list of case ranges */
102 int def_sym
; /* default symbol */
105 struct switch_t
*prev
;
107 } *cur_switch
; /* current switch */
109 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
110 /*list of temporary local variables on the stack in current function. */
111 ST_DATA
struct temp_local_variable
{
112 int location
; //offset on stack. Svalue.c.i
115 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
116 short nb_temp_local_vars
;
118 static struct scope
{
120 struct { int loc
, num
; } vla
;
121 struct { Sym
*s
; int n
; } cl
;
124 } *cur_scope
, *loop_scope
, *root_scope
;
126 /********************************************************/
127 /* stab debug support */
129 static const struct {
132 } default_debug
[] = {
133 { VT_INT
, "int:t1=r1;-2147483648;2147483647;" },
134 { VT_BYTE
, "char:t2=r2;0;127;" },
136 { VT_LONG
| VT_INT
, "long int:t3=r3;-2147483648;2147483647;" },
138 { VT_LLONG
| VT_LONG
, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
140 { VT_INT
| VT_UNSIGNED
, "unsigned int:t4=r4;0;037777777777;" },
142 { VT_LONG
| VT_INT
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;037777777777;" },
144 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
145 { VT_LLONG
| VT_LONG
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;01777777777777777777777;" },
147 { VT_QLONG
, "__int128:t6=r6;0;-1;" },
148 { VT_QLONG
| VT_UNSIGNED
, "__int128 unsigned:t7=r7;0;-1;" },
149 { VT_LLONG
, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
150 { VT_LLONG
| VT_UNSIGNED
, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
151 { VT_SHORT
, "short int:t10=r10;-32768;32767;" },
152 { VT_SHORT
| VT_UNSIGNED
, "short unsigned int:t11=r11;0;65535;" },
153 { VT_BYTE
| VT_DEFSIGN
, "signed char:t12=r12;-128;127;" },
154 { VT_BYTE
| VT_DEFSIGN
| VT_UNSIGNED
, "unsigned char:t13=r13;0;255;" },
155 { VT_FLOAT
, "float:t14=r1;4;0;" },
156 { VT_DOUBLE
, "double:t15=r1;8;0;" },
157 { VT_LDOUBLE
, "long double:t16=r1;16;0;" },
158 { -1, "_Float32:t17=r1;4;0;" },
159 { -1, "_Float64:t18=r1;8;0;" },
160 { -1, "_Float128:t19=r1;16;0;" },
161 { -1, "_Float32x:t20=r1;8;0;" },
162 { -1, "_Float64x:t21=r1;16;0;" },
163 { -1, "_Decimal32:t22=r1;4;0;" },
164 { -1, "_Decimal64:t23=r1;8;0;" },
165 { -1, "_Decimal128:t24=r1;16;0;" },
166 /* if default char is unsigned */
167 { VT_BYTE
| VT_UNSIGNED
, "unsigned char:t25=r25;0;255;" },
168 { VT_VOID
, "void:t26=26" },
171 static int debug_next_type
;
173 static struct debug_hash
{
178 static int n_debug_hash
;
180 static struct debug_info
{
191 struct debug_info
*child
, *next
, *last
, *parent
;
192 } *debug_info
, *debug_info_root
;
194 /********************************************************/
196 #define precedence_parser
197 static void init_prec(void);
199 /********************************************************/
200 #ifndef CONFIG_TCC_ASM
201 ST_FUNC
void asm_instr(void)
203 tcc_error("inline asm() not supported");
205 ST_FUNC
void asm_global_instr(void)
207 tcc_error("inline asm() not supported");
211 /* ------------------------------------------------------------------------- */
212 static void gen_cast(CType
*type
);
213 static void gen_cast_s(int t
);
214 static inline CType
*pointed_type(CType
*type
);
215 static int is_compatible_types(CType
*type1
, CType
*type2
);
216 static int parse_btype(CType
*type
, AttributeDef
*ad
);
217 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
218 static void parse_expr_type(CType
*type
);
219 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
220 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
221 static void block(int is_expr
);
222 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
223 static void decl(int l
);
224 static int decl0(int l
, int is_for_loop_init
, Sym
*);
225 static void expr_eq(void);
226 static void vla_runtime_type_size(CType
*type
, int *a
);
227 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
228 static inline int64_t expr_const64(void);
229 static void vpush64(int ty
, unsigned long long v
);
230 static void vpush(CType
*type
);
231 static int gvtst(int inv
, int t
);
232 static void gen_inline_functions(TCCState
*s
);
233 static void free_inline_functions(TCCState
*s
);
234 static void skip_or_save_block(TokenString
**str
);
235 static void gv_dup(void);
236 static int get_temp_local_var(int size
,int align
);
237 static void clear_temp_local_var_list();
238 static void cast_error(CType
*st
, CType
*dt
);
240 ST_INLN
int is_float(int t
)
242 int bt
= t
& VT_BTYPE
;
243 return bt
== VT_LDOUBLE
249 static inline int is_integer_btype(int bt
)
258 static int btype_size(int bt
)
260 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
264 bt
== VT_PTR
? PTR_SIZE
: 0;
267 /* returns function return register from type */
268 static int R_RET(int t
)
272 #ifdef TCC_TARGET_X86_64
273 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
275 #elif defined TCC_TARGET_RISCV64
276 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
282 /* returns 2nd function return register, if any */
283 static int R2_RET(int t
)
289 #elif defined TCC_TARGET_X86_64
294 #elif defined TCC_TARGET_RISCV64
301 /* returns true for two-word types */
302 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
304 /* put function return registers to stack value */
305 static void PUT_R_RET(SValue
*sv
, int t
)
307 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
310 /* returns function return register class for type t */
311 static int RC_RET(int t
)
313 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
316 /* returns generic register class for type t */
317 static int RC_TYPE(int t
)
321 #ifdef TCC_TARGET_X86_64
322 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
324 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
326 #elif defined TCC_TARGET_RISCV64
327 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
333 /* returns 2nd register class corresponding to t and rc */
334 static int RC2_TYPE(int t
, int rc
)
336 if (!USING_TWO_WORDS(t
))
351 /* we use our own 'finite' function to avoid potential problems with
352 non standard math libs */
353 /* XXX: endianness dependent */
354 ST_FUNC
int ieee_finite(double d
)
357 memcpy(p
, &d
, sizeof(double));
358 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
361 /* compiling intel long double natively */
362 #if (defined __i386__ || defined __x86_64__) \
363 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
364 # define TCC_IS_NATIVE_387
367 ST_FUNC
void test_lvalue(void)
369 if (!(vtop
->r
& VT_LVAL
))
373 ST_FUNC
void check_vstack(void)
375 if (vtop
!= vstack
- 1)
376 tcc_error("internal compiler error: vstack leak (%d)",
377 (int)(vtop
- vstack
+ 1));
380 /* ------------------------------------------------------------------------- */
381 /* vstack debugging aid */
384 void pv (const char *lbl
, int a
, int b
)
387 for (i
= a
; i
< a
+ b
; ++i
) {
388 SValue
*p
= &vtop
[-i
];
389 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
390 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
395 /* ------------------------------------------------------------------------- */
396 /* start of translation unit info */
397 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
403 /* file info: full path + filename */
404 section_sym
= put_elf_sym(symtab_section
, 0, 0,
405 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
406 text_section
->sh_num
, NULL
);
407 getcwd(buf
, sizeof(buf
));
409 normalize_slashes(buf
);
411 pstrcat(buf
, sizeof(buf
), "/");
412 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
413 text_section
->data_offset
, text_section
, section_sym
);
414 put_stabs_r(s1
, file
->prev
->filename
, N_SO
, 0, 0,
415 text_section
->data_offset
, text_section
, section_sym
);
416 for (i
= 0; i
< sizeof (default_debug
) / sizeof (default_debug
[0]); i
++)
417 put_stabs(s1
, default_debug
[i
].name
, N_LSYM
, 0, 0, 0);
419 new_file
= last_line_num
= 0;
421 debug_next_type
= sizeof(default_debug
) / sizeof(default_debug
[0]);
425 /* we're currently 'including' the <command line> */
429 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
430 symbols can be safely used */
431 put_elf_sym(symtab_section
, 0, 0,
432 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
433 SHN_ABS
, file
->filename
);
436 static void tcc_debug_stabs (TCCState
*s1
, const char *str
, int type
, unsigned long value
,
437 Section
*sec
, int sym_index
)
443 (struct debug_sym
*)tcc_realloc (debug_info
->sym
,
444 sizeof(struct debug_sym
) *
445 (debug_info
->n_sym
+ 1));
446 s
= debug_info
->sym
+ debug_info
->n_sym
++;
449 s
->str
= tcc_strdup(str
);
451 s
->sym_index
= sym_index
;
454 put_stabs_r (s1
, str
, type
, 0, 0, value
, sec
, sym_index
);
456 put_stabs (s1
, str
, type
, 0, 0, value
);
459 static void tcc_debug_stabn(int type
, int value
)
461 if (type
== N_LBRAC
) {
462 struct debug_info
*info
=
463 (struct debug_info
*) tcc_mallocz(sizeof (*info
));
466 info
->parent
= debug_info
;
468 if (debug_info
->child
) {
469 if (debug_info
->child
->last
)
470 debug_info
->child
->last
->next
= info
;
472 debug_info
->child
->next
= info
;
473 debug_info
->child
->last
= info
;
476 debug_info
->child
= info
;
479 debug_info_root
= info
;
483 debug_info
->end
= value
;
484 debug_info
= debug_info
->parent
;
488 static void tcc_get_debug_info(TCCState
*s1
, Sym
*s
, CString
*result
)
497 type
= t
->type
.t
& ~(VT_EXTERN
| VT_STATIC
| VT_CONSTANT
| VT_VOLATILE
);
498 if ((type
& VT_BTYPE
) != VT_BYTE
)
500 if (type
== VT_PTR
|| type
== (VT_PTR
| VT_ARRAY
))
501 n
++, t
= t
->type
.ref
;
505 if ((type
& VT_BTYPE
) == VT_STRUCT
) {
509 for (i
= 0; i
< n_debug_hash
; i
++) {
510 if (t
== debug_hash
[i
].type
) {
511 debug_type
= debug_hash
[i
].debug_type
;
515 if (debug_type
== -1) {
516 debug_type
= ++debug_next_type
;
517 debug_hash
= (struct debug_hash
*)
518 tcc_realloc (debug_hash
,
519 (n_debug_hash
+ 1) * sizeof(*debug_hash
));
520 debug_hash
[n_debug_hash
].debug_type
= debug_type
;
521 debug_hash
[n_debug_hash
++].type
= t
;
523 cstr_printf (&str
, "%s:T%d=%c%d",
524 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
525 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
527 IS_UNION (t
->type
.t
) ? 'u' : 's',
530 int pos
, size
, align
;
533 cstr_printf (&str
, "%s:",
534 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
535 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
536 tcc_get_debug_info (s1
, t
, &str
);
537 if (t
->type
.t
& VT_BITFIELD
) {
538 pos
= t
->c
* 8 + BIT_POS(t
->type
.t
);
539 size
= BIT_SIZE(t
->type
.t
);
543 size
= type_size(&t
->type
, &align
) * 8;
545 cstr_printf (&str
, ",%d,%d;", pos
, size
);
547 cstr_printf (&str
, ";");
548 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
552 else if (IS_ENUM(type
)) {
553 Sym
*e
= t
= t
->type
.ref
;
555 debug_type
= ++debug_next_type
;
557 cstr_printf (&str
, "%s:T%d=e",
558 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
559 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
563 cstr_printf (&str
, "%s:",
564 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
565 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
566 cstr_printf (&str
, e
->type
.t
& VT_UNSIGNED
? "%u," : "%d,",
569 cstr_printf (&str
, ";");
570 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
573 else if ((type
& VT_BTYPE
) != VT_FUNC
) {
574 type
&= ~VT_STRUCT_MASK
;
576 debug_type
<= sizeof(default_debug
) / sizeof(default_debug
[0]);
578 if (default_debug
[debug_type
- 1].type
== type
)
580 if (debug_type
> sizeof(default_debug
) / sizeof(default_debug
[0]))
584 cstr_printf (result
, "%d=", ++debug_next_type
);
587 type
= t
->type
.t
& ~(VT_EXTERN
| VT_STATIC
| VT_CONSTANT
| VT_VOLATILE
);
588 if ((type
& VT_BTYPE
) != VT_BYTE
)
591 cstr_printf (result
, "%d=*", ++debug_next_type
);
592 else if (type
== (VT_PTR
| VT_ARRAY
))
593 cstr_printf (result
, "%d=ar1;0;%d;",
594 ++debug_next_type
, t
->type
.ref
->c
- 1);
595 else if (type
== VT_FUNC
) {
596 cstr_printf (result
, "%d=f", ++debug_next_type
);
597 tcc_get_debug_info (s1
, t
->type
.ref
, result
);
604 cstr_printf (result
, "%d", debug_type
);
607 static void tcc_debug_finish (TCCState
*s1
, struct debug_info
*cur
)
611 struct debug_info
*next
= cur
->next
;
613 for (i
= 0; i
< cur
->n_sym
; i
++) {
614 struct debug_sym
*s
= &cur
->sym
[i
];
617 put_stabs_r(s1
, s
->str
, s
->type
, 0, 0, s
->value
,
618 s
->sec
, s
->sym_index
);
620 put_stabs(s1
, s
->str
, s
->type
, 0, 0, s
->value
);
624 put_stabn(s1
, N_LBRAC
, 0, 0, cur
->start
);
625 tcc_debug_finish (s1
, cur
->child
);
626 put_stabn(s1
, N_RBRAC
, 0, 0, cur
->end
);
632 static void tcc_add_debug_info(TCCState
*s1
, int param
, Sym
*s
, Sym
*e
)
635 cstr_new (&debug_str
);
636 for (; s
!= e
; s
= s
->prev
) {
637 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
639 cstr_reset (&debug_str
);
640 cstr_printf (&debug_str
, "%s:%s", get_tok_str(s
->v
, NULL
), param
? "p" : "");
641 tcc_get_debug_info(s1
, s
, &debug_str
);
642 tcc_debug_stabs(s1
, debug_str
.data
, param
? N_PSYM
: N_LSYM
, s
->c
, NULL
, 0);
644 cstr_free (&debug_str
);
647 static void tcc_debug_extern_sym(TCCState
*s1
, Sym
*sym
, int sh_num
, int sym_bind
)
649 Section
*s
= s1
->sections
[sh_num
];
653 cstr_printf (&str
, "%s:%c",
654 get_tok_str(sym
->v
, NULL
),
655 sym_bind
== STB_GLOBAL
? 'G' : local_scope
? 'V' : 'S'
657 tcc_get_debug_info(s1
, sym
, &str
);
658 if (sym_bind
== STB_GLOBAL
)
659 tcc_debug_stabs(s1
, str
.data
, N_GSYM
, 0, NULL
, 0);
661 tcc_debug_stabs(s1
, str
.data
,
662 (sym
->type
.t
& VT_STATIC
) && data_section
== s
663 ? N_STSYM
: N_LCSYM
, 0, s
, sym
->c
);
667 /* put end of translation unit info */
668 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
672 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
673 text_section
->data_offset
, text_section
, section_sym
);
674 tcc_free(debug_hash
);
677 static BufferedFile
* put_new_file(TCCState
*s1
)
679 BufferedFile
*f
= file
;
680 /* use upper file if from inline ":asm:" */
681 if (f
->filename
[0] == ':')
684 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
685 new_file
= last_line_num
= 0;
690 /* generate line number info */
691 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
695 || cur_text_section
!= text_section
696 || !(f
= put_new_file(s1
))
697 || last_line_num
== f
->line_num
)
699 if (func_ind
!= -1) {
700 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
702 /* from tcc_assemble */
703 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
705 last_line_num
= f
->line_num
;
708 /* put function symbol */
709 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
715 debug_info_root
= NULL
;
717 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
718 if (!(f
= put_new_file(s1
)))
720 cstr_new (&debug_str
);
721 cstr_printf(&debug_str
, "%s:%c", funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
722 tcc_get_debug_info(s1
, sym
->type
.ref
, &debug_str
);
723 put_stabs_r(s1
, debug_str
.data
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
724 cstr_free (&debug_str
);
729 /* put function size */
730 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
734 tcc_debug_stabn(N_RBRAC
, size
);
735 tcc_debug_finish (s1
, debug_info_root
);
738 /* put alternative filename */
739 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
741 if (0 == strcmp(file
->filename
, filename
))
743 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
747 /* begin of #include */
748 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
752 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
756 /* end of #include */
757 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
761 put_stabn(s1
, N_EINCL
, 0, 0, 0);
765 /* ------------------------------------------------------------------------- */
766 /* initialize vstack and types. This must be done also for tcc -E */
767 ST_FUNC
void tccgen_init(TCCState
*s1
)
770 memset(vtop
, 0, sizeof *vtop
);
772 /* define some often used types */
775 char_type
.t
= VT_BYTE
;
776 if (s1
->char_is_unsigned
)
777 char_type
.t
|= VT_UNSIGNED
;
778 char_pointer_type
= char_type
;
779 mk_pointer(&char_pointer_type
);
781 func_old_type
.t
= VT_FUNC
;
782 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
783 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
784 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
785 #ifdef precedence_parser
791 ST_FUNC
int tccgen_compile(TCCState
*s1
)
793 cur_text_section
= NULL
;
795 anon_sym
= SYM_FIRST_ANOM
;
798 nocode_wanted
= 0x80000000;
802 #ifdef TCC_TARGET_ARM
806 printf("%s: **** new file\n", file
->filename
);
808 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
811 gen_inline_functions(s1
);
813 /* end of translation unit info */
818 ST_FUNC
void tccgen_finish(TCCState
*s1
)
821 free_inline_functions(s1
);
822 sym_pop(&global_stack
, NULL
, 0);
823 sym_pop(&local_stack
, NULL
, 0);
824 /* free preprocessor macros */
827 dynarray_reset(&sym_pools
, &nb_sym_pools
);
828 sym_free_first
= NULL
;
831 /* ------------------------------------------------------------------------- */
832 ST_FUNC ElfSym
*elfsym(Sym
*s
)
836 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
839 /* apply storage attributes to Elf symbol */
840 ST_FUNC
void update_storage(Sym
*sym
)
843 int sym_bind
, old_sym_bind
;
849 if (sym
->a
.visibility
)
850 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
853 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
854 sym_bind
= STB_LOCAL
;
855 else if (sym
->a
.weak
)
858 sym_bind
= STB_GLOBAL
;
859 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
860 if (sym_bind
!= old_sym_bind
) {
861 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
865 if (sym
->a
.dllimport
)
866 esym
->st_other
|= ST_PE_IMPORT
;
867 if (sym
->a
.dllexport
)
868 esym
->st_other
|= ST_PE_EXPORT
;
872 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
873 get_tok_str(sym
->v
, NULL
),
874 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
882 /* ------------------------------------------------------------------------- */
883 /* update sym->c so that it points to an external symbol in section
884 'section' with value 'value' */
886 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
887 addr_t value
, unsigned long size
,
888 int can_add_underscore
)
890 int sym_type
, sym_bind
, info
, other
, t
;
896 name
= get_tok_str(sym
->v
, NULL
);
898 if ((t
& VT_BTYPE
) == VT_FUNC
) {
900 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
901 sym_type
= STT_NOTYPE
;
903 sym_type
= STT_OBJECT
;
905 if (t
& (VT_STATIC
| VT_INLINE
))
906 sym_bind
= STB_LOCAL
;
908 sym_bind
= STB_GLOBAL
;
912 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
913 Sym
*ref
= sym
->type
.ref
;
914 if (ref
->a
.nodecorate
) {
915 can_add_underscore
= 0;
917 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
918 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
920 other
|= ST_PE_STDCALL
;
921 can_add_underscore
= 0;
926 if (sym
->asm_label
) {
927 name
= get_tok_str(sym
->asm_label
& ~SYM_FIELD
, NULL
);
928 /* with SYM_FIELD it was __attribute__((alias("..."))) actually */
929 if (!(sym
->asm_label
& SYM_FIELD
))
930 can_add_underscore
= 0;
933 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
935 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
939 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
940 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
942 if (tcc_state
->do_debug
943 && sym_type
!= STT_FUNC
944 && sym
->v
< SYM_FIRST_ANOM
)
945 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
);
949 esym
->st_value
= value
;
950 esym
->st_size
= size
;
951 esym
->st_shndx
= sh_num
;
956 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
957 addr_t value
, unsigned long size
)
959 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
960 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
963 /* add a new relocation entry to symbol 'sym' in section 's' */
964 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
969 if (nocode_wanted
&& s
== cur_text_section
)
974 put_extern_sym(sym
, NULL
, 0, 0);
978 /* now we can add ELF relocation info */
979 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
983 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
985 greloca(s
, sym
, offset
, type
, 0);
989 /* ------------------------------------------------------------------------- */
990 /* symbol allocator */
991 static Sym
*__sym_malloc(void)
993 Sym
*sym_pool
, *sym
, *last_sym
;
996 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
997 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
999 last_sym
= sym_free_first
;
1001 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
1002 sym
->next
= last_sym
;
1006 sym_free_first
= last_sym
;
1010 static inline Sym
*sym_malloc(void)
1014 sym
= sym_free_first
;
1016 sym
= __sym_malloc();
1017 sym_free_first
= sym
->next
;
1020 sym
= tcc_malloc(sizeof(Sym
));
1025 ST_INLN
void sym_free(Sym
*sym
)
1028 sym
->next
= sym_free_first
;
1029 sym_free_first
= sym
;
1035 /* push, without hashing */
1036 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
1041 memset(s
, 0, sizeof *s
);
1051 /* find a symbol and return its associated structure. 's' is the top
1052 of the symbol stack */
1053 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
1058 else if (s
->v
== -1)
1065 /* structure lookup */
1066 ST_INLN Sym
*struct_find(int v
)
1069 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1071 return table_ident
[v
]->sym_struct
;
1074 /* find an identifier */
1075 ST_INLN Sym
*sym_find(int v
)
1078 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1080 return table_ident
[v
]->sym_identifier
;
1083 static int sym_scope(Sym
*s
)
1085 if (IS_ENUM_VAL (s
->type
.t
))
1086 return s
->type
.ref
->sym_scope
;
1088 return s
->sym_scope
;
1091 /* push a given symbol on the symbol stack */
1092 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
1101 s
= sym_push2(ps
, v
, type
->t
, c
);
1102 s
->type
.ref
= type
->ref
;
1104 /* don't record fields or anonymous symbols */
1106 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1107 /* record symbol in token array */
1108 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1110 ps
= &ts
->sym_struct
;
1112 ps
= &ts
->sym_identifier
;
1115 s
->sym_scope
= local_scope
;
1116 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
1117 tcc_error("redeclaration of '%s'",
1118 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
1123 /* push a global identifier */
1124 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
1127 s
= sym_push2(&global_stack
, v
, t
, c
);
1128 s
->r
= VT_CONST
| VT_SYM
;
1129 /* don't record anonymous symbol */
1130 if (v
< SYM_FIRST_ANOM
) {
1131 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
1132 /* modify the top most local identifier, so that sym_identifier will
1133 point to 's' when popped; happens when called from inline asm */
1134 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
1135 ps
= &(*ps
)->prev_tok
;
1142 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1143 pop them yet from the list, but do remove them from the token array. */
1144 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
1154 /* remove symbol in token array */
1156 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1157 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1159 ps
= &ts
->sym_struct
;
1161 ps
= &ts
->sym_identifier
;
1172 /* ------------------------------------------------------------------------- */
1173 static void vcheck_cmp(void)
1175 /* cannot let cpu flags if other instruction are generated. Also
1176 avoid leaving VT_JMP anywhere except on the top of the stack
1177 because it would complicate the code generator.
1179 Don't do this when nocode_wanted. vtop might come from
1180 !nocode_wanted regions (see 88_codeopt.c) and transforming
1181 it to a register without actually generating code is wrong
1182 as their value might still be used for real. All values
1183 we push under nocode_wanted will eventually be popped
1184 again, so that the VT_CMP/VT_JMP value will be in vtop
1185 when code is unsuppressed again. */
1187 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
1191 static void vsetc(CType
*type
, int r
, CValue
*vc
)
1193 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1194 tcc_error("memory full (vstack)");
1199 vtop
->r2
= VT_CONST
;
1204 ST_FUNC
void vswap(void)
1214 /* pop stack value */
1215 ST_FUNC
void vpop(void)
1218 v
= vtop
->r
& VT_VALMASK
;
1219 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1220 /* for x86, we need to pop the FP stack */
1221 if (v
== TREG_ST0
) {
1222 o(0xd8dd); /* fstp %st(0) */
1226 /* need to put correct jump if && or || without test */
1233 /* push constant of type "type" with useless value */
1234 static void vpush(CType
*type
)
1236 vset(type
, VT_CONST
, 0);
1239 /* push arbitrary 64bit constant */
1240 static void vpush64(int ty
, unsigned long long v
)
1247 vsetc(&ctype
, VT_CONST
, &cval
);
1250 /* push integer constant */
1251 ST_FUNC
void vpushi(int v
)
1256 /* push a pointer sized constant */
1257 static void vpushs(addr_t v
)
1259 vpush64(VT_SIZE_T
, v
);
1262 /* push long long constant */
1263 static inline void vpushll(long long v
)
1265 vpush64(VT_LLONG
, v
);
1268 ST_FUNC
void vset(CType
*type
, int r
, int v
)
1272 vsetc(type
, r
, &cval
);
1275 static void vseti(int r
, int v
)
1283 ST_FUNC
void vpushv(SValue
*v
)
1285 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1286 tcc_error("memory full (vstack)");
1291 static void vdup(void)
1296 /* rotate n first stack elements to the bottom
1297 I1 ... In -> I2 ... In I1 [top is right]
1299 ST_FUNC
void vrotb(int n
)
1306 for(i
=-n
+1;i
!=0;i
++)
1307 vtop
[i
] = vtop
[i
+1];
1311 /* rotate the n elements before entry e towards the top
1312 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1314 ST_FUNC
void vrote(SValue
*e
, int n
)
1321 for(i
= 0;i
< n
- 1; i
++)
1326 /* rotate n first stack elements to the top
1327 I1 ... In -> In I1 ... I(n-1) [top is right]
1329 ST_FUNC
void vrott(int n
)
1334 /* ------------------------------------------------------------------------- */
1335 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1337 /* called from generators to set the result from relational ops */
1338 ST_FUNC
void vset_VT_CMP(int op
)
1346 /* called once before asking generators to load VT_CMP to a register */
1347 static void vset_VT_JMP(void)
1349 int op
= vtop
->cmp_op
;
1351 if (vtop
->jtrue
|| vtop
->jfalse
) {
1352 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1353 int inv
= op
& (op
< 2); /* small optimization */
1354 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1356 /* otherwise convert flags (rsp. 0/1) to register */
1358 if (op
< 2) /* doesn't seem to happen */
1363 /* Set CPU Flags, doesn't yet jump */
1364 static void gvtst_set(int inv
, int t
)
1368 if (vtop
->r
!= VT_CMP
) {
1371 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1372 vset_VT_CMP(vtop
->c
.i
!= 0);
1375 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1376 *p
= gjmp_append(*p
, t
);
1379 /* Generate value test
1381 * Generate a test for any value (jump, comparison and integers) */
1382 static int gvtst(int inv
, int t
)
1387 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1389 x
= u
, u
= t
, t
= x
;
1392 /* jump to the wanted target */
1394 t
= gjmp_cond(op
^ inv
, t
);
1397 /* resolve complementary jumps to here */
1404 /* generate a zero or nozero test */
1405 static void gen_test_zero(int op
)
1407 if (vtop
->r
== VT_CMP
) {
1411 vtop
->jfalse
= vtop
->jtrue
;
1421 /* ------------------------------------------------------------------------- */
1422 /* push a symbol value of TYPE */
1423 static inline void vpushsym(CType
*type
, Sym
*sym
)
1427 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1431 /* Return a static symbol pointing to a section */
1432 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1438 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1439 sym
->type
.t
|= VT_STATIC
;
1440 put_extern_sym(sym
, sec
, offset
, size
);
1444 /* push a reference to a section offset by adding a dummy symbol */
1445 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1447 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1450 /* define a new external reference to a symbol 'v' of type 'u' */
1451 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1457 /* push forward reference */
1458 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1459 s
->type
.ref
= type
->ref
;
1460 } else if (IS_ASM_SYM(s
)) {
1461 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1462 s
->type
.ref
= type
->ref
;
1468 /* Merge symbol attributes. */
1469 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1471 if (sa1
->aligned
&& !sa
->aligned
)
1472 sa
->aligned
= sa1
->aligned
;
1473 sa
->packed
|= sa1
->packed
;
1474 sa
->weak
|= sa1
->weak
;
1475 if (sa1
->visibility
!= STV_DEFAULT
) {
1476 int vis
= sa
->visibility
;
1477 if (vis
== STV_DEFAULT
1478 || vis
> sa1
->visibility
)
1479 vis
= sa1
->visibility
;
1480 sa
->visibility
= vis
;
1482 sa
->dllexport
|= sa1
->dllexport
;
1483 sa
->nodecorate
|= sa1
->nodecorate
;
1484 sa
->dllimport
|= sa1
->dllimport
;
1487 /* Merge function attributes. */
1488 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1490 if (fa1
->func_call
&& !fa
->func_call
)
1491 fa
->func_call
= fa1
->func_call
;
1492 if (fa1
->func_type
&& !fa
->func_type
)
1493 fa
->func_type
= fa1
->func_type
;
1494 if (fa1
->func_args
&& !fa
->func_args
)
1495 fa
->func_args
= fa1
->func_args
;
1496 if (fa1
->func_noreturn
)
1497 fa
->func_noreturn
= 1;
1504 /* Merge attributes. */
1505 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1507 merge_symattr(&ad
->a
, &ad1
->a
);
1508 merge_funcattr(&ad
->f
, &ad1
->f
);
1511 ad
->section
= ad1
->section
;
1513 ad
->asm_label
= ad1
->asm_label
;
1515 ad
->attr_mode
= ad1
->attr_mode
;
1518 /* Merge some type attributes. */
1519 static void patch_type(Sym
*sym
, CType
*type
)
1521 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1522 if (!(sym
->type
.t
& VT_EXTERN
))
1523 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1524 sym
->type
.t
&= ~VT_EXTERN
;
1527 if (IS_ASM_SYM(sym
)) {
1528 /* stay static if both are static */
1529 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1530 sym
->type
.ref
= type
->ref
;
1533 if (!is_compatible_types(&sym
->type
, type
)) {
1534 tcc_error("incompatible types for redefinition of '%s'",
1535 get_tok_str(sym
->v
, NULL
));
1537 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1538 int static_proto
= sym
->type
.t
& VT_STATIC
;
1539 /* warn if static follows non-static function declaration */
1540 if ((type
->t
& VT_STATIC
) && !static_proto
1541 /* XXX this test for inline shouldn't be here. Until we
1542 implement gnu-inline mode again it silences a warning for
1543 mingw caused by our workarounds. */
1544 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1545 tcc_warning("static storage ignored for redefinition of '%s'",
1546 get_tok_str(sym
->v
, NULL
));
1548 /* set 'inline' if both agree or if one has static */
1549 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1550 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1551 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1552 static_proto
|= VT_INLINE
;
1555 if (0 == (type
->t
& VT_EXTERN
)) {
1556 struct FuncAttr f
= sym
->type
.ref
->f
;
1557 /* put complete type, use static from prototype */
1558 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1559 sym
->type
.ref
= type
->ref
;
1560 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1562 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1565 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1566 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1567 sym
->type
.ref
= type
->ref
;
1571 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1572 /* set array size if it was omitted in extern declaration */
1573 sym
->type
.ref
->c
= type
->ref
->c
;
1575 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1576 tcc_warning("storage mismatch for redefinition of '%s'",
1577 get_tok_str(sym
->v
, NULL
));
1581 /* Merge some storage attributes. */
1582 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1585 patch_type(sym
, type
);
1587 #ifdef TCC_TARGET_PE
1588 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1589 tcc_error("incompatible dll linkage for redefinition of '%s'",
1590 get_tok_str(sym
->v
, NULL
));
1592 merge_symattr(&sym
->a
, &ad
->a
);
1594 sym
->asm_label
= ad
->asm_label
;
1595 update_storage(sym
);
1598 /* copy sym to other stack */
1599 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1602 s
= sym_malloc(), *s
= *s0
;
1603 s
->prev
= *ps
, *ps
= s
;
1604 if (s
->v
< SYM_FIRST_ANOM
) {
1605 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1606 s
->prev_tok
= *ps
, *ps
= s
;
1611 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1612 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1614 int bt
= s
->type
.t
& VT_BTYPE
;
1615 if (bt
== VT_FUNC
|| bt
== VT_PTR
) {
1616 Sym
**sp
= &s
->type
.ref
;
1617 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1618 Sym
*s2
= sym_copy(s
, ps
);
1619 sp
= &(*sp
= s2
)->next
;
1620 sym_copy_ref(s2
, ps
);
1625 /* define a new external reference to a symbol 'v' */
1626 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1630 /* look for global symbol */
1632 while (s
&& s
->sym_scope
)
1636 /* push forward reference */
1637 s
= global_identifier_push(v
, type
->t
, 0);
1640 s
->asm_label
= ad
->asm_label
;
1641 s
->type
.ref
= type
->ref
;
1642 /* copy type to the global stack */
1644 sym_copy_ref(s
, &global_stack
);
1646 patch_storage(s
, ad
, type
);
1648 /* push variables on local_stack if any */
1649 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1650 s
= sym_copy(s
, &local_stack
);
1654 /* push a reference to global symbol v */
1655 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1657 vpushsym(type
, external_global_sym(v
, type
));
1660 /* save registers up to (vtop - n) stack entry */
1661 ST_FUNC
void save_regs(int n
)
1664 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1668 /* save r to the memory stack, and mark it as being free */
1669 ST_FUNC
void save_reg(int r
)
1671 save_reg_upstack(r
, 0);
1674 /* save r to the memory stack, and mark it as being free,
1675 if seen up to (vtop - n) stack entry */
1676 ST_FUNC
void save_reg_upstack(int r
, int n
)
1678 int l
, size
, align
, bt
;
1681 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1686 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1687 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1688 /* must save value on stack if not already done */
1690 bt
= p
->type
.t
& VT_BTYPE
;
1693 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1696 size
= type_size(&sv
.type
, &align
);
1697 l
= get_temp_local_var(size
,align
);
1698 sv
.r
= VT_LOCAL
| VT_LVAL
;
1700 store(p
->r
& VT_VALMASK
, &sv
);
1701 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1702 /* x86 specific: need to pop fp register ST0 if saved */
1703 if (r
== TREG_ST0
) {
1704 o(0xd8dd); /* fstp %st(0) */
1707 /* special long long case */
1708 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1713 /* mark that stack entry as being saved on the stack */
1714 if (p
->r
& VT_LVAL
) {
1715 /* also clear the bounded flag because the
1716 relocation address of the function was stored in
1718 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1720 p
->r
= VT_LVAL
| VT_LOCAL
;
1728 #ifdef TCC_TARGET_ARM
1729 /* find a register of class 'rc2' with at most one reference on stack.
1730 * If none, call get_reg(rc) */
1731 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1736 for(r
=0;r
<NB_REGS
;r
++) {
1737 if (reg_classes
[r
] & rc2
) {
1740 for(p
= vstack
; p
<= vtop
; p
++) {
1741 if ((p
->r
& VT_VALMASK
) == r
||
1753 /* find a free register of class 'rc'. If none, save one register */
1754 ST_FUNC
int get_reg(int rc
)
1759 /* find a free register */
1760 for(r
=0;r
<NB_REGS
;r
++) {
1761 if (reg_classes
[r
] & rc
) {
1764 for(p
=vstack
;p
<=vtop
;p
++) {
1765 if ((p
->r
& VT_VALMASK
) == r
||
1774 /* no register left : free the first one on the stack (VERY
1775 IMPORTANT to start from the bottom to ensure that we don't
1776 spill registers used in gen_opi()) */
1777 for(p
=vstack
;p
<=vtop
;p
++) {
1778 /* look at second register (if long long) */
1780 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1782 r
= p
->r
& VT_VALMASK
;
1783 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1789 /* Should never comes here */
1793 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1794 static int get_temp_local_var(int size
,int align
){
1796 struct temp_local_variable
*temp_var
;
1803 for(i
=0;i
<nb_temp_local_vars
;i
++){
1804 temp_var
=&arr_temp_local_vars
[i
];
1805 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1808 /*check if temp_var is free*/
1810 for(p
=vstack
;p
<=vtop
;p
++) {
1812 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1813 if(p
->c
.i
==temp_var
->location
){
1820 found_var
=temp_var
->location
;
1826 loc
= (loc
- size
) & -align
;
1827 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1828 temp_var
=&arr_temp_local_vars
[i
];
1829 temp_var
->location
=loc
;
1830 temp_var
->size
=size
;
1831 temp_var
->align
=align
;
1832 nb_temp_local_vars
++;
1839 static void clear_temp_local_var_list(){
1840 nb_temp_local_vars
=0;
1843 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1845 static void move_reg(int r
, int s
, int t
)
1859 /* get address of vtop (vtop MUST BE an lvalue) */
1860 ST_FUNC
void gaddrof(void)
1862 vtop
->r
&= ~VT_LVAL
;
1863 /* tricky: if saved lvalue, then we can go back to lvalue */
1864 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1865 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1868 #ifdef CONFIG_TCC_BCHECK
1869 /* generate lvalue bound code */
1870 static void gbound(void)
1874 vtop
->r
&= ~VT_MUSTBOUND
;
1875 /* if lvalue, then use checking code before dereferencing */
1876 if (vtop
->r
& VT_LVAL
) {
1877 /* if not VT_BOUNDED value, then make one */
1878 if (!(vtop
->r
& VT_BOUNDED
)) {
1879 /* must save type because we must set it to int to get pointer */
1881 vtop
->type
.t
= VT_PTR
;
1884 gen_bounded_ptr_add();
1888 /* then check for dereferencing */
1889 gen_bounded_ptr_deref();
1893 /* we need to call __bound_ptr_add before we start to load function
1894 args into registers */
1895 ST_FUNC
void gbound_args(int nb_args
)
1900 for (i
= 1; i
<= nb_args
; ++i
)
1901 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1907 sv
= vtop
- nb_args
;
1908 if (sv
->r
& VT_SYM
) {
1912 #ifndef TCC_TARGET_PE
1913 || v
== TOK_sigsetjmp
1914 || v
== TOK___sigsetjmp
1917 vpush_global_sym(&func_old_type
, TOK___bound_setjmp
);
1920 func_bound_add_epilog
= 1;
1922 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1923 if (v
== TOK_alloca
)
1924 func_bound_add_epilog
= 1;
1929 /* Add bounds for local symbols from S to E (via ->prev) */
1930 static void add_local_bounds(Sym
*s
, Sym
*e
)
1932 for (; s
!= e
; s
= s
->prev
) {
1933 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1935 /* Add arrays/structs/unions because we always take address */
1936 if ((s
->type
.t
& VT_ARRAY
)
1937 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1938 || s
->a
.addrtaken
) {
1939 /* add local bound info */
1940 int align
, size
= type_size(&s
->type
, &align
);
1941 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1942 2 * sizeof(addr_t
));
1943 bounds_ptr
[0] = s
->c
;
1944 bounds_ptr
[1] = size
;
1950 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1951 static void pop_local_syms(Sym
**ptop
, Sym
*b
, int keep
, int ellipsis
)
1953 #ifdef CONFIG_TCC_BCHECK
1954 if (tcc_state
->do_bounds_check
&& !ellipsis
&& !keep
)
1955 add_local_bounds(*ptop
, b
);
1957 if (tcc_state
->do_debug
)
1958 tcc_add_debug_info (tcc_state
, !local_scope
, *ptop
, b
);
1959 sym_pop(ptop
, b
, keep
);
1962 static void incr_bf_adr(int o
)
1964 vtop
->type
= char_pointer_type
;
1968 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1972 /* single-byte load mode for packed or otherwise unaligned bitfields */
1973 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1976 save_reg_upstack(vtop
->r
, 1);
1977 vpush64(type
->t
& VT_BTYPE
, 0); // B X
1978 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
1987 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
1989 vpushi((1 << n
) - 1), gen_op('&');
1992 vpushi(bits
), gen_op(TOK_SHL
);
1995 bits
+= n
, bit_size
-= n
, o
= 1;
1998 if (!(type
->t
& VT_UNSIGNED
)) {
1999 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
2000 vpushi(n
), gen_op(TOK_SHL
);
2001 vpushi(n
), gen_op(TOK_SAR
);
2005 /* single-byte store mode for packed or otherwise unaligned bitfields */
2006 static void store_packed_bf(int bit_pos
, int bit_size
)
2008 int bits
, n
, o
, m
, c
;
2010 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2012 save_reg_upstack(vtop
->r
, 1);
2013 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2015 incr_bf_adr(o
); // X B
2017 c
? vdup() : gv_dup(); // B V X
2020 vpushi(bits
), gen_op(TOK_SHR
);
2022 vpushi(bit_pos
), gen_op(TOK_SHL
);
2027 m
= ((1 << n
) - 1) << bit_pos
;
2028 vpushi(m
), gen_op('&'); // X B V1
2029 vpushv(vtop
-1); // X B V1 B
2030 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
2031 gen_op('&'); // X B V1 B1
2032 gen_op('|'); // X B V2
2034 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
2035 vstore(), vpop(); // X B
2036 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
2041 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
2044 if (0 == sv
->type
.ref
)
2046 t
= sv
->type
.ref
->auxtype
;
2047 if (t
!= -1 && t
!= VT_STRUCT
) {
2048 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
2054 /* store vtop a register belonging to class 'rc'. lvalues are
2055 converted to values. Cannot be used if cannot be converted to
2056 register value (such as structures). */
2057 ST_FUNC
int gv(int rc
)
2059 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
2060 int bit_pos
, bit_size
, size
, align
;
2062 /* NOTE: get_reg can modify vstack[] */
2063 if (vtop
->type
.t
& VT_BITFIELD
) {
2066 bit_pos
= BIT_POS(vtop
->type
.t
);
2067 bit_size
= BIT_SIZE(vtop
->type
.t
);
2068 /* remove bit field info to avoid loops */
2069 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
2072 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
2073 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
2074 type
.t
|= VT_UNSIGNED
;
2076 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
2078 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2083 if (r
== VT_STRUCT
) {
2084 load_packed_bf(&type
, bit_pos
, bit_size
);
2086 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
2087 /* cast to int to propagate signedness in following ops */
2089 /* generate shifts */
2090 vpushi(bits
- (bit_pos
+ bit_size
));
2092 vpushi(bits
- bit_size
);
2093 /* NOTE: transformed to SHR if unsigned */
2098 if (is_float(vtop
->type
.t
) &&
2099 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2100 unsigned long offset
;
2101 /* CPUs usually cannot use float constants, so we store them
2102 generically in data segment */
2103 size
= type_size(&vtop
->type
, &align
);
2105 size
= 0, align
= 1;
2106 offset
= section_add(data_section
, size
, align
);
2107 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
2109 init_putv(&vtop
->type
, data_section
, offset
);
2112 #ifdef CONFIG_TCC_BCHECK
2113 if (vtop
->r
& VT_MUSTBOUND
)
2117 bt
= vtop
->type
.t
& VT_BTYPE
;
2119 #ifdef TCC_TARGET_RISCV64
2121 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
2124 rc2
= RC2_TYPE(bt
, rc
);
2126 /* need to reload if:
2128 - lvalue (need to dereference pointer)
2129 - already a register, but not in the right class */
2130 r
= vtop
->r
& VT_VALMASK
;
2131 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
2132 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
2134 if (!r_ok
|| !r2_ok
) {
2138 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
2139 int original_type
= vtop
->type
.t
;
2141 /* two register type load :
2142 expand to two words temporarily */
2143 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2145 unsigned long long ll
= vtop
->c
.i
;
2146 vtop
->c
.i
= ll
; /* first word */
2148 vtop
->r
= r
; /* save register value */
2149 vpushi(ll
>> 32); /* second word */
2150 } else if (vtop
->r
& VT_LVAL
) {
2151 /* We do not want to modifier the long long pointer here.
2152 So we save any other instances down the stack */
2153 save_reg_upstack(vtop
->r
, 1);
2154 /* load from memory */
2155 vtop
->type
.t
= load_type
;
2158 vtop
[-1].r
= r
; /* save register value */
2159 /* increment pointer to get second word */
2160 vtop
->type
.t
= VT_PTRDIFF_T
;
2165 vtop
->type
.t
= load_type
;
2167 /* move registers */
2170 if (r2_ok
&& vtop
->r2
< VT_CONST
)
2173 vtop
[-1].r
= r
; /* save register value */
2174 vtop
->r
= vtop
[-1].r2
;
2176 /* Allocate second register. Here we rely on the fact that
2177 get_reg() tries first to free r2 of an SValue. */
2181 /* write second register */
2184 vtop
->type
.t
= original_type
;
2186 if (vtop
->r
== VT_CMP
)
2188 /* one register type load */
2193 #ifdef TCC_TARGET_C67
2194 /* uses register pairs for doubles */
2195 if (bt
== VT_DOUBLE
)
2202 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2203 ST_FUNC
void gv2(int rc1
, int rc2
)
2205 /* generate more generic register first. But VT_JMP or VT_CMP
2206 values must be generated first in all cases to avoid possible
2208 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
2213 /* test if reload is needed for first register */
2214 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
2224 /* test if reload is needed for first register */
2225 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
2232 /* expand 64bit on stack in two ints */
2233 ST_FUNC
void lexpand(void)
2236 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
2237 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
2238 if (v
== VT_CONST
) {
2241 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
2247 vtop
[0].r
= vtop
[-1].r2
;
2248 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2250 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2255 /* build a long long from two ints */
2256 static void lbuild(int t
)
2258 gv2(RC_INT
, RC_INT
);
2259 vtop
[-1].r2
= vtop
[0].r
;
2260 vtop
[-1].type
.t
= t
;
2265 /* convert stack entry to register and duplicate its value in another
2267 static void gv_dup(void)
2273 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2274 if (t
& VT_BITFIELD
) {
2284 /* stack: H L L1 H1 */
2294 /* duplicate value */
2304 /* generate CPU independent (unsigned) long long operations */
2305 static void gen_opl(int op
)
2307 int t
, a
, b
, op1
, c
, i
;
2309 unsigned short reg_iret
= REG_IRET
;
2310 unsigned short reg_lret
= REG_IRE2
;
2316 func
= TOK___divdi3
;
2319 func
= TOK___udivdi3
;
2322 func
= TOK___moddi3
;
2325 func
= TOK___umoddi3
;
2332 /* call generic long long function */
2333 vpush_global_sym(&func_old_type
, func
);
2338 vtop
->r2
= reg_lret
;
2346 //pv("gen_opl A",0,2);
2352 /* stack: L1 H1 L2 H2 */
2357 vtop
[-2] = vtop
[-3];
2360 /* stack: H1 H2 L1 L2 */
2361 //pv("gen_opl B",0,4);
2367 /* stack: H1 H2 L1 L2 ML MH */
2370 /* stack: ML MH H1 H2 L1 L2 */
2374 /* stack: ML MH H1 L2 H2 L1 */
2379 /* stack: ML MH M1 M2 */
2382 } else if (op
== '+' || op
== '-') {
2383 /* XXX: add non carry method too (for MIPS or alpha) */
2389 /* stack: H1 H2 (L1 op L2) */
2392 gen_op(op1
+ 1); /* TOK_xxxC2 */
2395 /* stack: H1 H2 (L1 op L2) */
2398 /* stack: (L1 op L2) H1 H2 */
2400 /* stack: (L1 op L2) (H1 op H2) */
2408 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2409 t
= vtop
[-1].type
.t
;
2413 /* stack: L H shift */
2415 /* constant: simpler */
2416 /* NOTE: all comments are for SHL. the other cases are
2417 done by swapping words */
2428 if (op
!= TOK_SAR
) {
2461 /* XXX: should provide a faster fallback on x86 ? */
2464 func
= TOK___ashrdi3
;
2467 func
= TOK___lshrdi3
;
2470 func
= TOK___ashldi3
;
2476 /* compare operations */
2482 /* stack: L1 H1 L2 H2 */
2484 vtop
[-1] = vtop
[-2];
2486 /* stack: L1 L2 H1 H2 */
2490 /* when values are equal, we need to compare low words. since
2491 the jump is inverted, we invert the test too. */
2494 else if (op1
== TOK_GT
)
2496 else if (op1
== TOK_ULT
)
2498 else if (op1
== TOK_UGT
)
2508 /* generate non equal test */
2510 vset_VT_CMP(TOK_NE
);
2514 /* compare low. Always unsigned */
2518 else if (op1
== TOK_LE
)
2520 else if (op1
== TOK_GT
)
2522 else if (op1
== TOK_GE
)
2525 #if 0//def TCC_TARGET_I386
2526 if (op
== TOK_NE
) { gsym(b
); break; }
2527 if (op
== TOK_EQ
) { gsym(a
); break; }
2536 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2538 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2539 return (a
^ b
) >> 63 ? -x
: x
;
2542 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2544 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2547 /* handle integer constant optimizations and various machine
2549 static void gen_opic(int op
)
2551 SValue
*v1
= vtop
- 1;
2553 int t1
= v1
->type
.t
& VT_BTYPE
;
2554 int t2
= v2
->type
.t
& VT_BTYPE
;
2555 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2556 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2557 uint64_t l1
= c1
? v1
->c
.i
: 0;
2558 uint64_t l2
= c2
? v2
->c
.i
: 0;
2559 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2561 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2562 l1
= ((uint32_t)l1
|
2563 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2564 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2565 l2
= ((uint32_t)l2
|
2566 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2570 case '+': l1
+= l2
; break;
2571 case '-': l1
-= l2
; break;
2572 case '&': l1
&= l2
; break;
2573 case '^': l1
^= l2
; break;
2574 case '|': l1
|= l2
; break;
2575 case '*': l1
*= l2
; break;
2582 /* if division by zero, generate explicit division */
2584 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2585 tcc_error("division by zero in constant");
2589 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2590 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2591 case TOK_UDIV
: l1
= l1
/ l2
; break;
2592 case TOK_UMOD
: l1
= l1
% l2
; break;
2595 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2596 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2598 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2601 case TOK_ULT
: l1
= l1
< l2
; break;
2602 case TOK_UGE
: l1
= l1
>= l2
; break;
2603 case TOK_EQ
: l1
= l1
== l2
; break;
2604 case TOK_NE
: l1
= l1
!= l2
; break;
2605 case TOK_ULE
: l1
= l1
<= l2
; break;
2606 case TOK_UGT
: l1
= l1
> l2
; break;
2607 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2608 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2609 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2610 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2612 case TOK_LAND
: l1
= l1
&& l2
; break;
2613 case TOK_LOR
: l1
= l1
|| l2
; break;
2617 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2618 l1
= ((uint32_t)l1
|
2619 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2623 /* if commutative ops, put c2 as constant */
2624 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2625 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2627 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2628 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2630 if (!const_wanted
&&
2632 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2633 (l1
== -1 && op
== TOK_SAR
))) {
2634 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2636 } else if (!const_wanted
&&
2637 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2639 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2640 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2641 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2646 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2649 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2650 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2653 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2654 /* filter out NOP operations like x*1, x-0, x&-1... */
2656 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2657 /* try to use shifts instead of muls or divs */
2658 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2667 else if (op
== TOK_PDIV
)
2673 } else if (c2
&& (op
== '+' || op
== '-') &&
2674 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2675 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2676 /* symbol + constant case */
2680 /* The backends can't always deal with addends to symbols
2681 larger than +-1<<31. Don't construct such. */
2688 /* call low level op generator */
2689 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2690 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2698 /* generate a floating point operation with constant propagation */
2699 static void gen_opif(int op
)
2703 #if defined _MSC_VER && defined __x86_64__
2704 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2711 /* currently, we cannot do computations with forward symbols */
2712 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2713 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2715 if (v1
->type
.t
== VT_FLOAT
) {
2718 } else if (v1
->type
.t
== VT_DOUBLE
) {
2726 /* NOTE: we only do constant propagation if finite number (not
2727 NaN or infinity) (ANSI spec) */
2728 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2732 case '+': f1
+= f2
; break;
2733 case '-': f1
-= f2
; break;
2734 case '*': f1
*= f2
; break;
2737 /* If not in initializer we need to potentially generate
2738 FP exceptions at runtime, otherwise we want to fold. */
2744 /* XXX: also handles tests ? */
2748 /* XXX: overflow test ? */
2749 if (v1
->type
.t
== VT_FLOAT
) {
2751 } else if (v1
->type
.t
== VT_DOUBLE
) {
2763 /* print a type. If 'varstr' is not NULL, then the variable is also
2764 printed in the type */
2766 /* XXX: add array and function pointers */
2767 static void type_to_str(char *buf
, int buf_size
,
2768 CType
*type
, const char *varstr
)
2780 pstrcat(buf
, buf_size
, "extern ");
2782 pstrcat(buf
, buf_size
, "static ");
2784 pstrcat(buf
, buf_size
, "typedef ");
2786 pstrcat(buf
, buf_size
, "inline ");
2787 if (t
& VT_VOLATILE
)
2788 pstrcat(buf
, buf_size
, "volatile ");
2789 if (t
& VT_CONSTANT
)
2790 pstrcat(buf
, buf_size
, "const ");
2792 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2793 || ((t
& VT_UNSIGNED
)
2794 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2797 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2799 buf_size
-= strlen(buf
);
2835 tstr
= "long double";
2837 pstrcat(buf
, buf_size
, tstr
);
2844 pstrcat(buf
, buf_size
, tstr
);
2845 v
= type
->ref
->v
& ~SYM_STRUCT
;
2846 if (v
>= SYM_FIRST_ANOM
)
2847 pstrcat(buf
, buf_size
, "<anonymous>");
2849 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2854 if (varstr
&& '*' == *varstr
) {
2855 pstrcat(buf1
, sizeof(buf1
), "(");
2856 pstrcat(buf1
, sizeof(buf1
), varstr
);
2857 pstrcat(buf1
, sizeof(buf1
), ")");
2859 pstrcat(buf1
, buf_size
, "(");
2861 while (sa
!= NULL
) {
2863 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2864 pstrcat(buf1
, sizeof(buf1
), buf2
);
2867 pstrcat(buf1
, sizeof(buf1
), ", ");
2869 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2870 pstrcat(buf1
, sizeof(buf1
), ", ...");
2871 pstrcat(buf1
, sizeof(buf1
), ")");
2872 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2877 if (varstr
&& '*' == *varstr
)
2878 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2880 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2881 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2884 pstrcpy(buf1
, sizeof(buf1
), "*");
2885 if (t
& VT_CONSTANT
)
2886 pstrcat(buf1
, buf_size
, "const ");
2887 if (t
& VT_VOLATILE
)
2888 pstrcat(buf1
, buf_size
, "volatile ");
2890 pstrcat(buf1
, sizeof(buf1
), varstr
);
2891 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2895 pstrcat(buf
, buf_size
, " ");
2896 pstrcat(buf
, buf_size
, varstr
);
2901 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2903 char buf1
[256], buf2
[256];
2904 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2905 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2906 tcc_error(fmt
, buf1
, buf2
);
2909 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2911 char buf1
[256], buf2
[256];
2912 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2913 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2914 tcc_warning(fmt
, buf1
, buf2
);
2917 static int pointed_size(CType
*type
)
2920 return type_size(pointed_type(type
), &align
);
2923 static void vla_runtime_pointed_size(CType
*type
)
2926 vla_runtime_type_size(pointed_type(type
), &align
);
2929 static inline int is_null_pointer(SValue
*p
)
2931 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2933 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2934 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2935 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2936 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2937 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2938 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2942 /* compare function types. OLD functions match any new functions */
2943 static int is_compatible_func(CType
*type1
, CType
*type2
)
2949 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2951 if (s1
->f
.func_type
!= s2
->f
.func_type
2952 && s1
->f
.func_type
!= FUNC_OLD
2953 && s2
->f
.func_type
!= FUNC_OLD
)
2955 /* we should check the function return type for FUNC_OLD too
2956 but that causes problems with the internally used support
2957 functions such as TOK_memmove */
2958 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
2960 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
2963 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2974 /* return true if type1 and type2 are the same. If unqualified is
2975 true, qualifiers on the types are ignored.
2977 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
2981 t1
= type1
->t
& VT_TYPE
;
2982 t2
= type2
->t
& VT_TYPE
;
2984 /* strip qualifiers before comparing */
2985 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2986 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
2989 /* Default Vs explicit signedness only matters for char */
2990 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
2994 /* XXX: bitfields ? */
2999 && !(type1
->ref
->c
< 0
3000 || type2
->ref
->c
< 0
3001 || type1
->ref
->c
== type2
->ref
->c
))
3004 /* test more complicated cases */
3005 bt1
= t1
& VT_BTYPE
;
3006 if (bt1
== VT_PTR
) {
3007 type1
= pointed_type(type1
);
3008 type2
= pointed_type(type2
);
3009 return is_compatible_types(type1
, type2
);
3010 } else if (bt1
== VT_STRUCT
) {
3011 return (type1
->ref
== type2
->ref
);
3012 } else if (bt1
== VT_FUNC
) {
3013 return is_compatible_func(type1
, type2
);
3014 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
3015 /* If both are enums then they must be the same, if only one is then
3016 t1 and t2 must be equal, which was checked above already. */
3017 return type1
->ref
== type2
->ref
;
3023 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3024 type is stored in DEST if non-null (except for pointer plus/minus) . */
3025 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
3027 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
3028 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
3034 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
3035 ret
= op
== '?' ? 1 : 0;
3036 /* NOTE: as an extension, we accept void on only one side */
3038 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3039 if (op
== '+') ; /* Handled in caller */
3040 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3041 /* If one is a null ptr constant the result type is the other. */
3042 else if (is_null_pointer (op2
)) type
= *type1
;
3043 else if (is_null_pointer (op1
)) type
= *type2
;
3044 else if (bt1
!= bt2
) {
3045 /* accept comparison or cond-expr between pointer and integer
3047 if ((op
== '?' || TOK_ISCOND(op
))
3048 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
3049 tcc_warning("pointer/integer mismatch in %s",
3050 op
== '?' ? "conditional expression" : "comparison");
3051 else if (op
!= '-' || !is_integer_btype(bt2
))
3053 type
= *(bt1
== VT_PTR
? type1
: type2
);
3055 CType
*pt1
= pointed_type(type1
);
3056 CType
*pt2
= pointed_type(type2
);
3057 int pbt1
= pt1
->t
& VT_BTYPE
;
3058 int pbt2
= pt2
->t
& VT_BTYPE
;
3059 int newquals
, copied
= 0;
3060 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
3061 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
3062 if (op
!= '?' && !TOK_ISCOND(op
))
3065 type_incompatibility_warning(type1
, type2
,
3067 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3068 : "pointer type mismatch in comparison('%s' and '%s')");
3071 /* pointers to void get preferred, otherwise the
3072 pointed to types minus qualifs should be compatible */
3073 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
3074 /* combine qualifs */
3075 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
3076 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3079 /* copy the pointer target symbol */
3080 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3083 pointed_type(&type
)->t
|= newquals
;
3085 /* pointers to incomplete arrays get converted to
3086 pointers to completed ones if possible */
3087 if (pt1
->t
& VT_ARRAY
3088 && pt2
->t
& VT_ARRAY
3089 && pointed_type(&type
)->ref
->c
< 0
3090 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
3093 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3095 pointed_type(&type
)->ref
=
3096 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
3097 0, pointed_type(&type
)->ref
->c
);
3098 pointed_type(&type
)->ref
->c
=
3099 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
3105 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
3106 if (op
!= '?' || !compare_types(type1
, type2
, 1))
3109 } else if (is_float(bt1
) || is_float(bt2
)) {
3110 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
3111 type
.t
= VT_LDOUBLE
;
3112 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
3117 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
3118 /* cast to biggest op */
3119 type
.t
= VT_LLONG
| VT_LONG
;
3120 if (bt1
== VT_LLONG
)
3122 if (bt2
== VT_LLONG
)
3124 /* convert to unsigned if it does not fit in a long long */
3125 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
3126 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
3127 type
.t
|= VT_UNSIGNED
;
3129 /* integer operations */
3130 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
3131 /* convert to unsigned if it does not fit in an integer */
3132 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
3133 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
3134 type
.t
|= VT_UNSIGNED
;
3141 /* generic gen_op: handles types problems */
3142 ST_FUNC
void gen_op(int op
)
3144 int u
, t1
, t2
, bt1
, bt2
, t
;
3145 CType type1
, combtype
;
3148 t1
= vtop
[-1].type
.t
;
3149 t2
= vtop
[0].type
.t
;
3150 bt1
= t1
& VT_BTYPE
;
3151 bt2
= t2
& VT_BTYPE
;
3153 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
3154 if (bt2
== VT_FUNC
) {
3155 mk_pointer(&vtop
->type
);
3158 if (bt1
== VT_FUNC
) {
3160 mk_pointer(&vtop
->type
);
3165 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
3166 tcc_error_noabort("invalid operand types for binary operation");
3168 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3169 /* at least one operand is a pointer */
3170 /* relational op: must be both pointers */
3173 /* if both pointers, then it must be the '-' op */
3174 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3176 tcc_error("cannot use pointers here");
3177 if (vtop
[-1].type
.t
& VT_VLA
) {
3178 vla_runtime_pointed_size(&vtop
[-1].type
);
3180 vpushi(pointed_size(&vtop
[-1].type
));
3184 vtop
->type
.t
= VT_PTRDIFF_T
;
3188 /* exactly one pointer : must be '+' or '-'. */
3189 if (op
!= '-' && op
!= '+')
3190 tcc_error("cannot use pointers here");
3191 /* Put pointer as first operand */
3192 if (bt2
== VT_PTR
) {
3194 t
= t1
, t1
= t2
, t2
= t
;
3197 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
3198 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3201 type1
= vtop
[-1].type
;
3202 if (vtop
[-1].type
.t
& VT_VLA
)
3203 vla_runtime_pointed_size(&vtop
[-1].type
);
3205 u
= pointed_size(&vtop
[-1].type
);
3207 tcc_error("unknown array element size");
3211 /* XXX: cast to int ? (long long case) */
3216 #ifdef CONFIG_TCC_BCHECK
3217 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
3218 /* if bounded pointers, we generate a special code to
3225 gen_bounded_ptr_add();
3231 type1
.t
&= ~VT_ARRAY
;
3232 /* put again type if gen_opic() swaped operands */
3236 /* floats can only be used for a few operations */
3237 if (is_float(combtype
.t
)
3238 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3240 tcc_error("invalid operands for binary operation");
3241 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
3242 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
3243 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
3245 t
|= (VT_LONG
& t1
);
3249 t
= t2
= combtype
.t
;
3250 /* XXX: currently, some unsigned operations are explicit, so
3251 we modify them here */
3252 if (t
& VT_UNSIGNED
) {
3259 else if (op
== TOK_LT
)
3261 else if (op
== TOK_GT
)
3263 else if (op
== TOK_LE
)
3265 else if (op
== TOK_GE
)
3271 /* special case for shifts and long long: we keep the shift as
3273 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3280 if (TOK_ISCOND(op
)) {
3281 /* relational op: the result is an int */
3282 vtop
->type
.t
= VT_INT
;
3287 // Make sure that we have converted to an rvalue:
3288 if (vtop
->r
& VT_LVAL
)
3289 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3292 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3293 #define gen_cvt_itof1 gen_cvt_itof
3295 /* generic itof for unsigned long long case */
3296 static void gen_cvt_itof1(int t
)
3298 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3299 (VT_LLONG
| VT_UNSIGNED
)) {
3302 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
3303 #if LDOUBLE_SIZE != 8
3304 else if (t
== VT_LDOUBLE
)
3305 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
3308 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
3319 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3320 #define gen_cvt_ftoi1 gen_cvt_ftoi
3322 /* generic ftoi for unsigned long long case */
3323 static void gen_cvt_ftoi1(int t
)
3326 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3327 /* not handled natively */
3328 st
= vtop
->type
.t
& VT_BTYPE
;
3330 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
3331 #if LDOUBLE_SIZE != 8
3332 else if (st
== VT_LDOUBLE
)
3333 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
3336 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
3347 /* special delayed cast for char/short */
3348 static void force_charshort_cast(void)
3350 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3351 int dbt
= vtop
->type
.t
;
3352 vtop
->r
&= ~VT_MUSTCAST
;
3354 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3358 static void gen_cast_s(int t
)
3366 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3367 static void gen_cast(CType
*type
)
3369 int sbt
, dbt
, sf
, df
, c
;
3370 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3372 /* special delayed cast for char/short */
3373 if (vtop
->r
& VT_MUSTCAST
)
3374 force_charshort_cast();
3376 /* bitfields first get cast to ints */
3377 if (vtop
->type
.t
& VT_BITFIELD
)
3380 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3381 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3389 dbt_bt
= dbt
& VT_BTYPE
;
3390 sbt_bt
= sbt
& VT_BTYPE
;
3392 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3393 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3394 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3397 /* constant case: we can do it now */
3398 /* XXX: in ISOC, cannot do it if error in convert */
3399 if (sbt
== VT_FLOAT
)
3400 vtop
->c
.ld
= vtop
->c
.f
;
3401 else if (sbt
== VT_DOUBLE
)
3402 vtop
->c
.ld
= vtop
->c
.d
;
3405 if (sbt_bt
== VT_LLONG
) {
3406 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3407 vtop
->c
.ld
= vtop
->c
.i
;
3409 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3411 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3412 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3414 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3417 if (dbt
== VT_FLOAT
)
3418 vtop
->c
.f
= (float)vtop
->c
.ld
;
3419 else if (dbt
== VT_DOUBLE
)
3420 vtop
->c
.d
= (double)vtop
->c
.ld
;
3421 } else if (sf
&& dbt
== VT_BOOL
) {
3422 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3425 vtop
->c
.i
= vtop
->c
.ld
;
3426 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3428 else if (sbt
& VT_UNSIGNED
)
3429 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3431 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3433 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3435 else if (dbt
== VT_BOOL
)
3436 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3438 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3439 dbt_bt
== VT_SHORT
? 0xffff :
3442 if (!(dbt
& VT_UNSIGNED
))
3443 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3448 } else if (dbt
== VT_BOOL
3449 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3450 == (VT_CONST
| VT_SYM
)) {
3451 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3457 /* cannot generate code for global or static initializers */
3458 if (STATIC_DATA_WANTED
)
3461 /* non constant case: generate code */
3462 if (dbt
== VT_BOOL
) {
3463 gen_test_zero(TOK_NE
);
3469 /* convert from fp to fp */
3472 /* convert int to fp */
3475 /* convert fp to int */
3477 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3480 goto again
; /* may need char/short cast */
3485 ds
= btype_size(dbt_bt
);
3486 ss
= btype_size(sbt_bt
);
3487 if (ds
== 0 || ss
== 0) {
3488 if (dbt_bt
== VT_VOID
)
3490 cast_error(&vtop
->type
, type
);
3492 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3493 tcc_error("cast to incomplete type");
3495 /* same size and no sign conversion needed */
3496 if (ds
== ss
&& ds
>= 4)
3498 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3499 tcc_warning("cast between pointer and integer of different size");
3500 if (sbt_bt
== VT_PTR
) {
3501 /* put integer type to allow logical operations below */
3502 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3506 /* processor allows { int a = 0, b = *(char*)&a; }
3507 That means that if we cast to less width, we can just
3508 change the type and read it still later. */
3509 #define ALLOW_SUBTYPE_ACCESS 1
3511 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3512 /* value still in memory */
3518 goto done
; /* no 64bit envolved */
3526 /* generate high word */
3527 if (sbt
& VT_UNSIGNED
) {
3536 } else if (ss
== 8) {
3537 /* from long long: just take low order word */
3545 /* need to convert from 32bit to 64bit */
3546 if (sbt
& VT_UNSIGNED
) {
3547 #if defined(TCC_TARGET_RISCV64)
3548 /* RISC-V keeps 32bit vals in registers sign-extended.
3549 So here we need a zero-extension. */
3558 ss
= ds
, ds
= 4, dbt
= sbt
;
3559 } else if (ss
== 8) {
3560 /* XXX some architectures (e.g. risc-v) would like it
3561 better for this merely being a 32-to-64 sign or zero-
3563 trunc
= 32; /* zero upper 32 bits */
3571 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3577 bits
= (ss
- ds
) * 8;
3578 /* for unsigned, gen_op will convert SAR to SHR */
3579 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3582 vpushi(bits
- trunc
);
3589 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3592 /* return type size as known at compile time. Put alignment at 'a' */
3593 ST_FUNC
int type_size(CType
*type
, int *a
)
3598 bt
= type
->t
& VT_BTYPE
;
3599 if (bt
== VT_STRUCT
) {
3604 } else if (bt
== VT_PTR
) {
3605 if (type
->t
& VT_ARRAY
) {
3609 ts
= type_size(&s
->type
, a
);
3611 if (ts
< 0 && s
->c
< 0)
3619 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3620 return -1; /* incomplete enum */
3621 } else if (bt
== VT_LDOUBLE
) {
3623 return LDOUBLE_SIZE
;
3624 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3625 #ifdef TCC_TARGET_I386
3626 #ifdef TCC_TARGET_PE
3631 #elif defined(TCC_TARGET_ARM)
3641 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3644 } else if (bt
== VT_SHORT
) {
3647 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3651 /* char, void, function, _Bool */
3657 /* push type size as known at runtime time on top of value stack. Put
3659 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
3661 if (type
->t
& VT_VLA
) {
3662 type_size(&type
->ref
->type
, a
);
3663 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3665 vpushi(type_size(type
, a
));
3669 /* return the pointed type of t */
3670 static inline CType
*pointed_type(CType
*type
)
3672 return &type
->ref
->type
;
3675 /* modify type so that its it is a pointer to type. */
3676 ST_FUNC
void mk_pointer(CType
*type
)
3679 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3680 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3684 /* return true if type1 and type2 are exactly the same (including
3687 static int is_compatible_types(CType
*type1
, CType
*type2
)
3689 return compare_types(type1
,type2
,0);
3692 /* return true if type1 and type2 are the same (ignoring qualifiers).
3694 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3696 return compare_types(type1
,type2
,1);
3699 static void cast_error(CType
*st
, CType
*dt
)
3701 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3704 /* verify type compatibility to store vtop in 'dt' type */
3705 static void verify_assign_cast(CType
*dt
)
3707 CType
*st
, *type1
, *type2
;
3708 int dbt
, sbt
, qualwarn
, lvl
;
3710 st
= &vtop
->type
; /* source type */
3711 dbt
= dt
->t
& VT_BTYPE
;
3712 sbt
= st
->t
& VT_BTYPE
;
3713 if (dt
->t
& VT_CONSTANT
)
3714 tcc_warning("assignment of read-only location");
3718 tcc_error("assignment to void expression");
3721 /* special cases for pointers */
3722 /* '0' can also be a pointer */
3723 if (is_null_pointer(vtop
))
3725 /* accept implicit pointer to integer cast with warning */
3726 if (is_integer_btype(sbt
)) {
3727 tcc_warning("assignment makes pointer from integer without a cast");
3730 type1
= pointed_type(dt
);
3732 type2
= pointed_type(st
);
3733 else if (sbt
== VT_FUNC
)
3734 type2
= st
; /* a function is implicitly a function pointer */
3737 if (is_compatible_types(type1
, type2
))
3739 for (qualwarn
= lvl
= 0;; ++lvl
) {
3740 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3741 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3743 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3744 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3745 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3747 type1
= pointed_type(type1
);
3748 type2
= pointed_type(type2
);
3750 if (!is_compatible_unqualified_types(type1
, type2
)) {
3751 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3752 /* void * can match anything */
3753 } else if (dbt
== sbt
3754 && is_integer_btype(sbt
& VT_BTYPE
)
3755 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3756 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3757 /* Like GCC don't warn by default for merely changes
3758 in pointer target signedness. Do warn for different
3759 base types, though, in particular for unsigned enums
3760 and signed int targets. */
3762 tcc_warning("assignment from incompatible pointer type");
3767 tcc_warning("assignment discards qualifiers from pointer target type");
3773 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3774 tcc_warning("assignment makes integer from pointer without a cast");
3775 } else if (sbt
== VT_STRUCT
) {
3776 goto case_VT_STRUCT
;
3778 /* XXX: more tests */
3782 if (!is_compatible_unqualified_types(dt
, st
)) {
3790 static void gen_assign_cast(CType
*dt
)
3792 verify_assign_cast(dt
);
3796 /* store vtop in lvalue pushed on stack */
3797 ST_FUNC
void vstore(void)
3799 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3801 ft
= vtop
[-1].type
.t
;
3802 sbt
= vtop
->type
.t
& VT_BTYPE
;
3803 dbt
= ft
& VT_BTYPE
;
3805 verify_assign_cast(&vtop
[-1].type
);
3807 if (sbt
== VT_STRUCT
) {
3808 /* if structure, only generate pointer */
3809 /* structure assignment : generate memcpy */
3810 /* XXX: optimize if small size */
3811 size
= type_size(&vtop
->type
, &align
);
3815 #ifdef CONFIG_TCC_BCHECK
3816 if (vtop
->r
& VT_MUSTBOUND
)
3817 gbound(); /* check would be wrong after gaddrof() */
3819 vtop
->type
.t
= VT_PTR
;
3822 /* address of memcpy() */
3825 vpush_global_sym(&func_old_type
, TOK_memmove8
);
3826 else if(!(align
& 3))
3827 vpush_global_sym(&func_old_type
, TOK_memmove4
);
3830 /* Use memmove, rather than memcpy, as dest and src may be same: */
3831 vpush_global_sym(&func_old_type
, TOK_memmove
);
3836 #ifdef CONFIG_TCC_BCHECK
3837 if (vtop
->r
& VT_MUSTBOUND
)
3840 vtop
->type
.t
= VT_PTR
;
3845 /* leave source on stack */
3847 } else if (ft
& VT_BITFIELD
) {
3848 /* bitfield store handling */
3850 /* save lvalue as expression result (example: s.b = s.a = n;) */
3851 vdup(), vtop
[-1] = vtop
[-2];
3853 bit_pos
= BIT_POS(ft
);
3854 bit_size
= BIT_SIZE(ft
);
3855 /* remove bit field info to avoid loops */
3856 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3858 if (dbt
== VT_BOOL
) {
3859 gen_cast(&vtop
[-1].type
);
3860 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3862 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3863 if (dbt
!= VT_BOOL
) {
3864 gen_cast(&vtop
[-1].type
);
3865 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3867 if (r
== VT_STRUCT
) {
3868 store_packed_bf(bit_pos
, bit_size
);
3870 unsigned long long mask
= (1ULL << bit_size
) - 1;
3871 if (dbt
!= VT_BOOL
) {
3873 if (dbt
== VT_LLONG
)
3876 vpushi((unsigned)mask
);
3883 /* duplicate destination */
3886 /* load destination, mask and or with source */
3887 if (dbt
== VT_LLONG
)
3888 vpushll(~(mask
<< bit_pos
));
3890 vpushi(~((unsigned)mask
<< bit_pos
));
3895 /* ... and discard */
3898 } else if (dbt
== VT_VOID
) {
3901 /* optimize char/short casts */
3903 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3904 && is_integer_btype(sbt
)
3906 if ((vtop
->r
& VT_MUSTCAST
)
3907 && btype_size(dbt
) > btype_size(sbt
)
3909 force_charshort_cast();
3912 gen_cast(&vtop
[-1].type
);
3915 #ifdef CONFIG_TCC_BCHECK
3916 /* bound check case */
3917 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3923 gv(RC_TYPE(dbt
)); /* generate value */
3926 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3927 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3928 vtop
->type
.t
= ft
& VT_TYPE
;
3931 /* if lvalue was saved on stack, must read it */
3932 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3934 r
= get_reg(RC_INT
);
3935 sv
.type
.t
= VT_PTRDIFF_T
;
3936 sv
.r
= VT_LOCAL
| VT_LVAL
;
3937 sv
.c
.i
= vtop
[-1].c
.i
;
3939 vtop
[-1].r
= r
| VT_LVAL
;
3942 r
= vtop
->r
& VT_VALMASK
;
3943 /* two word case handling :
3944 store second register at word + 4 (or +8 for x86-64) */
3945 if (USING_TWO_WORDS(dbt
)) {
3946 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3947 vtop
[-1].type
.t
= load_type
;
3950 /* convert to int to increment easily */
3951 vtop
->type
.t
= VT_PTRDIFF_T
;
3957 vtop
[-1].type
.t
= load_type
;
3958 /* XXX: it works because r2 is spilled last ! */
3959 store(vtop
->r2
, vtop
- 1);
3965 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3969 /* post defines POST/PRE add. c is the token ++ or -- */
3970 ST_FUNC
void inc(int post
, int c
)
3973 vdup(); /* save lvalue */
3975 gv_dup(); /* duplicate value */
3980 vpushi(c
- TOK_MID
);
3982 vstore(); /* store value */
3984 vpop(); /* if post op, return saved value */
3987 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
3989 /* read the string */
3993 while (tok
== TOK_STR
) {
3994 /* XXX: add \0 handling too ? */
3995 cstr_cat(astr
, tokc
.str
.data
, -1);
3998 cstr_ccat(astr
, '\0');
4001 /* If I is >= 1 and a power of two, returns log2(i)+1.
4002 If I is 0 returns 0. */
4003 ST_FUNC
int exact_log2p1(int i
)
4008 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
4019 /* Parse __attribute__((...)) GNUC extension. */
4020 static void parse_attribute(AttributeDef
*ad
)
4026 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
4031 while (tok
!= ')') {
4032 if (tok
< TOK_IDENT
)
4033 expect("attribute name");
4045 tcc_warning("implicit declaration of function '%s'",
4046 get_tok_str(tok
, &tokc
));
4047 s
= external_global_sym(tok
, &func_old_type
);
4048 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4049 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
4050 ad
->cleanup_func
= s
;
4055 case TOK_CONSTRUCTOR1
:
4056 case TOK_CONSTRUCTOR2
:
4057 ad
->f
.func_ctor
= 1;
4059 case TOK_DESTRUCTOR1
:
4060 case TOK_DESTRUCTOR2
:
4061 ad
->f
.func_dtor
= 1;
4063 case TOK_ALWAYS_INLINE1
:
4064 case TOK_ALWAYS_INLINE2
:
4065 ad
->f
.func_alwinl
= 1;
4070 parse_mult_str(&astr
, "section name");
4071 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
4078 parse_mult_str(&astr
, "alias(\"target\")");
4079 ad
->asm_label
= /* save string as token, for later */
4080 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
| SYM_FIELD
;
4084 case TOK_VISIBILITY1
:
4085 case TOK_VISIBILITY2
:
4087 parse_mult_str(&astr
,
4088 "visibility(\"default|hidden|internal|protected\")");
4089 if (!strcmp (astr
.data
, "default"))
4090 ad
->a
.visibility
= STV_DEFAULT
;
4091 else if (!strcmp (astr
.data
, "hidden"))
4092 ad
->a
.visibility
= STV_HIDDEN
;
4093 else if (!strcmp (astr
.data
, "internal"))
4094 ad
->a
.visibility
= STV_INTERNAL
;
4095 else if (!strcmp (astr
.data
, "protected"))
4096 ad
->a
.visibility
= STV_PROTECTED
;
4098 expect("visibility(\"default|hidden|internal|protected\")");
4107 if (n
<= 0 || (n
& (n
- 1)) != 0)
4108 tcc_error("alignment must be a positive power of two");
4113 ad
->a
.aligned
= exact_log2p1(n
);
4114 if (n
!= 1 << (ad
->a
.aligned
- 1))
4115 tcc_error("alignment of %d is larger than implemented", n
);
4127 /* currently, no need to handle it because tcc does not
4128 track unused objects */
4132 ad
->f
.func_noreturn
= 1;
4137 ad
->f
.func_call
= FUNC_CDECL
;
4142 ad
->f
.func_call
= FUNC_STDCALL
;
4144 #ifdef TCC_TARGET_I386
4154 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
4160 ad
->f
.func_call
= FUNC_FASTCALLW
;
4167 ad
->attr_mode
= VT_LLONG
+ 1;
4170 ad
->attr_mode
= VT_BYTE
+ 1;
4173 ad
->attr_mode
= VT_SHORT
+ 1;
4177 ad
->attr_mode
= VT_INT
+ 1;
4180 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4187 ad
->a
.dllexport
= 1;
4189 case TOK_NODECORATE
:
4190 ad
->a
.nodecorate
= 1;
4193 ad
->a
.dllimport
= 1;
4196 if (tcc_state
->warn_unsupported
)
4197 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
4198 /* skip parameters */
4200 int parenthesis
= 0;
4204 else if (tok
== ')')
4207 } while (parenthesis
&& tok
!= -1);
4220 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4224 while ((s
= s
->next
) != NULL
) {
4225 if ((s
->v
& SYM_FIELD
) &&
4226 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
4227 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
4228 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
4240 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4242 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4243 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4244 int pcc
= !tcc_state
->ms_bitfields
;
4245 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4252 prevbt
= VT_STRUCT
; /* make it never match */
4257 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4258 if (f
->type
.t
& VT_BITFIELD
)
4259 bit_size
= BIT_SIZE(f
->type
.t
);
4262 size
= type_size(&f
->type
, &align
);
4263 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4266 if (pcc
&& bit_size
== 0) {
4267 /* in pcc mode, packing does not affect zero-width bitfields */
4270 /* in pcc mode, attribute packed overrides if set. */
4271 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4274 /* pragma pack overrides align if lesser and packs bitfields always */
4277 if (pragma_pack
< align
)
4278 align
= pragma_pack
;
4279 /* in pcc mode pragma pack also overrides individual align */
4280 if (pcc
&& pragma_pack
< a
)
4284 /* some individual align was specified */
4288 if (type
->ref
->type
.t
== VT_UNION
) {
4289 if (pcc
&& bit_size
>= 0)
4290 size
= (bit_size
+ 7) >> 3;
4295 } else if (bit_size
< 0) {
4297 c
+= (bit_pos
+ 7) >> 3;
4298 c
= (c
+ align
- 1) & -align
;
4307 /* A bit-field. Layout is more complicated. There are two
4308 options: PCC (GCC) compatible and MS compatible */
4310 /* In PCC layout a bit-field is placed adjacent to the
4311 preceding bit-fields, except if:
4313 - an individual alignment was given
4314 - it would overflow its base type container and
4315 there is no packing */
4316 if (bit_size
== 0) {
4318 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4320 } else if (f
->a
.aligned
) {
4322 } else if (!packed
) {
4324 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4325 if (ofs
> size
/ align
)
4329 /* in pcc mode, long long bitfields have type int if they fit */
4330 if (size
== 8 && bit_size
<= 32)
4331 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4333 while (bit_pos
>= align
* 8)
4334 c
+= align
, bit_pos
-= align
* 8;
4337 /* In PCC layout named bit-fields influence the alignment
4338 of the containing struct using the base types alignment,
4339 except for packed fields (which here have correct align). */
4340 if (f
->v
& SYM_FIRST_ANOM
4341 // && bit_size // ??? gcc on ARM/rpi does that
4346 bt
= f
->type
.t
& VT_BTYPE
;
4347 if ((bit_pos
+ bit_size
> size
* 8)
4348 || (bit_size
> 0) == (bt
!= prevbt
)
4350 c
= (c
+ align
- 1) & -align
;
4353 /* In MS bitfield mode a bit-field run always uses
4354 at least as many bits as the underlying type.
4355 To start a new run it's also required that this
4356 or the last bit-field had non-zero width. */
4357 if (bit_size
|| prev_bit_size
)
4360 /* In MS layout the records alignment is normally
4361 influenced by the field, except for a zero-width
4362 field at the start of a run (but by further zero-width
4363 fields it is again). */
4364 if (bit_size
== 0 && prevbt
!= bt
)
4367 prev_bit_size
= bit_size
;
4370 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4371 | (bit_pos
<< VT_STRUCT_SHIFT
);
4372 bit_pos
+= bit_size
;
4374 if (align
> maxalign
)
4378 printf("set field %s offset %-2d size %-2d align %-2d",
4379 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4380 if (f
->type
.t
& VT_BITFIELD
) {
4381 printf(" pos %-2d bits %-2d",
4394 c
+= (bit_pos
+ 7) >> 3;
4396 /* store size and alignment */
4397 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4401 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4402 /* can happen if individual align for some member was given. In
4403 this case MSVC ignores maxalign when aligning the size */
4408 c
= (c
+ a
- 1) & -a
;
4412 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4415 /* check whether we can access bitfields by their type */
4416 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4420 if (0 == (f
->type
.t
& VT_BITFIELD
))
4424 bit_size
= BIT_SIZE(f
->type
.t
);
4427 bit_pos
= BIT_POS(f
->type
.t
);
4428 size
= type_size(&f
->type
, &align
);
4429 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
4432 /* try to access the field using a different type */
4433 c0
= -1, s
= align
= 1;
4436 px
= f
->c
* 8 + bit_pos
;
4437 cx
= (px
>> 3) & -align
;
4438 px
= px
- (cx
<< 3);
4441 s
= (px
+ bit_size
+ 7) >> 3;
4451 s
= type_size(&t
, &align
);
4455 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
4456 /* update offset and bit position */
4459 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4460 | (bit_pos
<< VT_STRUCT_SHIFT
);
4464 printf("FIX field %s offset %-2d size %-2d align %-2d "
4465 "pos %-2d bits %-2d\n",
4466 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4467 cx
, s
, align
, px
, bit_size
);
4470 /* fall back to load/store single-byte wise */
4471 f
->auxtype
= VT_STRUCT
;
4473 printf("FIX field %s : load byte-wise\n",
4474 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4480 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4481 static void struct_decl(CType
*type
, int u
)
4483 int v
, c
, size
, align
, flexible
;
4484 int bit_size
, bsize
, bt
;
4486 AttributeDef ad
, ad1
;
4489 memset(&ad
, 0, sizeof ad
);
4491 parse_attribute(&ad
);
4495 /* struct already defined ? return it */
4497 expect("struct/union/enum name");
4499 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4502 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4504 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4509 /* Record the original enum/struct/union token. */
4510 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4512 /* we put an undefined size for struct/union */
4513 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4514 s
->r
= 0; /* default alignment is zero as gcc */
4516 type
->t
= s
->type
.t
;
4522 tcc_error("struct/union/enum already defined");
4524 /* cannot be empty */
4525 /* non empty enums are not allowed */
4528 long long ll
= 0, pl
= 0, nl
= 0;
4531 /* enum symbols have static storage */
4532 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4536 expect("identifier");
4538 if (ss
&& !local_stack
)
4539 tcc_error("redefinition of enumerator '%s'",
4540 get_tok_str(v
, NULL
));
4544 ll
= expr_const64();
4546 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4548 *ps
= ss
, ps
= &ss
->next
;
4557 /* NOTE: we accept a trailing comma */
4562 /* set integral type of the enum */
4565 if (pl
!= (unsigned)pl
)
4566 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4568 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4569 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4570 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4572 /* set type for enum members */
4573 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4575 if (ll
== (int)ll
) /* default is int if it fits */
4577 if (t
.t
& VT_UNSIGNED
) {
4578 ss
->type
.t
|= VT_UNSIGNED
;
4579 if (ll
== (unsigned)ll
)
4582 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4583 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4588 while (tok
!= '}') {
4589 if (!parse_btype(&btype
, &ad1
)) {
4595 tcc_error("flexible array member '%s' not at the end of struct",
4596 get_tok_str(v
, NULL
));
4602 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4604 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4605 expect("identifier");
4607 int v
= btype
.ref
->v
;
4608 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4609 if (tcc_state
->ms_extensions
== 0)
4610 expect("identifier");
4614 if (type_size(&type1
, &align
) < 0) {
4615 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4618 tcc_error("field '%s' has incomplete type",
4619 get_tok_str(v
, NULL
));
4621 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4622 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4623 (type1
.t
& VT_STORAGE
))
4624 tcc_error("invalid type for '%s'",
4625 get_tok_str(v
, NULL
));
4629 bit_size
= expr_const();
4630 /* XXX: handle v = 0 case for messages */
4632 tcc_error("negative width in bit-field '%s'",
4633 get_tok_str(v
, NULL
));
4634 if (v
&& bit_size
== 0)
4635 tcc_error("zero width for bit-field '%s'",
4636 get_tok_str(v
, NULL
));
4637 parse_attribute(&ad1
);
4639 size
= type_size(&type1
, &align
);
4640 if (bit_size
>= 0) {
4641 bt
= type1
.t
& VT_BTYPE
;
4647 tcc_error("bitfields must have scalar type");
4649 if (bit_size
> bsize
) {
4650 tcc_error("width of '%s' exceeds its type",
4651 get_tok_str(v
, NULL
));
4652 } else if (bit_size
== bsize
4653 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4654 /* no need for bit fields */
4656 } else if (bit_size
== 64) {
4657 tcc_error("field width 64 not implemented");
4659 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4661 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4664 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4665 /* Remember we've seen a real field to check
4666 for placement of flexible array member. */
4669 /* If member is a struct or bit-field, enforce
4670 placing into the struct (as anonymous). */
4672 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4677 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4682 if (tok
== ';' || tok
== TOK_EOF
)
4689 parse_attribute(&ad
);
4690 if (ad
.cleanup_func
) {
4691 tcc_warning("attribute '__cleanup__' ignored on type");
4693 struct_layout(type
, &ad
);
4698 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4700 merge_symattr(&ad
->a
, &s
->a
);
4701 merge_funcattr(&ad
->f
, &s
->f
);
4704 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4705 are added to the element type, copied because it could be a typedef. */
4706 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4708 while (type
->t
& VT_ARRAY
) {
4709 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4710 type
= &type
->ref
->type
;
4712 type
->t
|= qualifiers
;
4715 /* return 0 if no type declaration. otherwise, return the basic type
4718 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4720 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4724 memset(ad
, 0, sizeof(AttributeDef
));
4734 /* currently, we really ignore extension */
4744 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4745 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4746 tmbt
: tcc_error("too many basic types");
4749 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4754 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4771 memset(&ad1
, 0, sizeof(AttributeDef
));
4772 if (parse_btype(&type1
, &ad1
)) {
4773 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4775 n
= 1 << (ad1
.a
.aligned
- 1);
4777 type_size(&type1
, &n
);
4780 if (n
<= 0 || (n
& (n
- 1)) != 0)
4781 tcc_error("alignment must be a positive power of two");
4784 ad
->a
.aligned
= exact_log2p1(n
);
4788 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4789 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4790 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4791 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4798 #ifdef TCC_TARGET_ARM64
4800 /* GCC's __uint128_t appears in some Linux header files. Make it a
4801 synonym for long double to get the size and alignment right. */
4812 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4813 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4821 struct_decl(&type1
, VT_ENUM
);
4824 type
->ref
= type1
.ref
;
4827 struct_decl(&type1
, VT_STRUCT
);
4830 struct_decl(&type1
, VT_UNION
);
4833 /* type modifiers */
4838 parse_btype_qualify(type
, VT_CONSTANT
);
4846 parse_btype_qualify(type
, VT_VOLATILE
);
4853 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4854 tcc_error("signed and unsigned modifier");
4867 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4868 tcc_error("signed and unsigned modifier");
4869 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4885 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4886 tcc_error("multiple storage classes");
4898 ad
->f
.func_noreturn
= 1;
4900 /* GNUC attribute */
4901 case TOK_ATTRIBUTE1
:
4902 case TOK_ATTRIBUTE2
:
4903 parse_attribute(ad
);
4904 if (ad
->attr_mode
) {
4905 u
= ad
->attr_mode
-1;
4906 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4914 parse_expr_type(&type1
);
4915 /* remove all storage modifiers except typedef */
4916 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4918 sym_to_attr(ad
, type1
.ref
);
4924 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4928 if (tok
== ':' && !in_generic
) {
4929 /* ignore if it's a label */
4934 t
&= ~(VT_BTYPE
|VT_LONG
);
4935 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4936 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4937 type
->ref
= s
->type
.ref
;
4939 parse_btype_qualify(type
, t
);
4941 /* get attributes from typedef */
4950 if (tcc_state
->char_is_unsigned
) {
4951 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4954 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4955 bt
= t
& (VT_BTYPE
|VT_LONG
);
4957 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4958 #if defined TCC_TARGET_PE || (defined _WIN32 && defined _MSC_VER)
4959 if (bt
== VT_LDOUBLE
)
4960 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4966 /* convert a function parameter type (array to pointer and function to
4967 function pointer) */
4968 static inline void convert_parameter_type(CType
*pt
)
4970 /* remove const and volatile qualifiers (XXX: const could be used
4971 to indicate a const function parameter */
4972 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4973 /* array must be transformed to pointer according to ANSI C */
4975 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4980 ST_FUNC
void parse_asm_str(CString
*astr
)
4983 parse_mult_str(astr
, "string constant");
4986 /* Parse an asm label and return the token */
4987 static int asm_label_instr(void)
4993 parse_asm_str(&astr
);
4996 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
4998 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
5003 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
5005 int n
, l
, t1
, arg_size
, align
, unused_align
;
5006 Sym
**plast
, *s
, *first
;
5011 /* function type, or recursive declarator (return if so) */
5013 if (td
&& !(td
& TYPE_ABSTRACT
))
5017 else if (parse_btype(&pt
, &ad1
))
5020 merge_attr (ad
, &ad1
);
5029 /* read param name and compute offset */
5030 if (l
!= FUNC_OLD
) {
5031 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
5033 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
5034 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
5035 tcc_error("parameter declared as void");
5039 expect("identifier");
5040 pt
.t
= VT_VOID
; /* invalid type */
5044 convert_parameter_type(&pt
);
5045 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
5046 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
5052 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
5057 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
5058 tcc_error("invalid type");
5061 /* if no parameters, then old type prototype */
5064 /* NOTE: const is ignored in returned type as it has a special
5065 meaning in gcc / C++ */
5066 type
->t
&= ~VT_CONSTANT
;
5067 /* some ancient pre-K&R C allows a function to return an array
5068 and the array brackets to be put after the arguments, such
5069 that "int c()[]" means something like "int[] c()" */
5072 skip(']'); /* only handle simple "[]" */
5075 /* we push a anonymous symbol which will contain the function prototype */
5076 ad
->f
.func_args
= arg_size
;
5077 ad
->f
.func_type
= l
;
5078 s
= sym_push(SYM_FIELD
, type
, 0, 0);
5084 } else if (tok
== '[') {
5085 int saved_nocode_wanted
= nocode_wanted
;
5086 /* array definition */
5089 /* XXX The optional type-quals and static should only be accepted
5090 in parameter decls. The '*' as well, and then even only
5091 in prototypes (not function defs). */
5093 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5108 if (!local_stack
|| (storage
& VT_STATIC
))
5109 vpushi(expr_const());
5111 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5112 length must always be evaluated, even under nocode_wanted,
5113 so that its size slot is initialized (e.g. under sizeof
5118 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5121 tcc_error("invalid array size");
5123 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5124 tcc_error("size of variable length array should be an integer");
5130 /* parse next post type */
5131 post_type(type
, ad
, storage
, 0);
5133 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5134 tcc_error("declaration of an array of functions");
5135 if ((type
->t
& VT_BTYPE
) == VT_VOID
5136 || type_size(type
, &unused_align
) < 0)
5137 tcc_error("declaration of an array of incomplete type elements");
5139 t1
|= type
->t
& VT_VLA
;
5143 tcc_error("need explicit inner array size in VLAs");
5144 loc
-= type_size(&int_type
, &align
);
5148 vla_runtime_type_size(type
, &align
);
5150 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5156 nocode_wanted
= saved_nocode_wanted
;
5158 /* we push an anonymous symbol which will contain the array
5160 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5161 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5167 /* Parse a type declarator (except basic type), and return the type
5168 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5169 expected. 'type' should contain the basic type. 'ad' is the
5170 attribute definition of the basic type. It can be modified by
5171 type_decl(). If this (possibly abstract) declarator is a pointer chain
5172 it returns the innermost pointed to type (equals *type, but is a different
5173 pointer), otherwise returns type itself, that's used for recursive calls. */
5174 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5177 int qualifiers
, storage
;
5179 /* recursive type, remove storage bits first, apply them later again */
5180 storage
= type
->t
& VT_STORAGE
;
5181 type
->t
&= ~VT_STORAGE
;
5184 while (tok
== '*') {
5192 qualifiers
|= VT_CONSTANT
;
5197 qualifiers
|= VT_VOLATILE
;
5203 /* XXX: clarify attribute handling */
5204 case TOK_ATTRIBUTE1
:
5205 case TOK_ATTRIBUTE2
:
5206 parse_attribute(ad
);
5210 type
->t
|= qualifiers
;
5212 /* innermost pointed to type is the one for the first derivation */
5213 ret
= pointed_type(type
);
5217 /* This is possibly a parameter type list for abstract declarators
5218 ('int ()'), use post_type for testing this. */
5219 if (!post_type(type
, ad
, 0, td
)) {
5220 /* It's not, so it's a nested declarator, and the post operations
5221 apply to the innermost pointed to type (if any). */
5222 /* XXX: this is not correct to modify 'ad' at this point, but
5223 the syntax is not clear */
5224 parse_attribute(ad
);
5225 post
= type_decl(type
, ad
, v
, td
);
5229 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5230 /* type identifier */
5235 if (!(td
& TYPE_ABSTRACT
))
5236 expect("identifier");
5239 post_type(post
, ad
, storage
, 0);
5240 parse_attribute(ad
);
5245 /* indirection with full error checking and bound check */
5246 ST_FUNC
void indir(void)
5248 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5249 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5253 if (vtop
->r
& VT_LVAL
)
5255 vtop
->type
= *pointed_type(&vtop
->type
);
5256 /* Arrays and functions are never lvalues */
5257 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5258 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5260 /* if bound checking, the referenced pointer must be checked */
5261 #ifdef CONFIG_TCC_BCHECK
5262 if (tcc_state
->do_bounds_check
)
5263 vtop
->r
|= VT_MUSTBOUND
;
5268 /* pass a parameter to a function and do type checking and casting */
5269 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5274 func_type
= func
->f
.func_type
;
5275 if (func_type
== FUNC_OLD
||
5276 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5277 /* default casting : only need to convert float to double */
5278 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5279 gen_cast_s(VT_DOUBLE
);
5280 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5281 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5282 type
.ref
= vtop
->type
.ref
;
5284 } else if (vtop
->r
& VT_MUSTCAST
) {
5285 force_charshort_cast();
5287 } else if (arg
== NULL
) {
5288 tcc_error("too many arguments to function");
5291 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5292 gen_assign_cast(&type
);
5296 /* parse an expression and return its type without any side effect. */
5297 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5306 /* parse an expression of the form '(type)' or '(expr)' and return its
5308 static void parse_expr_type(CType
*type
)
5314 if (parse_btype(type
, &ad
)) {
5315 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5317 expr_type(type
, gexpr
);
5322 static void parse_type(CType
*type
)
5327 if (!parse_btype(type
, &ad
)) {
5330 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5333 static void parse_builtin_params(int nc
, const char *args
)
5342 while ((c
= *args
++)) {
5357 type
.t
= VT_CONSTANT
;
5363 type
.t
= VT_CONSTANT
;
5365 type
.t
|= char_type
.t
;
5375 tcc_error("internal error");
5377 gen_assign_cast(&type
);
5384 ST_FUNC
void unary(void)
5386 int n
, t
, align
, size
, r
, sizeof_caller
;
5391 /* generate line number info */
5392 if (tcc_state
->do_debug
)
5393 tcc_debug_line(tcc_state
);
5395 sizeof_caller
= in_sizeof
;
5398 /* XXX: GCC 2.95.3 does not generate a table although it should be
5406 #ifdef TCC_TARGET_PE
5407 t
= VT_SHORT
|VT_UNSIGNED
;
5415 vsetc(&type
, VT_CONST
, &tokc
);
5419 t
= VT_INT
| VT_UNSIGNED
;
5425 t
= VT_LLONG
| VT_UNSIGNED
;
5437 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5440 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5442 case TOK___FUNCTION__
:
5444 goto tok_identifier
;
5450 /* special function name identifier */
5451 len
= strlen(funcname
) + 1;
5452 /* generate char[len] type */
5457 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
5458 if (!NODATA_WANTED
) {
5459 ptr
= section_ptr_add(data_section
, len
);
5460 memcpy(ptr
, funcname
, len
);
5466 #ifdef TCC_TARGET_PE
5467 t
= VT_SHORT
| VT_UNSIGNED
;
5473 /* string parsing */
5475 if (tcc_state
->char_is_unsigned
)
5476 t
= VT_BYTE
| VT_UNSIGNED
;
5478 if (tcc_state
->warn_write_strings
)
5483 memset(&ad
, 0, sizeof(AttributeDef
));
5484 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5489 if (parse_btype(&type
, &ad
)) {
5490 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5492 /* check ISOC99 compound literal */
5494 /* data is allocated locally by default */
5499 /* all except arrays are lvalues */
5500 if (!(type
.t
& VT_ARRAY
))
5502 memset(&ad
, 0, sizeof(AttributeDef
));
5503 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5505 if (sizeof_caller
) {
5512 } else if (tok
== '{') {
5513 int saved_nocode_wanted
= nocode_wanted
;
5514 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5515 tcc_error("expected constant");
5516 /* save all registers */
5518 /* statement expression : we do not accept break/continue
5519 inside as GCC does. We do retain the nocode_wanted state,
5520 as statement expressions can't ever be entered from the
5521 outside, so any reactivation of code emission (from labels
5522 or loop heads) can be disabled again after the end of it. */
5524 nocode_wanted
= saved_nocode_wanted
;
5539 /* functions names must be treated as function pointers,
5540 except for unary '&' and sizeof. Since we consider that
5541 functions are not lvalues, we only have to handle it
5542 there and in function calls. */
5543 /* arrays can also be used although they are not lvalues */
5544 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5545 !(vtop
->type
.t
& VT_ARRAY
))
5548 vtop
->sym
->a
.addrtaken
= 1;
5549 mk_pointer(&vtop
->type
);
5555 gen_test_zero(TOK_EQ
);
5566 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5567 tcc_error("pointer not accepted for unary plus");
5568 /* In order to force cast, we add zero, except for floating point
5569 where we really need an noop (otherwise -0.0 will be transformed
5571 if (!is_float(vtop
->type
.t
)) {
5583 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5585 if (vtop
[1].r
& VT_SYM
)
5586 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5587 size
= type_size(&type
, &align
);
5588 if (s
&& s
->a
.aligned
)
5589 align
= 1 << (s
->a
.aligned
- 1);
5590 if (t
== TOK_SIZEOF
) {
5591 if (!(type
.t
& VT_VLA
)) {
5593 tcc_error("sizeof applied to an incomplete type");
5596 vla_runtime_type_size(&type
, &align
);
5601 vtop
->type
.t
|= VT_UNSIGNED
;
5604 case TOK_builtin_expect
:
5605 /* __builtin_expect is a no-op for now */
5606 parse_builtin_params(0, "ee");
5609 case TOK_builtin_types_compatible_p
:
5610 parse_builtin_params(0, "tt");
5611 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5612 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5613 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5617 case TOK_builtin_choose_expr
:
5644 case TOK_builtin_constant_p
:
5645 parse_builtin_params(1, "e");
5646 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5650 case TOK_builtin_frame_address
:
5651 case TOK_builtin_return_address
:
5657 if (tok
!= TOK_CINT
) {
5658 tcc_error("%s only takes positive integers",
5659 tok1
== TOK_builtin_return_address
?
5660 "__builtin_return_address" :
5661 "__builtin_frame_address");
5663 level
= (uint32_t)tokc
.i
;
5668 vset(&type
, VT_LOCAL
, 0); /* local frame */
5670 #ifdef TCC_TARGET_RISCV64
5674 mk_pointer(&vtop
->type
);
5675 indir(); /* -> parent frame */
5677 if (tok1
== TOK_builtin_return_address
) {
5678 // assume return address is just above frame pointer on stack
5679 #ifdef TCC_TARGET_ARM
5682 #elif defined TCC_TARGET_RISCV64
5689 mk_pointer(&vtop
->type
);
5694 #ifdef TCC_TARGET_RISCV64
5695 case TOK_builtin_va_start
:
5696 parse_builtin_params(0, "ee");
5697 r
= vtop
->r
& VT_VALMASK
;
5701 tcc_error("__builtin_va_start expects a local variable");
5706 #ifdef TCC_TARGET_X86_64
5707 #ifdef TCC_TARGET_PE
5708 case TOK_builtin_va_start
:
5709 parse_builtin_params(0, "ee");
5710 r
= vtop
->r
& VT_VALMASK
;
5714 tcc_error("__builtin_va_start expects a local variable");
5716 vtop
->type
= char_pointer_type
;
5721 case TOK_builtin_va_arg_types
:
5722 parse_builtin_params(0, "t");
5723 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5730 #ifdef TCC_TARGET_ARM64
5731 case TOK_builtin_va_start
: {
5732 parse_builtin_params(0, "ee");
5736 vtop
->type
.t
= VT_VOID
;
5739 case TOK_builtin_va_arg
: {
5740 parse_builtin_params(0, "et");
5748 case TOK___arm64_clear_cache
: {
5749 parse_builtin_params(0, "ee");
5752 vtop
->type
.t
= VT_VOID
;
5757 /* pre operations */
5768 t
= vtop
->type
.t
& VT_BTYPE
;
5770 /* In IEEE negate(x) isn't subtract(0,x), but rather
5774 vtop
->c
.f
= -1.0 * 0.0;
5775 else if (t
== VT_DOUBLE
)
5776 vtop
->c
.d
= -1.0 * 0.0;
5778 vtop
->c
.ld
= -1.0 * 0.0;
5786 goto tok_identifier
;
5788 /* allow to take the address of a label */
5789 if (tok
< TOK_UIDENT
)
5790 expect("label identifier");
5791 s
= label_find(tok
);
5793 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5795 if (s
->r
== LABEL_DECLARED
)
5796 s
->r
= LABEL_FORWARD
;
5799 s
->type
.t
= VT_VOID
;
5800 mk_pointer(&s
->type
);
5801 s
->type
.t
|= VT_STATIC
;
5803 vpushsym(&s
->type
, s
);
5809 CType controlling_type
;
5810 int has_default
= 0;
5813 TokenString
*str
= NULL
;
5814 int saved_const_wanted
= const_wanted
;
5819 expr_type(&controlling_type
, expr_eq
);
5820 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5821 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5822 mk_pointer(&controlling_type
);
5823 const_wanted
= saved_const_wanted
;
5827 if (tok
== TOK_DEFAULT
) {
5829 tcc_error("too many 'default'");
5835 AttributeDef ad_tmp
;
5840 parse_btype(&cur_type
, &ad_tmp
);
5843 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5844 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5846 tcc_error("type match twice");
5856 skip_or_save_block(&str
);
5858 skip_or_save_block(NULL
);
5865 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5866 tcc_error("type '%s' does not match any association", buf
);
5868 begin_macro(str
, 1);
5877 // special qnan , snan and infinity values
5882 vtop
->type
.t
= VT_FLOAT
;
5887 goto special_math_val
;
5890 goto special_math_val
;
5897 expect("identifier");
5899 if (!s
|| IS_ASM_SYM(s
)) {
5900 const char *name
= get_tok_str(t
, NULL
);
5902 tcc_error("'%s' undeclared", name
);
5903 /* for simple function calls, we tolerate undeclared
5904 external reference to int() function */
5905 if (tcc_state
->warn_implicit_function_declaration
5906 #ifdef TCC_TARGET_PE
5907 /* people must be warned about using undeclared WINAPI functions
5908 (which usually start with uppercase letter) */
5909 || (name
[0] >= 'A' && name
[0] <= 'Z')
5912 tcc_warning("implicit declaration of function '%s'", name
);
5913 s
= external_global_sym(t
, &func_old_type
);
5917 /* A symbol that has a register is a local register variable,
5918 which starts out as VT_LOCAL value. */
5919 if ((r
& VT_VALMASK
) < VT_CONST
)
5920 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5922 vset(&s
->type
, r
, s
->c
);
5923 /* Point to s as backpointer (even without r&VT_SYM).
5924 Will be used by at least the x86 inline asm parser for
5930 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5931 vtop
->c
.i
= s
->enum_val
;
5936 /* post operations */
5938 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5941 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5942 int qualifiers
, cumofs
= 0;
5944 if (tok
== TOK_ARROW
)
5946 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5949 /* expect pointer on structure */
5950 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5951 expect("struct or union");
5952 if (tok
== TOK_CDOUBLE
)
5953 expect("field name");
5955 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5956 expect("field name");
5957 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5959 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5960 /* add field offset to pointer */
5961 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5962 vpushi(cumofs
+ s
->c
);
5964 /* change type to field type, and set to lvalue */
5965 vtop
->type
= s
->type
;
5966 vtop
->type
.t
|= qualifiers
;
5967 /* an array is never an lvalue */
5968 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5970 #ifdef CONFIG_TCC_BCHECK
5971 /* if bound checking, the referenced pointer must be checked */
5972 if (tcc_state
->do_bounds_check
)
5973 vtop
->r
|= VT_MUSTBOUND
;
5977 } else if (tok
== '[') {
5983 } else if (tok
== '(') {
5986 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5989 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5990 /* pointer test (no array accepted) */
5991 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5992 vtop
->type
= *pointed_type(&vtop
->type
);
5993 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5997 expect("function pointer");
6000 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6002 /* get return type */
6005 sa
= s
->next
; /* first parameter */
6006 nb_args
= regsize
= 0;
6008 /* compute first implicit argument if a structure is returned */
6009 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6010 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6011 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6012 &ret_align
, ®size
);
6013 if (ret_nregs
<= 0) {
6014 /* get some space for the returned structure */
6015 size
= type_size(&s
->type
, &align
);
6016 #ifdef TCC_TARGET_ARM64
6017 /* On arm64, a small struct is return in registers.
6018 It is much easier to write it to memory if we know
6019 that we are allowed to write some extra bytes, so
6020 round the allocated space up to a power of 2: */
6022 while (size
& (size
- 1))
6023 size
= (size
| (size
- 1)) + 1;
6025 loc
= (loc
- size
) & -align
;
6027 ret
.r
= VT_LOCAL
| VT_LVAL
;
6028 /* pass it as 'int' to avoid structure arg passing
6030 vseti(VT_LOCAL
, loc
);
6042 if (ret_nregs
> 0) {
6043 /* return in register */
6045 PUT_R_RET(&ret
, ret
.type
.t
);
6050 gfunc_param_typed(s
, sa
);
6060 tcc_error("too few arguments to function");
6062 gfunc_call(nb_args
);
6064 if (ret_nregs
< 0) {
6065 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6066 #ifdef TCC_TARGET_RISCV64
6067 arch_transfer_ret_regs(1);
6071 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6072 vsetc(&ret
.type
, r
, &ret
.c
);
6073 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6076 /* handle packed struct return */
6077 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6080 size
= type_size(&s
->type
, &align
);
6081 /* We're writing whole regs often, make sure there's enough
6082 space. Assume register size is power of 2. */
6083 if (regsize
> align
)
6085 loc
= (loc
- size
) & -align
;
6089 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6093 if (--ret_nregs
== 0)
6097 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6100 /* Promote char/short return values. This is matters only
6101 for calling function that were not compiled by TCC and
6102 only on some architectures. For those where it doesn't
6103 matter we expect things to be already promoted to int,
6105 t
= s
->type
.t
& VT_BTYPE
;
6106 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6108 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6110 vtop
->type
.t
= VT_INT
;
6114 if (s
->f
.func_noreturn
)
6122 #ifndef precedence_parser /* original top-down parser */
6124 static void expr_prod(void)
6129 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6136 static void expr_sum(void)
6141 while ((t
= tok
) == '+' || t
== '-') {
6148 static void expr_shift(void)
6153 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6160 static void expr_cmp(void)
6165 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6166 t
== TOK_ULT
|| t
== TOK_UGE
) {
6173 static void expr_cmpeq(void)
6178 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6185 static void expr_and(void)
6188 while (tok
== '&') {
6195 static void expr_xor(void)
6198 while (tok
== '^') {
6205 static void expr_or(void)
6208 while (tok
== '|') {
6215 static void expr_landor(int op
);
6217 static void expr_land(void)
6220 if (tok
== TOK_LAND
)
6224 static void expr_lor(void)
6231 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6232 #else /* defined precedence_parser */
6233 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6234 # define expr_lor() unary(), expr_infix(1)
6236 static int precedence(int tok
)
6239 case TOK_LOR
: return 1;
6240 case TOK_LAND
: return 2;
6244 case TOK_EQ
: case TOK_NE
: return 6;
6245 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6246 case TOK_SHL
: case TOK_SAR
: return 8;
6247 case '+': case '-': return 9;
6248 case '*': case '/': case '%': return 10;
6250 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6255 static unsigned char prec
[256];
6256 static void init_prec(void)
6259 for (i
= 0; i
< 256; i
++)
6260 prec
[i
] = precedence(i
);
6262 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6264 static void expr_landor(int op
);
6266 static void expr_infix(int p
)
6269 while ((p2
= precedence(t
)) >= p
) {
6270 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6275 if (precedence(tok
) > p2
)
6284 /* Assuming vtop is a value used in a conditional context
6285 (i.e. compared with zero) return 0 if it's false, 1 if
6286 true and -1 if it can't be statically determined. */
6287 static int condition_3way(void)
6290 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6291 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6293 gen_cast_s(VT_BOOL
);
6300 static void expr_landor(int op
)
6302 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6304 c
= f
? i
: condition_3way();
6306 save_regs(1), cc
= 0;
6308 nocode_wanted
++, f
= 1;
6316 expr_landor_next(op
);
6328 static int is_cond_bool(SValue
*sv
)
6330 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6331 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6332 return (unsigned)sv
->c
.i
< 2;
6333 if (sv
->r
== VT_CMP
)
6338 static void expr_cond(void)
6340 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6348 c
= condition_3way();
6349 g
= (tok
== ':' && gnu_ext
);
6359 /* needed to avoid having different registers saved in
6366 ncw_prev
= nocode_wanted
;
6372 if (c
< 0 && vtop
->r
== VT_CMP
) {
6379 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6380 mk_pointer(&vtop
->type
);
6381 sv
= *vtop
; /* save value to handle it later */
6382 vtop
--; /* no vpop so that FP stack is not flushed */
6392 nocode_wanted
= ncw_prev
;
6398 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6399 if (sv
.r
== VT_CMP
) {
6410 nocode_wanted
= ncw_prev
;
6411 // tcc_warning("two conditions expr_cond");
6415 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6416 mk_pointer(&vtop
->type
);
6418 /* cast operands to correct type according to ISOC rules */
6419 if (!combine_types(&type
, &sv
, vtop
, '?'))
6420 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6421 "type mismatch in conditional expression (have '%s' and '%s')");
6422 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6423 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6424 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6426 /* now we convert second operand */
6430 mk_pointer(&vtop
->type
);
6432 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6436 rc
= RC_TYPE(type
.t
);
6437 /* for long longs, we use fixed registers to avoid having
6438 to handle a complicated move */
6439 if (USING_TWO_WORDS(type
.t
))
6440 rc
= RC_RET(type
.t
);
6448 nocode_wanted
= ncw_prev
;
6450 /* this is horrible, but we must also convert first
6456 mk_pointer(&vtop
->type
);
6458 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6464 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6474 static void expr_eq(void)
6479 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6487 gen_op(TOK_ASSIGN_OP(t
));
6493 ST_FUNC
void gexpr(void)
6504 /* parse a constant expression and return value in vtop. */
6505 static void expr_const1(void)
6508 nocode_wanted
+= unevalmask
+ 1;
6510 nocode_wanted
-= unevalmask
+ 1;
6514 /* parse an integer constant and return its value. */
6515 static inline int64_t expr_const64(void)
6519 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6520 expect("constant expression");
6526 /* parse an integer constant and return its value.
6527 Complain if it doesn't fit 32bit (signed or unsigned). */
6528 ST_FUNC
int expr_const(void)
6531 int64_t wc
= expr_const64();
6533 if (c
!= wc
&& (unsigned)c
!= wc
)
6534 tcc_error("constant exceeds 32 bit");
6538 /* ------------------------------------------------------------------------- */
6539 /* return from function */
6541 #ifndef TCC_TARGET_ARM64
6542 static void gfunc_return(CType
*func_type
)
6544 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6545 CType type
, ret_type
;
6546 int ret_align
, ret_nregs
, regsize
;
6547 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6548 &ret_align
, ®size
);
6549 if (ret_nregs
< 0) {
6550 #ifdef TCC_TARGET_RISCV64
6551 arch_transfer_ret_regs(0);
6553 } else if (0 == ret_nregs
) {
6554 /* if returning structure, must copy it to implicit
6555 first pointer arg location */
6558 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6561 /* copy structure value to pointer */
6564 /* returning structure packed into registers */
6565 int size
, addr
, align
, rc
;
6566 size
= type_size(func_type
,&align
);
6567 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6568 (vtop
->c
.i
& (ret_align
-1)))
6569 && (align
& (ret_align
-1))) {
6570 loc
= (loc
- size
) & -ret_align
;
6573 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6577 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6579 vtop
->type
= ret_type
;
6580 rc
= RC_RET(ret_type
.t
);
6588 if (--ret_nregs
== 0)
6590 /* We assume that when a structure is returned in multiple
6591 registers, their classes are consecutive values of the
6594 vtop
->c
.i
+= regsize
;
6599 gv(RC_RET(func_type
->t
));
6601 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6605 static void check_func_return(void)
6607 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6609 if (!strcmp (funcname
, "main")
6610 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6611 /* main returns 0 by default */
6613 gen_assign_cast(&func_vt
);
6614 gfunc_return(&func_vt
);
6616 tcc_warning("function might return no value: '%s'", funcname
);
6620 /* ------------------------------------------------------------------------- */
6623 static int case_cmp(const void *pa
, const void *pb
)
6625 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6626 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6627 return a
< b
? -1 : a
> b
;
6630 static void gtst_addr(int t
, int a
)
6632 gsym_addr(gvtst(0, t
), a
);
6635 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6639 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6656 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6658 gcase(base
, len
/2, bsym
);
6662 base
+= e
; len
-= e
;
6672 if (p
->v1
== p
->v2
) {
6674 gtst_addr(0, p
->sym
);
6684 gtst_addr(0, p
->sym
);
6688 *bsym
= gjmp(*bsym
);
6691 /* ------------------------------------------------------------------------- */
6692 /* __attribute__((cleanup(fn))) */
6694 static void try_call_scope_cleanup(Sym
*stop
)
6696 Sym
*cls
= cur_scope
->cl
.s
;
6698 for (; cls
!= stop
; cls
= cls
->ncl
) {
6699 Sym
*fs
= cls
->next
;
6700 Sym
*vs
= cls
->prev_tok
;
6702 vpushsym(&fs
->type
, fs
);
6703 vset(&vs
->type
, vs
->r
, vs
->c
);
6705 mk_pointer(&vtop
->type
);
6711 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6716 if (!cur_scope
->cl
.s
)
6719 /* search NCA of both cleanup chains given parents and initial depth */
6720 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6721 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6723 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6725 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6728 try_call_scope_cleanup(cc
);
6731 /* call 'func' for each __attribute__((cleanup(func))) */
6732 static void block_cleanup(struct scope
*o
)
6736 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6737 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6742 try_call_scope_cleanup(o
->cl
.s
);
6743 pcl
->jnext
= gjmp(0);
6745 goto remove_pending
;
6755 try_call_scope_cleanup(o
->cl
.s
);
6758 /* ------------------------------------------------------------------------- */
6761 static void vla_restore(int loc
)
6764 gen_vla_sp_restore(loc
);
6767 static void vla_leave(struct scope
*o
)
6769 if (o
->vla
.num
< cur_scope
->vla
.num
)
6770 vla_restore(o
->vla
.loc
);
6773 /* ------------------------------------------------------------------------- */
6776 void new_scope(struct scope
*o
)
6778 /* copy and link previous scope */
6780 o
->prev
= cur_scope
;
6783 /* record local declaration stack position */
6784 o
->lstk
= local_stack
;
6785 o
->llstk
= local_label_stack
;
6789 if (tcc_state
->do_debug
)
6790 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
6793 void prev_scope(struct scope
*o
, int is_expr
)
6797 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6798 block_cleanup(o
->prev
);
6800 /* pop locally defined labels */
6801 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6803 /* In the is_expr case (a statement expression is finished here),
6804 vtop might refer to symbols on the local_stack. Either via the
6805 type or via vtop->sym. We can't pop those nor any that in turn
6806 might be referred to. To make it easier we don't roll back
6807 any symbols in that case; some upper level call to block() will
6808 do that. We do have to remove such symbols from the lookup
6809 tables, though. sym_pop will do that. */
6811 /* pop locally defined symbols */
6812 pop_local_syms(&local_stack
, o
->lstk
, is_expr
, 0);
6813 cur_scope
= o
->prev
;
6816 if (tcc_state
->do_debug
)
6817 tcc_debug_stabn(N_RBRAC
, ind
- func_ind
);
6820 /* leave a scope via break/continue(/goto) */
6821 void leave_scope(struct scope
*o
)
6825 try_call_scope_cleanup(o
->cl
.s
);
6829 /* ------------------------------------------------------------------------- */
6830 /* call block from 'for do while' loops */
6832 static void lblock(int *bsym
, int *csym
)
6834 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6835 int *b
= co
->bsym
, *c
= co
->csym
;
6849 static void block(int is_expr
)
6851 int a
, b
, c
, d
, e
, t
;
6856 /* default return value is (void) */
6858 vtop
->type
.t
= VT_VOID
;
6870 if (tok
== TOK_ELSE
) {
6875 gsym(d
); /* patch else jmp */
6880 } else if (t
== TOK_WHILE
) {
6892 } else if (t
== '{') {
6895 /* handle local labels declarations */
6896 while (tok
== TOK_LABEL
) {
6899 if (tok
< TOK_UIDENT
)
6900 expect("label identifier");
6901 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6903 } while (tok
== ',');
6907 while (tok
!= '}') {
6916 prev_scope(&o
, is_expr
);
6919 else if (!nocode_wanted
)
6920 check_func_return();
6922 } else if (t
== TOK_RETURN
) {
6923 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6927 gen_assign_cast(&func_vt
);
6929 if (vtop
->type
.t
!= VT_VOID
)
6930 tcc_warning("void function returns a value");
6934 tcc_warning("'return' with no value");
6937 leave_scope(root_scope
);
6939 gfunc_return(&func_vt
);
6941 /* jump unless last stmt in top-level block */
6942 if (tok
!= '}' || local_scope
!= 1)
6946 } else if (t
== TOK_BREAK
) {
6948 if (!cur_scope
->bsym
)
6949 tcc_error("cannot break");
6950 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
6951 leave_scope(cur_switch
->scope
);
6953 leave_scope(loop_scope
);
6954 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6957 } else if (t
== TOK_CONTINUE
) {
6959 if (!cur_scope
->csym
)
6960 tcc_error("cannot continue");
6961 leave_scope(loop_scope
);
6962 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6965 } else if (t
== TOK_FOR
) {
6970 /* c99 for-loop init decl? */
6971 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6972 /* no, regular for-loop init expr */
7000 } else if (t
== TOK_DO
) {
7014 } else if (t
== TOK_SWITCH
) {
7015 struct switch_t
*sw
;
7017 sw
= tcc_mallocz(sizeof *sw
);
7019 sw
->scope
= cur_scope
;
7020 sw
->prev
= cur_switch
;
7026 sw
->sv
= *vtop
--; /* save switch value */
7029 b
= gjmp(0); /* jump to first case */
7031 a
= gjmp(a
); /* add implicit break */
7035 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmp
);
7036 for (b
= 1; b
< sw
->n
; b
++)
7037 if (sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7038 tcc_error("duplicate case value");
7040 /* Our switch table sorting is signed, so the compared
7041 value needs to be as well when it's 64bit. */
7043 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
7044 vtop
->type
.t
&= ~VT_UNSIGNED
;
7046 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7049 gsym_addr(d
, sw
->def_sym
);
7055 dynarray_reset(&sw
->p
, &sw
->n
);
7056 cur_switch
= sw
->prev
;
7059 } else if (t
== TOK_CASE
) {
7060 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7063 cr
->v1
= cr
->v2
= expr_const64();
7064 if (gnu_ext
&& tok
== TOK_DOTS
) {
7066 cr
->v2
= expr_const64();
7067 if (cr
->v2
< cr
->v1
)
7068 tcc_warning("empty case range");
7071 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7074 goto block_after_label
;
7076 } else if (t
== TOK_DEFAULT
) {
7079 if (cur_switch
->def_sym
)
7080 tcc_error("too many 'default'");
7081 cur_switch
->def_sym
= gind();
7084 goto block_after_label
;
7086 } else if (t
== TOK_GOTO
) {
7087 vla_restore(root_scope
->vla
.loc
);
7088 if (tok
== '*' && gnu_ext
) {
7092 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7096 } else if (tok
>= TOK_UIDENT
) {
7097 s
= label_find(tok
);
7098 /* put forward definition if needed */
7100 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7101 else if (s
->r
== LABEL_DECLARED
)
7102 s
->r
= LABEL_FORWARD
;
7104 if (s
->r
& LABEL_FORWARD
) {
7105 /* start new goto chain for cleanups, linked via label->next */
7106 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7107 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7108 pending_gotos
->prev_tok
= s
;
7109 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7110 pending_gotos
->next
= s
;
7112 s
->jnext
= gjmp(s
->jnext
);
7114 try_call_cleanup_goto(s
->cleanupstate
);
7115 gjmp_addr(s
->jnext
);
7120 expect("label identifier");
7124 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7128 if (tok
== ':' && t
>= TOK_UIDENT
) {
7133 if (s
->r
== LABEL_DEFINED
)
7134 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7135 s
->r
= LABEL_DEFINED
;
7137 Sym
*pcl
; /* pending cleanup goto */
7138 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7140 sym_pop(&s
->next
, NULL
, 0);
7144 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7147 s
->cleanupstate
= cur_scope
->cl
.s
;
7150 vla_restore(cur_scope
->vla
.loc
);
7151 /* we accept this, but it is a mistake */
7153 tcc_warning("deprecated use of label at end of compound statement");
7159 /* expression case */
7175 /* This skips over a stream of tokens containing balanced {} and ()
7176 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7177 with a '{'). If STR then allocates and stores the skipped tokens
7178 in *STR. This doesn't check if () and {} are nested correctly,
7179 i.e. "({)}" is accepted. */
7180 static void skip_or_save_block(TokenString
**str
)
7182 int braces
= tok
== '{';
7185 *str
= tok_str_alloc();
7187 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7189 if (tok
== TOK_EOF
) {
7190 if (str
|| level
> 0)
7191 tcc_error("unexpected end of file");
7196 tok_str_add_tok(*str
);
7199 if (t
== '{' || t
== '(') {
7201 } else if (t
== '}' || t
== ')') {
7203 if (level
== 0 && braces
&& t
== '}')
7208 tok_str_add(*str
, -1);
7209 tok_str_add(*str
, 0);
7213 #define EXPR_CONST 1
7216 static void parse_init_elem(int expr_type
)
7218 int saved_global_expr
;
7221 /* compound literals must be allocated globally in this case */
7222 saved_global_expr
= global_expr
;
7225 global_expr
= saved_global_expr
;
7226 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7227 (compound literals). */
7228 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7229 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7230 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7231 #ifdef TCC_TARGET_PE
7232 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7235 tcc_error("initializer element is not constant");
7243 /* put zeros for variable based init */
7244 static void init_putz(Section
*sec
, unsigned long c
, int size
)
7247 /* nothing to do because globals are already set to zero */
7249 vpush_global_sym(&func_old_type
, TOK_memset
);
7251 #ifdef TCC_TARGET_ARM
7263 #define DIF_SIZE_ONLY 2
7264 #define DIF_HAVE_ELEM 4
7266 /* t is the array or struct type. c is the array or struct
7267 address. cur_field is the pointer to the current
7268 field, for arrays the 'c' member contains the current start
7269 index. 'flags' is as in decl_initializer.
7270 'al' contains the already initialized length of the
7271 current container (starting at c). This returns the new length of that. */
7272 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
7273 Sym
**cur_field
, int flags
, int al
)
7276 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7277 unsigned long corig
= c
;
7282 if (flags
& DIF_HAVE_ELEM
)
7285 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7292 /* NOTE: we only support ranges for last designator */
7293 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7295 if (!(type
->t
& VT_ARRAY
))
7296 expect("array type");
7298 index
= index_last
= expr_const();
7299 if (tok
== TOK_DOTS
&& gnu_ext
) {
7301 index_last
= expr_const();
7305 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
7307 tcc_error("invalid index");
7309 (*cur_field
)->c
= index_last
;
7310 type
= pointed_type(type
);
7311 elem_size
= type_size(type
, &align
);
7312 c
+= index
* elem_size
;
7313 nb_elems
= index_last
- index
+ 1;
7320 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7321 expect("struct/union type");
7323 f
= find_field(type
, l
, &cumofs
);
7336 } else if (!gnu_ext
) {
7341 if (type
->t
& VT_ARRAY
) {
7342 index
= (*cur_field
)->c
;
7343 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
7344 tcc_error("index too large");
7345 type
= pointed_type(type
);
7346 c
+= index
* type_size(type
, &align
);
7349 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7350 *cur_field
= f
= f
->next
;
7352 tcc_error("too many field init");
7357 /* must put zero in holes (note that doing it that way
7358 ensures that it even works with designators) */
7359 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
7360 init_putz(sec
, corig
+ al
, c
- corig
- al
);
7361 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
7363 /* XXX: make it more general */
7364 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7365 unsigned long c_end
;
7370 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7371 for (i
= 1; i
< nb_elems
; i
++) {
7372 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
7377 } else if (!NODATA_WANTED
) {
7378 c_end
= c
+ nb_elems
* elem_size
;
7379 if (c_end
> sec
->data_allocated
)
7380 section_realloc(sec
, c_end
);
7381 src
= sec
->data
+ c
;
7383 for(i
= 1; i
< nb_elems
; i
++) {
7385 memcpy(dst
, src
, elem_size
);
7389 c
+= nb_elems
* type_size(type
, &align
);
7395 /* store a value or an expression directly in global data or in local array */
7396 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
7403 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7407 /* XXX: not portable */
7408 /* XXX: generate error if incorrect relocation */
7409 gen_assign_cast(&dtype
);
7410 bt
= type
->t
& VT_BTYPE
;
7412 if ((vtop
->r
& VT_SYM
)
7415 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7416 || (type
->t
& VT_BITFIELD
))
7417 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7419 tcc_error("initializer element is not computable at load time");
7421 if (NODATA_WANTED
) {
7426 size
= type_size(type
, &align
);
7427 section_reserve(sec
, c
+ size
);
7428 ptr
= sec
->data
+ c
;
7430 /* XXX: make code faster ? */
7431 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7432 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7433 /* XXX This rejects compound literals like
7434 '(void *){ptr}'. The problem is that '&sym' is
7435 represented the same way, which would be ruled out
7436 by the SYM_FIRST_ANOM check above, but also '"string"'
7437 in 'char *p = "string"' is represented the same
7438 with the type being VT_PTR and the symbol being an
7439 anonymous one. That is, there's no difference in vtop
7440 between '(void *){x}' and '&(void *){x}'. Ignore
7441 pointer typed entities here. Hopefully no real code
7442 will ever use compound literals with scalar type. */
7443 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7444 /* These come from compound literals, memcpy stuff over. */
7448 esym
= elfsym(vtop
->sym
);
7449 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7450 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7452 /* We need to copy over all memory contents, and that
7453 includes relocations. Use the fact that relocs are
7454 created it order, so look from the end of relocs
7455 until we hit one before the copied region. */
7456 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
7457 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
7458 while (num_relocs
--) {
7460 if (rel
->r_offset
>= esym
->st_value
+ size
)
7462 if (rel
->r_offset
< esym
->st_value
)
7464 /* Note: if the same fields are initialized multiple
7465 times (possible with designators) then we possibly
7466 add multiple relocations for the same offset here.
7467 That would lead to wrong code, the last reloc needs
7468 to win. We clean this up later after the whole
7469 initializer is parsed. */
7470 put_elf_reloca(symtab_section
, sec
,
7471 c
+ rel
->r_offset
- esym
->st_value
,
7472 ELFW(R_TYPE
)(rel
->r_info
),
7473 ELFW(R_SYM
)(rel
->r_info
),
7483 if (type
->t
& VT_BITFIELD
) {
7484 int bit_pos
, bit_size
, bits
, n
;
7485 unsigned char *p
, v
, m
;
7486 bit_pos
= BIT_POS(vtop
->type
.t
);
7487 bit_size
= BIT_SIZE(vtop
->type
.t
);
7488 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7489 bit_pos
&= 7, bits
= 0;
7494 v
= vtop
->c
.i
>> bits
<< bit_pos
;
7495 m
= ((1 << n
) - 1) << bit_pos
;
7496 *p
= (*p
& ~m
) | (v
& m
);
7497 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7501 /* XXX: when cross-compiling we assume that each type has the
7502 same representation on host and target, which is likely to
7503 be wrong in the case of long double */
7505 vtop
->c
.i
= vtop
->c
.i
!= 0;
7507 *(char *)ptr
|= vtop
->c
.i
;
7510 *(short *)ptr
|= vtop
->c
.i
;
7513 *(float*)ptr
= vtop
->c
.f
;
7516 *(double *)ptr
= vtop
->c
.d
;
7519 #if defined TCC_IS_NATIVE_387
7520 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7521 memcpy(ptr
, &vtop
->c
.ld
, 10);
7523 else if (sizeof (long double) == sizeof (double))
7524 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7526 else if (vtop
->c
.ld
== 0.0)
7530 if (sizeof(long double) == LDOUBLE_SIZE
)
7531 *(long double*)ptr
= vtop
->c
.ld
;
7532 else if (sizeof(double) == LDOUBLE_SIZE
)
7533 *(double *)ptr
= (double)vtop
->c
.ld
;
7535 tcc_error("can't cross compile long double constants");
7539 *(long long *)ptr
|= vtop
->c
.i
;
7546 addr_t val
= vtop
->c
.i
;
7548 if (vtop
->r
& VT_SYM
)
7549 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7551 *(addr_t
*)ptr
|= val
;
7553 if (vtop
->r
& VT_SYM
)
7554 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7555 *(addr_t
*)ptr
|= val
;
7561 int val
= vtop
->c
.i
;
7563 if (vtop
->r
& VT_SYM
)
7564 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7568 if (vtop
->r
& VT_SYM
)
7569 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7578 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7585 /* 't' contains the type and storage info. 'c' is the offset of the
7586 object in section 'sec'. If 'sec' is NULL, it means stack based
7587 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7588 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7589 size only evaluation is wanted (only for arrays). */
7590 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
7593 int len
, n
, no_oblock
, i
;
7599 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7600 /* In case of strings we have special handling for arrays, so
7601 don't consume them as initializer value (which would commit them
7602 to some anonymous symbol). */
7603 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7604 !(flags
& DIF_SIZE_ONLY
)) {
7605 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7606 flags
|= DIF_HAVE_ELEM
;
7609 if ((flags
& DIF_HAVE_ELEM
) &&
7610 !(type
->t
& VT_ARRAY
) &&
7611 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7612 The source type might have VT_CONSTANT set, which is
7613 of course assignable to non-const elements. */
7614 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7615 init_putv(type
, sec
, c
);
7616 } else if (type
->t
& VT_ARRAY
) {
7619 t1
= pointed_type(type
);
7620 size1
= type_size(t1
, &align1
);
7623 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7626 tcc_error("character array initializer must be a literal,"
7627 " optionally enclosed in braces");
7632 /* only parse strings here if correct type (otherwise: handle
7633 them as ((w)char *) expressions */
7634 if ((tok
== TOK_LSTR
&&
7635 #ifdef TCC_TARGET_PE
7636 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7638 (t1
->t
& VT_BTYPE
) == VT_INT
7640 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7643 cstr_reset(&initstr
);
7644 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7645 tcc_error("unhandled string literal merging");
7646 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7648 initstr
.size
-= size1
;
7650 len
+= tokc
.str
.size
;
7652 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7654 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7657 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7658 && tok
!= TOK_EOF
) {
7659 /* Not a lone literal but part of a bigger expression. */
7660 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7661 tokc
.str
.size
= initstr
.size
;
7662 tokc
.str
.data
= initstr
.data
;
7668 if (n
>= 0 && len
> n
)
7670 if (!(flags
& DIF_SIZE_ONLY
)) {
7671 if (sec
&& !NODATA_WANTED
&&
7672 (c
+ nb
> sec
->data_allocated
))
7673 nb
= sec
->data_allocated
- c
;
7675 tcc_warning("initializer-string for array is too long");
7676 /* in order to go faster for common case (char
7677 string in global variable, we handle it
7679 if (sec
&& size1
== 1) {
7681 memcpy(sec
->data
+ c
, initstr
.data
, nb
);
7685 ch
= ((unsigned char *)initstr
.data
)[i
];
7687 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7689 init_putv(t1
, sec
, c
+ i
* size1
);
7693 /* only add trailing zero if enough storage (no
7694 warning in this case since it is standard) */
7695 if (n
< 0 || len
< n
) {
7696 if (!(flags
& DIF_SIZE_ONLY
)) {
7698 init_putv(t1
, sec
, c
+ (len
* size1
));
7709 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7710 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7711 flags
&= ~DIF_HAVE_ELEM
;
7712 if (type
->t
& VT_ARRAY
) {
7714 /* special test for multi dimensional arrays (may not
7715 be strictly correct if designators are used at the
7717 if (no_oblock
&& len
>= n
*size1
)
7720 if (s
->type
.t
== VT_UNION
)
7724 if (no_oblock
&& f
== NULL
)
7733 /* put zeros at the end */
7734 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7735 init_putz(sec
, c
+ len
, n
*size1
- len
);
7738 /* patch type size if needed, which happens only for array types */
7740 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7741 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7744 if ((flags
& DIF_FIRST
) || tok
== '{') {
7752 } else if (tok
== '{') {
7753 if (flags
& DIF_HAVE_ELEM
)
7756 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7758 } else if ((flags
& DIF_SIZE_ONLY
)) {
7759 /* If we supported only ISO C we wouldn't have to accept calling
7760 this on anything than an array if DIF_SIZE_ONLY (and even then
7761 only on the outermost level, so no recursion would be needed),
7762 because initializing a flex array member isn't supported.
7763 But GNU C supports it, so we need to recurse even into
7764 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7765 /* just skip expression */
7766 skip_or_save_block(NULL
);
7768 if (!(flags
& DIF_HAVE_ELEM
)) {
7769 /* This should happen only when we haven't parsed
7770 the init element above for fear of committing a
7771 string constant to memory too early. */
7772 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7773 expect("string constant");
7774 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7776 init_putv(type
, sec
, c
);
7780 /* parse an initializer for type 't' if 'has_init' is non zero, and
7781 allocate space in local or global data space ('r' is either
7782 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7783 variable 'v' of scope 'scope' is declared before initializers
7784 are parsed. If 'v' is zero, then a reference to the new object
7785 is put in the value stack. If 'has_init' is 2, a special parsing
7786 is done to handle string constants. */
7787 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7788 int has_init
, int v
, int scope
)
7790 int size
, align
, addr
;
7791 TokenString
*init_str
= NULL
;
7794 Sym
*flexible_array
;
7796 int saved_nocode_wanted
= nocode_wanted
;
7797 #ifdef CONFIG_TCC_BCHECK
7798 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7801 /* Always allocate static or global variables */
7802 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7803 nocode_wanted
|= 0x80000000;
7805 flexible_array
= NULL
;
7806 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7807 Sym
*field
= type
->ref
->next
;
7810 field
= field
->next
;
7811 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7812 flexible_array
= field
;
7816 size
= type_size(type
, &align
);
7817 /* If unknown size, we must evaluate it before
7818 evaluating initializers because
7819 initializers can generate global data too
7820 (e.g. string pointers or ISOC99 compound
7821 literals). It also simplifies local
7822 initializers handling */
7823 if (size
< 0 || (flexible_array
&& has_init
)) {
7825 tcc_error("unknown type size");
7826 /* get all init string */
7827 if (has_init
== 2) {
7828 init_str
= tok_str_alloc();
7829 /* only get strings */
7830 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7831 tok_str_add_tok(init_str
);
7834 tok_str_add(init_str
, -1);
7835 tok_str_add(init_str
, 0);
7837 skip_or_save_block(&init_str
);
7842 begin_macro(init_str
, 1);
7844 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7845 /* prepare second initializer parsing */
7846 macro_ptr
= init_str
->str
;
7849 /* if still unknown size, error */
7850 size
= type_size(type
, &align
);
7852 tcc_error("unknown type size");
7854 /* If there's a flex member and it was used in the initializer
7856 if (flexible_array
&&
7857 flexible_array
->type
.ref
->c
> 0)
7858 size
+= flexible_array
->type
.ref
->c
7859 * pointed_size(&flexible_array
->type
);
7860 /* take into account specified alignment if bigger */
7861 if (ad
->a
.aligned
) {
7862 int speca
= 1 << (ad
->a
.aligned
- 1);
7865 } else if (ad
->a
.packed
) {
7869 if (!v
&& NODATA_WANTED
)
7870 size
= 0, align
= 1;
7872 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7874 #ifdef CONFIG_TCC_BCHECK
7876 /* add padding between stack variables for bound checking */
7880 loc
= (loc
- size
) & -align
;
7882 #ifdef CONFIG_TCC_BCHECK
7884 /* add padding between stack variables for bound checking */
7889 /* local variable */
7890 #ifdef CONFIG_TCC_ASM
7891 if (ad
->asm_label
) {
7892 int reg
= asm_parse_regvar(ad
->asm_label
);
7894 r
= (r
& ~VT_VALMASK
) | reg
;
7897 sym
= sym_push(v
, type
, r
, addr
);
7898 if (ad
->cleanup_func
) {
7899 Sym
*cls
= sym_push2(&all_cleanups
,
7900 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7901 cls
->prev_tok
= sym
;
7902 cls
->next
= ad
->cleanup_func
;
7903 cls
->ncl
= cur_scope
->cl
.s
;
7904 cur_scope
->cl
.s
= cls
;
7909 /* push local reference */
7910 vset(type
, r
, addr
);
7913 if (v
&& scope
== VT_CONST
) {
7914 /* see if the symbol was already defined */
7917 patch_storage(sym
, ad
, type
);
7918 /* we accept several definitions of the same global variable. */
7919 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7924 /* allocate symbol in corresponding section */
7929 else if (tcc_state
->nocommon
)
7934 addr
= section_add(sec
, size
, align
);
7935 #ifdef CONFIG_TCC_BCHECK
7936 /* add padding if bound check */
7938 section_add(sec
, 1, 1);
7941 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7942 sec
= common_section
;
7947 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7948 patch_storage(sym
, ad
, NULL
);
7950 /* update symbol definition */
7951 put_extern_sym(sym
, sec
, addr
, size
);
7953 /* push global reference */
7954 vpush_ref(type
, sec
, addr
, size
);
7959 #ifdef CONFIG_TCC_BCHECK
7960 /* handles bounds now because the symbol must be defined
7961 before for the relocation */
7965 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7966 /* then add global bound info */
7967 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7968 bounds_ptr
[0] = 0; /* relocated */
7969 bounds_ptr
[1] = size
;
7974 if (type
->t
& VT_VLA
) {
7980 /* save current stack pointer */
7981 if (root_scope
->vla
.loc
== 0) {
7982 struct scope
*v
= cur_scope
;
7983 gen_vla_sp_save(loc
-= PTR_SIZE
);
7984 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
7987 vla_runtime_type_size(type
, &a
);
7988 gen_vla_alloc(type
, a
);
7989 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7990 /* on _WIN64, because of the function args scratch area, the
7991 result of alloca differs from RSP and is returned in RAX. */
7992 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7994 gen_vla_sp_save(addr
);
7995 cur_scope
->vla
.loc
= addr
;
7996 cur_scope
->vla
.num
++;
7997 } else if (has_init
) {
7998 size_t oldreloc_offset
= 0;
7999 if (sec
&& sec
->reloc
)
8000 oldreloc_offset
= sec
->reloc
->data_offset
;
8001 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
8002 if (sec
&& sec
->reloc
)
8003 squeeze_multi_relocs(sec
, oldreloc_offset
);
8004 /* patch flexible array member size back to -1, */
8005 /* for possible subsequent similar declarations */
8007 flexible_array
->type
.ref
->c
= -1;
8011 /* restore parse state if needed */
8017 nocode_wanted
= saved_nocode_wanted
;
8020 /* parse a function defined by symbol 'sym' and generate its code in
8021 'cur_text_section' */
8022 static void gen_function(Sym
*sym
)
8024 /* Initialize VLA state */
8025 struct scope f
= { 0 };
8026 cur_scope
= root_scope
= &f
;
8029 ind
= cur_text_section
->data_offset
;
8030 if (sym
->a
.aligned
) {
8031 size_t newoff
= section_add(cur_text_section
, 0,
8032 1 << (sym
->a
.aligned
- 1));
8033 gen_fill_nops(newoff
- ind
);
8035 /* NOTE: we patch the symbol size later */
8036 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8037 if (sym
->type
.ref
->f
.func_ctor
)
8038 add_array (tcc_state
, ".init_array", sym
->c
);
8039 if (sym
->type
.ref
->f
.func_dtor
)
8040 add_array (tcc_state
, ".fini_array", sym
->c
);
8042 funcname
= get_tok_str(sym
->v
, NULL
);
8044 func_vt
= sym
->type
.ref
->type
;
8045 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8047 /* put debug symbol */
8048 tcc_debug_funcstart(tcc_state
, sym
);
8049 /* push a dummy symbol to enable local sym storage */
8050 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8051 local_scope
= 1; /* for function parameters */
8055 clear_temp_local_var_list();
8059 /* reset local stack */
8060 pop_local_syms(&local_stack
, NULL
, 0, func_var
);
8062 cur_text_section
->data_offset
= ind
;
8064 label_pop(&global_label_stack
, NULL
, 0);
8065 sym_pop(&all_cleanups
, NULL
, 0);
8066 /* patch symbol size */
8067 elfsym(sym
)->st_size
= ind
- func_ind
;
8068 /* end of function */
8069 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8070 /* It's better to crash than to generate wrong code */
8071 cur_text_section
= NULL
;
8072 funcname
= ""; /* for safety */
8073 func_vt
.t
= VT_VOID
; /* for safety */
8074 func_var
= 0; /* for safety */
8075 ind
= 0; /* for safety */
8076 nocode_wanted
= 0x80000000;
8078 /* do this after funcend debug info */
8082 static void gen_inline_functions(TCCState
*s
)
8085 int inline_generated
, i
;
8086 struct InlineFunc
*fn
;
8088 tcc_open_bf(s
, ":inline:", 0);
8089 /* iterate while inline function are referenced */
8091 inline_generated
= 0;
8092 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8093 fn
= s
->inline_fns
[i
];
8095 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8096 /* the function was used or forced (and then not internal):
8097 generate its code and convert it to a normal function */
8099 tcc_debug_putfile(s
, fn
->filename
);
8100 begin_macro(fn
->func_str
, 1);
8102 cur_text_section
= text_section
;
8106 inline_generated
= 1;
8109 } while (inline_generated
);
8113 static void free_inline_functions(TCCState
*s
)
8116 /* free tokens of unused inline functions */
8117 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8118 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8120 tok_str_free(fn
->func_str
);
8122 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8125 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8126 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8127 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8132 AttributeDef ad
, adbase
;
8135 if (tok
== TOK_STATIC_ASSERT
) {
8145 tcc_error("_Static_assert fail");
8147 goto static_assert_out
;
8151 parse_mult_str(&error_str
, "string constant");
8153 tcc_error("%s", (char *)error_str
.data
);
8154 cstr_free(&error_str
);
8160 if (!parse_btype(&btype
, &adbase
)) {
8161 if (is_for_loop_init
)
8163 /* skip redundant ';' if not in old parameter decl scope */
8164 if (tok
== ';' && l
!= VT_CMP
) {
8170 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8171 /* global asm block */
8175 if (tok
>= TOK_UIDENT
) {
8176 /* special test for old K&R protos without explicit int
8177 type. Only accepted when defining global data */
8181 expect("declaration");
8186 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8187 int v
= btype
.ref
->v
;
8188 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8189 tcc_warning("unnamed struct/union that defines no instances");
8193 if (IS_ENUM(btype
.t
)) {
8198 while (1) { /* iterate thru each declaration */
8200 /* If the base type itself was an array type of unspecified
8201 size (like in 'typedef int arr[]; arr x = {1};') then
8202 we will overwrite the unknown size by the real one for
8203 this decl. We need to unshare the ref symbol holding
8205 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
8206 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
8209 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8213 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8214 printf("type = '%s'\n", buf
);
8217 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8218 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8219 tcc_error("function without file scope cannot be static");
8220 /* if old style function prototype, we accept a
8223 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8224 decl0(VT_CMP
, 0, sym
);
8225 #ifdef TCC_TARGET_MACHO
8226 if (sym
->f
.func_alwinl
8227 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8228 == (VT_EXTERN
| VT_INLINE
))) {
8229 /* always_inline functions must be handled as if they
8230 don't generate multiple global defs, even if extern
8231 inline, i.e. GNU inline semantics for those. Rewrite
8232 them into static inline. */
8233 type
.t
&= ~VT_EXTERN
;
8234 type
.t
|= VT_STATIC
;
8237 /* always compile 'extern inline' */
8238 if (type
.t
& VT_EXTERN
)
8239 type
.t
&= ~VT_INLINE
;
8242 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8243 ad
.asm_label
= asm_label_instr();
8244 /* parse one last attribute list, after asm label */
8245 parse_attribute(&ad
);
8247 /* gcc does not allow __asm__("label") with function definition,
8254 #ifdef TCC_TARGET_PE
8255 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8256 if (type
.t
& VT_STATIC
)
8257 tcc_error("cannot have dll linkage with static");
8258 if (type
.t
& VT_TYPEDEF
) {
8259 tcc_warning("'%s' attribute ignored for typedef",
8260 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8261 (ad
.a
.dllexport
= 0, "dllexport"));
8262 } else if (ad
.a
.dllimport
) {
8263 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8266 type
.t
|= VT_EXTERN
;
8272 tcc_error("cannot use local functions");
8273 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8274 expect("function definition");
8276 /* reject abstract declarators in function definition
8277 make old style params without decl have int type */
8279 while ((sym
= sym
->next
) != NULL
) {
8280 if (!(sym
->v
& ~SYM_FIELD
))
8281 expect("identifier");
8282 if (sym
->type
.t
== VT_VOID
)
8283 sym
->type
= int_type
;
8286 /* apply post-declaraton attributes */
8287 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8289 /* put function symbol */
8290 type
.t
&= ~VT_EXTERN
;
8291 sym
= external_sym(v
, &type
, 0, &ad
);
8293 /* static inline functions are just recorded as a kind
8294 of macro. Their code will be emitted at the end of
8295 the compilation unit only if they are used */
8296 if (sym
->type
.t
& VT_INLINE
) {
8297 struct InlineFunc
*fn
;
8298 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8299 strcpy(fn
->filename
, file
->filename
);
8301 skip_or_save_block(&fn
->func_str
);
8302 dynarray_add(&tcc_state
->inline_fns
,
8303 &tcc_state
->nb_inline_fns
, fn
);
8305 /* compute text section */
8306 cur_text_section
= ad
.section
;
8307 if (!cur_text_section
)
8308 cur_text_section
= text_section
;
8314 /* find parameter in function parameter list */
8315 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8316 if ((sym
->v
& ~SYM_FIELD
) == v
)
8318 tcc_error("declaration for parameter '%s' but no such parameter",
8319 get_tok_str(v
, NULL
));
8321 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8322 tcc_error("storage class specified for '%s'",
8323 get_tok_str(v
, NULL
));
8324 if (sym
->type
.t
!= VT_VOID
)
8325 tcc_error("redefinition of parameter '%s'",
8326 get_tok_str(v
, NULL
));
8327 convert_parameter_type(&type
);
8329 } else if (type
.t
& VT_TYPEDEF
) {
8330 /* save typedefed type */
8331 /* XXX: test storage specifiers ? */
8333 if (sym
&& sym
->sym_scope
== local_scope
) {
8334 if (!is_compatible_types(&sym
->type
, &type
)
8335 || !(sym
->type
.t
& VT_TYPEDEF
))
8336 tcc_error("incompatible redefinition of '%s'",
8337 get_tok_str(v
, NULL
));
8340 sym
= sym_push(v
, &type
, 0, 0);
8344 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8345 && !(type
.t
& VT_EXTERN
)) {
8346 tcc_error("declaration of void object");
8349 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8350 /* external function definition */
8351 /* specific case for func_call attribute */
8353 } else if (!(type
.t
& VT_ARRAY
)) {
8354 /* not lvalue if array */
8357 has_init
= (tok
== '=');
8358 if (has_init
&& (type
.t
& VT_VLA
))
8359 tcc_error("variable length array cannot be initialized");
8360 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8361 || (type
.t
& VT_BTYPE
) == VT_FUNC
8362 /* as with GCC, uninitialized global arrays with no size
8363 are considered extern: */
8364 || ((type
.t
& VT_ARRAY
) && !has_init
8365 && l
== VT_CONST
&& type
.ref
->c
< 0)
8367 /* external variable or function */
8368 type
.t
|= VT_EXTERN
;
8369 sym
= external_sym(v
, &type
, r
, &ad
);
8371 if (type
.t
& VT_STATIC
)
8377 else if (l
== VT_CONST
)
8378 /* uninitialized global variables may be overridden */
8379 type
.t
|= VT_EXTERN
;
8380 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8384 if (is_for_loop_init
)
8396 static void decl(int l
)
8401 /* ------------------------------------------------------------------------- */
8404 /* ------------------------------------------------------------------------- */