2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
66 static int gind(void) { CODE_ON(); return ind
; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
70 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
77 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
79 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
81 static int last_line_num
, new_file
, func_ind
; /* debug info control */
82 ST_DATA
const char *funcname
;
83 ST_DATA CType int_type
, func_old_type
, char_pointer_type
;
84 static CString initstr
;
87 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
88 #define VT_PTRDIFF_T VT_INT
90 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_LLONG
93 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
97 ST_DATA
struct switch_t
{
101 } **p
; int n
; /* list of case ranges */
102 int def_sym
; /* default symbol */
105 struct switch_t
*prev
;
107 } *cur_switch
; /* current switch */
109 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
110 /*list of temporary local variables on the stack in current function. */
111 ST_DATA
struct temp_local_variable
{
112 int location
; //offset on stack. Svalue.c.i
115 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
116 short nb_temp_local_vars
;
118 static struct scope
{
120 struct { int loc
, num
; } vla
;
121 struct { Sym
*s
; int n
; } cl
;
124 } *cur_scope
, *loop_scope
, *root_scope
;
126 /********************************************************/
127 /* stab debug support */
129 static const struct {
132 } default_debug
[] = {
133 { VT_INT
, "int:t1=r1;-2147483648;2147483647;" },
134 { VT_BYTE
, "char:t2=r2;0;127;" },
136 { VT_LONG
| VT_INT
, "long int:t3=r3;-2147483648;2147483647;" },
138 { VT_LLONG
| VT_LONG
, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
140 { VT_INT
| VT_UNSIGNED
, "unsigned int:t4=r4;0;037777777777;" },
142 { VT_LONG
| VT_INT
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;037777777777;" },
144 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
145 { VT_LLONG
| VT_LONG
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;01777777777777777777777;" },
147 { VT_QLONG
, "__int128:t6=r6;0;-1;" },
148 { VT_QLONG
| VT_UNSIGNED
, "__int128 unsigned:t7=r7;0;-1;" },
149 { VT_LLONG
, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
150 { VT_LLONG
| VT_UNSIGNED
, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
151 { VT_SHORT
, "short int:t10=r10;-32768;32767;" },
152 { VT_SHORT
| VT_UNSIGNED
, "short unsigned int:t11=r11;0;65535;" },
153 { VT_BYTE
| VT_DEFSIGN
, "signed char:t12=r12;-128;127;" },
154 { VT_BYTE
| VT_DEFSIGN
| VT_UNSIGNED
, "unsigned char:t13=r13;0;255;" },
155 { VT_FLOAT
, "float:t14=r1;4;0;" },
156 { VT_DOUBLE
, "double:t15=r1;8;0;" },
157 { VT_LDOUBLE
, "long double:t16=r1;16;0;" },
158 { -1, "_Float32:t17=r1;4;0;" },
159 { -1, "_Float64:t18=r1;8;0;" },
160 { -1, "_Float128:t19=r1;16;0;" },
161 { -1, "_Float32x:t20=r1;8;0;" },
162 { -1, "_Float64x:t21=r1;16;0;" },
163 { -1, "_Decimal32:t22=r1;4;0;" },
164 { -1, "_Decimal64:t23=r1;8;0;" },
165 { -1, "_Decimal128:t24=r1;16;0;" },
166 /* if default char is unsigned */
167 { VT_BYTE
| VT_UNSIGNED
, "unsigned char:t25=r25;0;255;" },
168 { VT_VOID
, "void:t26=26" },
171 static int debug_next_type
;
173 static struct debug_hash
{
178 static int n_debug_hash
;
180 static struct debug_info
{
191 struct debug_info
*child
, *next
, *last
, *parent
;
192 } *debug_info
, *debug_info_root
;
194 /********************************************************/
196 #define precedence_parser
197 static void init_prec(void);
199 /********************************************************/
200 #ifndef CONFIG_TCC_ASM
201 ST_FUNC
void asm_instr(void)
203 tcc_error("inline asm() not supported");
205 ST_FUNC
void asm_global_instr(void)
207 tcc_error("inline asm() not supported");
211 /* ------------------------------------------------------------------------- */
212 static void gen_cast(CType
*type
);
213 static void gen_cast_s(int t
);
214 static inline CType
*pointed_type(CType
*type
);
215 static int is_compatible_types(CType
*type1
, CType
*type2
);
216 static int parse_btype(CType
*type
, AttributeDef
*ad
);
217 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
218 static void parse_expr_type(CType
*type
);
219 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
220 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
221 static void block(int is_expr
);
222 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
223 static void decl(int l
);
224 static int decl0(int l
, int is_for_loop_init
, Sym
*);
225 static void expr_eq(void);
226 static void vla_runtime_type_size(CType
*type
, int *a
);
227 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
228 static inline int64_t expr_const64(void);
229 static void vpush64(int ty
, unsigned long long v
);
230 static void vpush(CType
*type
);
231 static int gvtst(int inv
, int t
);
232 static void gen_inline_functions(TCCState
*s
);
233 static void free_inline_functions(TCCState
*s
);
234 static void skip_or_save_block(TokenString
**str
);
235 static void gv_dup(void);
236 static int get_temp_local_var(int size
,int align
);
237 static void clear_temp_local_var_list();
238 static void cast_error(CType
*st
, CType
*dt
);
240 ST_INLN
int is_float(int t
)
242 int bt
= t
& VT_BTYPE
;
243 return bt
== VT_LDOUBLE
249 static inline int is_integer_btype(int bt
)
258 static int btype_size(int bt
)
260 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
264 bt
== VT_PTR
? PTR_SIZE
: 0;
267 /* returns function return register from type */
268 static int R_RET(int t
)
272 #ifdef TCC_TARGET_X86_64
273 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
275 #elif defined TCC_TARGET_RISCV64
276 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
282 /* returns 2nd function return register, if any */
283 static int R2_RET(int t
)
289 #elif defined TCC_TARGET_X86_64
294 #elif defined TCC_TARGET_RISCV64
301 /* returns true for two-word types */
302 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
304 /* put function return registers to stack value */
305 static void PUT_R_RET(SValue
*sv
, int t
)
307 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
310 /* returns function return register class for type t */
311 static int RC_RET(int t
)
313 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
316 /* returns generic register class for type t */
317 static int RC_TYPE(int t
)
321 #ifdef TCC_TARGET_X86_64
322 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
324 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
326 #elif defined TCC_TARGET_RISCV64
327 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
333 /* returns 2nd register class corresponding to t and rc */
334 static int RC2_TYPE(int t
, int rc
)
336 if (!USING_TWO_WORDS(t
))
351 /* we use our own 'finite' function to avoid potential problems with
352 non standard math libs */
353 /* XXX: endianness dependent */
354 ST_FUNC
int ieee_finite(double d
)
357 memcpy(p
, &d
, sizeof(double));
358 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
361 /* compiling intel long double natively */
362 #if (defined __i386__ || defined __x86_64__) \
363 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
364 # define TCC_IS_NATIVE_387
367 ST_FUNC
void test_lvalue(void)
369 if (!(vtop
->r
& VT_LVAL
))
373 ST_FUNC
void check_vstack(void)
375 if (vtop
!= vstack
- 1)
376 tcc_error("internal compiler error: vstack leak (%d)",
377 (int)(vtop
- vstack
+ 1));
380 /* ------------------------------------------------------------------------- */
381 /* vstack debugging aid */
384 void pv (const char *lbl
, int a
, int b
)
387 for (i
= a
; i
< a
+ b
; ++i
) {
388 SValue
*p
= &vtop
[-i
];
389 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
390 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
395 /* ------------------------------------------------------------------------- */
396 /* start of translation unit info */
397 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
403 /* file info: full path + filename */
404 section_sym
= put_elf_sym(symtab_section
, 0, 0,
405 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
406 text_section
->sh_num
, NULL
);
407 getcwd(buf
, sizeof(buf
));
409 normalize_slashes(buf
);
411 pstrcat(buf
, sizeof(buf
), "/");
412 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
413 text_section
->data_offset
, text_section
, section_sym
);
414 put_stabs_r(s1
, file
->prev
->filename
, N_SO
, 0, 0,
415 text_section
->data_offset
, text_section
, section_sym
);
416 for (i
= 0; i
< sizeof (default_debug
) / sizeof (default_debug
[0]); i
++)
417 put_stabs(s1
, default_debug
[i
].name
, N_LSYM
, 0, 0, 0);
419 new_file
= last_line_num
= 0;
421 debug_next_type
= sizeof(default_debug
) / sizeof(default_debug
[0]);
425 /* we're currently 'including' the <command line> */
429 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
430 symbols can be safely used */
431 put_elf_sym(symtab_section
, 0, 0,
432 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
433 SHN_ABS
, file
->filename
);
436 static void tcc_debug_stabs (TCCState
*s1
, const char *str
, int type
, unsigned long value
,
437 Section
*sec
, int sym_index
)
443 (struct debug_sym
*)tcc_realloc (debug_info
->sym
,
444 sizeof(struct debug_sym
) *
445 (debug_info
->n_sym
+ 1));
446 s
= debug_info
->sym
+ debug_info
->n_sym
++;
449 s
->str
= tcc_strdup(str
);
451 s
->sym_index
= sym_index
;
454 put_stabs_r (s1
, str
, type
, 0, 0, value
, sec
, sym_index
);
456 put_stabs (s1
, str
, type
, 0, 0, value
);
459 static void tcc_debug_stabn(int type
, int value
)
461 if (type
== N_LBRAC
) {
462 struct debug_info
*info
=
463 (struct debug_info
*) tcc_mallocz(sizeof (*info
));
466 info
->parent
= debug_info
;
468 if (debug_info
->child
) {
469 if (debug_info
->child
->last
)
470 debug_info
->child
->last
->next
= info
;
472 debug_info
->child
->next
= info
;
473 debug_info
->child
->last
= info
;
476 debug_info
->child
= info
;
479 debug_info_root
= info
;
483 debug_info
->end
= value
;
484 debug_info
= debug_info
->parent
;
488 static void tcc_get_debug_info(TCCState
*s1
, Sym
*s
, CString
*result
)
497 type
= t
->type
.t
& ~(VT_EXTERN
| VT_STATIC
| VT_CONSTANT
| VT_VOLATILE
);
498 if ((type
& VT_BTYPE
) != VT_BYTE
)
500 if (type
== VT_PTR
|| type
== (VT_PTR
| VT_ARRAY
))
501 n
++, t
= t
->type
.ref
;
505 if ((type
& VT_BTYPE
) == VT_STRUCT
) {
509 for (i
= 0; i
< n_debug_hash
; i
++) {
510 if (t
== debug_hash
[i
].type
) {
511 debug_type
= debug_hash
[i
].debug_type
;
515 if (debug_type
== -1) {
516 debug_type
= ++debug_next_type
;
517 debug_hash
= (struct debug_hash
*)
518 tcc_realloc (debug_hash
,
519 (n_debug_hash
+ 1) * sizeof(*debug_hash
));
520 debug_hash
[n_debug_hash
].debug_type
= debug_type
;
521 debug_hash
[n_debug_hash
++].type
= t
;
523 cstr_printf (&str
, "%s:T%d=%c%d",
524 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
525 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
527 IS_UNION (t
->type
.t
) ? 'u' : 's',
530 int pos
, size
, align
;
533 cstr_printf (&str
, "%s:",
534 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
535 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
536 tcc_get_debug_info (s1
, t
, &str
);
537 if (t
->type
.t
& VT_BITFIELD
) {
538 pos
= t
->c
* 8 + BIT_POS(t
->type
.t
);
539 size
= BIT_SIZE(t
->type
.t
);
543 size
= type_size(&t
->type
, &align
) * 8;
545 cstr_printf (&str
, ",%d,%d;", pos
, size
);
547 cstr_printf (&str
, ";");
548 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
552 else if (IS_ENUM(type
)) {
553 Sym
*e
= t
= t
->type
.ref
;
555 debug_type
= ++debug_next_type
;
557 cstr_printf (&str
, "%s:T%d=e",
558 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
559 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
563 cstr_printf (&str
, "%s:",
564 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
565 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
566 cstr_printf (&str
, e
->type
.t
& VT_UNSIGNED
? "%u," : "%d,",
569 cstr_printf (&str
, ";");
570 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
573 else if ((type
& VT_BTYPE
) != VT_FUNC
) {
574 type
&= ~VT_STRUCT_MASK
;
576 debug_type
<= sizeof(default_debug
) / sizeof(default_debug
[0]);
578 if (default_debug
[debug_type
- 1].type
== type
)
580 if (debug_type
> sizeof(default_debug
) / sizeof(default_debug
[0]))
584 cstr_printf (result
, "%d=", ++debug_next_type
);
587 type
= t
->type
.t
& ~(VT_EXTERN
| VT_STATIC
| VT_CONSTANT
| VT_VOLATILE
);
588 if ((type
& VT_BTYPE
) != VT_BYTE
)
591 cstr_printf (result
, "%d=*", ++debug_next_type
);
592 else if (type
== (VT_PTR
| VT_ARRAY
))
593 cstr_printf (result
, "%d=ar1;0;%d;",
594 ++debug_next_type
, t
->type
.ref
->c
- 1);
595 else if (type
== VT_FUNC
) {
596 cstr_printf (result
, "%d=f", ++debug_next_type
);
597 tcc_get_debug_info (s1
, t
->type
.ref
, result
);
604 cstr_printf (result
, "%d", debug_type
);
607 static void tcc_debug_finish (TCCState
*s1
, struct debug_info
*cur
)
611 struct debug_info
*next
= cur
->next
;
613 for (i
= 0; i
< cur
->n_sym
; i
++) {
614 struct debug_sym
*s
= &cur
->sym
[i
];
617 put_stabs_r(s1
, s
->str
, s
->type
, 0, 0, s
->value
,
618 s
->sec
, s
->sym_index
);
620 put_stabs(s1
, s
->str
, s
->type
, 0, 0, s
->value
);
624 put_stabn(s1
, N_LBRAC
, 0, 0, cur
->start
);
625 tcc_debug_finish (s1
, cur
->child
);
626 put_stabn(s1
, N_RBRAC
, 0, 0, cur
->end
);
632 static void tcc_add_debug_info(TCCState
*s1
, int param
, Sym
*s
, Sym
*e
)
635 cstr_new (&debug_str
);
636 for (; s
!= e
; s
= s
->prev
) {
637 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
639 cstr_reset (&debug_str
);
640 cstr_printf (&debug_str
, "%s:%s", get_tok_str(s
->v
, NULL
), param
? "p" : "");
641 tcc_get_debug_info(s1
, s
, &debug_str
);
642 tcc_debug_stabs(s1
, debug_str
.data
, param
? N_PSYM
: N_LSYM
, s
->c
, NULL
, 0);
644 cstr_free (&debug_str
);
647 static void tcc_debug_extern_sym(TCCState
*s1
, Sym
*sym
, int sh_num
, int sym_bind
)
649 Section
*s
= s1
->sections
[sh_num
];
653 cstr_printf (&str
, "%s:%c",
654 get_tok_str(sym
->v
, NULL
),
655 sym_bind
== STB_GLOBAL
? 'G' : local_scope
? 'V' : 'S'
657 tcc_get_debug_info(s1
, sym
, &str
);
658 if (sym_bind
== STB_GLOBAL
)
659 tcc_debug_stabs(s1
, str
.data
, N_GSYM
, 0, NULL
, 0);
661 tcc_debug_stabs(s1
, str
.data
,
662 (sym
->type
.t
& VT_STATIC
) && data_section
== s
663 ? N_STSYM
: N_LCSYM
, 0, s
, sym
->c
);
667 /* put end of translation unit info */
668 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
672 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
673 text_section
->data_offset
, text_section
, section_sym
);
674 tcc_free(debug_hash
);
677 static BufferedFile
* put_new_file(TCCState
*s1
)
679 BufferedFile
*f
= file
;
680 /* use upper file if from inline ":asm:" */
681 if (f
->filename
[0] == ':')
684 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
685 new_file
= last_line_num
= 0;
690 /* generate line number info */
691 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
695 || cur_text_section
!= text_section
696 || !(f
= put_new_file(s1
))
697 || last_line_num
== f
->line_num
)
699 if (func_ind
!= -1) {
700 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
702 /* from tcc_assemble */
703 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
705 last_line_num
= f
->line_num
;
708 /* put function symbol */
709 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
715 debug_info_root
= NULL
;
717 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
718 if (!(f
= put_new_file(s1
)))
720 cstr_new (&debug_str
);
721 cstr_printf(&debug_str
, "%s:%c", funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
722 tcc_get_debug_info(s1
, sym
->type
.ref
, &debug_str
);
723 put_stabs_r(s1
, debug_str
.data
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
724 cstr_free (&debug_str
);
729 /* put function size */
730 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
734 tcc_debug_stabn(N_RBRAC
, size
);
735 tcc_debug_finish (s1
, debug_info_root
);
738 /* put alternative filename */
739 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
741 if (0 == strcmp(file
->filename
, filename
))
743 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
747 /* begin of #include */
748 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
752 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
756 /* end of #include */
757 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
761 put_stabn(s1
, N_EINCL
, 0, 0, 0);
765 /* ------------------------------------------------------------------------- */
766 /* initialize vstack and types. This must be done also for tcc -E */
767 ST_FUNC
void tccgen_init(TCCState
*s1
)
770 memset(vtop
, 0, sizeof *vtop
);
772 /* define some often used types */
774 char_pointer_type
.t
= VT_BYTE
;
775 mk_pointer(&char_pointer_type
);
776 func_old_type
.t
= VT_FUNC
;
777 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
778 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
779 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
780 #ifdef precedence_parser
786 ST_FUNC
int tccgen_compile(TCCState
*s1
)
788 cur_text_section
= NULL
;
790 anon_sym
= SYM_FIRST_ANOM
;
793 nocode_wanted
= 0x80000000;
797 #ifdef TCC_TARGET_ARM
801 printf("%s: **** new file\n", file
->filename
);
803 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
806 gen_inline_functions(s1
);
808 /* end of translation unit info */
813 ST_FUNC
void tccgen_finish(TCCState
*s1
)
816 free_inline_functions(s1
);
817 sym_pop(&global_stack
, NULL
, 0);
818 sym_pop(&local_stack
, NULL
, 0);
819 /* free preprocessor macros */
822 dynarray_reset(&sym_pools
, &nb_sym_pools
);
823 sym_free_first
= NULL
;
826 /* ------------------------------------------------------------------------- */
827 ST_FUNC ElfSym
*elfsym(Sym
*s
)
831 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
834 /* apply storage attributes to Elf symbol */
835 ST_FUNC
void update_storage(Sym
*sym
)
838 int sym_bind
, old_sym_bind
;
844 if (sym
->a
.visibility
)
845 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
848 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
849 sym_bind
= STB_LOCAL
;
850 else if (sym
->a
.weak
)
853 sym_bind
= STB_GLOBAL
;
854 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
855 if (sym_bind
!= old_sym_bind
) {
856 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
860 if (sym
->a
.dllimport
)
861 esym
->st_other
|= ST_PE_IMPORT
;
862 if (sym
->a
.dllexport
)
863 esym
->st_other
|= ST_PE_EXPORT
;
867 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
868 get_tok_str(sym
->v
, NULL
),
869 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
877 /* ------------------------------------------------------------------------- */
878 /* update sym->c so that it points to an external symbol in section
879 'section' with value 'value' */
881 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
882 addr_t value
, unsigned long size
,
883 int can_add_underscore
)
885 int sym_type
, sym_bind
, info
, other
, t
;
889 #ifdef CONFIG_TCC_BCHECK
893 name
= get_tok_str(sym
->v
, NULL
);
894 #ifdef CONFIG_TCC_BCHECK
895 if (tcc_state
->do_bounds_check
) {
896 /* XXX: avoid doing that for statics ? */
897 /* if bound checking is activated, we change some function
898 names by adding the "__bound" prefix */
899 #if defined(TCC_TARGET_ARM) && defined(TCC_ARM_EABI)
900 if (strcmp (name
, "memcpy") == 0 ||
901 strcmp (name
, "memmove") == 0 ||
902 strcmp (name
, "memset") == 0)
907 /* XXX: we rely only on malloc hooks */
916 #if defined(TCC_TARGET_ARM) && defined(TCC_ARM_EABI)
930 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
936 #ifndef TCC_TARGET_PE
939 #if defined(TCC_TARGET_ARM) && defined(TCC_ARM_EABI)
942 strcpy(buf
, "__bound_");
950 if ((t
& VT_BTYPE
) == VT_FUNC
) {
952 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
953 sym_type
= STT_NOTYPE
;
955 sym_type
= STT_OBJECT
;
957 if (t
& (VT_STATIC
| VT_INLINE
))
958 sym_bind
= STB_LOCAL
;
960 sym_bind
= STB_GLOBAL
;
963 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
964 Sym
*ref
= sym
->type
.ref
;
965 if (ref
->a
.nodecorate
) {
966 can_add_underscore
= 0;
968 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
969 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
971 other
|= ST_PE_STDCALL
;
972 can_add_underscore
= 0;
976 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
978 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
982 name
= get_tok_str(sym
->asm_label
, NULL
);
983 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
984 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
986 if (tcc_state
->do_debug
987 && sym_type
!= STT_FUNC
988 && sym
->v
< SYM_FIRST_ANOM
)
989 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
);
993 esym
->st_value
= value
;
994 esym
->st_size
= size
;
995 esym
->st_shndx
= sh_num
;
1000 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
1001 addr_t value
, unsigned long size
)
1003 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
1004 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
1007 /* add a new relocation entry to symbol 'sym' in section 's' */
1008 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
1013 if (nocode_wanted
&& s
== cur_text_section
)
1018 put_extern_sym(sym
, NULL
, 0, 0);
1022 /* now we can add ELF relocation info */
1023 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
1027 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
1029 greloca(s
, sym
, offset
, type
, 0);
1033 /* ------------------------------------------------------------------------- */
1034 /* symbol allocator */
1035 static Sym
*__sym_malloc(void)
1037 Sym
*sym_pool
, *sym
, *last_sym
;
1040 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
1041 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
1043 last_sym
= sym_free_first
;
1045 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
1046 sym
->next
= last_sym
;
1050 sym_free_first
= last_sym
;
1054 static inline Sym
*sym_malloc(void)
1058 sym
= sym_free_first
;
1060 sym
= __sym_malloc();
1061 sym_free_first
= sym
->next
;
1064 sym
= tcc_malloc(sizeof(Sym
));
1069 ST_INLN
void sym_free(Sym
*sym
)
1072 sym
->next
= sym_free_first
;
1073 sym_free_first
= sym
;
1079 /* push, without hashing */
1080 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
1085 memset(s
, 0, sizeof *s
);
1095 /* find a symbol and return its associated structure. 's' is the top
1096 of the symbol stack */
1097 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
1102 else if (s
->v
== -1)
1109 /* structure lookup */
1110 ST_INLN Sym
*struct_find(int v
)
1113 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1115 return table_ident
[v
]->sym_struct
;
1118 /* find an identifier */
1119 ST_INLN Sym
*sym_find(int v
)
1122 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1124 return table_ident
[v
]->sym_identifier
;
1127 static int sym_scope(Sym
*s
)
1129 if (IS_ENUM_VAL (s
->type
.t
))
1130 return s
->type
.ref
->sym_scope
;
1132 return s
->sym_scope
;
1135 /* push a given symbol on the symbol stack */
1136 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
1145 s
= sym_push2(ps
, v
, type
->t
, c
);
1146 s
->type
.ref
= type
->ref
;
1148 /* don't record fields or anonymous symbols */
1150 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1151 /* record symbol in token array */
1152 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1154 ps
= &ts
->sym_struct
;
1156 ps
= &ts
->sym_identifier
;
1159 s
->sym_scope
= local_scope
;
1160 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
1161 tcc_error("redeclaration of '%s'",
1162 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
1167 /* push a global identifier */
1168 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
1171 s
= sym_push2(&global_stack
, v
, t
, c
);
1172 s
->r
= VT_CONST
| VT_SYM
;
1173 /* don't record anonymous symbol */
1174 if (v
< SYM_FIRST_ANOM
) {
1175 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
1176 /* modify the top most local identifier, so that sym_identifier will
1177 point to 's' when popped; happens when called from inline asm */
1178 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
1179 ps
= &(*ps
)->prev_tok
;
1186 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1187 pop them yet from the list, but do remove them from the token array. */
1188 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
1198 /* remove symbol in token array */
1200 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1201 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1203 ps
= &ts
->sym_struct
;
1205 ps
= &ts
->sym_identifier
;
1216 /* ------------------------------------------------------------------------- */
1217 static void vcheck_cmp(void)
1219 /* cannot let cpu flags if other instruction are generated. Also
1220 avoid leaving VT_JMP anywhere except on the top of the stack
1221 because it would complicate the code generator.
1223 Don't do this when nocode_wanted. vtop might come from
1224 !nocode_wanted regions (see 88_codeopt.c) and transforming
1225 it to a register without actually generating code is wrong
1226 as their value might still be used for real. All values
1227 we push under nocode_wanted will eventually be popped
1228 again, so that the VT_CMP/VT_JMP value will be in vtop
1229 when code is unsuppressed again. */
1231 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
1235 static void vsetc(CType
*type
, int r
, CValue
*vc
)
1237 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1238 tcc_error("memory full (vstack)");
1243 vtop
->r2
= VT_CONST
;
1248 ST_FUNC
void vswap(void)
1258 /* pop stack value */
1259 ST_FUNC
void vpop(void)
1262 v
= vtop
->r
& VT_VALMASK
;
1263 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1264 /* for x86, we need to pop the FP stack */
1265 if (v
== TREG_ST0
) {
1266 o(0xd8dd); /* fstp %st(0) */
1270 /* need to put correct jump if && or || without test */
1277 /* push constant of type "type" with useless value */
1278 static void vpush(CType
*type
)
1280 vset(type
, VT_CONST
, 0);
1283 /* push arbitrary 64bit constant */
1284 static void vpush64(int ty
, unsigned long long v
)
1291 vsetc(&ctype
, VT_CONST
, &cval
);
1294 /* push integer constant */
1295 ST_FUNC
void vpushi(int v
)
1300 /* push a pointer sized constant */
1301 static void vpushs(addr_t v
)
1303 vpush64(VT_SIZE_T
, v
);
1306 /* push long long constant */
1307 static inline void vpushll(long long v
)
1309 vpush64(VT_LLONG
, v
);
1312 ST_FUNC
void vset(CType
*type
, int r
, int v
)
1316 vsetc(type
, r
, &cval
);
1319 static void vseti(int r
, int v
)
1327 ST_FUNC
void vpushv(SValue
*v
)
1329 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1330 tcc_error("memory full (vstack)");
1335 static void vdup(void)
1340 /* rotate n first stack elements to the bottom
1341 I1 ... In -> I2 ... In I1 [top is right]
1343 ST_FUNC
void vrotb(int n
)
1350 for(i
=-n
+1;i
!=0;i
++)
1351 vtop
[i
] = vtop
[i
+1];
1355 /* rotate the n elements before entry e towards the top
1356 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1358 ST_FUNC
void vrote(SValue
*e
, int n
)
1365 for(i
= 0;i
< n
- 1; i
++)
1370 /* rotate n first stack elements to the top
1371 I1 ... In -> In I1 ... I(n-1) [top is right]
1373 ST_FUNC
void vrott(int n
)
1378 /* ------------------------------------------------------------------------- */
1379 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1381 /* called from generators to set the result from relational ops */
1382 ST_FUNC
void vset_VT_CMP(int op
)
1390 /* called once before asking generators to load VT_CMP to a register */
1391 static void vset_VT_JMP(void)
1393 int op
= vtop
->cmp_op
;
1395 if (vtop
->jtrue
|| vtop
->jfalse
) {
1396 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1397 int inv
= op
& (op
< 2); /* small optimization */
1398 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1400 /* otherwise convert flags (rsp. 0/1) to register */
1402 if (op
< 2) /* doesn't seem to happen */
1407 /* Set CPU Flags, doesn't yet jump */
1408 static void gvtst_set(int inv
, int t
)
1412 if (vtop
->r
!= VT_CMP
) {
1415 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1416 vset_VT_CMP(vtop
->c
.i
!= 0);
1419 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1420 *p
= gjmp_append(*p
, t
);
1423 /* Generate value test
1425 * Generate a test for any value (jump, comparison and integers) */
1426 static int gvtst(int inv
, int t
)
1431 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1433 x
= u
, u
= t
, t
= x
;
1436 /* jump to the wanted target */
1438 t
= gjmp_cond(op
^ inv
, t
);
1441 /* resolve complementary jumps to here */
1448 /* generate a zero or nozero test */
1449 static void gen_test_zero(int op
)
1451 if (vtop
->r
== VT_CMP
) {
1455 vtop
->jfalse
= vtop
->jtrue
;
1465 /* ------------------------------------------------------------------------- */
1466 /* push a symbol value of TYPE */
1467 static inline void vpushsym(CType
*type
, Sym
*sym
)
1471 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1475 /* Return a static symbol pointing to a section */
1476 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1482 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1483 sym
->type
.t
|= VT_STATIC
;
1484 put_extern_sym(sym
, sec
, offset
, size
);
1488 /* push a reference to a section offset by adding a dummy symbol */
1489 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1491 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1494 /* define a new external reference to a symbol 'v' of type 'u' */
1495 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1501 /* push forward reference */
1502 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1503 s
->type
.ref
= type
->ref
;
1504 } else if (IS_ASM_SYM(s
)) {
1505 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1506 s
->type
.ref
= type
->ref
;
1512 /* Merge symbol attributes. */
1513 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1515 if (sa1
->aligned
&& !sa
->aligned
)
1516 sa
->aligned
= sa1
->aligned
;
1517 sa
->packed
|= sa1
->packed
;
1518 sa
->weak
|= sa1
->weak
;
1519 if (sa1
->visibility
!= STV_DEFAULT
) {
1520 int vis
= sa
->visibility
;
1521 if (vis
== STV_DEFAULT
1522 || vis
> sa1
->visibility
)
1523 vis
= sa1
->visibility
;
1524 sa
->visibility
= vis
;
1526 sa
->dllexport
|= sa1
->dllexport
;
1527 sa
->nodecorate
|= sa1
->nodecorate
;
1528 sa
->dllimport
|= sa1
->dllimport
;
1531 /* Merge function attributes. */
1532 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1534 if (fa1
->func_call
&& !fa
->func_call
)
1535 fa
->func_call
= fa1
->func_call
;
1536 if (fa1
->func_type
&& !fa
->func_type
)
1537 fa
->func_type
= fa1
->func_type
;
1538 if (fa1
->func_args
&& !fa
->func_args
)
1539 fa
->func_args
= fa1
->func_args
;
1540 if (fa1
->func_noreturn
)
1541 fa
->func_noreturn
= 1;
1548 /* Merge attributes. */
1549 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1551 merge_symattr(&ad
->a
, &ad1
->a
);
1552 merge_funcattr(&ad
->f
, &ad1
->f
);
1555 ad
->section
= ad1
->section
;
1556 if (ad1
->alias_target
)
1557 ad
->alias_target
= ad1
->alias_target
;
1559 ad
->asm_label
= ad1
->asm_label
;
1561 ad
->attr_mode
= ad1
->attr_mode
;
1564 /* Merge some type attributes. */
1565 static void patch_type(Sym
*sym
, CType
*type
)
1567 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1568 if (!(sym
->type
.t
& VT_EXTERN
))
1569 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1570 sym
->type
.t
&= ~VT_EXTERN
;
1573 if (IS_ASM_SYM(sym
)) {
1574 /* stay static if both are static */
1575 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1576 sym
->type
.ref
= type
->ref
;
1579 if (!is_compatible_types(&sym
->type
, type
)) {
1580 tcc_error("incompatible types for redefinition of '%s'",
1581 get_tok_str(sym
->v
, NULL
));
1583 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1584 int static_proto
= sym
->type
.t
& VT_STATIC
;
1585 /* warn if static follows non-static function declaration */
1586 if ((type
->t
& VT_STATIC
) && !static_proto
1587 /* XXX this test for inline shouldn't be here. Until we
1588 implement gnu-inline mode again it silences a warning for
1589 mingw caused by our workarounds. */
1590 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1591 tcc_warning("static storage ignored for redefinition of '%s'",
1592 get_tok_str(sym
->v
, NULL
));
1594 /* set 'inline' if both agree or if one has static */
1595 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1596 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1597 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1598 static_proto
|= VT_INLINE
;
1601 if (0 == (type
->t
& VT_EXTERN
)) {
1602 struct FuncAttr f
= sym
->type
.ref
->f
;
1603 /* put complete type, use static from prototype */
1604 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1605 sym
->type
.ref
= type
->ref
;
1606 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1608 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1611 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1612 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1613 sym
->type
.ref
= type
->ref
;
1617 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1618 /* set array size if it was omitted in extern declaration */
1619 sym
->type
.ref
->c
= type
->ref
->c
;
1621 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1622 tcc_warning("storage mismatch for redefinition of '%s'",
1623 get_tok_str(sym
->v
, NULL
));
1627 /* Merge some storage attributes. */
1628 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1631 patch_type(sym
, type
);
1633 #ifdef TCC_TARGET_PE
1634 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1635 tcc_error("incompatible dll linkage for redefinition of '%s'",
1636 get_tok_str(sym
->v
, NULL
));
1638 merge_symattr(&sym
->a
, &ad
->a
);
1640 sym
->asm_label
= ad
->asm_label
;
1641 update_storage(sym
);
1644 /* copy sym to other stack */
1645 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1648 s
= sym_malloc(), *s
= *s0
;
1649 s
->prev
= *ps
, *ps
= s
;
1650 if (s
->v
< SYM_FIRST_ANOM
) {
1651 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1652 s
->prev_tok
= *ps
, *ps
= s
;
1657 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1658 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1660 int bt
= s
->type
.t
& VT_BTYPE
;
1661 if (bt
== VT_FUNC
|| bt
== VT_PTR
) {
1662 Sym
**sp
= &s
->type
.ref
;
1663 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1664 Sym
*s2
= sym_copy(s
, ps
);
1665 sp
= &(*sp
= s2
)->next
;
1666 sym_copy_ref(s2
, ps
);
1671 /* define a new external reference to a symbol 'v' */
1672 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1676 /* look for global symbol */
1678 while (s
&& s
->sym_scope
)
1682 /* push forward reference */
1683 s
= global_identifier_push(v
, type
->t
, 0);
1686 s
->asm_label
= ad
->asm_label
;
1687 s
->type
.ref
= type
->ref
;
1688 /* copy type to the global stack */
1690 sym_copy_ref(s
, &global_stack
);
1692 patch_storage(s
, ad
, type
);
1694 /* push variables on local_stack if any */
1695 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1696 s
= sym_copy(s
, &local_stack
);
1700 /* push a reference to global symbol v */
1701 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1703 vpushsym(type
, external_global_sym(v
, type
));
1706 /* save registers up to (vtop - n) stack entry */
1707 ST_FUNC
void save_regs(int n
)
1710 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1714 /* save r to the memory stack, and mark it as being free */
1715 ST_FUNC
void save_reg(int r
)
1717 save_reg_upstack(r
, 0);
1720 /* save r to the memory stack, and mark it as being free,
1721 if seen up to (vtop - n) stack entry */
1722 ST_FUNC
void save_reg_upstack(int r
, int n
)
1724 int l
, size
, align
, bt
;
1727 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1732 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1733 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1734 /* must save value on stack if not already done */
1736 bt
= p
->type
.t
& VT_BTYPE
;
1739 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1742 size
= type_size(&sv
.type
, &align
);
1743 l
= get_temp_local_var(size
,align
);
1744 sv
.r
= VT_LOCAL
| VT_LVAL
;
1746 store(p
->r
& VT_VALMASK
, &sv
);
1747 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1748 /* x86 specific: need to pop fp register ST0 if saved */
1749 if (r
== TREG_ST0
) {
1750 o(0xd8dd); /* fstp %st(0) */
1753 /* special long long case */
1754 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1759 /* mark that stack entry as being saved on the stack */
1760 if (p
->r
& VT_LVAL
) {
1761 /* also clear the bounded flag because the
1762 relocation address of the function was stored in
1764 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1766 p
->r
= VT_LVAL
| VT_LOCAL
;
1774 #ifdef TCC_TARGET_ARM
1775 /* find a register of class 'rc2' with at most one reference on stack.
1776 * If none, call get_reg(rc) */
1777 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1782 for(r
=0;r
<NB_REGS
;r
++) {
1783 if (reg_classes
[r
] & rc2
) {
1786 for(p
= vstack
; p
<= vtop
; p
++) {
1787 if ((p
->r
& VT_VALMASK
) == r
||
1799 /* find a free register of class 'rc'. If none, save one register */
1800 ST_FUNC
int get_reg(int rc
)
1805 /* find a free register */
1806 for(r
=0;r
<NB_REGS
;r
++) {
1807 if (reg_classes
[r
] & rc
) {
1810 for(p
=vstack
;p
<=vtop
;p
++) {
1811 if ((p
->r
& VT_VALMASK
) == r
||
1820 /* no register left : free the first one on the stack (VERY
1821 IMPORTANT to start from the bottom to ensure that we don't
1822 spill registers used in gen_opi()) */
1823 for(p
=vstack
;p
<=vtop
;p
++) {
1824 /* look at second register (if long long) */
1826 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1828 r
= p
->r
& VT_VALMASK
;
1829 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1835 /* Should never comes here */
1839 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1840 static int get_temp_local_var(int size
,int align
){
1842 struct temp_local_variable
*temp_var
;
1849 for(i
=0;i
<nb_temp_local_vars
;i
++){
1850 temp_var
=&arr_temp_local_vars
[i
];
1851 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1854 /*check if temp_var is free*/
1856 for(p
=vstack
;p
<=vtop
;p
++) {
1858 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1859 if(p
->c
.i
==temp_var
->location
){
1866 found_var
=temp_var
->location
;
1872 loc
= (loc
- size
) & -align
;
1873 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1874 temp_var
=&arr_temp_local_vars
[i
];
1875 temp_var
->location
=loc
;
1876 temp_var
->size
=size
;
1877 temp_var
->align
=align
;
1878 nb_temp_local_vars
++;
1885 static void clear_temp_local_var_list(){
1886 nb_temp_local_vars
=0;
1889 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1891 static void move_reg(int r
, int s
, int t
)
1905 /* get address of vtop (vtop MUST BE an lvalue) */
1906 ST_FUNC
void gaddrof(void)
1908 vtop
->r
&= ~VT_LVAL
;
1909 /* tricky: if saved lvalue, then we can go back to lvalue */
1910 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1911 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1914 #ifdef CONFIG_TCC_BCHECK
1915 /* generate lvalue bound code */
1916 static void gbound(void)
1920 vtop
->r
&= ~VT_MUSTBOUND
;
1921 /* if lvalue, then use checking code before dereferencing */
1922 if (vtop
->r
& VT_LVAL
) {
1923 /* if not VT_BOUNDED value, then make one */
1924 if (!(vtop
->r
& VT_BOUNDED
)) {
1925 /* must save type because we must set it to int to get pointer */
1927 vtop
->type
.t
= VT_PTR
;
1930 gen_bounded_ptr_add();
1934 /* then check for dereferencing */
1935 gen_bounded_ptr_deref();
1939 /* we need to call __bound_ptr_add before we start to load function
1940 args into registers */
1941 ST_FUNC
void gbound_args(int nb_args
)
1944 for (i
= 1; i
<= nb_args
; ++i
)
1945 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1952 /* Add bounds for local symbols from S to E (via ->prev) */
1953 static void add_local_bounds(Sym
*s
, Sym
*e
)
1955 for (; s
!= e
; s
= s
->prev
) {
1956 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1958 /* Add arrays/structs/unions because we always take address */
1959 if ((s
->type
.t
& VT_ARRAY
)
1960 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1961 || s
->a
.addrtaken
) {
1962 /* add local bound info */
1963 int align
, size
= type_size(&s
->type
, &align
);
1964 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1965 2 * sizeof(addr_t
));
1966 bounds_ptr
[0] = s
->c
;
1967 bounds_ptr
[1] = size
;
1973 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1974 static void pop_local_syms(Sym
**ptop
, Sym
*b
, int keep
, int ellipsis
)
1976 #ifdef CONFIG_TCC_BCHECK
1977 if (tcc_state
->do_bounds_check
&& !ellipsis
&& !keep
)
1978 add_local_bounds(*ptop
, b
);
1980 if (tcc_state
->do_debug
)
1981 tcc_add_debug_info (tcc_state
, !local_scope
, *ptop
, b
);
1982 sym_pop(ptop
, b
, keep
);
1985 static void incr_bf_adr(int o
)
1987 vtop
->type
= char_pointer_type
;
1991 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
1995 /* single-byte load mode for packed or otherwise unaligned bitfields */
1996 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
1999 save_reg_upstack(vtop
->r
, 1);
2000 vpush64(type
->t
& VT_BTYPE
, 0); // B X
2001 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2010 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
2012 vpushi((1 << n
) - 1), gen_op('&');
2015 vpushi(bits
), gen_op(TOK_SHL
);
2018 bits
+= n
, bit_size
-= n
, o
= 1;
2021 if (!(type
->t
& VT_UNSIGNED
)) {
2022 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
2023 vpushi(n
), gen_op(TOK_SHL
);
2024 vpushi(n
), gen_op(TOK_SAR
);
2028 /* single-byte store mode for packed or otherwise unaligned bitfields */
2029 static void store_packed_bf(int bit_pos
, int bit_size
)
2031 int bits
, n
, o
, m
, c
;
2033 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2035 save_reg_upstack(vtop
->r
, 1);
2036 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2038 incr_bf_adr(o
); // X B
2040 c
? vdup() : gv_dup(); // B V X
2043 vpushi(bits
), gen_op(TOK_SHR
);
2045 vpushi(bit_pos
), gen_op(TOK_SHL
);
2050 m
= ((1 << n
) - 1) << bit_pos
;
2051 vpushi(m
), gen_op('&'); // X B V1
2052 vpushv(vtop
-1); // X B V1 B
2053 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
2054 gen_op('&'); // X B V1 B1
2055 gen_op('|'); // X B V2
2057 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
2058 vstore(), vpop(); // X B
2059 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
2064 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
2067 if (0 == sv
->type
.ref
)
2069 t
= sv
->type
.ref
->auxtype
;
2070 if (t
!= -1 && t
!= VT_STRUCT
) {
2071 sv
->type
.t
= (sv
->type
.t
& ~VT_BTYPE
) | t
;
2077 /* store vtop a register belonging to class 'rc'. lvalues are
2078 converted to values. Cannot be used if cannot be converted to
2079 register value (such as structures). */
2080 ST_FUNC
int gv(int rc
)
2082 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
2083 int bit_pos
, bit_size
, size
, align
;
2085 /* NOTE: get_reg can modify vstack[] */
2086 if (vtop
->type
.t
& VT_BITFIELD
) {
2089 bit_pos
= BIT_POS(vtop
->type
.t
);
2090 bit_size
= BIT_SIZE(vtop
->type
.t
);
2091 /* remove bit field info to avoid loops */
2092 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
2095 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
2096 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
2097 type
.t
|= VT_UNSIGNED
;
2099 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
2101 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2106 if (r
== VT_STRUCT
) {
2107 load_packed_bf(&type
, bit_pos
, bit_size
);
2109 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
2110 /* cast to int to propagate signedness in following ops */
2112 /* generate shifts */
2113 vpushi(bits
- (bit_pos
+ bit_size
));
2115 vpushi(bits
- bit_size
);
2116 /* NOTE: transformed to SHR if unsigned */
2121 if (is_float(vtop
->type
.t
) &&
2122 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2123 unsigned long offset
;
2124 /* CPUs usually cannot use float constants, so we store them
2125 generically in data segment */
2126 size
= type_size(&vtop
->type
, &align
);
2128 size
= 0, align
= 1;
2129 offset
= section_add(data_section
, size
, align
);
2130 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
2132 init_putv(&vtop
->type
, data_section
, offset
);
2135 #ifdef CONFIG_TCC_BCHECK
2136 if (vtop
->r
& VT_MUSTBOUND
)
2140 bt
= vtop
->type
.t
& VT_BTYPE
;
2142 #ifdef TCC_TARGET_RISCV64
2144 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
2147 rc2
= RC2_TYPE(bt
, rc
);
2149 /* need to reload if:
2151 - lvalue (need to dereference pointer)
2152 - already a register, but not in the right class */
2153 r
= vtop
->r
& VT_VALMASK
;
2154 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
2155 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
2157 if (!r_ok
|| !r2_ok
) {
2161 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
2162 int original_type
= vtop
->type
.t
;
2164 /* two register type load :
2165 expand to two words temporarily */
2166 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2168 unsigned long long ll
= vtop
->c
.i
;
2169 vtop
->c
.i
= ll
; /* first word */
2171 vtop
->r
= r
; /* save register value */
2172 vpushi(ll
>> 32); /* second word */
2173 } else if (vtop
->r
& VT_LVAL
) {
2174 /* We do not want to modifier the long long pointer here.
2175 So we save any other instances down the stack */
2176 save_reg_upstack(vtop
->r
, 1);
2177 /* load from memory */
2178 vtop
->type
.t
= load_type
;
2181 vtop
[-1].r
= r
; /* save register value */
2182 /* increment pointer to get second word */
2183 vtop
->type
.t
= VT_PTRDIFF_T
;
2188 vtop
->type
.t
= load_type
;
2190 /* move registers */
2193 if (r2_ok
&& vtop
->r2
< VT_CONST
)
2196 vtop
[-1].r
= r
; /* save register value */
2197 vtop
->r
= vtop
[-1].r2
;
2199 /* Allocate second register. Here we rely on the fact that
2200 get_reg() tries first to free r2 of an SValue. */
2204 /* write second register */
2207 vtop
->type
.t
= original_type
;
2209 if (vtop
->r
== VT_CMP
)
2211 /* one register type load */
2216 #ifdef TCC_TARGET_C67
2217 /* uses register pairs for doubles */
2218 if (bt
== VT_DOUBLE
)
2225 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2226 ST_FUNC
void gv2(int rc1
, int rc2
)
2228 /* generate more generic register first. But VT_JMP or VT_CMP
2229 values must be generated first in all cases to avoid possible
2231 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
2236 /* test if reload is needed for first register */
2237 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
2247 /* test if reload is needed for first register */
2248 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
2255 /* expand 64bit on stack in two ints */
2256 ST_FUNC
void lexpand(void)
2259 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
2260 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
2261 if (v
== VT_CONST
) {
2264 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
2270 vtop
[0].r
= vtop
[-1].r2
;
2271 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2273 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2278 /* build a long long from two ints */
2279 static void lbuild(int t
)
2281 gv2(RC_INT
, RC_INT
);
2282 vtop
[-1].r2
= vtop
[0].r
;
2283 vtop
[-1].type
.t
= t
;
2288 /* convert stack entry to register and duplicate its value in another
2290 static void gv_dup(void)
2296 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2297 if (t
& VT_BITFIELD
) {
2307 /* stack: H L L1 H1 */
2317 /* duplicate value */
2327 /* generate CPU independent (unsigned) long long operations */
2328 static void gen_opl(int op
)
2330 int t
, a
, b
, op1
, c
, i
;
2332 unsigned short reg_iret
= REG_IRET
;
2333 unsigned short reg_lret
= REG_IRE2
;
2339 func
= TOK___divdi3
;
2342 func
= TOK___udivdi3
;
2345 func
= TOK___moddi3
;
2348 func
= TOK___umoddi3
;
2355 /* call generic long long function */
2356 vpush_global_sym(&func_old_type
, func
);
2361 vtop
->r2
= reg_lret
;
2369 //pv("gen_opl A",0,2);
2375 /* stack: L1 H1 L2 H2 */
2380 vtop
[-2] = vtop
[-3];
2383 /* stack: H1 H2 L1 L2 */
2384 //pv("gen_opl B",0,4);
2390 /* stack: H1 H2 L1 L2 ML MH */
2393 /* stack: ML MH H1 H2 L1 L2 */
2397 /* stack: ML MH H1 L2 H2 L1 */
2402 /* stack: ML MH M1 M2 */
2405 } else if (op
== '+' || op
== '-') {
2406 /* XXX: add non carry method too (for MIPS or alpha) */
2412 /* stack: H1 H2 (L1 op L2) */
2415 gen_op(op1
+ 1); /* TOK_xxxC2 */
2418 /* stack: H1 H2 (L1 op L2) */
2421 /* stack: (L1 op L2) H1 H2 */
2423 /* stack: (L1 op L2) (H1 op H2) */
2431 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2432 t
= vtop
[-1].type
.t
;
2436 /* stack: L H shift */
2438 /* constant: simpler */
2439 /* NOTE: all comments are for SHL. the other cases are
2440 done by swapping words */
2451 if (op
!= TOK_SAR
) {
2484 /* XXX: should provide a faster fallback on x86 ? */
2487 func
= TOK___ashrdi3
;
2490 func
= TOK___lshrdi3
;
2493 func
= TOK___ashldi3
;
2499 /* compare operations */
2505 /* stack: L1 H1 L2 H2 */
2507 vtop
[-1] = vtop
[-2];
2509 /* stack: L1 L2 H1 H2 */
2513 /* when values are equal, we need to compare low words. since
2514 the jump is inverted, we invert the test too. */
2517 else if (op1
== TOK_GT
)
2519 else if (op1
== TOK_ULT
)
2521 else if (op1
== TOK_UGT
)
2531 /* generate non equal test */
2533 vset_VT_CMP(TOK_NE
);
2537 /* compare low. Always unsigned */
2541 else if (op1
== TOK_LE
)
2543 else if (op1
== TOK_GT
)
2545 else if (op1
== TOK_GE
)
2548 #if 0//def TCC_TARGET_I386
2549 if (op
== TOK_NE
) { gsym(b
); break; }
2550 if (op
== TOK_EQ
) { gsym(a
); break; }
2559 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2561 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2562 return (a
^ b
) >> 63 ? -x
: x
;
2565 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2567 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2570 /* handle integer constant optimizations and various machine
2572 static void gen_opic(int op
)
2574 SValue
*v1
= vtop
- 1;
2576 int t1
= v1
->type
.t
& VT_BTYPE
;
2577 int t2
= v2
->type
.t
& VT_BTYPE
;
2578 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2579 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2580 uint64_t l1
= c1
? v1
->c
.i
: 0;
2581 uint64_t l2
= c2
? v2
->c
.i
: 0;
2582 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2584 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2585 l1
= ((uint32_t)l1
|
2586 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2587 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2588 l2
= ((uint32_t)l2
|
2589 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2593 case '+': l1
+= l2
; break;
2594 case '-': l1
-= l2
; break;
2595 case '&': l1
&= l2
; break;
2596 case '^': l1
^= l2
; break;
2597 case '|': l1
|= l2
; break;
2598 case '*': l1
*= l2
; break;
2605 /* if division by zero, generate explicit division */
2607 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2608 tcc_error("division by zero in constant");
2612 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2613 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2614 case TOK_UDIV
: l1
= l1
/ l2
; break;
2615 case TOK_UMOD
: l1
= l1
% l2
; break;
2618 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2619 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2621 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2624 case TOK_ULT
: l1
= l1
< l2
; break;
2625 case TOK_UGE
: l1
= l1
>= l2
; break;
2626 case TOK_EQ
: l1
= l1
== l2
; break;
2627 case TOK_NE
: l1
= l1
!= l2
; break;
2628 case TOK_ULE
: l1
= l1
<= l2
; break;
2629 case TOK_UGT
: l1
= l1
> l2
; break;
2630 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2631 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2632 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2633 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2635 case TOK_LAND
: l1
= l1
&& l2
; break;
2636 case TOK_LOR
: l1
= l1
|| l2
; break;
2640 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2641 l1
= ((uint32_t)l1
|
2642 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2646 /* if commutative ops, put c2 as constant */
2647 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2648 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2650 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2651 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2653 if (!const_wanted
&&
2655 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2656 (l1
== -1 && op
== TOK_SAR
))) {
2657 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2659 } else if (!const_wanted
&&
2660 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2662 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2663 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2664 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2669 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2672 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2673 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2676 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2677 /* filter out NOP operations like x*1, x-0, x&-1... */
2679 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2680 /* try to use shifts instead of muls or divs */
2681 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2690 else if (op
== TOK_PDIV
)
2696 } else if (c2
&& (op
== '+' || op
== '-') &&
2697 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2698 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2699 /* symbol + constant case */
2703 /* The backends can't always deal with addends to symbols
2704 larger than +-1<<31. Don't construct such. */
2711 /* call low level op generator */
2712 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2713 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2721 /* generate a floating point operation with constant propagation */
2722 static void gen_opif(int op
)
2726 #if defined _MSC_VER && defined __x86_64__
2727 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2734 /* currently, we cannot do computations with forward symbols */
2735 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2736 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2738 if (v1
->type
.t
== VT_FLOAT
) {
2741 } else if (v1
->type
.t
== VT_DOUBLE
) {
2749 /* NOTE: we only do constant propagation if finite number (not
2750 NaN or infinity) (ANSI spec) */
2751 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2755 case '+': f1
+= f2
; break;
2756 case '-': f1
-= f2
; break;
2757 case '*': f1
*= f2
; break;
2760 /* If not in initializer we need to potentially generate
2761 FP exceptions at runtime, otherwise we want to fold. */
2767 /* XXX: also handles tests ? */
2771 /* XXX: overflow test ? */
2772 if (v1
->type
.t
== VT_FLOAT
) {
2774 } else if (v1
->type
.t
== VT_DOUBLE
) {
2786 /* print a type. If 'varstr' is not NULL, then the variable is also
2787 printed in the type */
2789 /* XXX: add array and function pointers */
2790 static void type_to_str(char *buf
, int buf_size
,
2791 CType
*type
, const char *varstr
)
2803 pstrcat(buf
, buf_size
, "extern ");
2805 pstrcat(buf
, buf_size
, "static ");
2807 pstrcat(buf
, buf_size
, "typedef ");
2809 pstrcat(buf
, buf_size
, "inline ");
2810 if (t
& VT_VOLATILE
)
2811 pstrcat(buf
, buf_size
, "volatile ");
2812 if (t
& VT_CONSTANT
)
2813 pstrcat(buf
, buf_size
, "const ");
2815 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2816 || ((t
& VT_UNSIGNED
)
2817 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2820 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2822 buf_size
-= strlen(buf
);
2858 tstr
= "long double";
2860 pstrcat(buf
, buf_size
, tstr
);
2867 pstrcat(buf
, buf_size
, tstr
);
2868 v
= type
->ref
->v
& ~SYM_STRUCT
;
2869 if (v
>= SYM_FIRST_ANOM
)
2870 pstrcat(buf
, buf_size
, "<anonymous>");
2872 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2877 if (varstr
&& '*' == *varstr
) {
2878 pstrcat(buf1
, sizeof(buf1
), "(");
2879 pstrcat(buf1
, sizeof(buf1
), varstr
);
2880 pstrcat(buf1
, sizeof(buf1
), ")");
2882 pstrcat(buf1
, buf_size
, "(");
2884 while (sa
!= NULL
) {
2886 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2887 pstrcat(buf1
, sizeof(buf1
), buf2
);
2890 pstrcat(buf1
, sizeof(buf1
), ", ");
2892 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2893 pstrcat(buf1
, sizeof(buf1
), ", ...");
2894 pstrcat(buf1
, sizeof(buf1
), ")");
2895 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2900 if (varstr
&& '*' == *varstr
)
2901 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2903 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2904 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2907 pstrcpy(buf1
, sizeof(buf1
), "*");
2908 if (t
& VT_CONSTANT
)
2909 pstrcat(buf1
, buf_size
, "const ");
2910 if (t
& VT_VOLATILE
)
2911 pstrcat(buf1
, buf_size
, "volatile ");
2913 pstrcat(buf1
, sizeof(buf1
), varstr
);
2914 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2918 pstrcat(buf
, buf_size
, " ");
2919 pstrcat(buf
, buf_size
, varstr
);
2924 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2926 char buf1
[256], buf2
[256];
2927 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2928 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2929 tcc_error(fmt
, buf1
, buf2
);
2932 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2934 char buf1
[256], buf2
[256];
2935 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2936 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2937 tcc_warning(fmt
, buf1
, buf2
);
2940 static int pointed_size(CType
*type
)
2943 return type_size(pointed_type(type
), &align
);
2946 static void vla_runtime_pointed_size(CType
*type
)
2949 vla_runtime_type_size(pointed_type(type
), &align
);
2952 static inline int is_null_pointer(SValue
*p
)
2954 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2956 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2957 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2958 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2959 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2960 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2961 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2965 /* compare function types. OLD functions match any new functions */
2966 static int is_compatible_func(CType
*type1
, CType
*type2
)
2972 if (s1
->f
.func_call
!= s2
->f
.func_call
)
2974 if (s1
->f
.func_type
!= s2
->f
.func_type
2975 && s1
->f
.func_type
!= FUNC_OLD
2976 && s2
->f
.func_type
!= FUNC_OLD
)
2978 /* we should check the function return type for FUNC_OLD too
2979 but that causes problems with the internally used support
2980 functions such as TOK_memmove */
2981 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
2983 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
2986 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
2997 /* return true if type1 and type2 are the same. If unqualified is
2998 true, qualifiers on the types are ignored.
3000 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3004 t1
= type1
->t
& VT_TYPE
;
3005 t2
= type2
->t
& VT_TYPE
;
3007 /* strip qualifiers before comparing */
3008 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3009 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3012 /* Default Vs explicit signedness only matters for char */
3013 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3017 /* XXX: bitfields ? */
3022 && !(type1
->ref
->c
< 0
3023 || type2
->ref
->c
< 0
3024 || type1
->ref
->c
== type2
->ref
->c
))
3027 /* test more complicated cases */
3028 bt1
= t1
& VT_BTYPE
;
3029 if (bt1
== VT_PTR
) {
3030 type1
= pointed_type(type1
);
3031 type2
= pointed_type(type2
);
3032 return is_compatible_types(type1
, type2
);
3033 } else if (bt1
== VT_STRUCT
) {
3034 return (type1
->ref
== type2
->ref
);
3035 } else if (bt1
== VT_FUNC
) {
3036 return is_compatible_func(type1
, type2
);
3037 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
3038 /* If both are enums then they must be the same, if only one is then
3039 t1 and t2 must be equal, which was checked above already. */
3040 return type1
->ref
== type2
->ref
;
3046 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3047 type is stored in DEST if non-null (except for pointer plus/minus) . */
3048 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
3050 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
3051 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
3057 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
3058 ret
= op
== '?' ? 1 : 0;
3059 /* NOTE: as an extension, we accept void on only one side */
3061 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3062 if (op
== '+') ; /* Handled in caller */
3063 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3064 /* If one is a null ptr constant the result type is the other. */
3065 else if (is_null_pointer (op2
)) type
= *type1
;
3066 else if (is_null_pointer (op1
)) type
= *type2
;
3067 else if (bt1
!= bt2
) {
3068 /* accept comparison or cond-expr between pointer and integer
3070 if ((op
== '?' || TOK_ISCOND(op
))
3071 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
3072 tcc_warning("pointer/integer mismatch in %s",
3073 op
== '?' ? "conditional expression" : "comparison");
3074 else if (op
!= '-' || !is_integer_btype(bt2
))
3076 type
= *(bt1
== VT_PTR
? type1
: type2
);
3078 CType
*pt1
= pointed_type(type1
);
3079 CType
*pt2
= pointed_type(type2
);
3080 int pbt1
= pt1
->t
& VT_BTYPE
;
3081 int pbt2
= pt2
->t
& VT_BTYPE
;
3082 int newquals
, copied
= 0;
3083 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
3084 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
3085 if (op
!= '?' && !TOK_ISCOND(op
))
3088 type_incompatibility_warning(type1
, type2
,
3090 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3091 : "pointer type mismatch in comparison('%s' and '%s')");
3094 /* pointers to void get preferred, otherwise the
3095 pointed to types minus qualifs should be compatible */
3096 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
3097 /* combine qualifs */
3098 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
3099 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3102 /* copy the pointer target symbol */
3103 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3106 pointed_type(&type
)->t
|= newquals
;
3108 /* pointers to incomplete arrays get converted to
3109 pointers to completed ones if possible */
3110 if (pt1
->t
& VT_ARRAY
3111 && pt2
->t
& VT_ARRAY
3112 && pointed_type(&type
)->ref
->c
< 0
3113 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
3116 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3118 pointed_type(&type
)->ref
=
3119 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
3120 0, pointed_type(&type
)->ref
->c
);
3121 pointed_type(&type
)->ref
->c
=
3122 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
3128 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
3129 if (op
!= '?' || !compare_types(type1
, type2
, 1))
3132 } else if (is_float(bt1
) || is_float(bt2
)) {
3133 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
3134 type
.t
= VT_LDOUBLE
;
3135 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
3140 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
3141 /* cast to biggest op */
3142 type
.t
= VT_LLONG
| VT_LONG
;
3143 if (bt1
== VT_LLONG
)
3145 if (bt2
== VT_LLONG
)
3147 /* convert to unsigned if it does not fit in a long long */
3148 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
3149 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
3150 type
.t
|= VT_UNSIGNED
;
3152 /* integer operations */
3153 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
3154 /* convert to unsigned if it does not fit in an integer */
3155 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
3156 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
3157 type
.t
|= VT_UNSIGNED
;
3164 /* generic gen_op: handles types problems */
3165 ST_FUNC
void gen_op(int op
)
3167 int u
, t1
, t2
, bt1
, bt2
, t
;
3168 CType type1
, combtype
;
3171 t1
= vtop
[-1].type
.t
;
3172 t2
= vtop
[0].type
.t
;
3173 bt1
= t1
& VT_BTYPE
;
3174 bt2
= t2
& VT_BTYPE
;
3176 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
3177 if (bt2
== VT_FUNC
) {
3178 mk_pointer(&vtop
->type
);
3181 if (bt1
== VT_FUNC
) {
3183 mk_pointer(&vtop
->type
);
3188 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
3189 tcc_error_noabort("invalid operand types for binary operation");
3191 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3192 /* at least one operand is a pointer */
3193 /* relational op: must be both pointers */
3196 /* if both pointers, then it must be the '-' op */
3197 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3199 tcc_error("cannot use pointers here");
3200 if (vtop
[-1].type
.t
& VT_VLA
) {
3201 vla_runtime_pointed_size(&vtop
[-1].type
);
3203 vpushi(pointed_size(&vtop
[-1].type
));
3207 vtop
->type
.t
= VT_PTRDIFF_T
;
3211 /* exactly one pointer : must be '+' or '-'. */
3212 if (op
!= '-' && op
!= '+')
3213 tcc_error("cannot use pointers here");
3214 /* Put pointer as first operand */
3215 if (bt2
== VT_PTR
) {
3217 t
= t1
, t1
= t2
, t2
= t
;
3220 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
3221 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3224 type1
= vtop
[-1].type
;
3225 if (vtop
[-1].type
.t
& VT_VLA
)
3226 vla_runtime_pointed_size(&vtop
[-1].type
);
3228 u
= pointed_size(&vtop
[-1].type
);
3230 tcc_error("unknown array element size");
3234 /* XXX: cast to int ? (long long case) */
3239 #ifdef CONFIG_TCC_BCHECK
3240 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
3241 /* if bounded pointers, we generate a special code to
3248 gen_bounded_ptr_add();
3254 type1
.t
&= ~VT_ARRAY
;
3255 /* put again type if gen_opic() swaped operands */
3259 /* floats can only be used for a few operations */
3260 if (is_float(combtype
.t
)
3261 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3263 tcc_error("invalid operands for binary operation");
3264 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
3265 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
3266 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
3268 t
|= (VT_LONG
& t1
);
3272 t
= t2
= combtype
.t
;
3273 /* XXX: currently, some unsigned operations are explicit, so
3274 we modify them here */
3275 if (t
& VT_UNSIGNED
) {
3282 else if (op
== TOK_LT
)
3284 else if (op
== TOK_GT
)
3286 else if (op
== TOK_LE
)
3288 else if (op
== TOK_GE
)
3294 /* special case for shifts and long long: we keep the shift as
3296 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3303 if (TOK_ISCOND(op
)) {
3304 /* relational op: the result is an int */
3305 vtop
->type
.t
= VT_INT
;
3310 // Make sure that we have converted to an rvalue:
3311 if (vtop
->r
& VT_LVAL
)
3312 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3315 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3316 #define gen_cvt_itof1 gen_cvt_itof
3318 /* generic itof for unsigned long long case */
3319 static void gen_cvt_itof1(int t
)
3321 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3322 (VT_LLONG
| VT_UNSIGNED
)) {
3325 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
3326 #if LDOUBLE_SIZE != 8
3327 else if (t
== VT_LDOUBLE
)
3328 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
3331 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
3342 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3343 #define gen_cvt_ftoi1 gen_cvt_ftoi
3345 /* generic ftoi for unsigned long long case */
3346 static void gen_cvt_ftoi1(int t
)
3349 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3350 /* not handled natively */
3351 st
= vtop
->type
.t
& VT_BTYPE
;
3353 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
3354 #if LDOUBLE_SIZE != 8
3355 else if (st
== VT_LDOUBLE
)
3356 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
3359 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
3370 /* special delayed cast for char/short */
3371 static void force_charshort_cast(void)
3373 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3374 int dbt
= vtop
->type
.t
;
3375 vtop
->r
&= ~VT_MUSTCAST
;
3377 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3381 static void gen_cast_s(int t
)
3389 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3390 static void gen_cast(CType
*type
)
3392 int sbt
, dbt
, sf
, df
, c
;
3393 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3395 /* special delayed cast for char/short */
3396 if (vtop
->r
& VT_MUSTCAST
)
3397 force_charshort_cast();
3399 /* bitfields first get cast to ints */
3400 if (vtop
->type
.t
& VT_BITFIELD
)
3403 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3404 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3412 dbt_bt
= dbt
& VT_BTYPE
;
3413 sbt_bt
= sbt
& VT_BTYPE
;
3415 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3416 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3417 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3420 /* constant case: we can do it now */
3421 /* XXX: in ISOC, cannot do it if error in convert */
3422 if (sbt
== VT_FLOAT
)
3423 vtop
->c
.ld
= vtop
->c
.f
;
3424 else if (sbt
== VT_DOUBLE
)
3425 vtop
->c
.ld
= vtop
->c
.d
;
3428 if (sbt_bt
== VT_LLONG
) {
3429 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3430 vtop
->c
.ld
= vtop
->c
.i
;
3432 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3434 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3435 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3437 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3440 if (dbt
== VT_FLOAT
)
3441 vtop
->c
.f
= (float)vtop
->c
.ld
;
3442 else if (dbt
== VT_DOUBLE
)
3443 vtop
->c
.d
= (double)vtop
->c
.ld
;
3444 } else if (sf
&& dbt
== VT_BOOL
) {
3445 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3448 vtop
->c
.i
= vtop
->c
.ld
;
3449 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3451 else if (sbt
& VT_UNSIGNED
)
3452 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3454 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3456 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3458 else if (dbt
== VT_BOOL
)
3459 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3461 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3462 dbt_bt
== VT_SHORT
? 0xffff :
3465 if (!(dbt
& VT_UNSIGNED
))
3466 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3471 } else if (dbt
== VT_BOOL
3472 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3473 == (VT_CONST
| VT_SYM
)) {
3474 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3480 /* cannot generate code for global or static initializers */
3481 if (STATIC_DATA_WANTED
)
3484 /* non constant case: generate code */
3485 if (dbt
== VT_BOOL
) {
3486 gen_test_zero(TOK_NE
);
3492 /* convert from fp to fp */
3495 /* convert int to fp */
3498 /* convert fp to int */
3500 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3503 goto again
; /* may need char/short cast */
3508 ds
= btype_size(dbt_bt
);
3509 ss
= btype_size(sbt_bt
);
3510 if (ds
== 0 || ss
== 0) {
3511 if (dbt_bt
== VT_VOID
)
3513 cast_error(&vtop
->type
, type
);
3515 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3516 tcc_error("cast to incomplete type");
3518 /* same size and no sign conversion needed */
3519 if (ds
== ss
&& ds
>= 4)
3521 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3522 tcc_warning("cast between pointer and integer of different size");
3523 if (sbt_bt
== VT_PTR
) {
3524 /* put integer type to allow logical operations below */
3525 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3529 /* processor allows { int a = 0, b = *(char*)&a; }
3530 That means that if we cast to less width, we can just
3531 change the type and read it still later. */
3532 #define ALLOW_SUBTYPE_ACCESS 1
3534 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3535 /* value still in memory */
3541 goto done
; /* no 64bit envolved */
3549 /* generate high word */
3550 if (sbt
& VT_UNSIGNED
) {
3559 } else if (ss
== 8) {
3560 /* from long long: just take low order word */
3568 /* need to convert from 32bit to 64bit */
3569 if (sbt
& VT_UNSIGNED
) {
3570 #if defined(TCC_TARGET_RISCV64)
3571 /* RISC-V keeps 32bit vals in registers sign-extended.
3572 So here we need a zero-extension. */
3581 ss
= ds
, ds
= 4, dbt
= sbt
;
3582 } else if (ss
== 8) {
3583 /* XXX some architectures (e.g. risc-v) would like it
3584 better for this merely being a 32-to-64 sign or zero-
3586 trunc
= 32; /* zero upper 32 bits */
3594 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3600 bits
= (ss
- ds
) * 8;
3601 /* for unsigned, gen_op will convert SAR to SHR */
3602 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3605 vpushi(bits
- trunc
);
3612 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3615 /* return type size as known at compile time. Put alignment at 'a' */
3616 ST_FUNC
int type_size(CType
*type
, int *a
)
3621 bt
= type
->t
& VT_BTYPE
;
3622 if (bt
== VT_STRUCT
) {
3627 } else if (bt
== VT_PTR
) {
3628 if (type
->t
& VT_ARRAY
) {
3632 ts
= type_size(&s
->type
, a
);
3634 if (ts
< 0 && s
->c
< 0)
3642 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3643 return -1; /* incomplete enum */
3644 } else if (bt
== VT_LDOUBLE
) {
3646 return LDOUBLE_SIZE
;
3647 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3648 #ifdef TCC_TARGET_I386
3649 #ifdef TCC_TARGET_PE
3654 #elif defined(TCC_TARGET_ARM)
3664 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3667 } else if (bt
== VT_SHORT
) {
3670 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3674 /* char, void, function, _Bool */
3680 /* push type size as known at runtime time on top of value stack. Put
3682 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
3684 if (type
->t
& VT_VLA
) {
3685 type_size(&type
->ref
->type
, a
);
3686 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3688 vpushi(type_size(type
, a
));
3692 /* return the pointed type of t */
3693 static inline CType
*pointed_type(CType
*type
)
3695 return &type
->ref
->type
;
3698 /* modify type so that its it is a pointer to type. */
3699 ST_FUNC
void mk_pointer(CType
*type
)
3702 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3703 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3707 /* return true if type1 and type2 are exactly the same (including
3710 static int is_compatible_types(CType
*type1
, CType
*type2
)
3712 return compare_types(type1
,type2
,0);
3715 /* return true if type1 and type2 are the same (ignoring qualifiers).
3717 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3719 return compare_types(type1
,type2
,1);
3722 static void cast_error(CType
*st
, CType
*dt
)
3724 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3727 /* verify type compatibility to store vtop in 'dt' type */
3728 static void verify_assign_cast(CType
*dt
)
3730 CType
*st
, *type1
, *type2
;
3731 int dbt
, sbt
, qualwarn
, lvl
;
3733 st
= &vtop
->type
; /* source type */
3734 dbt
= dt
->t
& VT_BTYPE
;
3735 sbt
= st
->t
& VT_BTYPE
;
3736 if (dt
->t
& VT_CONSTANT
)
3737 tcc_warning("assignment of read-only location");
3741 tcc_error("assignment to void expression");
3744 /* special cases for pointers */
3745 /* '0' can also be a pointer */
3746 if (is_null_pointer(vtop
))
3748 /* accept implicit pointer to integer cast with warning */
3749 if (is_integer_btype(sbt
)) {
3750 tcc_warning("assignment makes pointer from integer without a cast");
3753 type1
= pointed_type(dt
);
3755 type2
= pointed_type(st
);
3756 else if (sbt
== VT_FUNC
)
3757 type2
= st
; /* a function is implicitly a function pointer */
3760 if (is_compatible_types(type1
, type2
))
3762 for (qualwarn
= lvl
= 0;; ++lvl
) {
3763 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3764 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3766 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3767 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3768 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3770 type1
= pointed_type(type1
);
3771 type2
= pointed_type(type2
);
3773 if (!is_compatible_unqualified_types(type1
, type2
)) {
3774 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3775 /* void * can match anything */
3776 } else if (dbt
== sbt
3777 && is_integer_btype(sbt
& VT_BTYPE
)
3778 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3779 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3780 /* Like GCC don't warn by default for merely changes
3781 in pointer target signedness. Do warn for different
3782 base types, though, in particular for unsigned enums
3783 and signed int targets. */
3785 tcc_warning("assignment from incompatible pointer type");
3790 tcc_warning("assignment discards qualifiers from pointer target type");
3796 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3797 tcc_warning("assignment makes integer from pointer without a cast");
3798 } else if (sbt
== VT_STRUCT
) {
3799 goto case_VT_STRUCT
;
3801 /* XXX: more tests */
3805 if (!is_compatible_unqualified_types(dt
, st
)) {
3813 static void gen_assign_cast(CType
*dt
)
3815 verify_assign_cast(dt
);
3819 /* store vtop in lvalue pushed on stack */
3820 ST_FUNC
void vstore(void)
3822 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3824 ft
= vtop
[-1].type
.t
;
3825 sbt
= vtop
->type
.t
& VT_BTYPE
;
3826 dbt
= ft
& VT_BTYPE
;
3828 verify_assign_cast(&vtop
[-1].type
);
3830 if (sbt
== VT_STRUCT
) {
3831 /* if structure, only generate pointer */
3832 /* structure assignment : generate memcpy */
3833 /* XXX: optimize if small size */
3834 size
= type_size(&vtop
->type
, &align
);
3838 #ifdef CONFIG_TCC_BCHECK
3839 if (vtop
->r
& VT_MUSTBOUND
)
3840 gbound(); /* check would be wrong after gaddrof() */
3842 vtop
->type
.t
= VT_PTR
;
3845 /* address of memcpy() */
3848 vpush_global_sym(&func_old_type
, TOK_memmove8
);
3849 else if(!(align
& 3))
3850 vpush_global_sym(&func_old_type
, TOK_memmove4
);
3853 /* Use memmove, rather than memcpy, as dest and src may be same: */
3854 vpush_global_sym(&func_old_type
, TOK_memmove
);
3859 #ifdef CONFIG_TCC_BCHECK
3860 if (vtop
->r
& VT_MUSTBOUND
)
3863 vtop
->type
.t
= VT_PTR
;
3868 /* leave source on stack */
3870 } else if (ft
& VT_BITFIELD
) {
3871 /* bitfield store handling */
3873 /* save lvalue as expression result (example: s.b = s.a = n;) */
3874 vdup(), vtop
[-1] = vtop
[-2];
3876 bit_pos
= BIT_POS(ft
);
3877 bit_size
= BIT_SIZE(ft
);
3878 /* remove bit field info to avoid loops */
3879 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3881 if (dbt
== VT_BOOL
) {
3882 gen_cast(&vtop
[-1].type
);
3883 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3885 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3886 if (dbt
!= VT_BOOL
) {
3887 gen_cast(&vtop
[-1].type
);
3888 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3890 if (r
== VT_STRUCT
) {
3891 store_packed_bf(bit_pos
, bit_size
);
3893 unsigned long long mask
= (1ULL << bit_size
) - 1;
3894 if (dbt
!= VT_BOOL
) {
3896 if (dbt
== VT_LLONG
)
3899 vpushi((unsigned)mask
);
3906 /* duplicate destination */
3909 /* load destination, mask and or with source */
3910 if (dbt
== VT_LLONG
)
3911 vpushll(~(mask
<< bit_pos
));
3913 vpushi(~((unsigned)mask
<< bit_pos
));
3918 /* ... and discard */
3921 } else if (dbt
== VT_VOID
) {
3924 /* optimize char/short casts */
3926 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3927 && is_integer_btype(sbt
)
3929 if ((vtop
->r
& VT_MUSTCAST
)
3930 && btype_size(dbt
) > btype_size(sbt
)
3932 force_charshort_cast();
3935 gen_cast(&vtop
[-1].type
);
3938 #ifdef CONFIG_TCC_BCHECK
3939 /* bound check case */
3940 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3946 gv(RC_TYPE(dbt
)); /* generate value */
3949 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3950 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3951 vtop
->type
.t
= ft
& VT_TYPE
;
3954 /* if lvalue was saved on stack, must read it */
3955 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3957 r
= get_reg(RC_INT
);
3958 sv
.type
.t
= VT_PTRDIFF_T
;
3959 sv
.r
= VT_LOCAL
| VT_LVAL
;
3960 sv
.c
.i
= vtop
[-1].c
.i
;
3962 vtop
[-1].r
= r
| VT_LVAL
;
3965 r
= vtop
->r
& VT_VALMASK
;
3966 /* two word case handling :
3967 store second register at word + 4 (or +8 for x86-64) */
3968 if (USING_TWO_WORDS(dbt
)) {
3969 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
3970 vtop
[-1].type
.t
= load_type
;
3973 /* convert to int to increment easily */
3974 vtop
->type
.t
= VT_PTRDIFF_T
;
3980 vtop
[-1].type
.t
= load_type
;
3981 /* XXX: it works because r2 is spilled last ! */
3982 store(vtop
->r2
, vtop
- 1);
3988 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
3992 /* post defines POST/PRE add. c is the token ++ or -- */
3993 ST_FUNC
void inc(int post
, int c
)
3996 vdup(); /* save lvalue */
3998 gv_dup(); /* duplicate value */
4003 vpushi(c
- TOK_MID
);
4005 vstore(); /* store value */
4007 vpop(); /* if post op, return saved value */
4010 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
4012 /* read the string */
4016 while (tok
== TOK_STR
) {
4017 /* XXX: add \0 handling too ? */
4018 cstr_cat(astr
, tokc
.str
.data
, -1);
4021 cstr_ccat(astr
, '\0');
4024 /* If I is >= 1 and a power of two, returns log2(i)+1.
4025 If I is 0 returns 0. */
4026 static int exact_log2p1(int i
)
4031 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
4042 /* Parse __attribute__((...)) GNUC extension. */
4043 static void parse_attribute(AttributeDef
*ad
)
4049 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
4054 while (tok
!= ')') {
4055 if (tok
< TOK_IDENT
)
4056 expect("attribute name");
4068 tcc_warning("implicit declaration of function '%s'",
4069 get_tok_str(tok
, &tokc
));
4070 s
= external_global_sym(tok
, &func_old_type
);
4071 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4072 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
4073 ad
->cleanup_func
= s
;
4078 case TOK_CONSTRUCTOR1
:
4079 case TOK_CONSTRUCTOR2
:
4080 ad
->f
.func_ctor
= 1;
4082 case TOK_DESTRUCTOR1
:
4083 case TOK_DESTRUCTOR2
:
4084 ad
->f
.func_dtor
= 1;
4089 parse_mult_str(&astr
, "section name");
4090 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
4097 parse_mult_str(&astr
, "alias(\"target\")");
4098 ad
->alias_target
= /* save string as token, for later */
4099 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
;
4103 case TOK_VISIBILITY1
:
4104 case TOK_VISIBILITY2
:
4106 parse_mult_str(&astr
,
4107 "visibility(\"default|hidden|internal|protected\")");
4108 if (!strcmp (astr
.data
, "default"))
4109 ad
->a
.visibility
= STV_DEFAULT
;
4110 else if (!strcmp (astr
.data
, "hidden"))
4111 ad
->a
.visibility
= STV_HIDDEN
;
4112 else if (!strcmp (astr
.data
, "internal"))
4113 ad
->a
.visibility
= STV_INTERNAL
;
4114 else if (!strcmp (astr
.data
, "protected"))
4115 ad
->a
.visibility
= STV_PROTECTED
;
4117 expect("visibility(\"default|hidden|internal|protected\")");
4126 if (n
<= 0 || (n
& (n
- 1)) != 0)
4127 tcc_error("alignment must be a positive power of two");
4132 ad
->a
.aligned
= exact_log2p1(n
);
4133 if (n
!= 1 << (ad
->a
.aligned
- 1))
4134 tcc_error("alignment of %d is larger than implemented", n
);
4146 /* currently, no need to handle it because tcc does not
4147 track unused objects */
4151 ad
->f
.func_noreturn
= 1;
4156 ad
->f
.func_call
= FUNC_CDECL
;
4161 ad
->f
.func_call
= FUNC_STDCALL
;
4163 #ifdef TCC_TARGET_I386
4173 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
4179 ad
->f
.func_call
= FUNC_FASTCALLW
;
4186 ad
->attr_mode
= VT_LLONG
+ 1;
4189 ad
->attr_mode
= VT_BYTE
+ 1;
4192 ad
->attr_mode
= VT_SHORT
+ 1;
4196 ad
->attr_mode
= VT_INT
+ 1;
4199 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4206 ad
->a
.dllexport
= 1;
4208 case TOK_NODECORATE
:
4209 ad
->a
.nodecorate
= 1;
4212 ad
->a
.dllimport
= 1;
4215 if (tcc_state
->warn_unsupported
)
4216 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
4217 /* skip parameters */
4219 int parenthesis
= 0;
4223 else if (tok
== ')')
4226 } while (parenthesis
&& tok
!= -1);
4239 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4243 while ((s
= s
->next
) != NULL
) {
4244 if ((s
->v
& SYM_FIELD
) &&
4245 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
4246 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
4247 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
4259 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4261 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4262 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4263 int pcc
= !tcc_state
->ms_bitfields
;
4264 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4271 prevbt
= VT_STRUCT
; /* make it never match */
4276 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4277 if (f
->type
.t
& VT_BITFIELD
)
4278 bit_size
= BIT_SIZE(f
->type
.t
);
4281 size
= type_size(&f
->type
, &align
);
4282 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4285 if (pcc
&& bit_size
== 0) {
4286 /* in pcc mode, packing does not affect zero-width bitfields */
4289 /* in pcc mode, attribute packed overrides if set. */
4290 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4293 /* pragma pack overrides align if lesser and packs bitfields always */
4296 if (pragma_pack
< align
)
4297 align
= pragma_pack
;
4298 /* in pcc mode pragma pack also overrides individual align */
4299 if (pcc
&& pragma_pack
< a
)
4303 /* some individual align was specified */
4307 if (type
->ref
->type
.t
== VT_UNION
) {
4308 if (pcc
&& bit_size
>= 0)
4309 size
= (bit_size
+ 7) >> 3;
4314 } else if (bit_size
< 0) {
4316 c
+= (bit_pos
+ 7) >> 3;
4317 c
= (c
+ align
- 1) & -align
;
4326 /* A bit-field. Layout is more complicated. There are two
4327 options: PCC (GCC) compatible and MS compatible */
4329 /* In PCC layout a bit-field is placed adjacent to the
4330 preceding bit-fields, except if:
4332 - an individual alignment was given
4333 - it would overflow its base type container and
4334 there is no packing */
4335 if (bit_size
== 0) {
4337 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4339 } else if (f
->a
.aligned
) {
4341 } else if (!packed
) {
4343 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4344 if (ofs
> size
/ align
)
4348 /* in pcc mode, long long bitfields have type int if they fit */
4349 if (size
== 8 && bit_size
<= 32)
4350 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4352 while (bit_pos
>= align
* 8)
4353 c
+= align
, bit_pos
-= align
* 8;
4356 /* In PCC layout named bit-fields influence the alignment
4357 of the containing struct using the base types alignment,
4358 except for packed fields (which here have correct align). */
4359 if (f
->v
& SYM_FIRST_ANOM
4360 // && bit_size // ??? gcc on ARM/rpi does that
4365 bt
= f
->type
.t
& VT_BTYPE
;
4366 if ((bit_pos
+ bit_size
> size
* 8)
4367 || (bit_size
> 0) == (bt
!= prevbt
)
4369 c
= (c
+ align
- 1) & -align
;
4372 /* In MS bitfield mode a bit-field run always uses
4373 at least as many bits as the underlying type.
4374 To start a new run it's also required that this
4375 or the last bit-field had non-zero width. */
4376 if (bit_size
|| prev_bit_size
)
4379 /* In MS layout the records alignment is normally
4380 influenced by the field, except for a zero-width
4381 field at the start of a run (but by further zero-width
4382 fields it is again). */
4383 if (bit_size
== 0 && prevbt
!= bt
)
4386 prev_bit_size
= bit_size
;
4389 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4390 | (bit_pos
<< VT_STRUCT_SHIFT
);
4391 bit_pos
+= bit_size
;
4393 if (align
> maxalign
)
4397 printf("set field %s offset %-2d size %-2d align %-2d",
4398 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4399 if (f
->type
.t
& VT_BITFIELD
) {
4400 printf(" pos %-2d bits %-2d",
4413 c
+= (bit_pos
+ 7) >> 3;
4415 /* store size and alignment */
4416 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4420 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4421 /* can happen if individual align for some member was given. In
4422 this case MSVC ignores maxalign when aligning the size */
4427 c
= (c
+ a
- 1) & -a
;
4431 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4434 /* check whether we can access bitfields by their type */
4435 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4439 if (0 == (f
->type
.t
& VT_BITFIELD
))
4443 bit_size
= BIT_SIZE(f
->type
.t
);
4446 bit_pos
= BIT_POS(f
->type
.t
);
4447 size
= type_size(&f
->type
, &align
);
4448 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
4451 /* try to access the field using a different type */
4452 c0
= -1, s
= align
= 1;
4455 px
= f
->c
* 8 + bit_pos
;
4456 cx
= (px
>> 3) & -align
;
4457 px
= px
- (cx
<< 3);
4460 s
= (px
+ bit_size
+ 7) >> 3;
4470 s
= type_size(&t
, &align
);
4474 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
4475 /* update offset and bit position */
4478 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4479 | (bit_pos
<< VT_STRUCT_SHIFT
);
4483 printf("FIX field %s offset %-2d size %-2d align %-2d "
4484 "pos %-2d bits %-2d\n",
4485 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4486 cx
, s
, align
, px
, bit_size
);
4489 /* fall back to load/store single-byte wise */
4490 f
->auxtype
= VT_STRUCT
;
4492 printf("FIX field %s : load byte-wise\n",
4493 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4499 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4500 static void struct_decl(CType
*type
, int u
)
4502 int v
, c
, size
, align
, flexible
;
4503 int bit_size
, bsize
, bt
;
4505 AttributeDef ad
, ad1
;
4508 memset(&ad
, 0, sizeof ad
);
4510 parse_attribute(&ad
);
4514 /* struct already defined ? return it */
4516 expect("struct/union/enum name");
4518 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4521 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4523 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4528 /* Record the original enum/struct/union token. */
4529 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4531 /* we put an undefined size for struct/union */
4532 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4533 s
->r
= 0; /* default alignment is zero as gcc */
4535 type
->t
= s
->type
.t
;
4541 tcc_error("struct/union/enum already defined");
4543 /* cannot be empty */
4544 /* non empty enums are not allowed */
4547 long long ll
= 0, pl
= 0, nl
= 0;
4550 /* enum symbols have static storage */
4551 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4555 expect("identifier");
4557 if (ss
&& !local_stack
)
4558 tcc_error("redefinition of enumerator '%s'",
4559 get_tok_str(v
, NULL
));
4563 ll
= expr_const64();
4565 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4567 *ps
= ss
, ps
= &ss
->next
;
4576 /* NOTE: we accept a trailing comma */
4581 /* set integral type of the enum */
4584 if (pl
!= (unsigned)pl
)
4585 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4587 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4588 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4589 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4591 /* set type for enum members */
4592 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4594 if (ll
== (int)ll
) /* default is int if it fits */
4596 if (t
.t
& VT_UNSIGNED
) {
4597 ss
->type
.t
|= VT_UNSIGNED
;
4598 if (ll
== (unsigned)ll
)
4601 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4602 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4607 while (tok
!= '}') {
4608 if (!parse_btype(&btype
, &ad1
)) {
4614 tcc_error("flexible array member '%s' not at the end of struct",
4615 get_tok_str(v
, NULL
));
4621 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4623 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4624 expect("identifier");
4626 int v
= btype
.ref
->v
;
4627 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4628 if (tcc_state
->ms_extensions
== 0)
4629 expect("identifier");
4633 if (type_size(&type1
, &align
) < 0) {
4634 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4637 tcc_error("field '%s' has incomplete type",
4638 get_tok_str(v
, NULL
));
4640 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4641 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4642 (type1
.t
& VT_STORAGE
))
4643 tcc_error("invalid type for '%s'",
4644 get_tok_str(v
, NULL
));
4648 bit_size
= expr_const();
4649 /* XXX: handle v = 0 case for messages */
4651 tcc_error("negative width in bit-field '%s'",
4652 get_tok_str(v
, NULL
));
4653 if (v
&& bit_size
== 0)
4654 tcc_error("zero width for bit-field '%s'",
4655 get_tok_str(v
, NULL
));
4656 parse_attribute(&ad1
);
4658 size
= type_size(&type1
, &align
);
4659 if (bit_size
>= 0) {
4660 bt
= type1
.t
& VT_BTYPE
;
4666 tcc_error("bitfields must have scalar type");
4668 if (bit_size
> bsize
) {
4669 tcc_error("width of '%s' exceeds its type",
4670 get_tok_str(v
, NULL
));
4671 } else if (bit_size
== bsize
4672 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4673 /* no need for bit fields */
4675 } else if (bit_size
== 64) {
4676 tcc_error("field width 64 not implemented");
4678 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4680 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4683 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4684 /* Remember we've seen a real field to check
4685 for placement of flexible array member. */
4688 /* If member is a struct or bit-field, enforce
4689 placing into the struct (as anonymous). */
4691 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4696 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4701 if (tok
== ';' || tok
== TOK_EOF
)
4708 parse_attribute(&ad
);
4709 if (ad
.cleanup_func
) {
4710 tcc_warning("attribute '__cleanup__' ignored on type");
4712 struct_layout(type
, &ad
);
4717 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4719 merge_symattr(&ad
->a
, &s
->a
);
4720 merge_funcattr(&ad
->f
, &s
->f
);
4723 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4724 are added to the element type, copied because it could be a typedef. */
4725 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4727 while (type
->t
& VT_ARRAY
) {
4728 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4729 type
= &type
->ref
->type
;
4731 type
->t
|= qualifiers
;
4734 /* return 0 if no type declaration. otherwise, return the basic type
4737 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4739 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4743 memset(ad
, 0, sizeof(AttributeDef
));
4753 /* currently, we really ignore extension */
4763 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4764 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4765 tmbt
: tcc_error("too many basic types");
4768 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4773 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4790 memset(&ad1
, 0, sizeof(AttributeDef
));
4791 if (parse_btype(&type1
, &ad1
)) {
4792 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4794 n
= 1 << (ad1
.a
.aligned
- 1);
4796 type_size(&type1
, &n
);
4799 if (n
<= 0 || (n
& (n
- 1)) != 0)
4800 tcc_error("alignment must be a positive power of two");
4803 ad
->a
.aligned
= exact_log2p1(n
);
4807 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4808 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4809 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4810 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4817 #ifdef TCC_TARGET_ARM64
4819 /* GCC's __uint128_t appears in some Linux header files. Make it a
4820 synonym for long double to get the size and alignment right. */
4831 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4832 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4840 struct_decl(&type1
, VT_ENUM
);
4843 type
->ref
= type1
.ref
;
4846 struct_decl(&type1
, VT_STRUCT
);
4849 struct_decl(&type1
, VT_UNION
);
4852 /* type modifiers */
4857 parse_btype_qualify(type
, VT_CONSTANT
);
4865 parse_btype_qualify(type
, VT_VOLATILE
);
4872 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4873 tcc_error("signed and unsigned modifier");
4886 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4887 tcc_error("signed and unsigned modifier");
4888 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4904 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4905 tcc_error("multiple storage classes");
4917 ad
->f
.func_noreturn
= 1;
4919 /* GNUC attribute */
4920 case TOK_ATTRIBUTE1
:
4921 case TOK_ATTRIBUTE2
:
4922 parse_attribute(ad
);
4923 if (ad
->attr_mode
) {
4924 u
= ad
->attr_mode
-1;
4925 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4933 parse_expr_type(&type1
);
4934 /* remove all storage modifiers except typedef */
4935 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4937 sym_to_attr(ad
, type1
.ref
);
4943 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4947 if (tok
== ':' && !in_generic
) {
4948 /* ignore if it's a label */
4953 t
&= ~(VT_BTYPE
|VT_LONG
);
4954 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4955 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4956 type
->ref
= s
->type
.ref
;
4958 parse_btype_qualify(type
, t
);
4960 /* get attributes from typedef */
4969 if (tcc_state
->char_is_unsigned
) {
4970 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
4973 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4974 bt
= t
& (VT_BTYPE
|VT_LONG
);
4976 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
4977 #ifdef TCC_TARGET_PE
4978 if (bt
== VT_LDOUBLE
)
4979 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
4985 /* convert a function parameter type (array to pointer and function to
4986 function pointer) */
4987 static inline void convert_parameter_type(CType
*pt
)
4989 /* remove const and volatile qualifiers (XXX: const could be used
4990 to indicate a const function parameter */
4991 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
4992 /* array must be transformed to pointer according to ANSI C */
4994 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
4999 ST_FUNC
void parse_asm_str(CString
*astr
)
5002 parse_mult_str(astr
, "string constant");
5005 /* Parse an asm label and return the token */
5006 static int asm_label_instr(void)
5012 parse_asm_str(&astr
);
5015 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
5017 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
5022 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
5024 int n
, l
, t1
, arg_size
, align
, unused_align
;
5025 Sym
**plast
, *s
, *first
;
5030 /* function type, or recursive declarator (return if so) */
5032 if (td
&& !(td
& TYPE_ABSTRACT
))
5036 else if (parse_btype(&pt
, &ad1
))
5039 merge_attr (ad
, &ad1
);
5048 /* read param name and compute offset */
5049 if (l
!= FUNC_OLD
) {
5050 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
5052 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
5053 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
5054 tcc_error("parameter declared as void");
5058 expect("identifier");
5059 pt
.t
= VT_VOID
; /* invalid type */
5063 convert_parameter_type(&pt
);
5064 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
5065 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
5071 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
5076 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
5077 tcc_error("invalid type");
5080 /* if no parameters, then old type prototype */
5083 /* NOTE: const is ignored in returned type as it has a special
5084 meaning in gcc / C++ */
5085 type
->t
&= ~VT_CONSTANT
;
5086 /* some ancient pre-K&R C allows a function to return an array
5087 and the array brackets to be put after the arguments, such
5088 that "int c()[]" means something like "int[] c()" */
5091 skip(']'); /* only handle simple "[]" */
5094 /* we push a anonymous symbol which will contain the function prototype */
5095 ad
->f
.func_args
= arg_size
;
5096 ad
->f
.func_type
= l
;
5097 s
= sym_push(SYM_FIELD
, type
, 0, 0);
5103 } else if (tok
== '[') {
5104 int saved_nocode_wanted
= nocode_wanted
;
5105 /* array definition */
5108 /* XXX The optional type-quals and static should only be accepted
5109 in parameter decls. The '*' as well, and then even only
5110 in prototypes (not function defs). */
5112 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5127 if (!local_stack
|| (storage
& VT_STATIC
))
5128 vpushi(expr_const());
5130 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5131 length must always be evaluated, even under nocode_wanted,
5132 so that its size slot is initialized (e.g. under sizeof
5137 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5140 tcc_error("invalid array size");
5142 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5143 tcc_error("size of variable length array should be an integer");
5149 /* parse next post type */
5150 post_type(type
, ad
, storage
, 0);
5152 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5153 tcc_error("declaration of an array of functions");
5154 if ((type
->t
& VT_BTYPE
) == VT_VOID
5155 || type_size(type
, &unused_align
) < 0)
5156 tcc_error("declaration of an array of incomplete type elements");
5158 t1
|= type
->t
& VT_VLA
;
5162 tcc_error("need explicit inner array size in VLAs");
5163 loc
-= type_size(&int_type
, &align
);
5167 vla_runtime_type_size(type
, &align
);
5169 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5175 nocode_wanted
= saved_nocode_wanted
;
5177 /* we push an anonymous symbol which will contain the array
5179 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5180 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5186 /* Parse a type declarator (except basic type), and return the type
5187 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5188 expected. 'type' should contain the basic type. 'ad' is the
5189 attribute definition of the basic type. It can be modified by
5190 type_decl(). If this (possibly abstract) declarator is a pointer chain
5191 it returns the innermost pointed to type (equals *type, but is a different
5192 pointer), otherwise returns type itself, that's used for recursive calls. */
5193 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5196 int qualifiers
, storage
;
5198 /* recursive type, remove storage bits first, apply them later again */
5199 storage
= type
->t
& VT_STORAGE
;
5200 type
->t
&= ~VT_STORAGE
;
5203 while (tok
== '*') {
5211 qualifiers
|= VT_CONSTANT
;
5216 qualifiers
|= VT_VOLATILE
;
5222 /* XXX: clarify attribute handling */
5223 case TOK_ATTRIBUTE1
:
5224 case TOK_ATTRIBUTE2
:
5225 parse_attribute(ad
);
5229 type
->t
|= qualifiers
;
5231 /* innermost pointed to type is the one for the first derivation */
5232 ret
= pointed_type(type
);
5236 /* This is possibly a parameter type list for abstract declarators
5237 ('int ()'), use post_type for testing this. */
5238 if (!post_type(type
, ad
, 0, td
)) {
5239 /* It's not, so it's a nested declarator, and the post operations
5240 apply to the innermost pointed to type (if any). */
5241 /* XXX: this is not correct to modify 'ad' at this point, but
5242 the syntax is not clear */
5243 parse_attribute(ad
);
5244 post
= type_decl(type
, ad
, v
, td
);
5248 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5249 /* type identifier */
5254 if (!(td
& TYPE_ABSTRACT
))
5255 expect("identifier");
5258 post_type(post
, ad
, storage
, 0);
5259 parse_attribute(ad
);
5264 /* indirection with full error checking and bound check */
5265 ST_FUNC
void indir(void)
5267 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5268 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5272 if (vtop
->r
& VT_LVAL
)
5274 vtop
->type
= *pointed_type(&vtop
->type
);
5275 /* Arrays and functions are never lvalues */
5276 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5277 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5279 /* if bound checking, the referenced pointer must be checked */
5280 #ifdef CONFIG_TCC_BCHECK
5281 if (tcc_state
->do_bounds_check
)
5282 vtop
->r
|= VT_MUSTBOUND
;
5287 /* pass a parameter to a function and do type checking and casting */
5288 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5293 func_type
= func
->f
.func_type
;
5294 if (func_type
== FUNC_OLD
||
5295 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5296 /* default casting : only need to convert float to double */
5297 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5298 gen_cast_s(VT_DOUBLE
);
5299 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5300 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5301 type
.ref
= vtop
->type
.ref
;
5303 } else if (vtop
->r
& VT_MUSTCAST
) {
5304 force_charshort_cast();
5306 } else if (arg
== NULL
) {
5307 tcc_error("too many arguments to function");
5310 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5311 gen_assign_cast(&type
);
5315 /* parse an expression and return its type without any side effect. */
5316 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5325 /* parse an expression of the form '(type)' or '(expr)' and return its
5327 static void parse_expr_type(CType
*type
)
5333 if (parse_btype(type
, &ad
)) {
5334 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5336 expr_type(type
, gexpr
);
5341 static void parse_type(CType
*type
)
5346 if (!parse_btype(type
, &ad
)) {
5349 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5352 static void parse_builtin_params(int nc
, const char *args
)
5359 while ((c
= *args
++)) {
5363 case 'e': expr_eq(); continue;
5364 case 't': parse_type(&t
); vpush(&t
); continue;
5365 default: tcc_error("internal error"); break;
5373 ST_FUNC
void unary(void)
5375 int n
, t
, align
, size
, r
, sizeof_caller
;
5380 /* generate line number info */
5381 if (tcc_state
->do_debug
)
5382 tcc_debug_line(tcc_state
);
5384 sizeof_caller
= in_sizeof
;
5387 /* XXX: GCC 2.95.3 does not generate a table although it should be
5395 #ifdef TCC_TARGET_PE
5396 t
= VT_SHORT
|VT_UNSIGNED
;
5404 vsetc(&type
, VT_CONST
, &tokc
);
5408 t
= VT_INT
| VT_UNSIGNED
;
5414 t
= VT_LLONG
| VT_UNSIGNED
;
5426 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5429 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5431 case TOK___FUNCTION__
:
5433 goto tok_identifier
;
5439 /* special function name identifier */
5440 len
= strlen(funcname
) + 1;
5441 /* generate char[len] type */
5446 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
5447 if (!NODATA_WANTED
) {
5448 ptr
= section_ptr_add(data_section
, len
);
5449 memcpy(ptr
, funcname
, len
);
5455 #ifdef TCC_TARGET_PE
5456 t
= VT_SHORT
| VT_UNSIGNED
;
5462 /* string parsing */
5464 if (tcc_state
->char_is_unsigned
)
5465 t
= VT_BYTE
| VT_UNSIGNED
;
5467 if (tcc_state
->warn_write_strings
)
5472 memset(&ad
, 0, sizeof(AttributeDef
));
5473 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5478 if (parse_btype(&type
, &ad
)) {
5479 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5481 /* check ISOC99 compound literal */
5483 /* data is allocated locally by default */
5488 /* all except arrays are lvalues */
5489 if (!(type
.t
& VT_ARRAY
))
5491 memset(&ad
, 0, sizeof(AttributeDef
));
5492 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5494 if (sizeof_caller
) {
5501 } else if (tok
== '{') {
5502 int saved_nocode_wanted
= nocode_wanted
;
5503 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5504 tcc_error("expected constant");
5505 /* save all registers */
5507 /* statement expression : we do not accept break/continue
5508 inside as GCC does. We do retain the nocode_wanted state,
5509 as statement expressions can't ever be entered from the
5510 outside, so any reactivation of code emission (from labels
5511 or loop heads) can be disabled again after the end of it. */
5513 nocode_wanted
= saved_nocode_wanted
;
5528 /* functions names must be treated as function pointers,
5529 except for unary '&' and sizeof. Since we consider that
5530 functions are not lvalues, we only have to handle it
5531 there and in function calls. */
5532 /* arrays can also be used although they are not lvalues */
5533 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5534 !(vtop
->type
.t
& VT_ARRAY
))
5537 vtop
->sym
->a
.addrtaken
= 1;
5538 mk_pointer(&vtop
->type
);
5544 gen_test_zero(TOK_EQ
);
5555 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5556 tcc_error("pointer not accepted for unary plus");
5557 /* In order to force cast, we add zero, except for floating point
5558 where we really need an noop (otherwise -0.0 will be transformed
5560 if (!is_float(vtop
->type
.t
)) {
5572 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5574 if (vtop
[1].r
& VT_SYM
)
5575 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5576 size
= type_size(&type
, &align
);
5577 if (s
&& s
->a
.aligned
)
5578 align
= 1 << (s
->a
.aligned
- 1);
5579 if (t
== TOK_SIZEOF
) {
5580 if (!(type
.t
& VT_VLA
)) {
5582 tcc_error("sizeof applied to an incomplete type");
5585 vla_runtime_type_size(&type
, &align
);
5590 vtop
->type
.t
|= VT_UNSIGNED
;
5593 case TOK_builtin_expect
:
5594 /* __builtin_expect is a no-op for now */
5595 parse_builtin_params(0, "ee");
5598 case TOK_builtin_types_compatible_p
:
5599 parse_builtin_params(0, "tt");
5600 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5601 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5602 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5606 case TOK_builtin_choose_expr
:
5633 case TOK_builtin_constant_p
:
5634 parse_builtin_params(1, "e");
5635 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
5639 case TOK_builtin_frame_address
:
5640 case TOK_builtin_return_address
:
5646 if (tok
!= TOK_CINT
) {
5647 tcc_error("%s only takes positive integers",
5648 tok1
== TOK_builtin_return_address
?
5649 "__builtin_return_address" :
5650 "__builtin_frame_address");
5652 level
= (uint32_t)tokc
.i
;
5657 vset(&type
, VT_LOCAL
, 0); /* local frame */
5659 #ifdef TCC_TARGET_RISCV64
5663 mk_pointer(&vtop
->type
);
5664 indir(); /* -> parent frame */
5666 if (tok1
== TOK_builtin_return_address
) {
5667 // assume return address is just above frame pointer on stack
5668 #ifdef TCC_TARGET_ARM
5671 #elif defined TCC_TARGET_RISCV64
5678 mk_pointer(&vtop
->type
);
5683 #ifdef TCC_TARGET_RISCV64
5684 case TOK_builtin_va_start
:
5685 parse_builtin_params(0, "ee");
5686 r
= vtop
->r
& VT_VALMASK
;
5690 tcc_error("__builtin_va_start expects a local variable");
5695 #ifdef TCC_TARGET_X86_64
5696 #ifdef TCC_TARGET_PE
5697 case TOK_builtin_va_start
:
5698 parse_builtin_params(0, "ee");
5699 r
= vtop
->r
& VT_VALMASK
;
5703 tcc_error("__builtin_va_start expects a local variable");
5705 vtop
->type
= char_pointer_type
;
5710 case TOK_builtin_va_arg_types
:
5711 parse_builtin_params(0, "t");
5712 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5719 #ifdef TCC_TARGET_ARM64
5720 case TOK_builtin_va_start
: {
5721 parse_builtin_params(0, "ee");
5725 vtop
->type
.t
= VT_VOID
;
5728 case TOK_builtin_va_arg
: {
5729 parse_builtin_params(0, "et");
5737 case TOK___arm64_clear_cache
: {
5738 parse_builtin_params(0, "ee");
5741 vtop
->type
.t
= VT_VOID
;
5745 /* pre operations */
5756 t
= vtop
->type
.t
& VT_BTYPE
;
5758 /* In IEEE negate(x) isn't subtract(0,x), but rather
5762 vtop
->c
.f
= -1.0 * 0.0;
5763 else if (t
== VT_DOUBLE
)
5764 vtop
->c
.d
= -1.0 * 0.0;
5766 vtop
->c
.ld
= -1.0 * 0.0;
5774 goto tok_identifier
;
5776 /* allow to take the address of a label */
5777 if (tok
< TOK_UIDENT
)
5778 expect("label identifier");
5779 s
= label_find(tok
);
5781 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5783 if (s
->r
== LABEL_DECLARED
)
5784 s
->r
= LABEL_FORWARD
;
5787 s
->type
.t
= VT_VOID
;
5788 mk_pointer(&s
->type
);
5789 s
->type
.t
|= VT_STATIC
;
5791 vpushsym(&s
->type
, s
);
5797 CType controlling_type
;
5798 int has_default
= 0;
5801 TokenString
*str
= NULL
;
5802 int saved_const_wanted
= const_wanted
;
5807 expr_type(&controlling_type
, expr_eq
);
5808 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5809 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5810 mk_pointer(&controlling_type
);
5811 const_wanted
= saved_const_wanted
;
5815 if (tok
== TOK_DEFAULT
) {
5817 tcc_error("too many 'default'");
5823 AttributeDef ad_tmp
;
5828 parse_btype(&cur_type
, &ad_tmp
);
5831 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5832 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5834 tcc_error("type match twice");
5844 skip_or_save_block(&str
);
5846 skip_or_save_block(NULL
);
5853 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5854 tcc_error("type '%s' does not match any association", buf
);
5856 begin_macro(str
, 1);
5865 // special qnan , snan and infinity values
5870 vtop
->type
.t
= VT_FLOAT
;
5875 goto special_math_val
;
5878 goto special_math_val
;
5885 expect("identifier");
5887 if (!s
|| IS_ASM_SYM(s
)) {
5888 const char *name
= get_tok_str(t
, NULL
);
5890 tcc_error("'%s' undeclared", name
);
5891 /* for simple function calls, we tolerate undeclared
5892 external reference to int() function */
5893 if (tcc_state
->warn_implicit_function_declaration
5894 #ifdef TCC_TARGET_PE
5895 /* people must be warned about using undeclared WINAPI functions
5896 (which usually start with uppercase letter) */
5897 || (name
[0] >= 'A' && name
[0] <= 'Z')
5900 tcc_warning("implicit declaration of function '%s'", name
);
5901 s
= external_global_sym(t
, &func_old_type
);
5905 /* A symbol that has a register is a local register variable,
5906 which starts out as VT_LOCAL value. */
5907 if ((r
& VT_VALMASK
) < VT_CONST
)
5908 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5910 vset(&s
->type
, r
, s
->c
);
5911 /* Point to s as backpointer (even without r&VT_SYM).
5912 Will be used by at least the x86 inline asm parser for
5918 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5919 vtop
->c
.i
= s
->enum_val
;
5924 /* post operations */
5926 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
5929 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
5930 int qualifiers
, cumofs
= 0;
5932 if (tok
== TOK_ARROW
)
5934 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
5937 /* expect pointer on structure */
5938 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
5939 expect("struct or union");
5940 if (tok
== TOK_CDOUBLE
)
5941 expect("field name");
5943 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
5944 expect("field name");
5945 s
= find_field(&vtop
->type
, tok
, &cumofs
);
5947 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
5948 /* add field offset to pointer */
5949 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
5950 vpushi(cumofs
+ s
->c
);
5952 /* change type to field type, and set to lvalue */
5953 vtop
->type
= s
->type
;
5954 vtop
->type
.t
|= qualifiers
;
5955 /* an array is never an lvalue */
5956 if (!(vtop
->type
.t
& VT_ARRAY
)) {
5958 #ifdef CONFIG_TCC_BCHECK
5959 /* if bound checking, the referenced pointer must be checked */
5960 if (tcc_state
->do_bounds_check
)
5961 vtop
->r
|= VT_MUSTBOUND
;
5965 } else if (tok
== '[') {
5971 } else if (tok
== '(') {
5974 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
5977 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5978 /* pointer test (no array accepted) */
5979 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
5980 vtop
->type
= *pointed_type(&vtop
->type
);
5981 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
5985 expect("function pointer");
5988 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
5990 /* get return type */
5993 sa
= s
->next
; /* first parameter */
5994 nb_args
= regsize
= 0;
5996 /* compute first implicit argument if a structure is returned */
5997 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
5998 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
5999 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6000 &ret_align
, ®size
);
6001 if (ret_nregs
<= 0) {
6002 /* get some space for the returned structure */
6003 size
= type_size(&s
->type
, &align
);
6004 #ifdef TCC_TARGET_ARM64
6005 /* On arm64, a small struct is return in registers.
6006 It is much easier to write it to memory if we know
6007 that we are allowed to write some extra bytes, so
6008 round the allocated space up to a power of 2: */
6010 while (size
& (size
- 1))
6011 size
= (size
| (size
- 1)) + 1;
6013 loc
= (loc
- size
) & -align
;
6015 ret
.r
= VT_LOCAL
| VT_LVAL
;
6016 /* pass it as 'int' to avoid structure arg passing
6018 vseti(VT_LOCAL
, loc
);
6030 if (ret_nregs
> 0) {
6031 /* return in register */
6033 PUT_R_RET(&ret
, ret
.type
.t
);
6038 gfunc_param_typed(s
, sa
);
6048 tcc_error("too few arguments to function");
6050 #ifdef CONFIG_TCC_BCHECK
6051 if (tcc_state
->do_bounds_check
&&
6052 (nb_args
== 1 || nb_args
== 2) &&
6053 (vtop
[-nb_args
].r
& VT_SYM
) &&
6054 (vtop
[-nb_args
].sym
->v
== TOK_setjmp
||
6055 vtop
[-nb_args
].sym
->v
== TOK__setjmp
6056 #ifndef TCC_TARGET_PE
6057 || vtop
[-nb_args
].sym
->v
== TOK_sigsetjmp
6058 || vtop
[-nb_args
].sym
->v
== TOK___sigsetjmp
6061 vpush_global_sym(&func_old_type
, TOK___bound_setjmp
);
6062 vpushv(vtop
- nb_args
);
6064 vpushv(vtop
- nb_args
);
6065 gfunc_call(nb_args
);
6068 gfunc_call(nb_args
);
6070 if (ret_nregs
< 0) {
6071 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6072 #ifdef TCC_TARGET_RISCV64
6073 arch_transfer_ret_regs(1);
6077 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6078 vsetc(&ret
.type
, r
, &ret
.c
);
6079 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6082 /* handle packed struct return */
6083 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6086 size
= type_size(&s
->type
, &align
);
6087 /* We're writing whole regs often, make sure there's enough
6088 space. Assume register size is power of 2. */
6089 if (regsize
> align
)
6091 loc
= (loc
- size
) & -align
;
6095 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6099 if (--ret_nregs
== 0)
6103 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6106 /* Promote char/short return values. This is matters only
6107 for calling function that were not compiled by TCC and
6108 only on some architectures. For those where it doesn't
6109 matter we expect things to be already promoted to int,
6111 t
= s
->type
.t
& VT_BTYPE
;
6112 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6114 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6116 vtop
->type
.t
= VT_INT
;
6120 if (s
->f
.func_noreturn
)
6128 #ifndef precedence_parser /* original top-down parser */
6130 static void expr_prod(void)
6135 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6142 static void expr_sum(void)
6147 while ((t
= tok
) == '+' || t
== '-') {
6154 static void expr_shift(void)
6159 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6166 static void expr_cmp(void)
6171 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6172 t
== TOK_ULT
|| t
== TOK_UGE
) {
6179 static void expr_cmpeq(void)
6184 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6191 static void expr_and(void)
6194 while (tok
== '&') {
6201 static void expr_xor(void)
6204 while (tok
== '^') {
6211 static void expr_or(void)
6214 while (tok
== '|') {
6221 static void expr_landor(int op
);
6223 static void expr_land(void)
6226 if (tok
== TOK_LAND
)
6230 static void expr_lor(void)
6237 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6238 #else /* defined precedence_parser */
6239 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6240 # define expr_lor() unary(), expr_infix(1)
6242 static int precedence(int tok
)
6245 case TOK_LOR
: return 1;
6246 case TOK_LAND
: return 2;
6250 case TOK_EQ
: case TOK_NE
: return 6;
6251 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6252 case TOK_SHL
: case TOK_SAR
: return 8;
6253 case '+': case '-': return 9;
6254 case '*': case '/': case '%': return 10;
6256 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6261 static unsigned char prec
[256];
6262 static void init_prec(void)
6265 for (i
= 0; i
< 256; i
++)
6266 prec
[i
] = precedence(i
);
6268 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6270 static void expr_landor(int op
);
6272 static void expr_infix(int p
)
6275 while ((p2
= precedence(t
)) >= p
) {
6276 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6281 if (precedence(tok
) > p2
)
6290 /* Assuming vtop is a value used in a conditional context
6291 (i.e. compared with zero) return 0 if it's false, 1 if
6292 true and -1 if it can't be statically determined. */
6293 static int condition_3way(void)
6296 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6297 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6299 gen_cast_s(VT_BOOL
);
6306 static void expr_landor(int op
)
6308 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6310 c
= f
? i
: condition_3way();
6312 save_regs(1), cc
= 0;
6314 nocode_wanted
++, f
= 1;
6322 expr_landor_next(op
);
6334 static int is_cond_bool(SValue
*sv
)
6336 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6337 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6338 return (unsigned)sv
->c
.i
< 2;
6339 if (sv
->r
== VT_CMP
)
6344 static void expr_cond(void)
6346 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6354 c
= condition_3way();
6355 g
= (tok
== ':' && gnu_ext
);
6365 /* needed to avoid having different registers saved in
6372 ncw_prev
= nocode_wanted
;
6378 if (c
< 0 && vtop
->r
== VT_CMP
) {
6385 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6386 mk_pointer(&vtop
->type
);
6387 sv
= *vtop
; /* save value to handle it later */
6388 vtop
--; /* no vpop so that FP stack is not flushed */
6398 nocode_wanted
= ncw_prev
;
6404 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6405 if (sv
.r
== VT_CMP
) {
6416 nocode_wanted
= ncw_prev
;
6417 // tcc_warning("two conditions expr_cond");
6421 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6422 mk_pointer(&vtop
->type
);
6424 /* cast operands to correct type according to ISOC rules */
6425 if (!combine_types(&type
, &sv
, vtop
, '?'))
6426 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6427 "type mismatch in conditional expression (have '%s' and '%s')");
6428 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6429 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6430 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6432 /* now we convert second operand */
6436 mk_pointer(&vtop
->type
);
6438 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6442 rc
= RC_TYPE(type
.t
);
6443 /* for long longs, we use fixed registers to avoid having
6444 to handle a complicated move */
6445 if (USING_TWO_WORDS(type
.t
))
6446 rc
= RC_RET(type
.t
);
6454 nocode_wanted
= ncw_prev
;
6456 /* this is horrible, but we must also convert first
6462 mk_pointer(&vtop
->type
);
6464 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6470 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6480 static void expr_eq(void)
6485 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6493 gen_op(TOK_ASSIGN_OP(t
));
6499 ST_FUNC
void gexpr(void)
6510 /* parse a constant expression and return value in vtop. */
6511 static void expr_const1(void)
6514 nocode_wanted
+= unevalmask
+ 1;
6516 nocode_wanted
-= unevalmask
+ 1;
6520 /* parse an integer constant and return its value. */
6521 static inline int64_t expr_const64(void)
6525 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6526 expect("constant expression");
6532 /* parse an integer constant and return its value.
6533 Complain if it doesn't fit 32bit (signed or unsigned). */
6534 ST_FUNC
int expr_const(void)
6537 int64_t wc
= expr_const64();
6539 if (c
!= wc
&& (unsigned)c
!= wc
)
6540 tcc_error("constant exceeds 32 bit");
6544 /* ------------------------------------------------------------------------- */
6545 /* return from function */
6547 #ifndef TCC_TARGET_ARM64
6548 static void gfunc_return(CType
*func_type
)
6550 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6551 CType type
, ret_type
;
6552 int ret_align
, ret_nregs
, regsize
;
6553 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6554 &ret_align
, ®size
);
6555 if (ret_nregs
< 0) {
6556 #ifdef TCC_TARGET_RISCV64
6557 arch_transfer_ret_regs(0);
6559 } else if (0 == ret_nregs
) {
6560 /* if returning structure, must copy it to implicit
6561 first pointer arg location */
6564 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6567 /* copy structure value to pointer */
6570 /* returning structure packed into registers */
6571 int size
, addr
, align
, rc
;
6572 size
= type_size(func_type
,&align
);
6573 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6574 (vtop
->c
.i
& (ret_align
-1)))
6575 && (align
& (ret_align
-1))) {
6576 loc
= (loc
- size
) & -ret_align
;
6579 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6583 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6585 vtop
->type
= ret_type
;
6586 rc
= RC_RET(ret_type
.t
);
6594 if (--ret_nregs
== 0)
6596 /* We assume that when a structure is returned in multiple
6597 registers, their classes are consecutive values of the
6600 vtop
->c
.i
+= regsize
;
6605 gv(RC_RET(func_type
->t
));
6607 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6611 static void check_func_return(void)
6613 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6615 if (!strcmp (funcname
, "main")
6616 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6617 /* main returns 0 by default */
6619 gen_assign_cast(&func_vt
);
6620 gfunc_return(&func_vt
);
6622 tcc_warning("function might return no value: '%s'", funcname
);
6626 /* ------------------------------------------------------------------------- */
6629 static int case_cmp(const void *pa
, const void *pb
)
6631 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6632 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6633 return a
< b
? -1 : a
> b
;
6636 static void gtst_addr(int t
, int a
)
6638 gsym_addr(gvtst(0, t
), a
);
6641 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6645 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6662 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6664 gcase(base
, len
/2, bsym
);
6668 base
+= e
; len
-= e
;
6678 if (p
->v1
== p
->v2
) {
6680 gtst_addr(0, p
->sym
);
6690 gtst_addr(0, p
->sym
);
6694 *bsym
= gjmp(*bsym
);
6697 /* ------------------------------------------------------------------------- */
6698 /* __attribute__((cleanup(fn))) */
6700 static void try_call_scope_cleanup(Sym
*stop
)
6702 Sym
*cls
= cur_scope
->cl
.s
;
6704 for (; cls
!= stop
; cls
= cls
->ncl
) {
6705 Sym
*fs
= cls
->next
;
6706 Sym
*vs
= cls
->prev_tok
;
6708 vpushsym(&fs
->type
, fs
);
6709 vset(&vs
->type
, vs
->r
, vs
->c
);
6711 mk_pointer(&vtop
->type
);
6717 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6722 if (!cur_scope
->cl
.s
)
6725 /* search NCA of both cleanup chains given parents and initial depth */
6726 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6727 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6729 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6731 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6734 try_call_scope_cleanup(cc
);
6737 /* call 'func' for each __attribute__((cleanup(func))) */
6738 static void block_cleanup(struct scope
*o
)
6742 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6743 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6748 try_call_scope_cleanup(o
->cl
.s
);
6749 pcl
->jnext
= gjmp(0);
6751 goto remove_pending
;
6761 try_call_scope_cleanup(o
->cl
.s
);
6764 /* ------------------------------------------------------------------------- */
6767 static void vla_restore(int loc
)
6770 gen_vla_sp_restore(loc
);
6773 static void vla_leave(struct scope
*o
)
6775 if (o
->vla
.num
< cur_scope
->vla
.num
)
6776 vla_restore(o
->vla
.loc
);
6779 /* ------------------------------------------------------------------------- */
6782 void new_scope(struct scope
*o
)
6784 /* copy and link previous scope */
6786 o
->prev
= cur_scope
;
6789 /* record local declaration stack position */
6790 o
->lstk
= local_stack
;
6791 o
->llstk
= local_label_stack
;
6795 if (tcc_state
->do_debug
)
6796 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
6799 void prev_scope(struct scope
*o
, int is_expr
)
6803 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6804 block_cleanup(o
->prev
);
6806 /* pop locally defined labels */
6807 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6809 /* In the is_expr case (a statement expression is finished here),
6810 vtop might refer to symbols on the local_stack. Either via the
6811 type or via vtop->sym. We can't pop those nor any that in turn
6812 might be referred to. To make it easier we don't roll back
6813 any symbols in that case; some upper level call to block() will
6814 do that. We do have to remove such symbols from the lookup
6815 tables, though. sym_pop will do that. */
6817 /* pop locally defined symbols */
6818 pop_local_syms(&local_stack
, o
->lstk
, is_expr
, 0);
6819 cur_scope
= o
->prev
;
6822 if (tcc_state
->do_debug
)
6823 tcc_debug_stabn(N_RBRAC
, ind
- func_ind
);
6826 /* leave a scope via break/continue(/goto) */
6827 void leave_scope(struct scope
*o
)
6831 try_call_scope_cleanup(o
->cl
.s
);
6835 /* ------------------------------------------------------------------------- */
6836 /* call block from 'for do while' loops */
6838 static void lblock(int *bsym
, int *csym
)
6840 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6841 int *b
= co
->bsym
, *c
= co
->csym
;
6855 static void block(int is_expr
)
6857 int a
, b
, c
, d
, e
, t
;
6862 /* default return value is (void) */
6864 vtop
->type
.t
= VT_VOID
;
6876 if (tok
== TOK_ELSE
) {
6881 gsym(d
); /* patch else jmp */
6886 } else if (t
== TOK_WHILE
) {
6898 } else if (t
== '{') {
6901 /* handle local labels declarations */
6902 while (tok
== TOK_LABEL
) {
6905 if (tok
< TOK_UIDENT
)
6906 expect("label identifier");
6907 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6909 } while (tok
== ',');
6913 while (tok
!= '}') {
6922 prev_scope(&o
, is_expr
);
6925 else if (!nocode_wanted
)
6926 check_func_return();
6928 } else if (t
== TOK_RETURN
) {
6929 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6933 gen_assign_cast(&func_vt
);
6935 if (vtop
->type
.t
!= VT_VOID
)
6936 tcc_warning("void function returns a value");
6940 tcc_warning("'return' with no value");
6943 leave_scope(root_scope
);
6945 gfunc_return(&func_vt
);
6947 /* jump unless last stmt in top-level block */
6948 if (tok
!= '}' || local_scope
!= 1)
6952 } else if (t
== TOK_BREAK
) {
6954 if (!cur_scope
->bsym
)
6955 tcc_error("cannot break");
6956 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
6957 leave_scope(cur_switch
->scope
);
6959 leave_scope(loop_scope
);
6960 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
6963 } else if (t
== TOK_CONTINUE
) {
6965 if (!cur_scope
->csym
)
6966 tcc_error("cannot continue");
6967 leave_scope(loop_scope
);
6968 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
6971 } else if (t
== TOK_FOR
) {
6976 /* c99 for-loop init decl? */
6977 if (!decl0(VT_LOCAL
, 1, NULL
)) {
6978 /* no, regular for-loop init expr */
7006 } else if (t
== TOK_DO
) {
7020 } else if (t
== TOK_SWITCH
) {
7021 struct switch_t
*sw
;
7023 sw
= tcc_mallocz(sizeof *sw
);
7025 sw
->scope
= cur_scope
;
7026 sw
->prev
= cur_switch
;
7032 sw
->sv
= *vtop
--; /* save switch value */
7035 b
= gjmp(0); /* jump to first case */
7037 a
= gjmp(a
); /* add implicit break */
7041 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmp
);
7042 for (b
= 1; b
< sw
->n
; b
++)
7043 if (sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7044 tcc_error("duplicate case value");
7046 /* Our switch table sorting is signed, so the compared
7047 value needs to be as well when it's 64bit. */
7049 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
7050 vtop
->type
.t
&= ~VT_UNSIGNED
;
7052 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7055 gsym_addr(d
, sw
->def_sym
);
7061 dynarray_reset(&sw
->p
, &sw
->n
);
7062 cur_switch
= sw
->prev
;
7065 } else if (t
== TOK_CASE
) {
7066 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7069 cr
->v1
= cr
->v2
= expr_const64();
7070 if (gnu_ext
&& tok
== TOK_DOTS
) {
7072 cr
->v2
= expr_const64();
7073 if (cr
->v2
< cr
->v1
)
7074 tcc_warning("empty case range");
7077 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7080 goto block_after_label
;
7082 } else if (t
== TOK_DEFAULT
) {
7085 if (cur_switch
->def_sym
)
7086 tcc_error("too many 'default'");
7087 cur_switch
->def_sym
= gind();
7090 goto block_after_label
;
7092 } else if (t
== TOK_GOTO
) {
7093 vla_restore(root_scope
->vla
.loc
);
7094 if (tok
== '*' && gnu_ext
) {
7098 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7102 } else if (tok
>= TOK_UIDENT
) {
7103 s
= label_find(tok
);
7104 /* put forward definition if needed */
7106 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7107 else if (s
->r
== LABEL_DECLARED
)
7108 s
->r
= LABEL_FORWARD
;
7110 if (s
->r
& LABEL_FORWARD
) {
7111 /* start new goto chain for cleanups, linked via label->next */
7112 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7113 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7114 pending_gotos
->prev_tok
= s
;
7115 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7116 pending_gotos
->next
= s
;
7118 s
->jnext
= gjmp(s
->jnext
);
7120 try_call_cleanup_goto(s
->cleanupstate
);
7121 gjmp_addr(s
->jnext
);
7126 expect("label identifier");
7130 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7134 if (tok
== ':' && t
>= TOK_UIDENT
) {
7139 if (s
->r
== LABEL_DEFINED
)
7140 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7141 s
->r
= LABEL_DEFINED
;
7143 Sym
*pcl
; /* pending cleanup goto */
7144 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7146 sym_pop(&s
->next
, NULL
, 0);
7150 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7153 s
->cleanupstate
= cur_scope
->cl
.s
;
7156 vla_restore(cur_scope
->vla
.loc
);
7157 /* we accept this, but it is a mistake */
7159 tcc_warning("deprecated use of label at end of compound statement");
7165 /* expression case */
7181 /* This skips over a stream of tokens containing balanced {} and ()
7182 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7183 with a '{'). If STR then allocates and stores the skipped tokens
7184 in *STR. This doesn't check if () and {} are nested correctly,
7185 i.e. "({)}" is accepted. */
7186 static void skip_or_save_block(TokenString
**str
)
7188 int braces
= tok
== '{';
7191 *str
= tok_str_alloc();
7193 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7195 if (tok
== TOK_EOF
) {
7196 if (str
|| level
> 0)
7197 tcc_error("unexpected end of file");
7202 tok_str_add_tok(*str
);
7205 if (t
== '{' || t
== '(') {
7207 } else if (t
== '}' || t
== ')') {
7209 if (level
== 0 && braces
&& t
== '}')
7214 tok_str_add(*str
, -1);
7215 tok_str_add(*str
, 0);
7219 #define EXPR_CONST 1
7222 static void parse_init_elem(int expr_type
)
7224 int saved_global_expr
;
7227 /* compound literals must be allocated globally in this case */
7228 saved_global_expr
= global_expr
;
7231 global_expr
= saved_global_expr
;
7232 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7233 (compound literals). */
7234 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7235 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7236 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7237 #ifdef TCC_TARGET_PE
7238 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7241 tcc_error("initializer element is not constant");
7249 /* put zeros for variable based init */
7250 static void init_putz(Section
*sec
, unsigned long c
, int size
)
7253 /* nothing to do because globals are already set to zero */
7255 vpush_global_sym(&func_old_type
, TOK_memset
);
7257 #ifdef TCC_TARGET_ARM
7269 #define DIF_SIZE_ONLY 2
7270 #define DIF_HAVE_ELEM 4
7272 /* t is the array or struct type. c is the array or struct
7273 address. cur_field is the pointer to the current
7274 field, for arrays the 'c' member contains the current start
7275 index. 'flags' is as in decl_initializer.
7276 'al' contains the already initialized length of the
7277 current container (starting at c). This returns the new length of that. */
7278 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
7279 Sym
**cur_field
, int flags
, int al
)
7282 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7283 unsigned long corig
= c
;
7288 if (flags
& DIF_HAVE_ELEM
)
7291 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7298 /* NOTE: we only support ranges for last designator */
7299 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7301 if (!(type
->t
& VT_ARRAY
))
7302 expect("array type");
7304 index
= index_last
= expr_const();
7305 if (tok
== TOK_DOTS
&& gnu_ext
) {
7307 index_last
= expr_const();
7311 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
7313 tcc_error("invalid index");
7315 (*cur_field
)->c
= index_last
;
7316 type
= pointed_type(type
);
7317 elem_size
= type_size(type
, &align
);
7318 c
+= index
* elem_size
;
7319 nb_elems
= index_last
- index
+ 1;
7326 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7327 expect("struct/union type");
7329 f
= find_field(type
, l
, &cumofs
);
7342 } else if (!gnu_ext
) {
7347 if (type
->t
& VT_ARRAY
) {
7348 index
= (*cur_field
)->c
;
7349 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
7350 tcc_error("index too large");
7351 type
= pointed_type(type
);
7352 c
+= index
* type_size(type
, &align
);
7355 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7356 *cur_field
= f
= f
->next
;
7358 tcc_error("too many field init");
7363 /* must put zero in holes (note that doing it that way
7364 ensures that it even works with designators) */
7365 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
7366 init_putz(sec
, corig
+ al
, c
- corig
- al
);
7367 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
7369 /* XXX: make it more general */
7370 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7371 unsigned long c_end
;
7376 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7377 for (i
= 1; i
< nb_elems
; i
++) {
7378 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
7383 } else if (!NODATA_WANTED
) {
7384 c_end
= c
+ nb_elems
* elem_size
;
7385 if (c_end
> sec
->data_allocated
)
7386 section_realloc(sec
, c_end
);
7387 src
= sec
->data
+ c
;
7389 for(i
= 1; i
< nb_elems
; i
++) {
7391 memcpy(dst
, src
, elem_size
);
7395 c
+= nb_elems
* type_size(type
, &align
);
7401 /* store a value or an expression directly in global data or in local array */
7402 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
7409 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7413 /* XXX: not portable */
7414 /* XXX: generate error if incorrect relocation */
7415 gen_assign_cast(&dtype
);
7416 bt
= type
->t
& VT_BTYPE
;
7418 if ((vtop
->r
& VT_SYM
)
7421 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7422 || (type
->t
& VT_BITFIELD
))
7423 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7425 tcc_error("initializer element is not computable at load time");
7427 if (NODATA_WANTED
) {
7432 size
= type_size(type
, &align
);
7433 section_reserve(sec
, c
+ size
);
7434 ptr
= sec
->data
+ c
;
7436 /* XXX: make code faster ? */
7437 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7438 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7439 /* XXX This rejects compound literals like
7440 '(void *){ptr}'. The problem is that '&sym' is
7441 represented the same way, which would be ruled out
7442 by the SYM_FIRST_ANOM check above, but also '"string"'
7443 in 'char *p = "string"' is represented the same
7444 with the type being VT_PTR and the symbol being an
7445 anonymous one. That is, there's no difference in vtop
7446 between '(void *){x}' and '&(void *){x}'. Ignore
7447 pointer typed entities here. Hopefully no real code
7448 will ever use compound literals with scalar type. */
7449 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7450 /* These come from compound literals, memcpy stuff over. */
7454 esym
= elfsym(vtop
->sym
);
7455 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7456 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7458 /* We need to copy over all memory contents, and that
7459 includes relocations. Use the fact that relocs are
7460 created it order, so look from the end of relocs
7461 until we hit one before the copied region. */
7462 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
7463 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
7464 while (num_relocs
--) {
7466 if (rel
->r_offset
>= esym
->st_value
+ size
)
7468 if (rel
->r_offset
< esym
->st_value
)
7470 /* Note: if the same fields are initialized multiple
7471 times (possible with designators) then we possibly
7472 add multiple relocations for the same offset here.
7473 That would lead to wrong code, the last reloc needs
7474 to win. We clean this up later after the whole
7475 initializer is parsed. */
7476 put_elf_reloca(symtab_section
, sec
,
7477 c
+ rel
->r_offset
- esym
->st_value
,
7478 ELFW(R_TYPE
)(rel
->r_info
),
7479 ELFW(R_SYM
)(rel
->r_info
),
7489 if (type
->t
& VT_BITFIELD
) {
7490 int bit_pos
, bit_size
, bits
, n
;
7491 unsigned char *p
, v
, m
;
7492 bit_pos
= BIT_POS(vtop
->type
.t
);
7493 bit_size
= BIT_SIZE(vtop
->type
.t
);
7494 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7495 bit_pos
&= 7, bits
= 0;
7500 v
= vtop
->c
.i
>> bits
<< bit_pos
;
7501 m
= ((1 << n
) - 1) << bit_pos
;
7502 *p
= (*p
& ~m
) | (v
& m
);
7503 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7507 /* XXX: when cross-compiling we assume that each type has the
7508 same representation on host and target, which is likely to
7509 be wrong in the case of long double */
7511 vtop
->c
.i
= vtop
->c
.i
!= 0;
7513 *(char *)ptr
|= vtop
->c
.i
;
7516 *(short *)ptr
|= vtop
->c
.i
;
7519 *(float*)ptr
= vtop
->c
.f
;
7522 *(double *)ptr
= vtop
->c
.d
;
7525 #if defined TCC_IS_NATIVE_387
7526 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7527 memcpy(ptr
, &vtop
->c
.ld
, 10);
7529 else if (sizeof (long double) == sizeof (double))
7530 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7532 else if (vtop
->c
.ld
== 0.0)
7536 if (sizeof(long double) == LDOUBLE_SIZE
)
7537 *(long double*)ptr
= vtop
->c
.ld
;
7538 else if (sizeof(double) == LDOUBLE_SIZE
)
7539 *(double *)ptr
= (double)vtop
->c
.ld
;
7541 tcc_error("can't cross compile long double constants");
7545 *(long long *)ptr
|= vtop
->c
.i
;
7552 addr_t val
= vtop
->c
.i
;
7554 if (vtop
->r
& VT_SYM
)
7555 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7557 *(addr_t
*)ptr
|= val
;
7559 if (vtop
->r
& VT_SYM
)
7560 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7561 *(addr_t
*)ptr
|= val
;
7567 int val
= vtop
->c
.i
;
7569 if (vtop
->r
& VT_SYM
)
7570 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7574 if (vtop
->r
& VT_SYM
)
7575 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7584 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7591 /* 't' contains the type and storage info. 'c' is the offset of the
7592 object in section 'sec'. If 'sec' is NULL, it means stack based
7593 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7594 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7595 size only evaluation is wanted (only for arrays). */
7596 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
7599 int len
, n
, no_oblock
, i
;
7605 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7606 /* In case of strings we have special handling for arrays, so
7607 don't consume them as initializer value (which would commit them
7608 to some anonymous symbol). */
7609 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7610 !(flags
& DIF_SIZE_ONLY
)) {
7611 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7612 flags
|= DIF_HAVE_ELEM
;
7615 if ((flags
& DIF_HAVE_ELEM
) &&
7616 !(type
->t
& VT_ARRAY
) &&
7617 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7618 The source type might have VT_CONSTANT set, which is
7619 of course assignable to non-const elements. */
7620 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7621 init_putv(type
, sec
, c
);
7622 } else if (type
->t
& VT_ARRAY
) {
7625 t1
= pointed_type(type
);
7626 size1
= type_size(t1
, &align1
);
7629 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7632 tcc_error("character array initializer must be a literal,"
7633 " optionally enclosed in braces");
7638 /* only parse strings here if correct type (otherwise: handle
7639 them as ((w)char *) expressions */
7640 if ((tok
== TOK_LSTR
&&
7641 #ifdef TCC_TARGET_PE
7642 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7644 (t1
->t
& VT_BTYPE
) == VT_INT
7646 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7649 cstr_reset(&initstr
);
7650 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7651 tcc_error("unhandled string literal merging");
7652 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7654 initstr
.size
-= size1
;
7656 len
+= tokc
.str
.size
;
7658 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7660 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7663 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7664 && tok
!= TOK_EOF
) {
7665 /* Not a lone literal but part of a bigger expression. */
7666 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7667 tokc
.str
.size
= initstr
.size
;
7668 tokc
.str
.data
= initstr
.data
;
7674 if (n
>= 0 && len
> n
)
7676 if (!(flags
& DIF_SIZE_ONLY
)) {
7678 tcc_warning("initializer-string for array is too long");
7679 /* in order to go faster for common case (char
7680 string in global variable, we handle it
7682 if (sec
&& size1
== 1) {
7684 memcpy(sec
->data
+ c
, initstr
.data
, nb
);
7688 ch
= ((unsigned char *)initstr
.data
)[i
];
7690 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7692 init_putv(t1
, sec
, c
+ i
* size1
);
7696 /* only add trailing zero if enough storage (no
7697 warning in this case since it is standard) */
7698 if (n
< 0 || len
< n
) {
7699 if (!(flags
& DIF_SIZE_ONLY
)) {
7701 init_putv(t1
, sec
, c
+ (len
* size1
));
7712 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7713 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7714 flags
&= ~DIF_HAVE_ELEM
;
7715 if (type
->t
& VT_ARRAY
) {
7717 /* special test for multi dimensional arrays (may not
7718 be strictly correct if designators are used at the
7720 if (no_oblock
&& len
>= n
*size1
)
7723 if (s
->type
.t
== VT_UNION
)
7727 if (no_oblock
&& f
== NULL
)
7736 /* put zeros at the end */
7737 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7738 init_putz(sec
, c
+ len
, n
*size1
- len
);
7741 /* patch type size if needed, which happens only for array types */
7743 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7744 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7747 if ((flags
& DIF_FIRST
) || tok
== '{') {
7755 } else if (tok
== '{') {
7756 if (flags
& DIF_HAVE_ELEM
)
7759 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7761 } else if ((flags
& DIF_SIZE_ONLY
)) {
7762 /* If we supported only ISO C we wouldn't have to accept calling
7763 this on anything than an array if DIF_SIZE_ONLY (and even then
7764 only on the outermost level, so no recursion would be needed),
7765 because initializing a flex array member isn't supported.
7766 But GNU C supports it, so we need to recurse even into
7767 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7768 /* just skip expression */
7769 skip_or_save_block(NULL
);
7771 if (!(flags
& DIF_HAVE_ELEM
)) {
7772 /* This should happen only when we haven't parsed
7773 the init element above for fear of committing a
7774 string constant to memory too early. */
7775 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7776 expect("string constant");
7777 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7779 init_putv(type
, sec
, c
);
7783 /* parse an initializer for type 't' if 'has_init' is non zero, and
7784 allocate space in local or global data space ('r' is either
7785 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7786 variable 'v' of scope 'scope' is declared before initializers
7787 are parsed. If 'v' is zero, then a reference to the new object
7788 is put in the value stack. If 'has_init' is 2, a special parsing
7789 is done to handle string constants. */
7790 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7791 int has_init
, int v
, int scope
)
7793 int size
, align
, addr
;
7794 TokenString
*init_str
= NULL
;
7797 Sym
*flexible_array
;
7799 int saved_nocode_wanted
= nocode_wanted
;
7800 #ifdef CONFIG_TCC_BCHECK
7801 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7804 /* Always allocate static or global variables */
7805 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7806 nocode_wanted
|= 0x80000000;
7808 flexible_array
= NULL
;
7809 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7810 Sym
*field
= type
->ref
->next
;
7813 field
= field
->next
;
7814 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7815 flexible_array
= field
;
7819 size
= type_size(type
, &align
);
7820 /* If unknown size, we must evaluate it before
7821 evaluating initializers because
7822 initializers can generate global data too
7823 (e.g. string pointers or ISOC99 compound
7824 literals). It also simplifies local
7825 initializers handling */
7826 if (size
< 0 || (flexible_array
&& has_init
)) {
7828 tcc_error("unknown type size");
7829 /* get all init string */
7830 if (has_init
== 2) {
7831 init_str
= tok_str_alloc();
7832 /* only get strings */
7833 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7834 tok_str_add_tok(init_str
);
7837 tok_str_add(init_str
, -1);
7838 tok_str_add(init_str
, 0);
7840 skip_or_save_block(&init_str
);
7845 begin_macro(init_str
, 1);
7847 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7848 /* prepare second initializer parsing */
7849 macro_ptr
= init_str
->str
;
7852 /* if still unknown size, error */
7853 size
= type_size(type
, &align
);
7855 tcc_error("unknown type size");
7857 /* If there's a flex member and it was used in the initializer
7859 if (flexible_array
&&
7860 flexible_array
->type
.ref
->c
> 0)
7861 size
+= flexible_array
->type
.ref
->c
7862 * pointed_size(&flexible_array
->type
);
7863 /* take into account specified alignment if bigger */
7864 if (ad
->a
.aligned
) {
7865 int speca
= 1 << (ad
->a
.aligned
- 1);
7868 } else if (ad
->a
.packed
) {
7872 if (!v
&& NODATA_WANTED
)
7873 size
= 0, align
= 1;
7875 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7877 #ifdef CONFIG_TCC_BCHECK
7879 /* add padding between stack variables for bound checking */
7883 loc
= (loc
- size
) & -align
;
7885 #ifdef CONFIG_TCC_BCHECK
7887 /* add padding between stack variables for bound checking */
7892 /* local variable */
7893 #ifdef CONFIG_TCC_ASM
7894 if (ad
->asm_label
) {
7895 int reg
= asm_parse_regvar(ad
->asm_label
);
7897 r
= (r
& ~VT_VALMASK
) | reg
;
7900 sym
= sym_push(v
, type
, r
, addr
);
7901 if (ad
->cleanup_func
) {
7902 Sym
*cls
= sym_push2(&all_cleanups
,
7903 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7904 cls
->prev_tok
= sym
;
7905 cls
->next
= ad
->cleanup_func
;
7906 cls
->ncl
= cur_scope
->cl
.s
;
7907 cur_scope
->cl
.s
= cls
;
7912 /* push local reference */
7913 vset(type
, r
, addr
);
7916 if (v
&& scope
== VT_CONST
) {
7917 /* see if the symbol was already defined */
7920 patch_storage(sym
, ad
, type
);
7921 /* we accept several definitions of the same global variable. */
7922 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7927 /* allocate symbol in corresponding section */
7932 else if (tcc_state
->nocommon
)
7937 addr
= section_add(sec
, size
, align
);
7938 #ifdef CONFIG_TCC_BCHECK
7939 /* add padding if bound check */
7941 section_add(sec
, 1, 1);
7944 addr
= align
; /* SHN_COMMON is special, symbol value is align */
7945 sec
= common_section
;
7950 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
7951 patch_storage(sym
, ad
, NULL
);
7953 /* update symbol definition */
7954 put_extern_sym(sym
, sec
, addr
, size
);
7956 /* push global reference */
7957 vpush_ref(type
, sec
, addr
, size
);
7962 #ifdef CONFIG_TCC_BCHECK
7963 /* handles bounds now because the symbol must be defined
7964 before for the relocation */
7968 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
7969 /* then add global bound info */
7970 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
7971 bounds_ptr
[0] = 0; /* relocated */
7972 bounds_ptr
[1] = size
;
7977 if (type
->t
& VT_VLA
) {
7983 /* save current stack pointer */
7984 if (root_scope
->vla
.loc
== 0) {
7985 struct scope
*v
= cur_scope
;
7986 gen_vla_sp_save(loc
-= PTR_SIZE
);
7987 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
7990 vla_runtime_type_size(type
, &a
);
7991 gen_vla_alloc(type
, a
);
7992 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7993 /* on _WIN64, because of the function args scratch area, the
7994 result of alloca differs from RSP and is returned in RAX. */
7995 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
7997 gen_vla_sp_save(addr
);
7998 cur_scope
->vla
.loc
= addr
;
7999 cur_scope
->vla
.num
++;
8000 } else if (has_init
) {
8001 size_t oldreloc_offset
= 0;
8002 if (sec
&& sec
->reloc
)
8003 oldreloc_offset
= sec
->reloc
->data_offset
;
8004 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
8005 if (sec
&& sec
->reloc
)
8006 squeeze_multi_relocs(sec
, oldreloc_offset
);
8007 /* patch flexible array member size back to -1, */
8008 /* for possible subsequent similar declarations */
8010 flexible_array
->type
.ref
->c
= -1;
8014 /* restore parse state if needed */
8020 nocode_wanted
= saved_nocode_wanted
;
8023 /* parse a function defined by symbol 'sym' and generate its code in
8024 'cur_text_section' */
8025 static void gen_function(Sym
*sym
)
8027 /* Initialize VLA state */
8028 struct scope f
= { 0 };
8029 cur_scope
= root_scope
= &f
;
8032 ind
= cur_text_section
->data_offset
;
8033 if (sym
->a
.aligned
) {
8034 size_t newoff
= section_add(cur_text_section
, 0,
8035 1 << (sym
->a
.aligned
- 1));
8036 gen_fill_nops(newoff
- ind
);
8038 /* NOTE: we patch the symbol size later */
8039 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8040 if (sym
->type
.ref
->f
.func_ctor
)
8041 add_array (tcc_state
, ".init_array", sym
->c
);
8042 if (sym
->type
.ref
->f
.func_dtor
)
8043 add_array (tcc_state
, ".fini_array", sym
->c
);
8045 funcname
= get_tok_str(sym
->v
, NULL
);
8047 func_vt
= sym
->type
.ref
->type
;
8048 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8050 /* put debug symbol */
8051 tcc_debug_funcstart(tcc_state
, sym
);
8052 /* push a dummy symbol to enable local sym storage */
8053 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8054 local_scope
= 1; /* for function parameters */
8058 clear_temp_local_var_list();
8062 /* reset local stack */
8063 pop_local_syms(&local_stack
, NULL
, 0, func_var
);
8065 cur_text_section
->data_offset
= ind
;
8067 label_pop(&global_label_stack
, NULL
, 0);
8068 sym_pop(&all_cleanups
, NULL
, 0);
8069 /* patch symbol size */
8070 elfsym(sym
)->st_size
= ind
- func_ind
;
8071 /* end of function */
8072 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8073 /* It's better to crash than to generate wrong code */
8074 cur_text_section
= NULL
;
8075 funcname
= ""; /* for safety */
8076 func_vt
.t
= VT_VOID
; /* for safety */
8077 func_var
= 0; /* for safety */
8078 ind
= 0; /* for safety */
8079 nocode_wanted
= 0x80000000;
8081 /* do this after funcend debug info */
8085 static void gen_inline_functions(TCCState
*s
)
8088 int inline_generated
, i
;
8089 struct InlineFunc
*fn
;
8091 tcc_open_bf(s
, ":inline:", 0);
8092 /* iterate while inline function are referenced */
8094 inline_generated
= 0;
8095 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8096 fn
= s
->inline_fns
[i
];
8098 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8099 /* the function was used or forced (and then not internal):
8100 generate its code and convert it to a normal function */
8102 tcc_debug_putfile(s
, fn
->filename
);
8103 begin_macro(fn
->func_str
, 1);
8105 cur_text_section
= text_section
;
8109 inline_generated
= 1;
8112 } while (inline_generated
);
8116 static void free_inline_functions(TCCState
*s
)
8119 /* free tokens of unused inline functions */
8120 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8121 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8123 tok_str_free(fn
->func_str
);
8125 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8128 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8129 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8130 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8135 AttributeDef ad
, adbase
;
8138 if (tok
== TOK_STATIC_ASSERT
) {
8148 tcc_error("_Static_assert fail");
8150 goto static_assert_out
;
8154 parse_mult_str(&error_str
, "string constant");
8156 tcc_error("%s", (char *)error_str
.data
);
8157 cstr_free(&error_str
);
8163 if (!parse_btype(&btype
, &adbase
)) {
8164 if (is_for_loop_init
)
8166 /* skip redundant ';' if not in old parameter decl scope */
8167 if (tok
== ';' && l
!= VT_CMP
) {
8173 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8174 /* global asm block */
8178 if (tok
>= TOK_UIDENT
) {
8179 /* special test for old K&R protos without explicit int
8180 type. Only accepted when defining global data */
8184 expect("declaration");
8189 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8190 int v
= btype
.ref
->v
;
8191 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8192 tcc_warning("unnamed struct/union that defines no instances");
8196 if (IS_ENUM(btype
.t
)) {
8201 while (1) { /* iterate thru each declaration */
8203 /* If the base type itself was an array type of unspecified
8204 size (like in 'typedef int arr[]; arr x = {1};') then
8205 we will overwrite the unknown size by the real one for
8206 this decl. We need to unshare the ref symbol holding
8208 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
8209 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
8212 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8216 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8217 printf("type = '%s'\n", buf
);
8220 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8221 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8222 tcc_error("function without file scope cannot be static");
8223 /* if old style function prototype, we accept a
8226 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8227 decl0(VT_CMP
, 0, sym
);
8228 /* always compile 'extern inline' */
8229 if (type
.t
& VT_EXTERN
)
8230 type
.t
&= ~VT_INLINE
;
8233 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8234 ad
.asm_label
= asm_label_instr();
8235 /* parse one last attribute list, after asm label */
8236 parse_attribute(&ad
);
8238 /* gcc does not allow __asm__("label") with function definition,
8245 #ifdef TCC_TARGET_PE
8246 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8247 if (type
.t
& VT_STATIC
)
8248 tcc_error("cannot have dll linkage with static");
8249 if (type
.t
& VT_TYPEDEF
) {
8250 tcc_warning("'%s' attribute ignored for typedef",
8251 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8252 (ad
.a
.dllexport
= 0, "dllexport"));
8253 } else if (ad
.a
.dllimport
) {
8254 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8257 type
.t
|= VT_EXTERN
;
8263 tcc_error("cannot use local functions");
8264 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8265 expect("function definition");
8267 /* reject abstract declarators in function definition
8268 make old style params without decl have int type */
8270 while ((sym
= sym
->next
) != NULL
) {
8271 if (!(sym
->v
& ~SYM_FIELD
))
8272 expect("identifier");
8273 if (sym
->type
.t
== VT_VOID
)
8274 sym
->type
= int_type
;
8277 /* apply post-declaraton attributes */
8278 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8280 /* put function symbol */
8281 type
.t
&= ~VT_EXTERN
;
8282 sym
= external_sym(v
, &type
, 0, &ad
);
8284 /* static inline functions are just recorded as a kind
8285 of macro. Their code will be emitted at the end of
8286 the compilation unit only if they are used */
8287 if (sym
->type
.t
& VT_INLINE
) {
8288 struct InlineFunc
*fn
;
8289 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8290 strcpy(fn
->filename
, file
->filename
);
8292 skip_or_save_block(&fn
->func_str
);
8293 dynarray_add(&tcc_state
->inline_fns
,
8294 &tcc_state
->nb_inline_fns
, fn
);
8296 /* compute text section */
8297 cur_text_section
= ad
.section
;
8298 if (!cur_text_section
)
8299 cur_text_section
= text_section
;
8305 /* find parameter in function parameter list */
8306 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8307 if ((sym
->v
& ~SYM_FIELD
) == v
)
8309 tcc_error("declaration for parameter '%s' but no such parameter",
8310 get_tok_str(v
, NULL
));
8312 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8313 tcc_error("storage class specified for '%s'",
8314 get_tok_str(v
, NULL
));
8315 if (sym
->type
.t
!= VT_VOID
)
8316 tcc_error("redefinition of parameter '%s'",
8317 get_tok_str(v
, NULL
));
8318 convert_parameter_type(&type
);
8320 } else if (type
.t
& VT_TYPEDEF
) {
8321 /* save typedefed type */
8322 /* XXX: test storage specifiers ? */
8324 if (sym
&& sym
->sym_scope
== local_scope
) {
8325 if (!is_compatible_types(&sym
->type
, &type
)
8326 || !(sym
->type
.t
& VT_TYPEDEF
))
8327 tcc_error("incompatible redefinition of '%s'",
8328 get_tok_str(v
, NULL
));
8331 sym
= sym_push(v
, &type
, 0, 0);
8335 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8336 && !(type
.t
& VT_EXTERN
)) {
8337 tcc_error("declaration of void object");
8340 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8341 /* external function definition */
8342 /* specific case for func_call attribute */
8344 } else if (!(type
.t
& VT_ARRAY
)) {
8345 /* not lvalue if array */
8348 has_init
= (tok
== '=');
8349 if (has_init
&& (type
.t
& VT_VLA
))
8350 tcc_error("variable length array cannot be initialized");
8351 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8352 || (type
.t
& VT_BTYPE
) == VT_FUNC
8353 /* as with GCC, uninitialized global arrays with no size
8354 are considered extern: */
8355 || ((type
.t
& VT_ARRAY
) && !has_init
8356 && l
== VT_CONST
&& type
.ref
->c
< 0)
8358 /* external variable or function */
8359 type
.t
|= VT_EXTERN
;
8360 sym
= external_sym(v
, &type
, r
, &ad
);
8361 if (ad
.alias_target
) {
8364 alias_target
= sym_find(ad
.alias_target
);
8365 esym
= elfsym(alias_target
);
8367 tcc_error("unsupported forward __alias__ attribute");
8368 put_extern_sym2(sym
, esym
->st_shndx
, esym
->st_value
, esym
->st_size
, 0);
8371 if (type
.t
& VT_STATIC
)
8377 else if (l
== VT_CONST
)
8378 /* uninitialized global variables may be overridden */
8379 type
.t
|= VT_EXTERN
;
8380 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8384 if (is_for_loop_init
)
8396 static void decl(int l
)
8401 /* ------------------------------------------------------------------------- */
8404 /* ------------------------------------------------------------------------- */