2 * TCC - Tiny C Compiler
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
30 anon_sym: anonymous symbol index
32 ST_DATA
int rsym
, anon_sym
, ind
, loc
;
34 ST_DATA Sym
*global_stack
;
35 ST_DATA Sym
*local_stack
;
36 ST_DATA Sym
*define_stack
;
37 ST_DATA Sym
*global_label_stack
;
38 ST_DATA Sym
*local_label_stack
;
40 static Sym
*sym_free_first
;
41 static void **sym_pools
;
42 static int nb_sym_pools
;
44 static Sym
*all_cleanups
, *pending_gotos
;
45 static int local_scope
;
47 static int in_generic
;
48 static int section_sym
;
51 static SValue _vstack
[1 + VSTACK_SIZE
];
52 #define vstack (_vstack + 1)
54 ST_DATA
int const_wanted
; /* true if constant wanted */
55 ST_DATA
int nocode_wanted
; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC
void gsym(int t
) { if (t
) { gsym_addr(t
, ind
); CODE_ON(); }}
66 static int gind(void) { CODE_ON(); return ind
; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t
) { gjmp_addr(t
); CODE_OFF(); }
70 static int gjmp_acs(int t
) { t
= gjmp(t
); CODE_OFF(); return t
; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
77 ST_DATA
int global_expr
; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt
; /* current function return type (used by return instruction) */
79 ST_DATA
int func_var
; /* true if current function is variadic (used by return instruction) */
81 static int last_line_num
, new_file
, func_ind
; /* debug info control */
82 ST_DATA
const char *funcname
;
83 ST_DATA CType int_type
, func_old_type
, char_type
, char_pointer_type
;
84 static CString initstr
;
87 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
88 #define VT_PTRDIFF_T VT_INT
90 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_LLONG
93 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
97 ST_DATA
struct switch_t
{
101 } **p
; int n
; /* list of case ranges */
102 int def_sym
; /* default symbol */
105 struct switch_t
*prev
;
107 } *cur_switch
; /* current switch */
109 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
110 /*list of temporary local variables on the stack in current function. */
111 ST_DATA
struct temp_local_variable
{
112 int location
; //offset on stack. Svalue.c.i
115 } arr_temp_local_vars
[MAX_TEMP_LOCAL_VARIABLE_NUMBER
];
116 short nb_temp_local_vars
;
118 static struct scope
{
120 struct { int loc
, num
; } vla
;
121 struct { Sym
*s
; int n
; } cl
;
124 } *cur_scope
, *loop_scope
, *root_scope
;
126 /********************************************************/
127 /* stab debug support */
129 static const struct {
132 } default_debug
[] = {
133 { VT_INT
, "int:t1=r1;-2147483648;2147483647;" },
134 { VT_BYTE
, "char:t2=r2;0;127;" },
136 { VT_LONG
| VT_INT
, "long int:t3=r3;-2147483648;2147483647;" },
138 { VT_LLONG
| VT_LONG
, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
140 { VT_INT
| VT_UNSIGNED
, "unsigned int:t4=r4;0;037777777777;" },
142 { VT_LONG
| VT_INT
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;037777777777;" },
144 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
145 { VT_LLONG
| VT_LONG
| VT_UNSIGNED
, "long unsigned int:t5=r5;0;01777777777777777777777;" },
147 { VT_QLONG
, "__int128:t6=r6;0;-1;" },
148 { VT_QLONG
| VT_UNSIGNED
, "__int128 unsigned:t7=r7;0;-1;" },
149 { VT_LLONG
, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
150 { VT_LLONG
| VT_UNSIGNED
, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
151 { VT_SHORT
, "short int:t10=r10;-32768;32767;" },
152 { VT_SHORT
| VT_UNSIGNED
, "short unsigned int:t11=r11;0;65535;" },
153 { VT_BYTE
| VT_DEFSIGN
, "signed char:t12=r12;-128;127;" },
154 { VT_BYTE
| VT_DEFSIGN
| VT_UNSIGNED
, "unsigned char:t13=r13;0;255;" },
155 { VT_FLOAT
, "float:t14=r1;4;0;" },
156 { VT_DOUBLE
, "double:t15=r1;8;0;" },
157 { VT_LDOUBLE
, "long double:t16=r1;16;0;" },
158 { -1, "_Float32:t17=r1;4;0;" },
159 { -1, "_Float64:t18=r1;8;0;" },
160 { -1, "_Float128:t19=r1;16;0;" },
161 { -1, "_Float32x:t20=r1;8;0;" },
162 { -1, "_Float64x:t21=r1;16;0;" },
163 { -1, "_Decimal32:t22=r1;4;0;" },
164 { -1, "_Decimal64:t23=r1;8;0;" },
165 { -1, "_Decimal128:t24=r1;16;0;" },
166 /* if default char is unsigned */
167 { VT_BYTE
| VT_UNSIGNED
, "unsigned char:t25=r25;0;255;" },
169 { VT_BOOL
, "bool:t26=r26;0;255;" },
170 { VT_VOID
, "void:t27=27" },
173 static int debug_next_type
;
175 static struct debug_hash
{
180 static int n_debug_hash
;
182 static struct debug_info
{
193 struct debug_info
*child
, *next
, *last
, *parent
;
194 } *debug_info
, *debug_info_root
;
196 /********************************************************/
198 #define precedence_parser
199 static void init_prec(void);
201 /********************************************************/
202 #ifndef CONFIG_TCC_ASM
203 ST_FUNC
void asm_instr(void)
205 tcc_error("inline asm() not supported");
207 ST_FUNC
void asm_global_instr(void)
209 tcc_error("inline asm() not supported");
213 /* ------------------------------------------------------------------------- */
214 static void gen_cast(CType
*type
);
215 static void gen_cast_s(int t
);
216 static inline CType
*pointed_type(CType
*type
);
217 static int is_compatible_types(CType
*type1
, CType
*type2
);
218 static int parse_btype(CType
*type
, AttributeDef
*ad
);
219 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
);
220 static void parse_expr_type(CType
*type
);
221 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
);
222 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
, int flags
);
223 static void block(int is_expr
);
224 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
, int has_init
, int v
, int scope
);
225 static void decl(int l
);
226 static int decl0(int l
, int is_for_loop_init
, Sym
*);
227 static void expr_eq(void);
228 static void vla_runtime_type_size(CType
*type
, int *a
);
229 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
);
230 static inline int64_t expr_const64(void);
231 static void vpush64(int ty
, unsigned long long v
);
232 static void vpush(CType
*type
);
233 static int gvtst(int inv
, int t
);
234 static void gen_inline_functions(TCCState
*s
);
235 static void free_inline_functions(TCCState
*s
);
236 static void skip_or_save_block(TokenString
**str
);
237 static void gv_dup(void);
238 static int get_temp_local_var(int size
,int align
);
239 static void clear_temp_local_var_list();
240 static void cast_error(CType
*st
, CType
*dt
);
242 ST_INLN
int is_float(int t
)
244 int bt
= t
& VT_BTYPE
;
245 return bt
== VT_LDOUBLE
251 static inline int is_integer_btype(int bt
)
260 static int btype_size(int bt
)
262 return bt
== VT_BYTE
|| bt
== VT_BOOL
? 1 :
266 bt
== VT_PTR
? PTR_SIZE
: 0;
269 /* returns function return register from type */
270 static int R_RET(int t
)
274 #ifdef TCC_TARGET_X86_64
275 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
277 #elif defined TCC_TARGET_RISCV64
278 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
284 /* returns 2nd function return register, if any */
285 static int R2_RET(int t
)
291 #elif defined TCC_TARGET_X86_64
296 #elif defined TCC_TARGET_RISCV64
303 /* returns true for two-word types */
304 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
306 /* put function return registers to stack value */
307 static void PUT_R_RET(SValue
*sv
, int t
)
309 sv
->r
= R_RET(t
), sv
->r2
= R2_RET(t
);
312 /* returns function return register class for type t */
313 static int RC_RET(int t
)
315 return reg_classes
[R_RET(t
)] & ~(RC_FLOAT
| RC_INT
);
318 /* returns generic register class for type t */
319 static int RC_TYPE(int t
)
323 #ifdef TCC_TARGET_X86_64
324 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
326 if ((t
& VT_BTYPE
) == VT_QFLOAT
)
328 #elif defined TCC_TARGET_RISCV64
329 if ((t
& VT_BTYPE
) == VT_LDOUBLE
)
335 /* returns 2nd register class corresponding to t and rc */
336 static int RC2_TYPE(int t
, int rc
)
338 if (!USING_TWO_WORDS(t
))
353 /* we use our own 'finite' function to avoid potential problems with
354 non standard math libs */
355 /* XXX: endianness dependent */
356 ST_FUNC
int ieee_finite(double d
)
359 memcpy(p
, &d
, sizeof(double));
360 return ((unsigned)((p
[1] | 0x800fffff) + 1)) >> 31;
363 /* compiling intel long double natively */
364 #if (defined __i386__ || defined __x86_64__) \
365 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
366 # define TCC_IS_NATIVE_387
369 ST_FUNC
void test_lvalue(void)
371 if (!(vtop
->r
& VT_LVAL
))
375 ST_FUNC
void check_vstack(void)
377 if (vtop
!= vstack
- 1)
378 tcc_error("internal compiler error: vstack leak (%d)",
379 (int)(vtop
- vstack
+ 1));
382 /* ------------------------------------------------------------------------- */
383 /* vstack debugging aid */
386 void pv (const char *lbl
, int a
, int b
)
389 for (i
= a
; i
< a
+ b
; ++i
) {
390 SValue
*p
= &vtop
[-i
];
391 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
392 lbl
, i
, p
->type
.t
, p
->r
, p
->r2
, (int)p
->c
.i
);
397 /* ------------------------------------------------------------------------- */
398 /* start of translation unit info */
399 ST_FUNC
void tcc_debug_start(TCCState
*s1
)
405 /* file info: full path + filename */
406 section_sym
= put_elf_sym(symtab_section
, 0, 0,
407 ELFW(ST_INFO
)(STB_LOCAL
, STT_SECTION
), 0,
408 text_section
->sh_num
, NULL
);
409 getcwd(buf
, sizeof(buf
));
411 normalize_slashes(buf
);
413 pstrcat(buf
, sizeof(buf
), "/");
414 put_stabs_r(s1
, buf
, N_SO
, 0, 0,
415 text_section
->data_offset
, text_section
, section_sym
);
416 put_stabs_r(s1
, file
->prev
->filename
, N_SO
, 0, 0,
417 text_section
->data_offset
, text_section
, section_sym
);
418 for (i
= 0; i
< sizeof (default_debug
) / sizeof (default_debug
[0]); i
++)
419 put_stabs(s1
, default_debug
[i
].name
, N_LSYM
, 0, 0, 0);
421 new_file
= last_line_num
= 0;
423 debug_next_type
= sizeof(default_debug
) / sizeof(default_debug
[0]);
427 /* we're currently 'including' the <command line> */
431 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
432 symbols can be safely used */
433 put_elf_sym(symtab_section
, 0, 0,
434 ELFW(ST_INFO
)(STB_LOCAL
, STT_FILE
), 0,
435 SHN_ABS
, file
->filename
);
438 static void tcc_debug_stabs (TCCState
*s1
, const char *str
, int type
, unsigned long value
,
439 Section
*sec
, int sym_index
)
445 (struct debug_sym
*)tcc_realloc (debug_info
->sym
,
446 sizeof(struct debug_sym
) *
447 (debug_info
->n_sym
+ 1));
448 s
= debug_info
->sym
+ debug_info
->n_sym
++;
451 s
->str
= tcc_strdup(str
);
453 s
->sym_index
= sym_index
;
456 put_stabs_r (s1
, str
, type
, 0, 0, value
, sec
, sym_index
);
458 put_stabs (s1
, str
, type
, 0, 0, value
);
461 static void tcc_debug_stabn(int type
, int value
)
463 if (type
== N_LBRAC
) {
464 struct debug_info
*info
=
465 (struct debug_info
*) tcc_mallocz(sizeof (*info
));
468 info
->parent
= debug_info
;
470 if (debug_info
->child
) {
471 if (debug_info
->child
->last
)
472 debug_info
->child
->last
->next
= info
;
474 debug_info
->child
->next
= info
;
475 debug_info
->child
->last
= info
;
478 debug_info
->child
= info
;
481 debug_info_root
= info
;
485 debug_info
->end
= value
;
486 debug_info
= debug_info
->parent
;
490 static void tcc_get_debug_info(TCCState
*s1
, Sym
*s
, CString
*result
)
499 type
= t
->type
.t
& ~(VT_EXTERN
| VT_STATIC
| VT_CONSTANT
| VT_VOLATILE
);
500 if ((type
& VT_BTYPE
) != VT_BYTE
)
502 if (type
== VT_PTR
|| type
== (VT_PTR
| VT_ARRAY
))
503 n
++, t
= t
->type
.ref
;
507 if ((type
& VT_BTYPE
) == VT_STRUCT
) {
511 for (i
= 0; i
< n_debug_hash
; i
++) {
512 if (t
== debug_hash
[i
].type
) {
513 debug_type
= debug_hash
[i
].debug_type
;
517 if (debug_type
== -1) {
518 debug_type
= ++debug_next_type
;
519 debug_hash
= (struct debug_hash
*)
520 tcc_realloc (debug_hash
,
521 (n_debug_hash
+ 1) * sizeof(*debug_hash
));
522 debug_hash
[n_debug_hash
].debug_type
= debug_type
;
523 debug_hash
[n_debug_hash
++].type
= t
;
525 cstr_printf (&str
, "%s:T%d=%c%d",
526 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
527 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
529 IS_UNION (t
->type
.t
) ? 'u' : 's',
532 int pos
, size
, align
;
535 cstr_printf (&str
, "%s:",
536 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
537 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
538 tcc_get_debug_info (s1
, t
, &str
);
539 if (t
->type
.t
& VT_BITFIELD
) {
540 pos
= t
->c
* 8 + BIT_POS(t
->type
.t
);
541 size
= BIT_SIZE(t
->type
.t
);
545 size
= type_size(&t
->type
, &align
) * 8;
547 cstr_printf (&str
, ",%d,%d;", pos
, size
);
549 cstr_printf (&str
, ";");
550 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
554 else if (IS_ENUM(type
)) {
555 Sym
*e
= t
= t
->type
.ref
;
557 debug_type
= ++debug_next_type
;
559 cstr_printf (&str
, "%s:T%d=e",
560 (t
->v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
561 ? "" : get_tok_str(t
->v
& ~SYM_STRUCT
, NULL
),
565 cstr_printf (&str
, "%s:",
566 (t
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
567 ? "" : get_tok_str(t
->v
& ~SYM_FIELD
, NULL
));
568 cstr_printf (&str
, e
->type
.t
& VT_UNSIGNED
? "%u," : "%d,",
571 cstr_printf (&str
, ";");
572 tcc_debug_stabs(s1
, str
.data
, N_LSYM
, 0, NULL
, 0);
575 else if ((type
& VT_BTYPE
) != VT_FUNC
) {
576 type
&= ~VT_STRUCT_MASK
;
578 debug_type
<= sizeof(default_debug
) / sizeof(default_debug
[0]);
580 if (default_debug
[debug_type
- 1].type
== type
)
582 if (debug_type
> sizeof(default_debug
) / sizeof(default_debug
[0]))
586 cstr_printf (result
, "%d=", ++debug_next_type
);
589 type
= t
->type
.t
& ~(VT_EXTERN
| VT_STATIC
| VT_CONSTANT
| VT_VOLATILE
);
590 if ((type
& VT_BTYPE
) != VT_BYTE
)
593 cstr_printf (result
, "%d=*", ++debug_next_type
);
594 else if (type
== (VT_PTR
| VT_ARRAY
))
595 cstr_printf (result
, "%d=ar1;0;%d;",
596 ++debug_next_type
, t
->type
.ref
->c
- 1);
597 else if (type
== VT_FUNC
) {
598 cstr_printf (result
, "%d=f", ++debug_next_type
);
599 tcc_get_debug_info (s1
, t
->type
.ref
, result
);
606 cstr_printf (result
, "%d", debug_type
);
609 static void tcc_debug_finish (TCCState
*s1
, struct debug_info
*cur
)
613 struct debug_info
*next
= cur
->next
;
615 for (i
= 0; i
< cur
->n_sym
; i
++) {
616 struct debug_sym
*s
= &cur
->sym
[i
];
619 put_stabs_r(s1
, s
->str
, s
->type
, 0, 0, s
->value
,
620 s
->sec
, s
->sym_index
);
622 put_stabs(s1
, s
->str
, s
->type
, 0, 0, s
->value
);
626 put_stabn(s1
, N_LBRAC
, 0, 0, cur
->start
);
627 tcc_debug_finish (s1
, cur
->child
);
628 put_stabn(s1
, N_RBRAC
, 0, 0, cur
->end
);
634 static void tcc_add_debug_info(TCCState
*s1
, int param
, Sym
*s
, Sym
*e
)
637 cstr_new (&debug_str
);
638 for (; s
!= e
; s
= s
->prev
) {
639 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
641 cstr_reset (&debug_str
);
642 cstr_printf (&debug_str
, "%s:%s", get_tok_str(s
->v
, NULL
), param
? "p" : "");
643 tcc_get_debug_info(s1
, s
, &debug_str
);
644 tcc_debug_stabs(s1
, debug_str
.data
, param
? N_PSYM
: N_LSYM
, s
->c
, NULL
, 0);
646 cstr_free (&debug_str
);
649 static void tcc_debug_extern_sym(TCCState
*s1
, Sym
*sym
, int sh_num
, int sym_bind
)
651 Section
*s
= s1
->sections
[sh_num
];
655 cstr_printf (&str
, "%s:%c",
656 get_tok_str(sym
->v
, NULL
),
657 sym_bind
== STB_GLOBAL
? 'G' : local_scope
? 'V' : 'S'
659 tcc_get_debug_info(s1
, sym
, &str
);
660 if (sym_bind
== STB_GLOBAL
)
661 tcc_debug_stabs(s1
, str
.data
, N_GSYM
, 0, NULL
, 0);
663 tcc_debug_stabs(s1
, str
.data
,
664 (sym
->type
.t
& VT_STATIC
) && data_section
== s
665 ? N_STSYM
: N_LCSYM
, 0, s
, sym
->c
);
669 /* put end of translation unit info */
670 ST_FUNC
void tcc_debug_end(TCCState
*s1
)
674 put_stabs_r(s1
, NULL
, N_SO
, 0, 0,
675 text_section
->data_offset
, text_section
, section_sym
);
676 tcc_free(debug_hash
);
679 static BufferedFile
* put_new_file(TCCState
*s1
)
681 BufferedFile
*f
= file
;
682 /* use upper file if from inline ":asm:" */
683 if (f
->filename
[0] == ':')
686 put_stabs_r(s1
, f
->filename
, N_SOL
, 0, 0, ind
, text_section
, section_sym
);
687 new_file
= last_line_num
= 0;
692 /* generate line number info */
693 ST_FUNC
void tcc_debug_line(TCCState
*s1
)
697 || cur_text_section
!= text_section
698 || !(f
= put_new_file(s1
))
699 || last_line_num
== f
->line_num
)
701 if (func_ind
!= -1) {
702 put_stabn(s1
, N_SLINE
, 0, f
->line_num
, ind
- func_ind
);
704 /* from tcc_assemble */
705 put_stabs_r(s1
, NULL
, N_SLINE
, 0, f
->line_num
, ind
, text_section
, section_sym
);
707 last_line_num
= f
->line_num
;
710 /* put function symbol */
711 ST_FUNC
void tcc_debug_funcstart(TCCState
*s1
, Sym
*sym
)
717 debug_info_root
= NULL
;
719 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
720 if (!(f
= put_new_file(s1
)))
722 cstr_new (&debug_str
);
723 cstr_printf(&debug_str
, "%s:%c", funcname
, sym
->type
.t
& VT_STATIC
? 'f' : 'F');
724 tcc_get_debug_info(s1
, sym
->type
.ref
, &debug_str
);
725 put_stabs_r(s1
, debug_str
.data
, N_FUN
, 0, f
->line_num
, 0, cur_text_section
, sym
->c
);
726 cstr_free (&debug_str
);
731 /* put function size */
732 ST_FUNC
void tcc_debug_funcend(TCCState
*s1
, int size
)
736 tcc_debug_stabn(N_RBRAC
, size
);
737 tcc_debug_finish (s1
, debug_info_root
);
740 /* put alternative filename */
741 ST_FUNC
void tcc_debug_putfile(TCCState
*s1
, const char *filename
)
743 if (0 == strcmp(file
->filename
, filename
))
745 pstrcpy(file
->filename
, sizeof(file
->filename
), filename
);
749 /* begin of #include */
750 ST_FUNC
void tcc_debug_bincl(TCCState
*s1
)
754 put_stabs(s1
, file
->filename
, N_BINCL
, 0, 0, 0);
758 /* end of #include */
759 ST_FUNC
void tcc_debug_eincl(TCCState
*s1
)
763 put_stabn(s1
, N_EINCL
, 0, 0, 0);
767 /* ------------------------------------------------------------------------- */
768 /* initialize vstack and types. This must be done also for tcc -E */
769 ST_FUNC
void tccgen_init(TCCState
*s1
)
772 memset(vtop
, 0, sizeof *vtop
);
774 /* define some often used types */
777 char_type
.t
= VT_BYTE
;
778 if (s1
->char_is_unsigned
)
779 char_type
.t
|= VT_UNSIGNED
;
780 char_pointer_type
= char_type
;
781 mk_pointer(&char_pointer_type
);
783 func_old_type
.t
= VT_FUNC
;
784 func_old_type
.ref
= sym_push(SYM_FIELD
, &int_type
, 0, 0);
785 func_old_type
.ref
->f
.func_call
= FUNC_CDECL
;
786 func_old_type
.ref
->f
.func_type
= FUNC_OLD
;
787 #ifdef precedence_parser
793 ST_FUNC
int tccgen_compile(TCCState
*s1
)
795 cur_text_section
= NULL
;
797 anon_sym
= SYM_FIRST_ANOM
;
800 nocode_wanted
= 0x80000000;
804 #ifdef TCC_TARGET_ARM
808 printf("%s: **** new file\n", file
->filename
);
810 parse_flags
= PARSE_FLAG_PREPROCESS
| PARSE_FLAG_TOK_NUM
| PARSE_FLAG_TOK_STR
;
813 gen_inline_functions(s1
);
815 /* end of translation unit info */
820 ST_FUNC
void tccgen_finish(TCCState
*s1
)
823 free_inline_functions(s1
);
824 sym_pop(&global_stack
, NULL
, 0);
825 sym_pop(&local_stack
, NULL
, 0);
826 /* free preprocessor macros */
829 dynarray_reset(&sym_pools
, &nb_sym_pools
);
830 sym_free_first
= NULL
;
833 /* ------------------------------------------------------------------------- */
834 ST_FUNC ElfSym
*elfsym(Sym
*s
)
838 return &((ElfSym
*)symtab_section
->data
)[s
->c
];
841 /* apply storage attributes to Elf symbol */
842 ST_FUNC
void update_storage(Sym
*sym
)
845 int sym_bind
, old_sym_bind
;
851 if (sym
->a
.visibility
)
852 esym
->st_other
= (esym
->st_other
& ~ELFW(ST_VISIBILITY
)(-1))
855 if (sym
->type
.t
& (VT_STATIC
| VT_INLINE
))
856 sym_bind
= STB_LOCAL
;
857 else if (sym
->a
.weak
)
860 sym_bind
= STB_GLOBAL
;
861 old_sym_bind
= ELFW(ST_BIND
)(esym
->st_info
);
862 if (sym_bind
!= old_sym_bind
) {
863 esym
->st_info
= ELFW(ST_INFO
)(sym_bind
, ELFW(ST_TYPE
)(esym
->st_info
));
867 if (sym
->a
.dllimport
)
868 esym
->st_other
|= ST_PE_IMPORT
;
869 if (sym
->a
.dllexport
)
870 esym
->st_other
|= ST_PE_EXPORT
;
874 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
875 get_tok_str(sym
->v
, NULL
),
876 sym_bind
== STB_WEAK
? 'w' : sym_bind
== STB_LOCAL
? 'l' : 'g',
884 /* ------------------------------------------------------------------------- */
885 /* update sym->c so that it points to an external symbol in section
886 'section' with value 'value' */
888 ST_FUNC
void put_extern_sym2(Sym
*sym
, int sh_num
,
889 addr_t value
, unsigned long size
,
890 int can_add_underscore
)
892 int sym_type
, sym_bind
, info
, other
, t
;
898 name
= get_tok_str(sym
->v
, NULL
);
900 if ((t
& VT_BTYPE
) == VT_FUNC
) {
902 } else if ((t
& VT_BTYPE
) == VT_VOID
) {
903 sym_type
= STT_NOTYPE
;
905 sym_type
= STT_OBJECT
;
907 if (t
& (VT_STATIC
| VT_INLINE
))
908 sym_bind
= STB_LOCAL
;
910 sym_bind
= STB_GLOBAL
;
914 if (sym_type
== STT_FUNC
&& sym
->type
.ref
) {
915 Sym
*ref
= sym
->type
.ref
;
916 if (ref
->a
.nodecorate
) {
917 can_add_underscore
= 0;
919 if (ref
->f
.func_call
== FUNC_STDCALL
&& can_add_underscore
) {
920 sprintf(buf1
, "_%s@%d", name
, ref
->f
.func_args
* PTR_SIZE
);
922 other
|= ST_PE_STDCALL
;
923 can_add_underscore
= 0;
928 if (sym
->asm_label
) {
929 name
= get_tok_str(sym
->asm_label
& ~SYM_FIELD
, NULL
);
930 /* with SYM_FIELD it was __attribute__((alias("..."))) actually */
931 if (!(sym
->asm_label
& SYM_FIELD
))
932 can_add_underscore
= 0;
935 if (tcc_state
->leading_underscore
&& can_add_underscore
) {
937 pstrcpy(buf1
+ 1, sizeof(buf1
) - 1, name
);
941 info
= ELFW(ST_INFO
)(sym_bind
, sym_type
);
942 sym
->c
= put_elf_sym(symtab_section
, value
, size
, info
, other
, sh_num
, name
);
944 if (tcc_state
->do_debug
945 && sym_type
!= STT_FUNC
946 && sym
->v
< SYM_FIRST_ANOM
)
947 tcc_debug_extern_sym(tcc_state
, sym
, sh_num
, sym_bind
);
951 esym
->st_value
= value
;
952 esym
->st_size
= size
;
953 esym
->st_shndx
= sh_num
;
958 ST_FUNC
void put_extern_sym(Sym
*sym
, Section
*section
,
959 addr_t value
, unsigned long size
)
961 int sh_num
= section
? section
->sh_num
: SHN_UNDEF
;
962 put_extern_sym2(sym
, sh_num
, value
, size
, 1);
965 /* add a new relocation entry to symbol 'sym' in section 's' */
966 ST_FUNC
void greloca(Section
*s
, Sym
*sym
, unsigned long offset
, int type
,
971 if (nocode_wanted
&& s
== cur_text_section
)
976 put_extern_sym(sym
, NULL
, 0, 0);
980 /* now we can add ELF relocation info */
981 put_elf_reloca(symtab_section
, s
, offset
, type
, c
, addend
);
985 ST_FUNC
void greloc(Section
*s
, Sym
*sym
, unsigned long offset
, int type
)
987 greloca(s
, sym
, offset
, type
, 0);
991 /* ------------------------------------------------------------------------- */
992 /* symbol allocator */
993 static Sym
*__sym_malloc(void)
995 Sym
*sym_pool
, *sym
, *last_sym
;
998 sym_pool
= tcc_malloc(SYM_POOL_NB
* sizeof(Sym
));
999 dynarray_add(&sym_pools
, &nb_sym_pools
, sym_pool
);
1001 last_sym
= sym_free_first
;
1003 for(i
= 0; i
< SYM_POOL_NB
; i
++) {
1004 sym
->next
= last_sym
;
1008 sym_free_first
= last_sym
;
1012 static inline Sym
*sym_malloc(void)
1016 sym
= sym_free_first
;
1018 sym
= __sym_malloc();
1019 sym_free_first
= sym
->next
;
1022 sym
= tcc_malloc(sizeof(Sym
));
1027 ST_INLN
void sym_free(Sym
*sym
)
1030 sym
->next
= sym_free_first
;
1031 sym_free_first
= sym
;
1037 /* push, without hashing */
1038 ST_FUNC Sym
*sym_push2(Sym
**ps
, int v
, int t
, int c
)
1043 memset(s
, 0, sizeof *s
);
1053 /* find a symbol and return its associated structure. 's' is the top
1054 of the symbol stack */
1055 ST_FUNC Sym
*sym_find2(Sym
*s
, int v
)
1060 else if (s
->v
== -1)
1067 /* structure lookup */
1068 ST_INLN Sym
*struct_find(int v
)
1071 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1073 return table_ident
[v
]->sym_struct
;
1076 /* find an identifier */
1077 ST_INLN Sym
*sym_find(int v
)
1080 if ((unsigned)v
>= (unsigned)(tok_ident
- TOK_IDENT
))
1082 return table_ident
[v
]->sym_identifier
;
1085 static int sym_scope(Sym
*s
)
1087 if (IS_ENUM_VAL (s
->type
.t
))
1088 return s
->type
.ref
->sym_scope
;
1090 return s
->sym_scope
;
1093 /* push a given symbol on the symbol stack */
1094 ST_FUNC Sym
*sym_push(int v
, CType
*type
, int r
, int c
)
1103 s
= sym_push2(ps
, v
, type
->t
, c
);
1104 s
->type
.ref
= type
->ref
;
1106 /* don't record fields or anonymous symbols */
1108 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1109 /* record symbol in token array */
1110 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1112 ps
= &ts
->sym_struct
;
1114 ps
= &ts
->sym_identifier
;
1117 s
->sym_scope
= local_scope
;
1118 if (s
->prev_tok
&& sym_scope(s
->prev_tok
) == s
->sym_scope
)
1119 tcc_error("redeclaration of '%s'",
1120 get_tok_str(v
& ~SYM_STRUCT
, NULL
));
1125 /* push a global identifier */
1126 ST_FUNC Sym
*global_identifier_push(int v
, int t
, int c
)
1129 s
= sym_push2(&global_stack
, v
, t
, c
);
1130 s
->r
= VT_CONST
| VT_SYM
;
1131 /* don't record anonymous symbol */
1132 if (v
< SYM_FIRST_ANOM
) {
1133 ps
= &table_ident
[v
- TOK_IDENT
]->sym_identifier
;
1134 /* modify the top most local identifier, so that sym_identifier will
1135 point to 's' when popped; happens when called from inline asm */
1136 while (*ps
!= NULL
&& (*ps
)->sym_scope
)
1137 ps
= &(*ps
)->prev_tok
;
1144 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1145 pop them yet from the list, but do remove them from the token array. */
1146 ST_FUNC
void sym_pop(Sym
**ptop
, Sym
*b
, int keep
)
1156 /* remove symbol in token array */
1158 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
1159 ts
= table_ident
[(v
& ~SYM_STRUCT
) - TOK_IDENT
];
1161 ps
= &ts
->sym_struct
;
1163 ps
= &ts
->sym_identifier
;
1174 /* ------------------------------------------------------------------------- */
1175 static void vcheck_cmp(void)
1177 /* cannot let cpu flags if other instruction are generated. Also
1178 avoid leaving VT_JMP anywhere except on the top of the stack
1179 because it would complicate the code generator.
1181 Don't do this when nocode_wanted. vtop might come from
1182 !nocode_wanted regions (see 88_codeopt.c) and transforming
1183 it to a register without actually generating code is wrong
1184 as their value might still be used for real. All values
1185 we push under nocode_wanted will eventually be popped
1186 again, so that the VT_CMP/VT_JMP value will be in vtop
1187 when code is unsuppressed again. */
1189 if (vtop
->r
== VT_CMP
&& !nocode_wanted
)
1193 static void vsetc(CType
*type
, int r
, CValue
*vc
)
1195 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1196 tcc_error("memory full (vstack)");
1201 vtop
->r2
= VT_CONST
;
1206 ST_FUNC
void vswap(void)
1216 /* pop stack value */
1217 ST_FUNC
void vpop(void)
1220 v
= vtop
->r
& VT_VALMASK
;
1221 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1222 /* for x86, we need to pop the FP stack */
1223 if (v
== TREG_ST0
) {
1224 o(0xd8dd); /* fstp %st(0) */
1228 /* need to put correct jump if && or || without test */
1235 /* push constant of type "type" with useless value */
1236 static void vpush(CType
*type
)
1238 vset(type
, VT_CONST
, 0);
1241 /* push arbitrary 64bit constant */
1242 static void vpush64(int ty
, unsigned long long v
)
1249 vsetc(&ctype
, VT_CONST
, &cval
);
1252 /* push integer constant */
1253 ST_FUNC
void vpushi(int v
)
1258 /* push a pointer sized constant */
1259 static void vpushs(addr_t v
)
1261 vpush64(VT_SIZE_T
, v
);
1264 /* push long long constant */
1265 static inline void vpushll(long long v
)
1267 vpush64(VT_LLONG
, v
);
1270 ST_FUNC
void vset(CType
*type
, int r
, int v
)
1274 vsetc(type
, r
, &cval
);
1277 static void vseti(int r
, int v
)
1285 ST_FUNC
void vpushv(SValue
*v
)
1287 if (vtop
>= vstack
+ (VSTACK_SIZE
- 1))
1288 tcc_error("memory full (vstack)");
1293 static void vdup(void)
1298 /* rotate n first stack elements to the bottom
1299 I1 ... In -> I2 ... In I1 [top is right]
1301 ST_FUNC
void vrotb(int n
)
1308 for(i
=-n
+1;i
!=0;i
++)
1309 vtop
[i
] = vtop
[i
+1];
1313 /* rotate the n elements before entry e towards the top
1314 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1316 ST_FUNC
void vrote(SValue
*e
, int n
)
1323 for(i
= 0;i
< n
- 1; i
++)
1328 /* rotate n first stack elements to the top
1329 I1 ... In -> In I1 ... I(n-1) [top is right]
1331 ST_FUNC
void vrott(int n
)
1336 /* ------------------------------------------------------------------------- */
1337 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1339 /* called from generators to set the result from relational ops */
1340 ST_FUNC
void vset_VT_CMP(int op
)
1348 /* called once before asking generators to load VT_CMP to a register */
1349 static void vset_VT_JMP(void)
1351 int op
= vtop
->cmp_op
;
1353 if (vtop
->jtrue
|| vtop
->jfalse
) {
1354 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1355 int inv
= op
& (op
< 2); /* small optimization */
1356 vseti(VT_JMP
+inv
, gvtst(inv
, 0));
1358 /* otherwise convert flags (rsp. 0/1) to register */
1360 if (op
< 2) /* doesn't seem to happen */
1365 /* Set CPU Flags, doesn't yet jump */
1366 static void gvtst_set(int inv
, int t
)
1370 if (vtop
->r
!= VT_CMP
) {
1373 if (vtop
->r
!= VT_CMP
) /* must be VT_CONST then */
1374 vset_VT_CMP(vtop
->c
.i
!= 0);
1377 p
= inv
? &vtop
->jfalse
: &vtop
->jtrue
;
1378 *p
= gjmp_append(*p
, t
);
1381 /* Generate value test
1383 * Generate a test for any value (jump, comparison and integers) */
1384 static int gvtst(int inv
, int t
)
1389 t
= vtop
->jtrue
, u
= vtop
->jfalse
;
1391 x
= u
, u
= t
, t
= x
;
1394 /* jump to the wanted target */
1396 t
= gjmp_cond(op
^ inv
, t
);
1399 /* resolve complementary jumps to here */
1406 /* generate a zero or nozero test */
1407 static void gen_test_zero(int op
)
1409 if (vtop
->r
== VT_CMP
) {
1413 vtop
->jfalse
= vtop
->jtrue
;
1423 /* ------------------------------------------------------------------------- */
1424 /* push a symbol value of TYPE */
1425 static inline void vpushsym(CType
*type
, Sym
*sym
)
1429 vsetc(type
, VT_CONST
| VT_SYM
, &cval
);
1433 /* Return a static symbol pointing to a section */
1434 ST_FUNC Sym
*get_sym_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1440 sym
= sym_push(v
, type
, VT_CONST
| VT_SYM
, 0);
1441 sym
->type
.t
|= VT_STATIC
;
1442 put_extern_sym(sym
, sec
, offset
, size
);
1446 /* push a reference to a section offset by adding a dummy symbol */
1447 static void vpush_ref(CType
*type
, Section
*sec
, unsigned long offset
, unsigned long size
)
1449 vpushsym(type
, get_sym_ref(type
, sec
, offset
, size
));
1452 /* define a new external reference to a symbol 'v' of type 'u' */
1453 ST_FUNC Sym
*external_global_sym(int v
, CType
*type
)
1459 /* push forward reference */
1460 s
= global_identifier_push(v
, type
->t
| VT_EXTERN
, 0);
1461 s
->type
.ref
= type
->ref
;
1462 } else if (IS_ASM_SYM(s
)) {
1463 s
->type
.t
= type
->t
| (s
->type
.t
& VT_EXTERN
);
1464 s
->type
.ref
= type
->ref
;
1470 /* Merge symbol attributes. */
1471 static void merge_symattr(struct SymAttr
*sa
, struct SymAttr
*sa1
)
1473 if (sa1
->aligned
&& !sa
->aligned
)
1474 sa
->aligned
= sa1
->aligned
;
1475 sa
->packed
|= sa1
->packed
;
1476 sa
->weak
|= sa1
->weak
;
1477 if (sa1
->visibility
!= STV_DEFAULT
) {
1478 int vis
= sa
->visibility
;
1479 if (vis
== STV_DEFAULT
1480 || vis
> sa1
->visibility
)
1481 vis
= sa1
->visibility
;
1482 sa
->visibility
= vis
;
1484 sa
->dllexport
|= sa1
->dllexport
;
1485 sa
->nodecorate
|= sa1
->nodecorate
;
1486 sa
->dllimport
|= sa1
->dllimport
;
1489 /* Merge function attributes. */
1490 static void merge_funcattr(struct FuncAttr
*fa
, struct FuncAttr
*fa1
)
1492 if (fa1
->func_call
&& !fa
->func_call
)
1493 fa
->func_call
= fa1
->func_call
;
1494 if (fa1
->func_type
&& !fa
->func_type
)
1495 fa
->func_type
= fa1
->func_type
;
1496 if (fa1
->func_args
&& !fa
->func_args
)
1497 fa
->func_args
= fa1
->func_args
;
1498 if (fa1
->func_noreturn
)
1499 fa
->func_noreturn
= 1;
1506 /* Merge attributes. */
1507 static void merge_attr(AttributeDef
*ad
, AttributeDef
*ad1
)
1509 merge_symattr(&ad
->a
, &ad1
->a
);
1510 merge_funcattr(&ad
->f
, &ad1
->f
);
1513 ad
->section
= ad1
->section
;
1515 ad
->asm_label
= ad1
->asm_label
;
1517 ad
->attr_mode
= ad1
->attr_mode
;
1520 /* Merge some type attributes. */
1521 static void patch_type(Sym
*sym
, CType
*type
)
1523 if (!(type
->t
& VT_EXTERN
) || IS_ENUM_VAL(sym
->type
.t
)) {
1524 if (!(sym
->type
.t
& VT_EXTERN
))
1525 tcc_error("redefinition of '%s'", get_tok_str(sym
->v
, NULL
));
1526 sym
->type
.t
&= ~VT_EXTERN
;
1529 if (IS_ASM_SYM(sym
)) {
1530 /* stay static if both are static */
1531 sym
->type
.t
= type
->t
& (sym
->type
.t
| ~VT_STATIC
);
1532 sym
->type
.ref
= type
->ref
;
1535 if (!is_compatible_types(&sym
->type
, type
)) {
1536 tcc_error("incompatible types for redefinition of '%s'",
1537 get_tok_str(sym
->v
, NULL
));
1539 } else if ((sym
->type
.t
& VT_BTYPE
) == VT_FUNC
) {
1540 int static_proto
= sym
->type
.t
& VT_STATIC
;
1541 /* warn if static follows non-static function declaration */
1542 if ((type
->t
& VT_STATIC
) && !static_proto
1543 /* XXX this test for inline shouldn't be here. Until we
1544 implement gnu-inline mode again it silences a warning for
1545 mingw caused by our workarounds. */
1546 && !((type
->t
| sym
->type
.t
) & VT_INLINE
))
1547 tcc_warning("static storage ignored for redefinition of '%s'",
1548 get_tok_str(sym
->v
, NULL
));
1550 /* set 'inline' if both agree or if one has static */
1551 if ((type
->t
| sym
->type
.t
) & VT_INLINE
) {
1552 if (!((type
->t
^ sym
->type
.t
) & VT_INLINE
)
1553 || ((type
->t
| sym
->type
.t
) & VT_STATIC
))
1554 static_proto
|= VT_INLINE
;
1557 if (0 == (type
->t
& VT_EXTERN
)) {
1558 struct FuncAttr f
= sym
->type
.ref
->f
;
1559 /* put complete type, use static from prototype */
1560 sym
->type
.t
= (type
->t
& ~(VT_STATIC
|VT_INLINE
)) | static_proto
;
1561 sym
->type
.ref
= type
->ref
;
1562 merge_funcattr(&sym
->type
.ref
->f
, &f
);
1564 sym
->type
.t
&= ~VT_INLINE
| static_proto
;
1567 if (sym
->type
.ref
->f
.func_type
== FUNC_OLD
1568 && type
->ref
->f
.func_type
!= FUNC_OLD
) {
1569 sym
->type
.ref
= type
->ref
;
1573 if ((sym
->type
.t
& VT_ARRAY
) && type
->ref
->c
>= 0) {
1574 /* set array size if it was omitted in extern declaration */
1575 sym
->type
.ref
->c
= type
->ref
->c
;
1577 if ((type
->t
^ sym
->type
.t
) & VT_STATIC
)
1578 tcc_warning("storage mismatch for redefinition of '%s'",
1579 get_tok_str(sym
->v
, NULL
));
1583 /* Merge some storage attributes. */
1584 static void patch_storage(Sym
*sym
, AttributeDef
*ad
, CType
*type
)
1587 patch_type(sym
, type
);
1589 #ifdef TCC_TARGET_PE
1590 if (sym
->a
.dllimport
!= ad
->a
.dllimport
)
1591 tcc_error("incompatible dll linkage for redefinition of '%s'",
1592 get_tok_str(sym
->v
, NULL
));
1594 merge_symattr(&sym
->a
, &ad
->a
);
1596 sym
->asm_label
= ad
->asm_label
;
1597 update_storage(sym
);
1600 /* copy sym to other stack */
1601 static Sym
*sym_copy(Sym
*s0
, Sym
**ps
)
1604 s
= sym_malloc(), *s
= *s0
;
1605 s
->prev
= *ps
, *ps
= s
;
1606 if (s
->v
< SYM_FIRST_ANOM
) {
1607 ps
= &table_ident
[s
->v
- TOK_IDENT
]->sym_identifier
;
1608 s
->prev_tok
= *ps
, *ps
= s
;
1613 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1614 static void sym_copy_ref(Sym
*s
, Sym
**ps
)
1616 int bt
= s
->type
.t
& VT_BTYPE
;
1617 if (bt
== VT_FUNC
|| bt
== VT_PTR
) {
1618 Sym
**sp
= &s
->type
.ref
;
1619 for (s
= *sp
, *sp
= NULL
; s
; s
= s
->next
) {
1620 Sym
*s2
= sym_copy(s
, ps
);
1621 sp
= &(*sp
= s2
)->next
;
1622 sym_copy_ref(s2
, ps
);
1627 /* define a new external reference to a symbol 'v' */
1628 static Sym
*external_sym(int v
, CType
*type
, int r
, AttributeDef
*ad
)
1632 /* look for global symbol */
1634 while (s
&& s
->sym_scope
)
1638 /* push forward reference */
1639 s
= global_identifier_push(v
, type
->t
, 0);
1642 s
->asm_label
= ad
->asm_label
;
1643 s
->type
.ref
= type
->ref
;
1644 /* copy type to the global stack */
1646 sym_copy_ref(s
, &global_stack
);
1648 patch_storage(s
, ad
, type
);
1650 /* push variables on local_stack if any */
1651 if (local_stack
&& (s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
1652 s
= sym_copy(s
, &local_stack
);
1656 /* push a reference to global symbol v */
1657 ST_FUNC
void vpush_global_sym(CType
*type
, int v
)
1659 vpushsym(type
, external_global_sym(v
, type
));
1662 /* save registers up to (vtop - n) stack entry */
1663 ST_FUNC
void save_regs(int n
)
1666 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++)
1670 /* save r to the memory stack, and mark it as being free */
1671 ST_FUNC
void save_reg(int r
)
1673 save_reg_upstack(r
, 0);
1676 /* save r to the memory stack, and mark it as being free,
1677 if seen up to (vtop - n) stack entry */
1678 ST_FUNC
void save_reg_upstack(int r
, int n
)
1680 int l
, size
, align
, bt
;
1683 if ((r
&= VT_VALMASK
) >= VT_CONST
)
1688 for(p
= vstack
, p1
= vtop
- n
; p
<= p1
; p
++) {
1689 if ((p
->r
& VT_VALMASK
) == r
|| p
->r2
== r
) {
1690 /* must save value on stack if not already done */
1692 bt
= p
->type
.t
& VT_BTYPE
;
1695 if ((p
->r
& VT_LVAL
) || bt
== VT_FUNC
)
1698 size
= type_size(&sv
.type
, &align
);
1699 l
= get_temp_local_var(size
,align
);
1700 sv
.r
= VT_LOCAL
| VT_LVAL
;
1702 store(p
->r
& VT_VALMASK
, &sv
);
1703 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1704 /* x86 specific: need to pop fp register ST0 if saved */
1705 if (r
== TREG_ST0
) {
1706 o(0xd8dd); /* fstp %st(0) */
1709 /* special long long case */
1710 if (p
->r2
< VT_CONST
&& USING_TWO_WORDS(bt
)) {
1715 /* mark that stack entry as being saved on the stack */
1716 if (p
->r
& VT_LVAL
) {
1717 /* also clear the bounded flag because the
1718 relocation address of the function was stored in
1720 p
->r
= (p
->r
& ~(VT_VALMASK
| VT_BOUNDED
)) | VT_LLOCAL
;
1722 p
->r
= VT_LVAL
| VT_LOCAL
;
1730 #ifdef TCC_TARGET_ARM
1731 /* find a register of class 'rc2' with at most one reference on stack.
1732 * If none, call get_reg(rc) */
1733 ST_FUNC
int get_reg_ex(int rc
, int rc2
)
1738 for(r
=0;r
<NB_REGS
;r
++) {
1739 if (reg_classes
[r
] & rc2
) {
1742 for(p
= vstack
; p
<= vtop
; p
++) {
1743 if ((p
->r
& VT_VALMASK
) == r
||
1755 /* find a free register of class 'rc'. If none, save one register */
1756 ST_FUNC
int get_reg(int rc
)
1761 /* find a free register */
1762 for(r
=0;r
<NB_REGS
;r
++) {
1763 if (reg_classes
[r
] & rc
) {
1766 for(p
=vstack
;p
<=vtop
;p
++) {
1767 if ((p
->r
& VT_VALMASK
) == r
||
1776 /* no register left : free the first one on the stack (VERY
1777 IMPORTANT to start from the bottom to ensure that we don't
1778 spill registers used in gen_opi()) */
1779 for(p
=vstack
;p
<=vtop
;p
++) {
1780 /* look at second register (if long long) */
1782 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
))
1784 r
= p
->r
& VT_VALMASK
;
1785 if (r
< VT_CONST
&& (reg_classes
[r
] & rc
)) {
1791 /* Should never comes here */
1795 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1796 static int get_temp_local_var(int size
,int align
){
1798 struct temp_local_variable
*temp_var
;
1805 for(i
=0;i
<nb_temp_local_vars
;i
++){
1806 temp_var
=&arr_temp_local_vars
[i
];
1807 if(temp_var
->size
<size
||align
!=temp_var
->align
){
1810 /*check if temp_var is free*/
1812 for(p
=vstack
;p
<=vtop
;p
++) {
1814 if(r
==VT_LOCAL
||r
==VT_LLOCAL
){
1815 if(p
->c
.i
==temp_var
->location
){
1822 found_var
=temp_var
->location
;
1828 loc
= (loc
- size
) & -align
;
1829 if(nb_temp_local_vars
<MAX_TEMP_LOCAL_VARIABLE_NUMBER
){
1830 temp_var
=&arr_temp_local_vars
[i
];
1831 temp_var
->location
=loc
;
1832 temp_var
->size
=size
;
1833 temp_var
->align
=align
;
1834 nb_temp_local_vars
++;
1841 static void clear_temp_local_var_list(){
1842 nb_temp_local_vars
=0;
1845 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1847 static void move_reg(int r
, int s
, int t
)
1861 /* get address of vtop (vtop MUST BE an lvalue) */
1862 ST_FUNC
void gaddrof(void)
1864 vtop
->r
&= ~VT_LVAL
;
1865 /* tricky: if saved lvalue, then we can go back to lvalue */
1866 if ((vtop
->r
& VT_VALMASK
) == VT_LLOCAL
)
1867 vtop
->r
= (vtop
->r
& ~VT_VALMASK
) | VT_LOCAL
| VT_LVAL
;
1870 #ifdef CONFIG_TCC_BCHECK
1871 /* generate a bounded pointer addition */
1872 static void gen_bounded_ptr_add(void)
1874 int save
= (vtop
[-1].r
& VT_VALMASK
) == VT_LOCAL
;
1879 vpush_global_sym(&func_old_type
, TOK___bound_ptr_add
);
1884 /* returned pointer is in REG_IRET */
1885 vtop
->r
= REG_IRET
| VT_BOUNDED
;
1888 /* relocation offset of the bounding function call point */
1889 vtop
->c
.i
= (cur_text_section
->reloc
->data_offset
- sizeof(ElfW_Rel
));
1892 /* patch pointer addition in vtop so that pointer dereferencing is
1894 static void gen_bounded_ptr_deref(void)
1904 size
= type_size(&vtop
->type
, &align
);
1906 case 1: func
= TOK___bound_ptr_indir1
; break;
1907 case 2: func
= TOK___bound_ptr_indir2
; break;
1908 case 4: func
= TOK___bound_ptr_indir4
; break;
1909 case 8: func
= TOK___bound_ptr_indir8
; break;
1910 case 12: func
= TOK___bound_ptr_indir12
; break;
1911 case 16: func
= TOK___bound_ptr_indir16
; break;
1913 /* may happen with struct member access */
1916 sym
= external_global_sym(func
, &func_old_type
);
1918 put_extern_sym(sym
, NULL
, 0, 0);
1919 /* patch relocation */
1920 /* XXX: find a better solution ? */
1921 rel
= (ElfW_Rel
*)(cur_text_section
->reloc
->data
+ vtop
->c
.i
);
1922 rel
->r_info
= ELFW(R_INFO
)(sym
->c
, ELFW(R_TYPE
)(rel
->r_info
));
1925 /* generate lvalue bound code */
1926 static void gbound(void)
1930 vtop
->r
&= ~VT_MUSTBOUND
;
1931 /* if lvalue, then use checking code before dereferencing */
1932 if (vtop
->r
& VT_LVAL
) {
1933 /* if not VT_BOUNDED value, then make one */
1934 if (!(vtop
->r
& VT_BOUNDED
)) {
1935 /* must save type because we must set it to int to get pointer */
1937 vtop
->type
.t
= VT_PTR
;
1940 gen_bounded_ptr_add();
1944 /* then check for dereferencing */
1945 gen_bounded_ptr_deref();
1949 /* we need to call __bound_ptr_add before we start to load function
1950 args into registers */
1951 ST_FUNC
void gbound_args(int nb_args
)
1956 for (i
= 1; i
<= nb_args
; ++i
)
1957 if (vtop
[1 - i
].r
& VT_MUSTBOUND
) {
1963 sv
= vtop
- nb_args
;
1964 if (sv
->r
& VT_SYM
) {
1968 #ifndef TCC_TARGET_PE
1969 || v
== TOK_sigsetjmp
1970 || v
== TOK___sigsetjmp
1973 vpush_global_sym(&func_old_type
, TOK___bound_setjmp
);
1976 func_bound_add_epilog
= 1;
1978 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
1979 if (v
== TOK_alloca
)
1980 func_bound_add_epilog
= 1;
1985 /* Add bounds for local symbols from S to E (via ->prev) */
1986 static void add_local_bounds(Sym
*s
, Sym
*e
)
1988 for (; s
!= e
; s
= s
->prev
) {
1989 if (!s
->v
|| (s
->r
& VT_VALMASK
) != VT_LOCAL
)
1991 /* Add arrays/structs/unions because we always take address */
1992 if ((s
->type
.t
& VT_ARRAY
)
1993 || (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
1994 || s
->a
.addrtaken
) {
1995 /* add local bound info */
1996 int align
, size
= type_size(&s
->type
, &align
);
1997 addr_t
*bounds_ptr
= section_ptr_add(lbounds_section
,
1998 2 * sizeof(addr_t
));
1999 bounds_ptr
[0] = s
->c
;
2000 bounds_ptr
[1] = size
;
2006 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2007 static void pop_local_syms(Sym
**ptop
, Sym
*b
, int keep
, int ellipsis
)
2009 #ifdef CONFIG_TCC_BCHECK
2010 if (tcc_state
->do_bounds_check
&& !ellipsis
&& !keep
)
2011 add_local_bounds(*ptop
, b
);
2013 if (tcc_state
->do_debug
)
2014 tcc_add_debug_info (tcc_state
, !local_scope
, *ptop
, b
);
2015 sym_pop(ptop
, b
, keep
);
2018 static void incr_bf_adr(int o
)
2020 vtop
->type
= char_pointer_type
;
2024 vtop
->type
.t
= VT_BYTE
| VT_UNSIGNED
;
2028 /* single-byte load mode for packed or otherwise unaligned bitfields */
2029 static void load_packed_bf(CType
*type
, int bit_pos
, int bit_size
)
2032 save_reg_upstack(vtop
->r
, 1);
2033 vpush64(type
->t
& VT_BTYPE
, 0); // B X
2034 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2043 vpushi(bit_pos
), gen_op(TOK_SHR
), bit_pos
= 0; // X B Y
2045 vpushi((1 << n
) - 1), gen_op('&');
2048 vpushi(bits
), gen_op(TOK_SHL
);
2051 bits
+= n
, bit_size
-= n
, o
= 1;
2054 if (!(type
->t
& VT_UNSIGNED
)) {
2055 n
= ((type
->t
& VT_BTYPE
) == VT_LLONG
? 64 : 32) - bits
;
2056 vpushi(n
), gen_op(TOK_SHL
);
2057 vpushi(n
), gen_op(TOK_SAR
);
2061 /* single-byte store mode for packed or otherwise unaligned bitfields */
2062 static void store_packed_bf(int bit_pos
, int bit_size
)
2064 int bits
, n
, o
, m
, c
;
2066 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2068 save_reg_upstack(vtop
->r
, 1);
2069 bits
= 0, o
= bit_pos
>> 3, bit_pos
&= 7;
2071 incr_bf_adr(o
); // X B
2073 c
? vdup() : gv_dup(); // B V X
2076 vpushi(bits
), gen_op(TOK_SHR
);
2078 vpushi(bit_pos
), gen_op(TOK_SHL
);
2083 m
= ((1 << n
) - 1) << bit_pos
;
2084 vpushi(m
), gen_op('&'); // X B V1
2085 vpushv(vtop
-1); // X B V1 B
2086 vpushi(m
& 0x80 ? ~m
& 0x7f : ~m
);
2087 gen_op('&'); // X B V1 B1
2088 gen_op('|'); // X B V2
2090 vdup(), vtop
[-1] = vtop
[-2]; // X B B V2
2091 vstore(), vpop(); // X B
2092 bits
+= n
, bit_size
-= n
, bit_pos
= 0, o
= 1;
2097 static int adjust_bf(SValue
*sv
, int bit_pos
, int bit_size
)
2100 if (0 == sv
->type
.ref
)
2102 t
= sv
->type
.ref
->auxtype
;
2103 if (t
!= -1 && t
!= VT_STRUCT
) {
2104 sv
->type
.t
= (sv
->type
.t
& ~(VT_BTYPE
| VT_LONG
)) | t
;
2110 /* store vtop a register belonging to class 'rc'. lvalues are
2111 converted to values. Cannot be used if cannot be converted to
2112 register value (such as structures). */
2113 ST_FUNC
int gv(int rc
)
2115 int r
, r2
, r_ok
, r2_ok
, rc2
, bt
;
2116 int bit_pos
, bit_size
, size
, align
;
2118 /* NOTE: get_reg can modify vstack[] */
2119 if (vtop
->type
.t
& VT_BITFIELD
) {
2122 bit_pos
= BIT_POS(vtop
->type
.t
);
2123 bit_size
= BIT_SIZE(vtop
->type
.t
);
2124 /* remove bit field info to avoid loops */
2125 vtop
->type
.t
&= ~VT_STRUCT_MASK
;
2128 type
.t
= vtop
->type
.t
& VT_UNSIGNED
;
2129 if ((vtop
->type
.t
& VT_BTYPE
) == VT_BOOL
)
2130 type
.t
|= VT_UNSIGNED
;
2132 r
= adjust_bf(vtop
, bit_pos
, bit_size
);
2134 if ((vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
)
2139 if (r
== VT_STRUCT
) {
2140 load_packed_bf(&type
, bit_pos
, bit_size
);
2142 int bits
= (type
.t
& VT_BTYPE
) == VT_LLONG
? 64 : 32;
2143 /* cast to int to propagate signedness in following ops */
2145 /* generate shifts */
2146 vpushi(bits
- (bit_pos
+ bit_size
));
2148 vpushi(bits
- bit_size
);
2149 /* NOTE: transformed to SHR if unsigned */
2154 if (is_float(vtop
->type
.t
) &&
2155 (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2156 unsigned long offset
;
2157 /* CPUs usually cannot use float constants, so we store them
2158 generically in data segment */
2159 size
= type_size(&vtop
->type
, &align
);
2161 size
= 0, align
= 1;
2162 offset
= section_add(data_section
, size
, align
);
2163 vpush_ref(&vtop
->type
, data_section
, offset
, size
);
2165 init_putv(&vtop
->type
, data_section
, offset
);
2168 #ifdef CONFIG_TCC_BCHECK
2169 if (vtop
->r
& VT_MUSTBOUND
)
2173 bt
= vtop
->type
.t
& VT_BTYPE
;
2175 #ifdef TCC_TARGET_RISCV64
2177 if (bt
== VT_LDOUBLE
&& rc
== RC_FLOAT
)
2180 rc2
= RC2_TYPE(bt
, rc
);
2182 /* need to reload if:
2184 - lvalue (need to dereference pointer)
2185 - already a register, but not in the right class */
2186 r
= vtop
->r
& VT_VALMASK
;
2187 r_ok
= !(vtop
->r
& VT_LVAL
) && (r
< VT_CONST
) && (reg_classes
[r
] & rc
);
2188 r2_ok
= !rc2
|| ((vtop
->r2
< VT_CONST
) && (reg_classes
[vtop
->r2
] & rc2
));
2190 if (!r_ok
|| !r2_ok
) {
2194 int load_type
= (bt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
2195 int original_type
= vtop
->type
.t
;
2197 /* two register type load :
2198 expand to two words temporarily */
2199 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
) {
2201 unsigned long long ll
= vtop
->c
.i
;
2202 vtop
->c
.i
= ll
; /* first word */
2204 vtop
->r
= r
; /* save register value */
2205 vpushi(ll
>> 32); /* second word */
2206 } else if (vtop
->r
& VT_LVAL
) {
2207 /* We do not want to modifier the long long pointer here.
2208 So we save any other instances down the stack */
2209 save_reg_upstack(vtop
->r
, 1);
2210 /* load from memory */
2211 vtop
->type
.t
= load_type
;
2214 vtop
[-1].r
= r
; /* save register value */
2215 /* increment pointer to get second word */
2216 vtop
->type
.t
= VT_PTRDIFF_T
;
2221 vtop
->type
.t
= load_type
;
2223 /* move registers */
2226 if (r2_ok
&& vtop
->r2
< VT_CONST
)
2229 vtop
[-1].r
= r
; /* save register value */
2230 vtop
->r
= vtop
[-1].r2
;
2232 /* Allocate second register. Here we rely on the fact that
2233 get_reg() tries first to free r2 of an SValue. */
2237 /* write second register */
2240 vtop
->type
.t
= original_type
;
2242 if (vtop
->r
== VT_CMP
)
2244 /* one register type load */
2249 #ifdef TCC_TARGET_C67
2250 /* uses register pairs for doubles */
2251 if (bt
== VT_DOUBLE
)
2258 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2259 ST_FUNC
void gv2(int rc1
, int rc2
)
2261 /* generate more generic register first. But VT_JMP or VT_CMP
2262 values must be generated first in all cases to avoid possible
2264 if (vtop
->r
!= VT_CMP
&& rc1
<= rc2
) {
2269 /* test if reload is needed for first register */
2270 if ((vtop
[-1].r
& VT_VALMASK
) >= VT_CONST
) {
2280 /* test if reload is needed for first register */
2281 if ((vtop
[0].r
& VT_VALMASK
) >= VT_CONST
) {
2288 /* expand 64bit on stack in two ints */
2289 ST_FUNC
void lexpand(void)
2292 u
= vtop
->type
.t
& (VT_DEFSIGN
| VT_UNSIGNED
);
2293 v
= vtop
->r
& (VT_VALMASK
| VT_LVAL
);
2294 if (v
== VT_CONST
) {
2297 } else if (v
== (VT_LVAL
|VT_CONST
) || v
== (VT_LVAL
|VT_LOCAL
)) {
2303 vtop
[0].r
= vtop
[-1].r2
;
2304 vtop
[0].r2
= vtop
[-1].r2
= VT_CONST
;
2306 vtop
[0].type
.t
= vtop
[-1].type
.t
= VT_INT
| u
;
2311 /* build a long long from two ints */
2312 static void lbuild(int t
)
2314 gv2(RC_INT
, RC_INT
);
2315 vtop
[-1].r2
= vtop
[0].r
;
2316 vtop
[-1].type
.t
= t
;
2321 /* convert stack entry to register and duplicate its value in another
2323 static void gv_dup(void)
2329 if ((t
& VT_BTYPE
) == VT_LLONG
) {
2330 if (t
& VT_BITFIELD
) {
2340 /* stack: H L L1 H1 */
2350 /* duplicate value */
2360 /* generate CPU independent (unsigned) long long operations */
2361 static void gen_opl(int op
)
2363 int t
, a
, b
, op1
, c
, i
;
2365 unsigned short reg_iret
= REG_IRET
;
2366 unsigned short reg_lret
= REG_IRE2
;
2372 func
= TOK___divdi3
;
2375 func
= TOK___udivdi3
;
2378 func
= TOK___moddi3
;
2381 func
= TOK___umoddi3
;
2388 /* call generic long long function */
2389 vpush_global_sym(&func_old_type
, func
);
2394 vtop
->r2
= reg_lret
;
2402 //pv("gen_opl A",0,2);
2408 /* stack: L1 H1 L2 H2 */
2413 vtop
[-2] = vtop
[-3];
2416 /* stack: H1 H2 L1 L2 */
2417 //pv("gen_opl B",0,4);
2423 /* stack: H1 H2 L1 L2 ML MH */
2426 /* stack: ML MH H1 H2 L1 L2 */
2430 /* stack: ML MH H1 L2 H2 L1 */
2435 /* stack: ML MH M1 M2 */
2438 } else if (op
== '+' || op
== '-') {
2439 /* XXX: add non carry method too (for MIPS or alpha) */
2445 /* stack: H1 H2 (L1 op L2) */
2448 gen_op(op1
+ 1); /* TOK_xxxC2 */
2451 /* stack: H1 H2 (L1 op L2) */
2454 /* stack: (L1 op L2) H1 H2 */
2456 /* stack: (L1 op L2) (H1 op H2) */
2464 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
2465 t
= vtop
[-1].type
.t
;
2469 /* stack: L H shift */
2471 /* constant: simpler */
2472 /* NOTE: all comments are for SHL. the other cases are
2473 done by swapping words */
2484 if (op
!= TOK_SAR
) {
2517 /* XXX: should provide a faster fallback on x86 ? */
2520 func
= TOK___ashrdi3
;
2523 func
= TOK___lshrdi3
;
2526 func
= TOK___ashldi3
;
2532 /* compare operations */
2538 /* stack: L1 H1 L2 H2 */
2540 vtop
[-1] = vtop
[-2];
2542 /* stack: L1 L2 H1 H2 */
2546 /* when values are equal, we need to compare low words. since
2547 the jump is inverted, we invert the test too. */
2550 else if (op1
== TOK_GT
)
2552 else if (op1
== TOK_ULT
)
2554 else if (op1
== TOK_UGT
)
2564 /* generate non equal test */
2566 vset_VT_CMP(TOK_NE
);
2570 /* compare low. Always unsigned */
2574 else if (op1
== TOK_LE
)
2576 else if (op1
== TOK_GT
)
2578 else if (op1
== TOK_GE
)
2581 #if 0//def TCC_TARGET_I386
2582 if (op
== TOK_NE
) { gsym(b
); break; }
2583 if (op
== TOK_EQ
) { gsym(a
); break; }
2592 static uint64_t gen_opic_sdiv(uint64_t a
, uint64_t b
)
2594 uint64_t x
= (a
>> 63 ? -a
: a
) / (b
>> 63 ? -b
: b
);
2595 return (a
^ b
) >> 63 ? -x
: x
;
2598 static int gen_opic_lt(uint64_t a
, uint64_t b
)
2600 return (a
^ (uint64_t)1 << 63) < (b
^ (uint64_t)1 << 63);
2603 /* handle integer constant optimizations and various machine
2605 static void gen_opic(int op
)
2607 SValue
*v1
= vtop
- 1;
2609 int t1
= v1
->type
.t
& VT_BTYPE
;
2610 int t2
= v2
->type
.t
& VT_BTYPE
;
2611 int c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2612 int c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2613 uint64_t l1
= c1
? v1
->c
.i
: 0;
2614 uint64_t l2
= c2
? v2
->c
.i
: 0;
2615 int shm
= (t1
== VT_LLONG
) ? 63 : 31;
2617 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2618 l1
= ((uint32_t)l1
|
2619 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2620 if (t2
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t2
!= VT_PTR
))
2621 l2
= ((uint32_t)l2
|
2622 (v2
->type
.t
& VT_UNSIGNED
? 0 : -(l2
& 0x80000000)));
2626 case '+': l1
+= l2
; break;
2627 case '-': l1
-= l2
; break;
2628 case '&': l1
&= l2
; break;
2629 case '^': l1
^= l2
; break;
2630 case '|': l1
|= l2
; break;
2631 case '*': l1
*= l2
; break;
2638 /* if division by zero, generate explicit division */
2640 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
2641 tcc_error("division by zero in constant");
2645 default: l1
= gen_opic_sdiv(l1
, l2
); break;
2646 case '%': l1
= l1
- l2
* gen_opic_sdiv(l1
, l2
); break;
2647 case TOK_UDIV
: l1
= l1
/ l2
; break;
2648 case TOK_UMOD
: l1
= l1
% l2
; break;
2651 case TOK_SHL
: l1
<<= (l2
& shm
); break;
2652 case TOK_SHR
: l1
>>= (l2
& shm
); break;
2654 l1
= (l1
>> 63) ? ~(~l1
>> (l2
& shm
)) : l1
>> (l2
& shm
);
2657 case TOK_ULT
: l1
= l1
< l2
; break;
2658 case TOK_UGE
: l1
= l1
>= l2
; break;
2659 case TOK_EQ
: l1
= l1
== l2
; break;
2660 case TOK_NE
: l1
= l1
!= l2
; break;
2661 case TOK_ULE
: l1
= l1
<= l2
; break;
2662 case TOK_UGT
: l1
= l1
> l2
; break;
2663 case TOK_LT
: l1
= gen_opic_lt(l1
, l2
); break;
2664 case TOK_GE
: l1
= !gen_opic_lt(l1
, l2
); break;
2665 case TOK_LE
: l1
= !gen_opic_lt(l2
, l1
); break;
2666 case TOK_GT
: l1
= gen_opic_lt(l2
, l1
); break;
2668 case TOK_LAND
: l1
= l1
&& l2
; break;
2669 case TOK_LOR
: l1
= l1
|| l2
; break;
2673 if (t1
!= VT_LLONG
&& (PTR_SIZE
!= 8 || t1
!= VT_PTR
))
2674 l1
= ((uint32_t)l1
|
2675 (v1
->type
.t
& VT_UNSIGNED
? 0 : -(l1
& 0x80000000)));
2679 /* if commutative ops, put c2 as constant */
2680 if (c1
&& (op
== '+' || op
== '&' || op
== '^' ||
2681 op
== '|' || op
== '*' || op
== TOK_EQ
|| op
== TOK_NE
)) {
2683 c2
= c1
; //c = c1, c1 = c2, c2 = c;
2684 l2
= l1
; //l = l1, l1 = l2, l2 = l;
2686 if (!const_wanted
&&
2688 (op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
)) ||
2689 (l1
== -1 && op
== TOK_SAR
))) {
2690 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2692 } else if (!const_wanted
&&
2693 c2
&& ((l2
== 0 && (op
== '&' || op
== '*')) ||
2695 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))) ||
2696 (l2
== 1 && (op
== '%' || op
== TOK_UMOD
)))) {
2697 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2702 } else if (c2
&& (((op
== '*' || op
== '/' || op
== TOK_UDIV
||
2705 ((op
== '+' || op
== '-' || op
== '|' || op
== '^' ||
2706 op
== TOK_SHL
|| op
== TOK_SHR
|| op
== TOK_SAR
) &&
2709 (l2
== -1 || (l2
== 0xFFFFFFFF && t2
!= VT_LLONG
))))) {
2710 /* filter out NOP operations like x*1, x-0, x&-1... */
2712 } else if (c2
&& (op
== '*' || op
== TOK_PDIV
|| op
== TOK_UDIV
)) {
2713 /* try to use shifts instead of muls or divs */
2714 if (l2
> 0 && (l2
& (l2
- 1)) == 0) {
2723 else if (op
== TOK_PDIV
)
2729 } else if (c2
&& (op
== '+' || op
== '-') &&
2730 (((vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == (VT_CONST
| VT_SYM
))
2731 || (vtop
[-1].r
& (VT_VALMASK
| VT_LVAL
)) == VT_LOCAL
)) {
2732 /* symbol + constant case */
2736 /* The backends can't always deal with addends to symbols
2737 larger than +-1<<31. Don't construct such. */
2744 /* call low level op generator */
2745 if (t1
== VT_LLONG
|| t2
== VT_LLONG
||
2746 (PTR_SIZE
== 8 && (t1
== VT_PTR
|| t2
== VT_PTR
)))
2754 /* generate a floating point operation with constant propagation */
2755 static void gen_opif(int op
)
2759 #if defined _MSC_VER && defined __x86_64__
2760 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2767 /* currently, we cannot do computations with forward symbols */
2768 c1
= (v1
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2769 c2
= (v2
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
2771 if (v1
->type
.t
== VT_FLOAT
) {
2774 } else if (v1
->type
.t
== VT_DOUBLE
) {
2782 /* NOTE: we only do constant propagation if finite number (not
2783 NaN or infinity) (ANSI spec) */
2784 if (!ieee_finite(f1
) || !ieee_finite(f2
))
2788 case '+': f1
+= f2
; break;
2789 case '-': f1
-= f2
; break;
2790 case '*': f1
*= f2
; break;
2793 /* If not in initializer we need to potentially generate
2794 FP exceptions at runtime, otherwise we want to fold. */
2800 /* XXX: also handles tests ? */
2804 /* XXX: overflow test ? */
2805 if (v1
->type
.t
== VT_FLOAT
) {
2807 } else if (v1
->type
.t
== VT_DOUBLE
) {
2819 /* print a type. If 'varstr' is not NULL, then the variable is also
2820 printed in the type */
2822 /* XXX: add array and function pointers */
2823 static void type_to_str(char *buf
, int buf_size
,
2824 CType
*type
, const char *varstr
)
2836 pstrcat(buf
, buf_size
, "extern ");
2838 pstrcat(buf
, buf_size
, "static ");
2840 pstrcat(buf
, buf_size
, "typedef ");
2842 pstrcat(buf
, buf_size
, "inline ");
2843 if (t
& VT_VOLATILE
)
2844 pstrcat(buf
, buf_size
, "volatile ");
2845 if (t
& VT_CONSTANT
)
2846 pstrcat(buf
, buf_size
, "const ");
2848 if (((t
& VT_DEFSIGN
) && bt
== VT_BYTE
)
2849 || ((t
& VT_UNSIGNED
)
2850 && (bt
== VT_SHORT
|| bt
== VT_INT
|| bt
== VT_LLONG
)
2853 pstrcat(buf
, buf_size
, (t
& VT_UNSIGNED
) ? "unsigned " : "signed ");
2855 buf_size
-= strlen(buf
);
2891 tstr
= "long double";
2893 pstrcat(buf
, buf_size
, tstr
);
2900 pstrcat(buf
, buf_size
, tstr
);
2901 v
= type
->ref
->v
& ~SYM_STRUCT
;
2902 if (v
>= SYM_FIRST_ANOM
)
2903 pstrcat(buf
, buf_size
, "<anonymous>");
2905 pstrcat(buf
, buf_size
, get_tok_str(v
, NULL
));
2910 if (varstr
&& '*' == *varstr
) {
2911 pstrcat(buf1
, sizeof(buf1
), "(");
2912 pstrcat(buf1
, sizeof(buf1
), varstr
);
2913 pstrcat(buf1
, sizeof(buf1
), ")");
2915 pstrcat(buf1
, buf_size
, "(");
2917 while (sa
!= NULL
) {
2919 type_to_str(buf2
, sizeof(buf2
), &sa
->type
, NULL
);
2920 pstrcat(buf1
, sizeof(buf1
), buf2
);
2923 pstrcat(buf1
, sizeof(buf1
), ", ");
2925 if (s
->f
.func_type
== FUNC_ELLIPSIS
)
2926 pstrcat(buf1
, sizeof(buf1
), ", ...");
2927 pstrcat(buf1
, sizeof(buf1
), ")");
2928 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2933 if (varstr
&& '*' == *varstr
)
2934 snprintf(buf1
, sizeof(buf1
), "(%s)[%d]", varstr
, s
->c
);
2936 snprintf(buf1
, sizeof(buf1
), "%s[%d]", varstr
? varstr
: "", s
->c
);
2937 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2940 pstrcpy(buf1
, sizeof(buf1
), "*");
2941 if (t
& VT_CONSTANT
)
2942 pstrcat(buf1
, buf_size
, "const ");
2943 if (t
& VT_VOLATILE
)
2944 pstrcat(buf1
, buf_size
, "volatile ");
2946 pstrcat(buf1
, sizeof(buf1
), varstr
);
2947 type_to_str(buf
, buf_size
, &s
->type
, buf1
);
2951 pstrcat(buf
, buf_size
, " ");
2952 pstrcat(buf
, buf_size
, varstr
);
2957 static void type_incompatibility_error(CType
* st
, CType
* dt
, const char* fmt
)
2959 char buf1
[256], buf2
[256];
2960 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2961 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2962 tcc_error(fmt
, buf1
, buf2
);
2965 static void type_incompatibility_warning(CType
* st
, CType
* dt
, const char* fmt
)
2967 char buf1
[256], buf2
[256];
2968 type_to_str(buf1
, sizeof(buf1
), st
, NULL
);
2969 type_to_str(buf2
, sizeof(buf2
), dt
, NULL
);
2970 tcc_warning(fmt
, buf1
, buf2
);
2973 static int pointed_size(CType
*type
)
2976 return type_size(pointed_type(type
), &align
);
2979 static void vla_runtime_pointed_size(CType
*type
)
2982 vla_runtime_type_size(pointed_type(type
), &align
);
2985 static inline int is_null_pointer(SValue
*p
)
2987 if ((p
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
2989 return ((p
->type
.t
& VT_BTYPE
) == VT_INT
&& (uint32_t)p
->c
.i
== 0) ||
2990 ((p
->type
.t
& VT_BTYPE
) == VT_LLONG
&& p
->c
.i
== 0) ||
2991 ((p
->type
.t
& VT_BTYPE
) == VT_PTR
&&
2992 (PTR_SIZE
== 4 ? (uint32_t)p
->c
.i
== 0 : p
->c
.i
== 0) &&
2993 ((pointed_type(&p
->type
)->t
& VT_BTYPE
) == VT_VOID
) &&
2994 0 == (pointed_type(&p
->type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
2998 /* compare function types. OLD functions match any new functions */
2999 static int is_compatible_func(CType
*type1
, CType
*type2
)
3005 if (s1
->f
.func_call
!= s2
->f
.func_call
)
3007 if (s1
->f
.func_type
!= s2
->f
.func_type
3008 && s1
->f
.func_type
!= FUNC_OLD
3009 && s2
->f
.func_type
!= FUNC_OLD
)
3011 /* we should check the function return type for FUNC_OLD too
3012 but that causes problems with the internally used support
3013 functions such as TOK_memmove */
3014 if (s1
->f
.func_type
== FUNC_OLD
&& !s1
->next
)
3016 if (s2
->f
.func_type
== FUNC_OLD
&& !s2
->next
)
3019 if (!is_compatible_unqualified_types(&s1
->type
, &s2
->type
))
3030 /* return true if type1 and type2 are the same. If unqualified is
3031 true, qualifiers on the types are ignored.
3033 static int compare_types(CType
*type1
, CType
*type2
, int unqualified
)
3037 t1
= type1
->t
& VT_TYPE
;
3038 t2
= type2
->t
& VT_TYPE
;
3040 /* strip qualifiers before comparing */
3041 t1
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3042 t2
&= ~(VT_CONSTANT
| VT_VOLATILE
);
3045 /* Default Vs explicit signedness only matters for char */
3046 if ((t1
& VT_BTYPE
) != VT_BYTE
) {
3050 /* XXX: bitfields ? */
3055 && !(type1
->ref
->c
< 0
3056 || type2
->ref
->c
< 0
3057 || type1
->ref
->c
== type2
->ref
->c
))
3060 /* test more complicated cases */
3061 bt1
= t1
& VT_BTYPE
;
3062 if (bt1
== VT_PTR
) {
3063 type1
= pointed_type(type1
);
3064 type2
= pointed_type(type2
);
3065 return is_compatible_types(type1
, type2
);
3066 } else if (bt1
== VT_STRUCT
) {
3067 return (type1
->ref
== type2
->ref
);
3068 } else if (bt1
== VT_FUNC
) {
3069 return is_compatible_func(type1
, type2
);
3070 } else if (IS_ENUM(type1
->t
) && IS_ENUM(type2
->t
)) {
3071 /* If both are enums then they must be the same, if only one is then
3072 t1 and t2 must be equal, which was checked above already. */
3073 return type1
->ref
== type2
->ref
;
3079 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3080 type is stored in DEST if non-null (except for pointer plus/minus) . */
3081 static int combine_types(CType
*dest
, SValue
*op1
, SValue
*op2
, int op
)
3083 CType
*type1
= &op1
->type
, *type2
= &op2
->type
, type
;
3084 int t1
= type1
->t
, t2
= type2
->t
, bt1
= t1
& VT_BTYPE
, bt2
= t2
& VT_BTYPE
;
3090 if (bt1
== VT_VOID
|| bt2
== VT_VOID
) {
3091 ret
= op
== '?' ? 1 : 0;
3092 /* NOTE: as an extension, we accept void on only one side */
3094 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3095 if (op
== '+') ; /* Handled in caller */
3096 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3097 /* If one is a null ptr constant the result type is the other. */
3098 else if (is_null_pointer (op2
)) type
= *type1
;
3099 else if (is_null_pointer (op1
)) type
= *type2
;
3100 else if (bt1
!= bt2
) {
3101 /* accept comparison or cond-expr between pointer and integer
3103 if ((op
== '?' || TOK_ISCOND(op
))
3104 && (is_integer_btype(bt1
) || is_integer_btype(bt2
)))
3105 tcc_warning("pointer/integer mismatch in %s",
3106 op
== '?' ? "conditional expression" : "comparison");
3107 else if (op
!= '-' || !is_integer_btype(bt2
))
3109 type
= *(bt1
== VT_PTR
? type1
: type2
);
3111 CType
*pt1
= pointed_type(type1
);
3112 CType
*pt2
= pointed_type(type2
);
3113 int pbt1
= pt1
->t
& VT_BTYPE
;
3114 int pbt2
= pt2
->t
& VT_BTYPE
;
3115 int newquals
, copied
= 0;
3116 if (pbt1
!= VT_VOID
&& pbt2
!= VT_VOID
3117 && !compare_types(pt1
, pt2
, 1/*unqualif*/)) {
3118 if (op
!= '?' && !TOK_ISCOND(op
))
3121 type_incompatibility_warning(type1
, type2
,
3123 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3124 : "pointer type mismatch in comparison('%s' and '%s')");
3127 /* pointers to void get preferred, otherwise the
3128 pointed to types minus qualifs should be compatible */
3129 type
= *((pbt1
== VT_VOID
) ? type1
: type2
);
3130 /* combine qualifs */
3131 newquals
= ((pt1
->t
| pt2
->t
) & (VT_CONSTANT
| VT_VOLATILE
));
3132 if ((~pointed_type(&type
)->t
& (VT_CONSTANT
| VT_VOLATILE
))
3135 /* copy the pointer target symbol */
3136 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3139 pointed_type(&type
)->t
|= newquals
;
3141 /* pointers to incomplete arrays get converted to
3142 pointers to completed ones if possible */
3143 if (pt1
->t
& VT_ARRAY
3144 && pt2
->t
& VT_ARRAY
3145 && pointed_type(&type
)->ref
->c
< 0
3146 && (pt1
->ref
->c
> 0 || pt2
->ref
->c
> 0))
3149 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
,
3151 pointed_type(&type
)->ref
=
3152 sym_push(SYM_FIELD
, &pointed_type(&type
)->ref
->type
,
3153 0, pointed_type(&type
)->ref
->c
);
3154 pointed_type(&type
)->ref
->c
=
3155 0 < pt1
->ref
->c
? pt1
->ref
->c
: pt2
->ref
->c
;
3161 } else if (bt1
== VT_STRUCT
|| bt2
== VT_STRUCT
) {
3162 if (op
!= '?' || !compare_types(type1
, type2
, 1))
3165 } else if (is_float(bt1
) || is_float(bt2
)) {
3166 if (bt1
== VT_LDOUBLE
|| bt2
== VT_LDOUBLE
) {
3167 type
.t
= VT_LDOUBLE
;
3168 } else if (bt1
== VT_DOUBLE
|| bt2
== VT_DOUBLE
) {
3173 } else if (bt1
== VT_LLONG
|| bt2
== VT_LLONG
) {
3174 /* cast to biggest op */
3175 type
.t
= VT_LLONG
| VT_LONG
;
3176 if (bt1
== VT_LLONG
)
3178 if (bt2
== VT_LLONG
)
3180 /* convert to unsigned if it does not fit in a long long */
3181 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
) ||
3182 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_LLONG
| VT_UNSIGNED
))
3183 type
.t
|= VT_UNSIGNED
;
3185 /* integer operations */
3186 type
.t
= VT_INT
| (VT_LONG
& (t1
| t2
));
3187 /* convert to unsigned if it does not fit in an integer */
3188 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
) ||
3189 (t2
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (VT_INT
| VT_UNSIGNED
))
3190 type
.t
|= VT_UNSIGNED
;
3197 /* generic gen_op: handles types problems */
3198 ST_FUNC
void gen_op(int op
)
3200 int u
, t1
, t2
, bt1
, bt2
, t
;
3201 CType type1
, combtype
;
3204 t1
= vtop
[-1].type
.t
;
3205 t2
= vtop
[0].type
.t
;
3206 bt1
= t1
& VT_BTYPE
;
3207 bt2
= t2
& VT_BTYPE
;
3209 if (bt1
== VT_FUNC
|| bt2
== VT_FUNC
) {
3210 if (bt2
== VT_FUNC
) {
3211 mk_pointer(&vtop
->type
);
3214 if (bt1
== VT_FUNC
) {
3216 mk_pointer(&vtop
->type
);
3221 } else if (!combine_types(&combtype
, vtop
- 1, vtop
, op
)) {
3222 tcc_error_noabort("invalid operand types for binary operation");
3224 } else if (bt1
== VT_PTR
|| bt2
== VT_PTR
) {
3225 /* at least one operand is a pointer */
3226 /* relational op: must be both pointers */
3229 /* if both pointers, then it must be the '-' op */
3230 if (bt1
== VT_PTR
&& bt2
== VT_PTR
) {
3232 tcc_error("cannot use pointers here");
3233 if (vtop
[-1].type
.t
& VT_VLA
) {
3234 vla_runtime_pointed_size(&vtop
[-1].type
);
3236 vpushi(pointed_size(&vtop
[-1].type
));
3240 vtop
->type
.t
= VT_PTRDIFF_T
;
3244 /* exactly one pointer : must be '+' or '-'. */
3245 if (op
!= '-' && op
!= '+')
3246 tcc_error("cannot use pointers here");
3247 /* Put pointer as first operand */
3248 if (bt2
== VT_PTR
) {
3250 t
= t1
, t1
= t2
, t2
= t
;
3253 if ((vtop
[0].type
.t
& VT_BTYPE
) == VT_LLONG
)
3254 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3257 type1
= vtop
[-1].type
;
3258 if (vtop
[-1].type
.t
& VT_VLA
)
3259 vla_runtime_pointed_size(&vtop
[-1].type
);
3261 u
= pointed_size(&vtop
[-1].type
);
3263 tcc_error("unknown array element size");
3267 /* XXX: cast to int ? (long long case) */
3272 #ifdef CONFIG_TCC_BCHECK
3273 if (tcc_state
->do_bounds_check
&& !const_wanted
) {
3274 /* if bounded pointers, we generate a special code to
3281 gen_bounded_ptr_add();
3287 type1
.t
&= ~VT_ARRAY
;
3288 /* put again type if gen_opic() swaped operands */
3292 /* floats can only be used for a few operations */
3293 if (is_float(combtype
.t
)
3294 && op
!= '+' && op
!= '-' && op
!= '*' && op
!= '/'
3296 tcc_error("invalid operands for binary operation");
3297 else if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
) {
3298 t
= bt1
== VT_LLONG
? VT_LLONG
: VT_INT
;
3299 if ((t1
& (VT_BTYPE
| VT_UNSIGNED
| VT_BITFIELD
)) == (t
| VT_UNSIGNED
))
3301 t
|= (VT_LONG
& t1
);
3305 t
= t2
= combtype
.t
;
3306 /* XXX: currently, some unsigned operations are explicit, so
3307 we modify them here */
3308 if (t
& VT_UNSIGNED
) {
3315 else if (op
== TOK_LT
)
3317 else if (op
== TOK_GT
)
3319 else if (op
== TOK_LE
)
3321 else if (op
== TOK_GE
)
3327 /* special case for shifts and long long: we keep the shift as
3329 if (op
== TOK_SHR
|| op
== TOK_SAR
|| op
== TOK_SHL
)
3336 if (TOK_ISCOND(op
)) {
3337 /* relational op: the result is an int */
3338 vtop
->type
.t
= VT_INT
;
3343 // Make sure that we have converted to an rvalue:
3344 if (vtop
->r
& VT_LVAL
)
3345 gv(is_float(vtop
->type
.t
& VT_BTYPE
) ? RC_FLOAT
: RC_INT
);
3348 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3349 #define gen_cvt_itof1 gen_cvt_itof
3351 /* generic itof for unsigned long long case */
3352 static void gen_cvt_itof1(int t
)
3354 if ((vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
)) ==
3355 (VT_LLONG
| VT_UNSIGNED
)) {
3358 vpush_global_sym(&func_old_type
, TOK___floatundisf
);
3359 #if LDOUBLE_SIZE != 8
3360 else if (t
== VT_LDOUBLE
)
3361 vpush_global_sym(&func_old_type
, TOK___floatundixf
);
3364 vpush_global_sym(&func_old_type
, TOK___floatundidf
);
3375 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3376 #define gen_cvt_ftoi1 gen_cvt_ftoi
3378 /* generic ftoi for unsigned long long case */
3379 static void gen_cvt_ftoi1(int t
)
3382 if (t
== (VT_LLONG
| VT_UNSIGNED
)) {
3383 /* not handled natively */
3384 st
= vtop
->type
.t
& VT_BTYPE
;
3386 vpush_global_sym(&func_old_type
, TOK___fixunssfdi
);
3387 #if LDOUBLE_SIZE != 8
3388 else if (st
== VT_LDOUBLE
)
3389 vpush_global_sym(&func_old_type
, TOK___fixunsxfdi
);
3392 vpush_global_sym(&func_old_type
, TOK___fixunsdfdi
);
3403 /* special delayed cast for char/short */
3404 static void force_charshort_cast(void)
3406 int sbt
= BFGET(vtop
->r
, VT_MUSTCAST
) == 2 ? VT_LLONG
: VT_INT
;
3407 int dbt
= vtop
->type
.t
;
3408 vtop
->r
&= ~VT_MUSTCAST
;
3410 gen_cast_s(dbt
== VT_BOOL
? VT_BYTE
|VT_UNSIGNED
: dbt
);
3414 static void gen_cast_s(int t
)
3422 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3423 static void gen_cast(CType
*type
)
3425 int sbt
, dbt
, sf
, df
, c
;
3426 int dbt_bt
, sbt_bt
, ds
, ss
, bits
, trunc
;
3428 /* special delayed cast for char/short */
3429 if (vtop
->r
& VT_MUSTCAST
)
3430 force_charshort_cast();
3432 /* bitfields first get cast to ints */
3433 if (vtop
->type
.t
& VT_BITFIELD
)
3436 dbt
= type
->t
& (VT_BTYPE
| VT_UNSIGNED
);
3437 sbt
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
3445 dbt_bt
= dbt
& VT_BTYPE
;
3446 sbt_bt
= sbt
& VT_BTYPE
;
3448 c
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
;
3449 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3450 c
&= (dbt
!= VT_LDOUBLE
) | !!nocode_wanted
;
3453 /* constant case: we can do it now */
3454 /* XXX: in ISOC, cannot do it if error in convert */
3455 if (sbt
== VT_FLOAT
)
3456 vtop
->c
.ld
= vtop
->c
.f
;
3457 else if (sbt
== VT_DOUBLE
)
3458 vtop
->c
.ld
= vtop
->c
.d
;
3461 if (sbt_bt
== VT_LLONG
) {
3462 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 63))
3463 vtop
->c
.ld
= vtop
->c
.i
;
3465 vtop
->c
.ld
= -(long double)-vtop
->c
.i
;
3467 if ((sbt
& VT_UNSIGNED
) || !(vtop
->c
.i
>> 31))
3468 vtop
->c
.ld
= (uint32_t)vtop
->c
.i
;
3470 vtop
->c
.ld
= -(long double)-(uint32_t)vtop
->c
.i
;
3473 if (dbt
== VT_FLOAT
)
3474 vtop
->c
.f
= (float)vtop
->c
.ld
;
3475 else if (dbt
== VT_DOUBLE
)
3476 vtop
->c
.d
= (double)vtop
->c
.ld
;
3477 } else if (sf
&& dbt
== VT_BOOL
) {
3478 vtop
->c
.i
= (vtop
->c
.ld
!= 0);
3481 vtop
->c
.i
= vtop
->c
.ld
;
3482 else if (sbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && sbt
== VT_PTR
))
3484 else if (sbt
& VT_UNSIGNED
)
3485 vtop
->c
.i
= (uint32_t)vtop
->c
.i
;
3487 vtop
->c
.i
= ((uint32_t)vtop
->c
.i
| -(vtop
->c
.i
& 0x80000000));
3489 if (dbt_bt
== VT_LLONG
|| (PTR_SIZE
== 8 && dbt
== VT_PTR
))
3491 else if (dbt
== VT_BOOL
)
3492 vtop
->c
.i
= (vtop
->c
.i
!= 0);
3494 uint32_t m
= dbt_bt
== VT_BYTE
? 0xff :
3495 dbt_bt
== VT_SHORT
? 0xffff :
3498 if (!(dbt
& VT_UNSIGNED
))
3499 vtop
->c
.i
|= -(vtop
->c
.i
& ((m
>> 1) + 1));
3504 } else if (dbt
== VT_BOOL
3505 && (vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
))
3506 == (VT_CONST
| VT_SYM
)) {
3507 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3513 /* cannot generate code for global or static initializers */
3514 if (STATIC_DATA_WANTED
)
3517 /* non constant case: generate code */
3518 if (dbt
== VT_BOOL
) {
3519 gen_test_zero(TOK_NE
);
3525 /* convert from fp to fp */
3528 /* convert int to fp */
3531 /* convert fp to int */
3533 if (dbt_bt
!= VT_LLONG
&& dbt_bt
!= VT_INT
)
3536 goto again
; /* may need char/short cast */
3541 ds
= btype_size(dbt_bt
);
3542 ss
= btype_size(sbt_bt
);
3543 if (ds
== 0 || ss
== 0) {
3544 if (dbt_bt
== VT_VOID
)
3546 cast_error(&vtop
->type
, type
);
3548 if (IS_ENUM(type
->t
) && type
->ref
->c
< 0)
3549 tcc_error("cast to incomplete type");
3551 /* same size and no sign conversion needed */
3552 if (ds
== ss
&& ds
>= 4)
3554 if (dbt_bt
== VT_PTR
|| sbt_bt
== VT_PTR
) {
3555 tcc_warning("cast between pointer and integer of different size");
3556 if (sbt_bt
== VT_PTR
) {
3557 /* put integer type to allow logical operations below */
3558 vtop
->type
.t
= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
);
3562 /* processor allows { int a = 0, b = *(char*)&a; }
3563 That means that if we cast to less width, we can just
3564 change the type and read it still later. */
3565 #define ALLOW_SUBTYPE_ACCESS 1
3567 if (ALLOW_SUBTYPE_ACCESS
&& (vtop
->r
& VT_LVAL
)) {
3568 /* value still in memory */
3572 if (ds
<= 4 && !(dbt
== (VT_SHORT
| VT_UNSIGNED
) && sbt
== VT_BYTE
)) {
3574 goto done
; /* no 64bit envolved */
3582 /* generate high word */
3583 if (sbt
& VT_UNSIGNED
) {
3592 } else if (ss
== 8) {
3593 /* from long long: just take low order word */
3601 /* need to convert from 32bit to 64bit */
3602 if (sbt
& VT_UNSIGNED
) {
3603 #if defined(TCC_TARGET_RISCV64)
3604 /* RISC-V keeps 32bit vals in registers sign-extended.
3605 So here we need a zero-extension. */
3614 ss
= ds
, ds
= 4, dbt
= sbt
;
3615 } else if (ss
== 8) {
3616 /* RISC-V keeps 32bit vals in registers sign-extended.
3617 So here we need a sign-extension for signed types and
3618 zero-extension. for unsigned types. */
3619 #if !defined(TCC_TARGET_RISCV64)
3620 trunc
= 32; /* zero upper 32 bits for non RISC-V targets */
3629 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3635 bits
= (ss
- ds
) * 8;
3636 /* for unsigned, gen_op will convert SAR to SHR */
3637 vtop
->type
.t
= (ss
== 8 ? VT_LLONG
: VT_INT
) | (dbt
& VT_UNSIGNED
);
3640 vpushi(bits
- trunc
);
3647 vtop
->type
.t
&= ~ ( VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
3650 /* return type size as known at compile time. Put alignment at 'a' */
3651 ST_FUNC
int type_size(CType
*type
, int *a
)
3656 bt
= type
->t
& VT_BTYPE
;
3657 if (bt
== VT_STRUCT
) {
3662 } else if (bt
== VT_PTR
) {
3663 if (type
->t
& VT_ARRAY
) {
3667 ts
= type_size(&s
->type
, a
);
3669 if (ts
< 0 && s
->c
< 0)
3677 } else if (IS_ENUM(type
->t
) && type
->ref
->c
< 0) {
3678 return -1; /* incomplete enum */
3679 } else if (bt
== VT_LDOUBLE
) {
3681 return LDOUBLE_SIZE
;
3682 } else if (bt
== VT_DOUBLE
|| bt
== VT_LLONG
) {
3683 #ifdef TCC_TARGET_I386
3684 #ifdef TCC_TARGET_PE
3689 #elif defined(TCC_TARGET_ARM)
3699 } else if (bt
== VT_INT
|| bt
== VT_FLOAT
) {
3702 } else if (bt
== VT_SHORT
) {
3705 } else if (bt
== VT_QLONG
|| bt
== VT_QFLOAT
) {
3709 /* char, void, function, _Bool */
3715 /* push type size as known at runtime time on top of value stack. Put
3717 ST_FUNC
void vla_runtime_type_size(CType
*type
, int *a
)
3719 if (type
->t
& VT_VLA
) {
3720 type_size(&type
->ref
->type
, a
);
3721 vset(&int_type
, VT_LOCAL
|VT_LVAL
, type
->ref
->c
);
3723 vpushi(type_size(type
, a
));
3727 /* return the pointed type of t */
3728 static inline CType
*pointed_type(CType
*type
)
3730 return &type
->ref
->type
;
3733 /* modify type so that its it is a pointer to type. */
3734 ST_FUNC
void mk_pointer(CType
*type
)
3737 s
= sym_push(SYM_FIELD
, type
, 0, -1);
3738 type
->t
= VT_PTR
| (type
->t
& VT_STORAGE
);
3742 /* return true if type1 and type2 are exactly the same (including
3745 static int is_compatible_types(CType
*type1
, CType
*type2
)
3747 return compare_types(type1
,type2
,0);
3750 /* return true if type1 and type2 are the same (ignoring qualifiers).
3752 static int is_compatible_unqualified_types(CType
*type1
, CType
*type2
)
3754 return compare_types(type1
,type2
,1);
3757 static void cast_error(CType
*st
, CType
*dt
)
3759 type_incompatibility_error(st
, dt
, "cannot convert '%s' to '%s'");
3762 /* verify type compatibility to store vtop in 'dt' type */
3763 static void verify_assign_cast(CType
*dt
)
3765 CType
*st
, *type1
, *type2
;
3766 int dbt
, sbt
, qualwarn
, lvl
;
3768 st
= &vtop
->type
; /* source type */
3769 dbt
= dt
->t
& VT_BTYPE
;
3770 sbt
= st
->t
& VT_BTYPE
;
3771 if (dt
->t
& VT_CONSTANT
)
3772 tcc_warning("assignment of read-only location");
3776 tcc_error("assignment to void expression");
3779 /* special cases for pointers */
3780 /* '0' can also be a pointer */
3781 if (is_null_pointer(vtop
))
3783 /* accept implicit pointer to integer cast with warning */
3784 if (is_integer_btype(sbt
)) {
3785 tcc_warning("assignment makes pointer from integer without a cast");
3788 type1
= pointed_type(dt
);
3790 type2
= pointed_type(st
);
3791 else if (sbt
== VT_FUNC
)
3792 type2
= st
; /* a function is implicitly a function pointer */
3795 if (is_compatible_types(type1
, type2
))
3797 for (qualwarn
= lvl
= 0;; ++lvl
) {
3798 if (((type2
->t
& VT_CONSTANT
) && !(type1
->t
& VT_CONSTANT
)) ||
3799 ((type2
->t
& VT_VOLATILE
) && !(type1
->t
& VT_VOLATILE
)))
3801 dbt
= type1
->t
& (VT_BTYPE
|VT_LONG
);
3802 sbt
= type2
->t
& (VT_BTYPE
|VT_LONG
);
3803 if (dbt
!= VT_PTR
|| sbt
!= VT_PTR
)
3805 type1
= pointed_type(type1
);
3806 type2
= pointed_type(type2
);
3808 if (!is_compatible_unqualified_types(type1
, type2
)) {
3809 if ((dbt
== VT_VOID
|| sbt
== VT_VOID
) && lvl
== 0) {
3810 /* void * can match anything */
3811 } else if (dbt
== sbt
3812 && is_integer_btype(sbt
& VT_BTYPE
)
3813 && IS_ENUM(type1
->t
) + IS_ENUM(type2
->t
)
3814 + !!((type1
->t
^ type2
->t
) & VT_UNSIGNED
) < 2) {
3815 /* Like GCC don't warn by default for merely changes
3816 in pointer target signedness. Do warn for different
3817 base types, though, in particular for unsigned enums
3818 and signed int targets. */
3820 tcc_warning("assignment from incompatible pointer type");
3825 tcc_warning("assignment discards qualifiers from pointer target type");
3831 if (sbt
== VT_PTR
|| sbt
== VT_FUNC
) {
3832 tcc_warning("assignment makes integer from pointer without a cast");
3833 } else if (sbt
== VT_STRUCT
) {
3834 goto case_VT_STRUCT
;
3836 /* XXX: more tests */
3840 if (!is_compatible_unqualified_types(dt
, st
)) {
3848 static void gen_assign_cast(CType
*dt
)
3850 verify_assign_cast(dt
);
3854 /* store vtop in lvalue pushed on stack */
3855 ST_FUNC
void vstore(void)
3857 int sbt
, dbt
, ft
, r
, size
, align
, bit_size
, bit_pos
, delayed_cast
;
3859 ft
= vtop
[-1].type
.t
;
3860 sbt
= vtop
->type
.t
& VT_BTYPE
;
3861 dbt
= ft
& VT_BTYPE
;
3863 verify_assign_cast(&vtop
[-1].type
);
3865 if (sbt
== VT_STRUCT
) {
3866 /* if structure, only generate pointer */
3867 /* structure assignment : generate memcpy */
3868 /* XXX: optimize if small size */
3869 size
= type_size(&vtop
->type
, &align
);
3873 #ifdef CONFIG_TCC_BCHECK
3874 if (vtop
->r
& VT_MUSTBOUND
)
3875 gbound(); /* check would be wrong after gaddrof() */
3877 vtop
->type
.t
= VT_PTR
;
3880 /* address of memcpy() */
3883 vpush_global_sym(&func_old_type
, TOK_memmove8
);
3884 else if(!(align
& 3))
3885 vpush_global_sym(&func_old_type
, TOK_memmove4
);
3888 /* Use memmove, rather than memcpy, as dest and src may be same: */
3889 vpush_global_sym(&func_old_type
, TOK_memmove
);
3894 #ifdef CONFIG_TCC_BCHECK
3895 if (vtop
->r
& VT_MUSTBOUND
)
3898 vtop
->type
.t
= VT_PTR
;
3903 /* leave source on stack */
3905 } else if (ft
& VT_BITFIELD
) {
3906 /* bitfield store handling */
3908 /* save lvalue as expression result (example: s.b = s.a = n;) */
3909 vdup(), vtop
[-1] = vtop
[-2];
3911 bit_pos
= BIT_POS(ft
);
3912 bit_size
= BIT_SIZE(ft
);
3913 /* remove bit field info to avoid loops */
3914 vtop
[-1].type
.t
= ft
& ~VT_STRUCT_MASK
;
3916 if (dbt
== VT_BOOL
) {
3917 gen_cast(&vtop
[-1].type
);
3918 vtop
[-1].type
.t
= (vtop
[-1].type
.t
& ~VT_BTYPE
) | (VT_BYTE
| VT_UNSIGNED
);
3920 r
= adjust_bf(vtop
- 1, bit_pos
, bit_size
);
3921 if (dbt
!= VT_BOOL
) {
3922 gen_cast(&vtop
[-1].type
);
3923 dbt
= vtop
[-1].type
.t
& VT_BTYPE
;
3925 if (r
== VT_STRUCT
) {
3926 store_packed_bf(bit_pos
, bit_size
);
3928 unsigned long long mask
= (1ULL << bit_size
) - 1;
3929 if (dbt
!= VT_BOOL
) {
3931 if (dbt
== VT_LLONG
)
3934 vpushi((unsigned)mask
);
3941 /* duplicate destination */
3944 /* load destination, mask and or with source */
3945 if (dbt
== VT_LLONG
)
3946 vpushll(~(mask
<< bit_pos
));
3948 vpushi(~((unsigned)mask
<< bit_pos
));
3953 /* ... and discard */
3956 } else if (dbt
== VT_VOID
) {
3959 /* optimize char/short casts */
3961 if ((dbt
== VT_BYTE
|| dbt
== VT_SHORT
)
3962 && is_integer_btype(sbt
)
3964 if ((vtop
->r
& VT_MUSTCAST
)
3965 && btype_size(dbt
) > btype_size(sbt
)
3967 force_charshort_cast();
3970 gen_cast(&vtop
[-1].type
);
3973 #ifdef CONFIG_TCC_BCHECK
3974 /* bound check case */
3975 if (vtop
[-1].r
& VT_MUSTBOUND
) {
3981 gv(RC_TYPE(dbt
)); /* generate value */
3984 vtop
->r
|= BFVAL(VT_MUSTCAST
, (sbt
== VT_LLONG
) + 1);
3985 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3986 vtop
->type
.t
= ft
& VT_TYPE
;
3989 /* if lvalue was saved on stack, must read it */
3990 if ((vtop
[-1].r
& VT_VALMASK
) == VT_LLOCAL
) {
3992 r
= get_reg(RC_INT
);
3993 sv
.type
.t
= VT_PTRDIFF_T
;
3994 sv
.r
= VT_LOCAL
| VT_LVAL
;
3995 sv
.c
.i
= vtop
[-1].c
.i
;
3997 vtop
[-1].r
= r
| VT_LVAL
;
4000 r
= vtop
->r
& VT_VALMASK
;
4001 /* two word case handling :
4002 store second register at word + 4 (or +8 for x86-64) */
4003 if (USING_TWO_WORDS(dbt
)) {
4004 int load_type
= (dbt
== VT_QFLOAT
) ? VT_DOUBLE
: VT_PTRDIFF_T
;
4005 vtop
[-1].type
.t
= load_type
;
4008 /* convert to int to increment easily */
4009 vtop
->type
.t
= VT_PTRDIFF_T
;
4015 vtop
[-1].type
.t
= load_type
;
4016 /* XXX: it works because r2 is spilled last ! */
4017 store(vtop
->r2
, vtop
- 1);
4023 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
4027 /* post defines POST/PRE add. c is the token ++ or -- */
4028 ST_FUNC
void inc(int post
, int c
)
4031 vdup(); /* save lvalue */
4033 gv_dup(); /* duplicate value */
4038 vpushi(c
- TOK_MID
);
4040 vstore(); /* store value */
4042 vpop(); /* if post op, return saved value */
4045 ST_FUNC
void parse_mult_str (CString
*astr
, const char *msg
)
4047 /* read the string */
4051 while (tok
== TOK_STR
) {
4052 /* XXX: add \0 handling too ? */
4053 cstr_cat(astr
, tokc
.str
.data
, -1);
4056 cstr_ccat(astr
, '\0');
4059 /* If I is >= 1 and a power of two, returns log2(i)+1.
4060 If I is 0 returns 0. */
4061 ST_FUNC
int exact_log2p1(int i
)
4066 for (ret
= 1; i
>= 1 << 8; ret
+= 8)
4077 /* Parse __attribute__((...)) GNUC extension. */
4078 static void parse_attribute(AttributeDef
*ad
)
4084 if (tok
!= TOK_ATTRIBUTE1
&& tok
!= TOK_ATTRIBUTE2
)
4089 while (tok
!= ')') {
4090 if (tok
< TOK_IDENT
)
4091 expect("attribute name");
4103 tcc_warning("implicit declaration of function '%s'",
4104 get_tok_str(tok
, &tokc
));
4105 s
= external_global_sym(tok
, &func_old_type
);
4106 } else if ((s
->type
.t
& VT_BTYPE
) != VT_FUNC
)
4107 tcc_error("'%s' is not declared as function", get_tok_str(tok
, &tokc
));
4108 ad
->cleanup_func
= s
;
4113 case TOK_CONSTRUCTOR1
:
4114 case TOK_CONSTRUCTOR2
:
4115 ad
->f
.func_ctor
= 1;
4117 case TOK_DESTRUCTOR1
:
4118 case TOK_DESTRUCTOR2
:
4119 ad
->f
.func_dtor
= 1;
4121 case TOK_ALWAYS_INLINE1
:
4122 case TOK_ALWAYS_INLINE2
:
4123 ad
->f
.func_alwinl
= 1;
4128 parse_mult_str(&astr
, "section name");
4129 ad
->section
= find_section(tcc_state
, (char *)astr
.data
);
4136 parse_mult_str(&astr
, "alias(\"target\")");
4137 ad
->asm_label
= /* save string as token, for later */
4138 tok_alloc((char*)astr
.data
, astr
.size
-1)->tok
| SYM_FIELD
;
4142 case TOK_VISIBILITY1
:
4143 case TOK_VISIBILITY2
:
4145 parse_mult_str(&astr
,
4146 "visibility(\"default|hidden|internal|protected\")");
4147 if (!strcmp (astr
.data
, "default"))
4148 ad
->a
.visibility
= STV_DEFAULT
;
4149 else if (!strcmp (astr
.data
, "hidden"))
4150 ad
->a
.visibility
= STV_HIDDEN
;
4151 else if (!strcmp (astr
.data
, "internal"))
4152 ad
->a
.visibility
= STV_INTERNAL
;
4153 else if (!strcmp (astr
.data
, "protected"))
4154 ad
->a
.visibility
= STV_PROTECTED
;
4156 expect("visibility(\"default|hidden|internal|protected\")");
4165 if (n
<= 0 || (n
& (n
- 1)) != 0)
4166 tcc_error("alignment must be a positive power of two");
4171 ad
->a
.aligned
= exact_log2p1(n
);
4172 if (n
!= 1 << (ad
->a
.aligned
- 1))
4173 tcc_error("alignment of %d is larger than implemented", n
);
4185 /* currently, no need to handle it because tcc does not
4186 track unused objects */
4190 ad
->f
.func_noreturn
= 1;
4195 ad
->f
.func_call
= FUNC_CDECL
;
4200 ad
->f
.func_call
= FUNC_STDCALL
;
4202 #ifdef TCC_TARGET_I386
4212 ad
->f
.func_call
= FUNC_FASTCALL1
+ n
- 1;
4218 ad
->f
.func_call
= FUNC_FASTCALLW
;
4225 ad
->attr_mode
= VT_LLONG
+ 1;
4228 ad
->attr_mode
= VT_BYTE
+ 1;
4231 ad
->attr_mode
= VT_SHORT
+ 1;
4235 ad
->attr_mode
= VT_INT
+ 1;
4238 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok
, NULL
));
4245 ad
->a
.dllexport
= 1;
4247 case TOK_NODECORATE
:
4248 ad
->a
.nodecorate
= 1;
4251 ad
->a
.dllimport
= 1;
4254 if (tcc_state
->warn_unsupported
)
4255 tcc_warning("'%s' attribute ignored", get_tok_str(t
, NULL
));
4256 /* skip parameters */
4258 int parenthesis
= 0;
4262 else if (tok
== ')')
4265 } while (parenthesis
&& tok
!= -1);
4278 static Sym
* find_field (CType
*type
, int v
, int *cumofs
)
4282 while ((s
= s
->next
) != NULL
) {
4283 if ((s
->v
& SYM_FIELD
) &&
4284 (s
->type
.t
& VT_BTYPE
) == VT_STRUCT
&&
4285 (s
->v
& ~SYM_FIELD
) >= SYM_FIRST_ANOM
) {
4286 Sym
*ret
= find_field (&s
->type
, v
, cumofs
);
4298 static void struct_layout(CType
*type
, AttributeDef
*ad
)
4300 int size
, align
, maxalign
, offset
, c
, bit_pos
, bit_size
;
4301 int packed
, a
, bt
, prevbt
, prev_bit_size
;
4302 int pcc
= !tcc_state
->ms_bitfields
;
4303 int pragma_pack
= *tcc_state
->pack_stack_ptr
;
4310 prevbt
= VT_STRUCT
; /* make it never match */
4315 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4316 if (f
->type
.t
& VT_BITFIELD
)
4317 bit_size
= BIT_SIZE(f
->type
.t
);
4320 size
= type_size(&f
->type
, &align
);
4321 a
= f
->a
.aligned
? 1 << (f
->a
.aligned
- 1) : 0;
4324 if (pcc
&& bit_size
== 0) {
4325 /* in pcc mode, packing does not affect zero-width bitfields */
4328 /* in pcc mode, attribute packed overrides if set. */
4329 if (pcc
&& (f
->a
.packed
|| ad
->a
.packed
))
4332 /* pragma pack overrides align if lesser and packs bitfields always */
4335 if (pragma_pack
< align
)
4336 align
= pragma_pack
;
4337 /* in pcc mode pragma pack also overrides individual align */
4338 if (pcc
&& pragma_pack
< a
)
4342 /* some individual align was specified */
4346 if (type
->ref
->type
.t
== VT_UNION
) {
4347 if (pcc
&& bit_size
>= 0)
4348 size
= (bit_size
+ 7) >> 3;
4353 } else if (bit_size
< 0) {
4355 c
+= (bit_pos
+ 7) >> 3;
4356 c
= (c
+ align
- 1) & -align
;
4365 /* A bit-field. Layout is more complicated. There are two
4366 options: PCC (GCC) compatible and MS compatible */
4368 /* In PCC layout a bit-field is placed adjacent to the
4369 preceding bit-fields, except if:
4371 - an individual alignment was given
4372 - it would overflow its base type container and
4373 there is no packing */
4374 if (bit_size
== 0) {
4376 c
= (c
+ ((bit_pos
+ 7) >> 3) + align
- 1) & -align
;
4378 } else if (f
->a
.aligned
) {
4380 } else if (!packed
) {
4382 int ofs
= ((c
* 8 + bit_pos
) % a8
+ bit_size
+ a8
- 1) / a8
;
4383 if (ofs
> size
/ align
)
4387 /* in pcc mode, long long bitfields have type int if they fit */
4388 if (size
== 8 && bit_size
<= 32)
4389 f
->type
.t
= (f
->type
.t
& ~VT_BTYPE
) | VT_INT
, size
= 4;
4391 while (bit_pos
>= align
* 8)
4392 c
+= align
, bit_pos
-= align
* 8;
4395 /* In PCC layout named bit-fields influence the alignment
4396 of the containing struct using the base types alignment,
4397 except for packed fields (which here have correct align). */
4398 if (f
->v
& SYM_FIRST_ANOM
4399 // && bit_size // ??? gcc on ARM/rpi does that
4404 bt
= f
->type
.t
& VT_BTYPE
;
4405 if ((bit_pos
+ bit_size
> size
* 8)
4406 || (bit_size
> 0) == (bt
!= prevbt
)
4408 c
= (c
+ align
- 1) & -align
;
4411 /* In MS bitfield mode a bit-field run always uses
4412 at least as many bits as the underlying type.
4413 To start a new run it's also required that this
4414 or the last bit-field had non-zero width. */
4415 if (bit_size
|| prev_bit_size
)
4418 /* In MS layout the records alignment is normally
4419 influenced by the field, except for a zero-width
4420 field at the start of a run (but by further zero-width
4421 fields it is again). */
4422 if (bit_size
== 0 && prevbt
!= bt
)
4425 prev_bit_size
= bit_size
;
4428 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4429 | (bit_pos
<< VT_STRUCT_SHIFT
);
4430 bit_pos
+= bit_size
;
4432 if (align
> maxalign
)
4436 printf("set field %s offset %-2d size %-2d align %-2d",
4437 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
), offset
, size
, align
);
4438 if (f
->type
.t
& VT_BITFIELD
) {
4439 printf(" pos %-2d bits %-2d",
4452 c
+= (bit_pos
+ 7) >> 3;
4454 /* store size and alignment */
4455 a
= bt
= ad
->a
.aligned
? 1 << (ad
->a
.aligned
- 1) : 1;
4459 if (pragma_pack
&& pragma_pack
< maxalign
&& 0 == pcc
) {
4460 /* can happen if individual align for some member was given. In
4461 this case MSVC ignores maxalign when aligning the size */
4466 c
= (c
+ a
- 1) & -a
;
4470 printf("struct size %-2d align %-2d\n\n", c
, a
), fflush(stdout
);
4473 /* check whether we can access bitfields by their type */
4474 for (f
= type
->ref
->next
; f
; f
= f
->next
) {
4478 if (0 == (f
->type
.t
& VT_BITFIELD
))
4482 bit_size
= BIT_SIZE(f
->type
.t
);
4485 bit_pos
= BIT_POS(f
->type
.t
);
4486 size
= type_size(&f
->type
, &align
);
4487 if (bit_pos
+ bit_size
<= size
* 8 && f
->c
+ size
<= c
)
4490 /* try to access the field using a different type */
4491 c0
= -1, s
= align
= 1;
4494 px
= f
->c
* 8 + bit_pos
;
4495 cx
= (px
>> 3) & -align
;
4496 px
= px
- (cx
<< 3);
4499 s
= (px
+ bit_size
+ 7) >> 3;
4509 s
= type_size(&t
, &align
);
4513 if (px
+ bit_size
<= s
* 8 && cx
+ s
<= c
) {
4514 /* update offset and bit position */
4517 f
->type
.t
= (f
->type
.t
& ~(0x3f << VT_STRUCT_SHIFT
))
4518 | (bit_pos
<< VT_STRUCT_SHIFT
);
4522 printf("FIX field %s offset %-2d size %-2d align %-2d "
4523 "pos %-2d bits %-2d\n",
4524 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
),
4525 cx
, s
, align
, px
, bit_size
);
4528 /* fall back to load/store single-byte wise */
4529 f
->auxtype
= VT_STRUCT
;
4531 printf("FIX field %s : load byte-wise\n",
4532 get_tok_str(f
->v
& ~SYM_FIELD
, NULL
));
4538 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4539 static void struct_decl(CType
*type
, int u
)
4541 int v
, c
, size
, align
, flexible
;
4542 int bit_size
, bsize
, bt
;
4544 AttributeDef ad
, ad1
;
4547 memset(&ad
, 0, sizeof ad
);
4549 parse_attribute(&ad
);
4553 /* struct already defined ? return it */
4555 expect("struct/union/enum name");
4557 if (s
&& (s
->sym_scope
== local_scope
|| tok
!= '{')) {
4560 if (u
== VT_ENUM
&& IS_ENUM(s
->type
.t
))
4562 tcc_error("redefinition of '%s'", get_tok_str(v
, NULL
));
4567 /* Record the original enum/struct/union token. */
4568 type1
.t
= u
== VT_ENUM
? u
| VT_INT
| VT_UNSIGNED
: u
;
4570 /* we put an undefined size for struct/union */
4571 s
= sym_push(v
| SYM_STRUCT
, &type1
, 0, -1);
4572 s
->r
= 0; /* default alignment is zero as gcc */
4574 type
->t
= s
->type
.t
;
4580 tcc_error("struct/union/enum already defined");
4582 /* cannot be empty */
4583 /* non empty enums are not allowed */
4586 long long ll
= 0, pl
= 0, nl
= 0;
4589 /* enum symbols have static storage */
4590 t
.t
= VT_INT
|VT_STATIC
|VT_ENUM_VAL
;
4594 expect("identifier");
4596 if (ss
&& !local_stack
)
4597 tcc_error("redefinition of enumerator '%s'",
4598 get_tok_str(v
, NULL
));
4602 ll
= expr_const64();
4604 ss
= sym_push(v
, &t
, VT_CONST
, 0);
4606 *ps
= ss
, ps
= &ss
->next
;
4615 /* NOTE: we accept a trailing comma */
4620 /* set integral type of the enum */
4623 if (pl
!= (unsigned)pl
)
4624 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4626 } else if (pl
!= (int)pl
|| nl
!= (int)nl
)
4627 t
.t
= (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4628 s
->type
.t
= type
->t
= t
.t
| VT_ENUM
;
4630 /* set type for enum members */
4631 for (ss
= s
->next
; ss
; ss
= ss
->next
) {
4633 if (ll
== (int)ll
) /* default is int if it fits */
4635 if (t
.t
& VT_UNSIGNED
) {
4636 ss
->type
.t
|= VT_UNSIGNED
;
4637 if (ll
== (unsigned)ll
)
4640 ss
->type
.t
= (ss
->type
.t
& ~VT_BTYPE
)
4641 | (LONG_SIZE
==8 ? VT_LLONG
|VT_LONG
: VT_LLONG
);
4646 while (tok
!= '}') {
4647 if (!parse_btype(&btype
, &ad1
)) {
4653 tcc_error("flexible array member '%s' not at the end of struct",
4654 get_tok_str(v
, NULL
));
4660 type_decl(&type1
, &ad1
, &v
, TYPE_DIRECT
);
4662 if ((type1
.t
& VT_BTYPE
) != VT_STRUCT
)
4663 expect("identifier");
4665 int v
= btype
.ref
->v
;
4666 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) < SYM_FIRST_ANOM
) {
4667 if (tcc_state
->ms_extensions
== 0)
4668 expect("identifier");
4672 if (type_size(&type1
, &align
) < 0) {
4673 if ((u
== VT_STRUCT
) && (type1
.t
& VT_ARRAY
) && c
)
4676 tcc_error("field '%s' has incomplete type",
4677 get_tok_str(v
, NULL
));
4679 if ((type1
.t
& VT_BTYPE
) == VT_FUNC
||
4680 (type1
.t
& VT_BTYPE
) == VT_VOID
||
4681 (type1
.t
& VT_STORAGE
))
4682 tcc_error("invalid type for '%s'",
4683 get_tok_str(v
, NULL
));
4687 bit_size
= expr_const();
4688 /* XXX: handle v = 0 case for messages */
4690 tcc_error("negative width in bit-field '%s'",
4691 get_tok_str(v
, NULL
));
4692 if (v
&& bit_size
== 0)
4693 tcc_error("zero width for bit-field '%s'",
4694 get_tok_str(v
, NULL
));
4695 parse_attribute(&ad1
);
4697 size
= type_size(&type1
, &align
);
4698 if (bit_size
>= 0) {
4699 bt
= type1
.t
& VT_BTYPE
;
4705 tcc_error("bitfields must have scalar type");
4707 if (bit_size
> bsize
) {
4708 tcc_error("width of '%s' exceeds its type",
4709 get_tok_str(v
, NULL
));
4710 } else if (bit_size
== bsize
4711 && !ad
.a
.packed
&& !ad1
.a
.packed
) {
4712 /* no need for bit fields */
4714 } else if (bit_size
== 64) {
4715 tcc_error("field width 64 not implemented");
4717 type1
.t
= (type1
.t
& ~VT_STRUCT_MASK
)
4719 | (bit_size
<< (VT_STRUCT_SHIFT
+ 6));
4722 if (v
!= 0 || (type1
.t
& VT_BTYPE
) == VT_STRUCT
) {
4723 /* Remember we've seen a real field to check
4724 for placement of flexible array member. */
4727 /* If member is a struct or bit-field, enforce
4728 placing into the struct (as anonymous). */
4730 ((type1
.t
& VT_BTYPE
) == VT_STRUCT
||
4735 ss
= sym_push(v
| SYM_FIELD
, &type1
, 0, 0);
4740 if (tok
== ';' || tok
== TOK_EOF
)
4747 parse_attribute(&ad
);
4748 if (ad
.cleanup_func
) {
4749 tcc_warning("attribute '__cleanup__' ignored on type");
4751 struct_layout(type
, &ad
);
4756 static void sym_to_attr(AttributeDef
*ad
, Sym
*s
)
4758 merge_symattr(&ad
->a
, &s
->a
);
4759 merge_funcattr(&ad
->f
, &s
->f
);
4762 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4763 are added to the element type, copied because it could be a typedef. */
4764 static void parse_btype_qualify(CType
*type
, int qualifiers
)
4766 while (type
->t
& VT_ARRAY
) {
4767 type
->ref
= sym_push(SYM_FIELD
, &type
->ref
->type
, 0, type
->ref
->c
);
4768 type
= &type
->ref
->type
;
4770 type
->t
|= qualifiers
;
4773 /* return 0 if no type declaration. otherwise, return the basic type
4776 static int parse_btype(CType
*type
, AttributeDef
*ad
)
4778 int t
, u
, bt
, st
, type_found
, typespec_found
, g
, n
;
4782 memset(ad
, 0, sizeof(AttributeDef
));
4792 /* currently, we really ignore extension */
4802 if (u
== VT_SHORT
|| u
== VT_LONG
) {
4803 if (st
!= -1 || (bt
!= -1 && bt
!= VT_INT
))
4804 tmbt
: tcc_error("too many basic types");
4807 if (bt
!= -1 || (st
!= -1 && u
!= VT_INT
))
4812 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4829 memset(&ad1
, 0, sizeof(AttributeDef
));
4830 if (parse_btype(&type1
, &ad1
)) {
4831 type_decl(&type1
, &ad1
, &n
, TYPE_ABSTRACT
);
4833 n
= 1 << (ad1
.a
.aligned
- 1);
4835 type_size(&type1
, &n
);
4838 if (n
<= 0 || (n
& (n
- 1)) != 0)
4839 tcc_error("alignment must be a positive power of two");
4842 ad
->a
.aligned
= exact_log2p1(n
);
4846 if ((t
& VT_BTYPE
) == VT_DOUBLE
) {
4847 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4848 } else if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4849 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LLONG
;
4856 #ifdef TCC_TARGET_ARM64
4858 /* GCC's __uint128_t appears in some Linux header files. Make it a
4859 synonym for long double to get the size and alignment right. */
4870 if ((t
& (VT_BTYPE
|VT_LONG
)) == VT_LONG
) {
4871 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | VT_LDOUBLE
;
4879 struct_decl(&type1
, VT_ENUM
);
4882 type
->ref
= type1
.ref
;
4885 struct_decl(&type1
, VT_STRUCT
);
4888 struct_decl(&type1
, VT_UNION
);
4891 /* type modifiers */
4896 parse_btype_qualify(type
, VT_CONSTANT
);
4904 parse_btype_qualify(type
, VT_VOLATILE
);
4911 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == (VT_DEFSIGN
|VT_UNSIGNED
))
4912 tcc_error("signed and unsigned modifier");
4925 if ((t
& (VT_DEFSIGN
|VT_UNSIGNED
)) == VT_DEFSIGN
)
4926 tcc_error("signed and unsigned modifier");
4927 t
|= VT_DEFSIGN
| VT_UNSIGNED
;
4943 if (t
& (VT_EXTERN
|VT_STATIC
|VT_TYPEDEF
) & ~g
)
4944 tcc_error("multiple storage classes");
4956 ad
->f
.func_noreturn
= 1;
4958 /* GNUC attribute */
4959 case TOK_ATTRIBUTE1
:
4960 case TOK_ATTRIBUTE2
:
4961 parse_attribute(ad
);
4962 if (ad
->attr_mode
) {
4963 u
= ad
->attr_mode
-1;
4964 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | u
;
4972 parse_expr_type(&type1
);
4973 /* remove all storage modifiers except typedef */
4974 type1
.t
&= ~(VT_STORAGE
&~VT_TYPEDEF
);
4976 sym_to_attr(ad
, type1
.ref
);
4982 if (!s
|| !(s
->type
.t
& VT_TYPEDEF
))
4986 if (tok
== ':' && !in_generic
) {
4987 /* ignore if it's a label */
4992 t
&= ~(VT_BTYPE
|VT_LONG
);
4993 u
= t
& ~(VT_CONSTANT
| VT_VOLATILE
), t
^= u
;
4994 type
->t
= (s
->type
.t
& ~VT_TYPEDEF
) | u
;
4995 type
->ref
= s
->type
.ref
;
4997 parse_btype_qualify(type
, t
);
4999 /* get attributes from typedef */
5008 if (tcc_state
->char_is_unsigned
) {
5009 if ((t
& (VT_DEFSIGN
|VT_BTYPE
)) == VT_BYTE
)
5012 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5013 bt
= t
& (VT_BTYPE
|VT_LONG
);
5015 t
|= LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
;
5016 #if defined TCC_TARGET_PE || (defined _WIN32 && defined _MSC_VER)
5017 if (bt
== VT_LDOUBLE
)
5018 t
= (t
& ~(VT_BTYPE
|VT_LONG
)) | (VT_DOUBLE
|VT_LONG
);
5024 /* convert a function parameter type (array to pointer and function to
5025 function pointer) */
5026 static inline void convert_parameter_type(CType
*pt
)
5028 /* remove const and volatile qualifiers (XXX: const could be used
5029 to indicate a const function parameter */
5030 pt
->t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5031 /* array must be transformed to pointer according to ANSI C */
5033 if ((pt
->t
& VT_BTYPE
) == VT_FUNC
) {
5038 ST_FUNC
void parse_asm_str(CString
*astr
)
5041 parse_mult_str(astr
, "string constant");
5044 /* Parse an asm label and return the token */
5045 static int asm_label_instr(void)
5051 parse_asm_str(&astr
);
5054 printf("asm_alias: \"%s\"\n", (char *)astr
.data
);
5056 v
= tok_alloc(astr
.data
, astr
.size
- 1)->tok
;
5061 static int post_type(CType
*type
, AttributeDef
*ad
, int storage
, int td
)
5063 int n
, l
, t1
, arg_size
, align
, unused_align
;
5064 Sym
**plast
, *s
, *first
;
5069 /* function type, or recursive declarator (return if so) */
5071 if (td
&& !(td
& TYPE_ABSTRACT
))
5075 else if (parse_btype(&pt
, &ad1
))
5078 merge_attr (ad
, &ad1
);
5087 /* read param name and compute offset */
5088 if (l
!= FUNC_OLD
) {
5089 if ((pt
.t
& VT_BTYPE
) == VT_VOID
&& tok
== ')')
5091 type_decl(&pt
, &ad1
, &n
, TYPE_DIRECT
| TYPE_ABSTRACT
);
5092 if ((pt
.t
& VT_BTYPE
) == VT_VOID
)
5093 tcc_error("parameter declared as void");
5097 expect("identifier");
5098 pt
.t
= VT_VOID
; /* invalid type */
5102 convert_parameter_type(&pt
);
5103 arg_size
+= (type_size(&pt
, &align
) + PTR_SIZE
- 1) / PTR_SIZE
;
5104 s
= sym_push(n
| SYM_FIELD
, &pt
, 0, 0);
5110 if (l
== FUNC_NEW
&& tok
== TOK_DOTS
) {
5115 if (l
== FUNC_NEW
&& !parse_btype(&pt
, &ad1
))
5116 tcc_error("invalid type");
5119 /* if no parameters, then old type prototype */
5122 /* NOTE: const is ignored in returned type as it has a special
5123 meaning in gcc / C++ */
5124 type
->t
&= ~VT_CONSTANT
;
5125 /* some ancient pre-K&R C allows a function to return an array
5126 and the array brackets to be put after the arguments, such
5127 that "int c()[]" means something like "int[] c()" */
5130 skip(']'); /* only handle simple "[]" */
5133 /* we push a anonymous symbol which will contain the function prototype */
5134 ad
->f
.func_args
= arg_size
;
5135 ad
->f
.func_type
= l
;
5136 s
= sym_push(SYM_FIELD
, type
, 0, 0);
5142 } else if (tok
== '[') {
5143 int saved_nocode_wanted
= nocode_wanted
;
5144 /* array definition */
5147 /* XXX The optional type-quals and static should only be accepted
5148 in parameter decls. The '*' as well, and then even only
5149 in prototypes (not function defs). */
5151 case TOK_RESTRICT1
: case TOK_RESTRICT2
: case TOK_RESTRICT3
:
5166 if (!local_stack
|| (storage
& VT_STATIC
))
5167 vpushi(expr_const());
5169 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5170 length must always be evaluated, even under nocode_wanted,
5171 so that its size slot is initialized (e.g. under sizeof
5176 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
) {
5179 tcc_error("invalid array size");
5181 if (!is_integer_btype(vtop
->type
.t
& VT_BTYPE
))
5182 tcc_error("size of variable length array should be an integer");
5188 /* parse next post type */
5189 post_type(type
, ad
, storage
, 0);
5191 if ((type
->t
& VT_BTYPE
) == VT_FUNC
)
5192 tcc_error("declaration of an array of functions");
5193 if ((type
->t
& VT_BTYPE
) == VT_VOID
5194 || type_size(type
, &unused_align
) < 0)
5195 tcc_error("declaration of an array of incomplete type elements");
5197 t1
|= type
->t
& VT_VLA
;
5201 tcc_error("need explicit inner array size in VLAs");
5202 loc
-= type_size(&int_type
, &align
);
5206 vla_runtime_type_size(type
, &align
);
5208 vset(&int_type
, VT_LOCAL
|VT_LVAL
, n
);
5214 nocode_wanted
= saved_nocode_wanted
;
5216 /* we push an anonymous symbol which will contain the array
5218 s
= sym_push(SYM_FIELD
, type
, 0, n
);
5219 type
->t
= (t1
? VT_VLA
: VT_ARRAY
) | VT_PTR
;
5225 /* Parse a type declarator (except basic type), and return the type
5226 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5227 expected. 'type' should contain the basic type. 'ad' is the
5228 attribute definition of the basic type. It can be modified by
5229 type_decl(). If this (possibly abstract) declarator is a pointer chain
5230 it returns the innermost pointed to type (equals *type, but is a different
5231 pointer), otherwise returns type itself, that's used for recursive calls. */
5232 static CType
*type_decl(CType
*type
, AttributeDef
*ad
, int *v
, int td
)
5235 int qualifiers
, storage
;
5237 /* recursive type, remove storage bits first, apply them later again */
5238 storage
= type
->t
& VT_STORAGE
;
5239 type
->t
&= ~VT_STORAGE
;
5242 while (tok
== '*') {
5250 qualifiers
|= VT_CONSTANT
;
5255 qualifiers
|= VT_VOLATILE
;
5261 /* XXX: clarify attribute handling */
5262 case TOK_ATTRIBUTE1
:
5263 case TOK_ATTRIBUTE2
:
5264 parse_attribute(ad
);
5268 type
->t
|= qualifiers
;
5270 /* innermost pointed to type is the one for the first derivation */
5271 ret
= pointed_type(type
);
5275 /* This is possibly a parameter type list for abstract declarators
5276 ('int ()'), use post_type for testing this. */
5277 if (!post_type(type
, ad
, 0, td
)) {
5278 /* It's not, so it's a nested declarator, and the post operations
5279 apply to the innermost pointed to type (if any). */
5280 /* XXX: this is not correct to modify 'ad' at this point, but
5281 the syntax is not clear */
5282 parse_attribute(ad
);
5283 post
= type_decl(type
, ad
, v
, td
);
5287 } else if (tok
>= TOK_IDENT
&& (td
& TYPE_DIRECT
)) {
5288 /* type identifier */
5293 if (!(td
& TYPE_ABSTRACT
))
5294 expect("identifier");
5297 post_type(post
, ad
, storage
, 0);
5298 parse_attribute(ad
);
5303 /* indirection with full error checking and bound check */
5304 ST_FUNC
void indir(void)
5306 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
5307 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
5311 if (vtop
->r
& VT_LVAL
)
5313 vtop
->type
= *pointed_type(&vtop
->type
);
5314 /* Arrays and functions are never lvalues */
5315 if (!(vtop
->type
.t
& (VT_ARRAY
| VT_VLA
))
5316 && (vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
5318 /* if bound checking, the referenced pointer must be checked */
5319 #ifdef CONFIG_TCC_BCHECK
5320 if (tcc_state
->do_bounds_check
)
5321 vtop
->r
|= VT_MUSTBOUND
;
5326 /* pass a parameter to a function and do type checking and casting */
5327 static void gfunc_param_typed(Sym
*func
, Sym
*arg
)
5332 func_type
= func
->f
.func_type
;
5333 if (func_type
== FUNC_OLD
||
5334 (func_type
== FUNC_ELLIPSIS
&& arg
== NULL
)) {
5335 /* default casting : only need to convert float to double */
5336 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FLOAT
) {
5337 gen_cast_s(VT_DOUBLE
);
5338 } else if (vtop
->type
.t
& VT_BITFIELD
) {
5339 type
.t
= vtop
->type
.t
& (VT_BTYPE
| VT_UNSIGNED
);
5340 type
.ref
= vtop
->type
.ref
;
5342 } else if (vtop
->r
& VT_MUSTCAST
) {
5343 force_charshort_cast();
5345 } else if (arg
== NULL
) {
5346 tcc_error("too many arguments to function");
5349 type
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
5350 gen_assign_cast(&type
);
5354 /* parse an expression and return its type without any side effect. */
5355 static void expr_type(CType
*type
, void (*expr_fn
)(void))
5364 /* parse an expression of the form '(type)' or '(expr)' and return its
5366 static void parse_expr_type(CType
*type
)
5372 if (parse_btype(type
, &ad
)) {
5373 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5375 expr_type(type
, gexpr
);
5380 static void parse_type(CType
*type
)
5385 if (!parse_btype(type
, &ad
)) {
5388 type_decl(type
, &ad
, &n
, TYPE_ABSTRACT
);
5391 static void parse_builtin_params(int nc
, const char *args
)
5400 while ((c
= *args
++)) {
5415 type
.t
= VT_CONSTANT
;
5421 type
.t
= VT_CONSTANT
;
5423 type
.t
|= char_type
.t
;
5433 tcc_error("internal error");
5435 gen_assign_cast(&type
);
5442 ST_FUNC
void unary(void)
5444 int n
, t
, align
, size
, r
, sizeof_caller
;
5449 /* generate line number info */
5450 if (tcc_state
->do_debug
)
5451 tcc_debug_line(tcc_state
);
5453 sizeof_caller
= in_sizeof
;
5456 /* XXX: GCC 2.95.3 does not generate a table although it should be
5464 #ifdef TCC_TARGET_PE
5465 t
= VT_SHORT
|VT_UNSIGNED
;
5473 vsetc(&type
, VT_CONST
, &tokc
);
5477 t
= VT_INT
| VT_UNSIGNED
;
5483 t
= VT_LLONG
| VT_UNSIGNED
;
5495 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
;
5498 t
= (LONG_SIZE
== 8 ? VT_LLONG
: VT_INT
) | VT_LONG
| VT_UNSIGNED
;
5500 case TOK___FUNCTION__
:
5502 goto tok_identifier
;
5508 /* special function name identifier */
5509 len
= strlen(funcname
) + 1;
5510 /* generate char[len] type */
5515 vpush_ref(&type
, data_section
, data_section
->data_offset
, len
);
5516 if (!NODATA_WANTED
) {
5517 ptr
= section_ptr_add(data_section
, len
);
5518 memcpy(ptr
, funcname
, len
);
5524 #ifdef TCC_TARGET_PE
5525 t
= VT_SHORT
| VT_UNSIGNED
;
5531 /* string parsing */
5533 if (tcc_state
->char_is_unsigned
)
5534 t
= VT_BYTE
| VT_UNSIGNED
;
5536 if (tcc_state
->warn_write_strings
)
5541 memset(&ad
, 0, sizeof(AttributeDef
));
5542 decl_initializer_alloc(&type
, &ad
, VT_CONST
, 2, 0, 0);
5547 if (parse_btype(&type
, &ad
)) {
5548 type_decl(&type
, &ad
, &n
, TYPE_ABSTRACT
);
5550 /* check ISOC99 compound literal */
5552 /* data is allocated locally by default */
5557 /* all except arrays are lvalues */
5558 if (!(type
.t
& VT_ARRAY
))
5560 memset(&ad
, 0, sizeof(AttributeDef
));
5561 decl_initializer_alloc(&type
, &ad
, r
, 1, 0, 0);
5563 if (sizeof_caller
) {
5570 } else if (tok
== '{') {
5571 int saved_nocode_wanted
= nocode_wanted
;
5572 if (const_wanted
&& !(nocode_wanted
& unevalmask
))
5573 tcc_error("expected constant");
5574 /* save all registers */
5576 /* statement expression : we do not accept break/continue
5577 inside as GCC does. We do retain the nocode_wanted state,
5578 as statement expressions can't ever be entered from the
5579 outside, so any reactivation of code emission (from labels
5580 or loop heads) can be disabled again after the end of it. */
5582 nocode_wanted
= saved_nocode_wanted
;
5597 /* functions names must be treated as function pointers,
5598 except for unary '&' and sizeof. Since we consider that
5599 functions are not lvalues, we only have to handle it
5600 there and in function calls. */
5601 /* arrays can also be used although they are not lvalues */
5602 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
&&
5603 !(vtop
->type
.t
& VT_ARRAY
))
5606 vtop
->sym
->a
.addrtaken
= 1;
5607 mk_pointer(&vtop
->type
);
5613 gen_test_zero(TOK_EQ
);
5624 if ((vtop
->type
.t
& VT_BTYPE
) == VT_PTR
)
5625 tcc_error("pointer not accepted for unary plus");
5626 /* In order to force cast, we add zero, except for floating point
5627 where we really need an noop (otherwise -0.0 will be transformed
5629 if (!is_float(vtop
->type
.t
)) {
5641 expr_type(&type
, unary
); /* Perform a in_sizeof = 0; */
5643 if (vtop
[1].r
& VT_SYM
)
5644 s
= vtop
[1].sym
; /* hack: accessing previous vtop */
5645 size
= type_size(&type
, &align
);
5646 if (s
&& s
->a
.aligned
)
5647 align
= 1 << (s
->a
.aligned
- 1);
5648 if (t
== TOK_SIZEOF
) {
5649 if (!(type
.t
& VT_VLA
)) {
5651 tcc_error("sizeof applied to an incomplete type");
5654 vla_runtime_type_size(&type
, &align
);
5659 vtop
->type
.t
|= VT_UNSIGNED
;
5662 case TOK_builtin_expect
:
5663 /* __builtin_expect is a no-op for now */
5664 parse_builtin_params(0, "ee");
5667 case TOK_builtin_types_compatible_p
:
5668 parse_builtin_params(0, "tt");
5669 vtop
[-1].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5670 vtop
[0].type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
);
5671 n
= is_compatible_types(&vtop
[-1].type
, &vtop
[0].type
);
5675 case TOK_builtin_choose_expr
:
5702 case TOK_builtin_constant_p
:
5703 parse_builtin_params(1, "e");
5704 n
= (vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
5705 !((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.addrtaken
);
5709 case TOK_builtin_frame_address
:
5710 case TOK_builtin_return_address
:
5716 if (tok
!= TOK_CINT
) {
5717 tcc_error("%s only takes positive integers",
5718 tok1
== TOK_builtin_return_address
?
5719 "__builtin_return_address" :
5720 "__builtin_frame_address");
5722 level
= (uint32_t)tokc
.i
;
5727 vset(&type
, VT_LOCAL
, 0); /* local frame */
5729 #ifdef TCC_TARGET_RISCV64
5733 mk_pointer(&vtop
->type
);
5734 indir(); /* -> parent frame */
5736 if (tok1
== TOK_builtin_return_address
) {
5737 // assume return address is just above frame pointer on stack
5738 #ifdef TCC_TARGET_ARM
5741 #elif defined TCC_TARGET_RISCV64
5748 mk_pointer(&vtop
->type
);
5753 #ifdef TCC_TARGET_RISCV64
5754 case TOK_builtin_va_start
:
5755 parse_builtin_params(0, "ee");
5756 r
= vtop
->r
& VT_VALMASK
;
5760 tcc_error("__builtin_va_start expects a local variable");
5765 #ifdef TCC_TARGET_X86_64
5766 #ifdef TCC_TARGET_PE
5767 case TOK_builtin_va_start
:
5768 parse_builtin_params(0, "ee");
5769 r
= vtop
->r
& VT_VALMASK
;
5773 tcc_error("__builtin_va_start expects a local variable");
5775 vtop
->type
= char_pointer_type
;
5780 case TOK_builtin_va_arg_types
:
5781 parse_builtin_params(0, "t");
5782 vpushi(classify_x86_64_va_arg(&vtop
->type
));
5789 #ifdef TCC_TARGET_ARM64
5790 case TOK_builtin_va_start
: {
5791 parse_builtin_params(0, "ee");
5795 vtop
->type
.t
= VT_VOID
;
5798 case TOK_builtin_va_arg
: {
5799 parse_builtin_params(0, "et");
5807 case TOK___arm64_clear_cache
: {
5808 parse_builtin_params(0, "ee");
5811 vtop
->type
.t
= VT_VOID
;
5816 /* pre operations */
5827 t
= vtop
->type
.t
& VT_BTYPE
;
5829 /* In IEEE negate(x) isn't subtract(0,x), but rather
5833 vtop
->c
.f
= -1.0 * 0.0;
5834 else if (t
== VT_DOUBLE
)
5835 vtop
->c
.d
= -1.0 * 0.0;
5837 vtop
->c
.ld
= -1.0 * 0.0;
5845 goto tok_identifier
;
5847 /* allow to take the address of a label */
5848 if (tok
< TOK_UIDENT
)
5849 expect("label identifier");
5850 s
= label_find(tok
);
5852 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
5854 if (s
->r
== LABEL_DECLARED
)
5855 s
->r
= LABEL_FORWARD
;
5858 s
->type
.t
= VT_VOID
;
5859 mk_pointer(&s
->type
);
5860 s
->type
.t
|= VT_STATIC
;
5862 vpushsym(&s
->type
, s
);
5868 CType controlling_type
;
5869 int has_default
= 0;
5872 TokenString
*str
= NULL
;
5873 int saved_const_wanted
= const_wanted
;
5878 expr_type(&controlling_type
, expr_eq
);
5879 controlling_type
.t
&= ~(VT_CONSTANT
| VT_VOLATILE
| VT_ARRAY
);
5880 if ((controlling_type
.t
& VT_BTYPE
) == VT_FUNC
)
5881 mk_pointer(&controlling_type
);
5882 const_wanted
= saved_const_wanted
;
5886 if (tok
== TOK_DEFAULT
) {
5888 tcc_error("too many 'default'");
5894 AttributeDef ad_tmp
;
5899 parse_btype(&cur_type
, &ad_tmp
);
5902 type_decl(&cur_type
, &ad_tmp
, &itmp
, TYPE_ABSTRACT
);
5903 if (compare_types(&controlling_type
, &cur_type
, 0)) {
5905 tcc_error("type match twice");
5915 skip_or_save_block(&str
);
5917 skip_or_save_block(NULL
);
5924 type_to_str(buf
, sizeof buf
, &controlling_type
, NULL
);
5925 tcc_error("type '%s' does not match any association", buf
);
5927 begin_macro(str
, 1);
5936 // special qnan , snan and infinity values
5941 vtop
->type
.t
= VT_FLOAT
;
5946 goto special_math_val
;
5949 goto special_math_val
;
5956 expect("identifier");
5958 if (!s
|| IS_ASM_SYM(s
)) {
5959 const char *name
= get_tok_str(t
, NULL
);
5961 tcc_error("'%s' undeclared", name
);
5962 /* for simple function calls, we tolerate undeclared
5963 external reference to int() function */
5964 if (tcc_state
->warn_implicit_function_declaration
5965 #ifdef TCC_TARGET_PE
5966 /* people must be warned about using undeclared WINAPI functions
5967 (which usually start with uppercase letter) */
5968 || (name
[0] >= 'A' && name
[0] <= 'Z')
5971 tcc_warning("implicit declaration of function '%s'", name
);
5972 s
= external_global_sym(t
, &func_old_type
);
5976 /* A symbol that has a register is a local register variable,
5977 which starts out as VT_LOCAL value. */
5978 if ((r
& VT_VALMASK
) < VT_CONST
)
5979 r
= (r
& ~VT_VALMASK
) | VT_LOCAL
;
5981 vset(&s
->type
, r
, s
->c
);
5982 /* Point to s as backpointer (even without r&VT_SYM).
5983 Will be used by at least the x86 inline asm parser for
5989 } else if (r
== VT_CONST
&& IS_ENUM_VAL(s
->type
.t
)) {
5990 vtop
->c
.i
= s
->enum_val
;
5995 /* post operations */
5997 if (tok
== TOK_INC
|| tok
== TOK_DEC
) {
6000 } else if (tok
== '.' || tok
== TOK_ARROW
|| tok
== TOK_CDOUBLE
) {
6001 int qualifiers
, cumofs
= 0;
6003 if (tok
== TOK_ARROW
)
6005 qualifiers
= vtop
->type
.t
& (VT_CONSTANT
| VT_VOLATILE
);
6008 /* expect pointer on structure */
6009 if ((vtop
->type
.t
& VT_BTYPE
) != VT_STRUCT
)
6010 expect("struct or union");
6011 if (tok
== TOK_CDOUBLE
)
6012 expect("field name");
6014 if (tok
== TOK_CINT
|| tok
== TOK_CUINT
)
6015 expect("field name");
6016 s
= find_field(&vtop
->type
, tok
, &cumofs
);
6018 tcc_error("field not found: %s", get_tok_str(tok
& ~SYM_FIELD
, &tokc
));
6019 /* add field offset to pointer */
6020 vtop
->type
= char_pointer_type
; /* change type to 'char *' */
6021 vpushi(cumofs
+ s
->c
);
6023 /* change type to field type, and set to lvalue */
6024 vtop
->type
= s
->type
;
6025 vtop
->type
.t
|= qualifiers
;
6026 /* an array is never an lvalue */
6027 if (!(vtop
->type
.t
& VT_ARRAY
)) {
6029 #ifdef CONFIG_TCC_BCHECK
6030 /* if bound checking, the referenced pointer must be checked */
6031 if (tcc_state
->do_bounds_check
)
6032 vtop
->r
|= VT_MUSTBOUND
;
6036 } else if (tok
== '[') {
6042 } else if (tok
== '(') {
6045 int nb_args
, ret_nregs
, ret_align
, regsize
, variadic
;
6048 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
) {
6049 /* pointer test (no array accepted) */
6050 if ((vtop
->type
.t
& (VT_BTYPE
| VT_ARRAY
)) == VT_PTR
) {
6051 vtop
->type
= *pointed_type(&vtop
->type
);
6052 if ((vtop
->type
.t
& VT_BTYPE
) != VT_FUNC
)
6056 expect("function pointer");
6059 vtop
->r
&= ~VT_LVAL
; /* no lvalue */
6061 /* get return type */
6064 sa
= s
->next
; /* first parameter */
6065 nb_args
= regsize
= 0;
6067 /* compute first implicit argument if a structure is returned */
6068 if ((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) {
6069 variadic
= (s
->f
.func_type
== FUNC_ELLIPSIS
);
6070 ret_nregs
= gfunc_sret(&s
->type
, variadic
, &ret
.type
,
6071 &ret_align
, ®size
);
6072 if (ret_nregs
<= 0) {
6073 /* get some space for the returned structure */
6074 size
= type_size(&s
->type
, &align
);
6075 #ifdef TCC_TARGET_ARM64
6076 /* On arm64, a small struct is return in registers.
6077 It is much easier to write it to memory if we know
6078 that we are allowed to write some extra bytes, so
6079 round the allocated space up to a power of 2: */
6081 while (size
& (size
- 1))
6082 size
= (size
| (size
- 1)) + 1;
6084 loc
= (loc
- size
) & -align
;
6086 ret
.r
= VT_LOCAL
| VT_LVAL
;
6087 /* pass it as 'int' to avoid structure arg passing
6089 vseti(VT_LOCAL
, loc
);
6101 if (ret_nregs
> 0) {
6102 /* return in register */
6104 PUT_R_RET(&ret
, ret
.type
.t
);
6109 gfunc_param_typed(s
, sa
);
6119 tcc_error("too few arguments to function");
6121 gfunc_call(nb_args
);
6123 if (ret_nregs
< 0) {
6124 vsetc(&ret
.type
, ret
.r
, &ret
.c
);
6125 #ifdef TCC_TARGET_RISCV64
6126 arch_transfer_ret_regs(1);
6130 for (r
= ret
.r
+ ret_nregs
+ !ret_nregs
; r
-- > ret
.r
;) {
6131 vsetc(&ret
.type
, r
, &ret
.c
);
6132 vtop
->r2
= ret
.r2
; /* Loop only happens when r2 is VT_CONST */
6135 /* handle packed struct return */
6136 if (((s
->type
.t
& VT_BTYPE
) == VT_STRUCT
) && ret_nregs
) {
6139 size
= type_size(&s
->type
, &align
);
6140 /* We're writing whole regs often, make sure there's enough
6141 space. Assume register size is power of 2. */
6142 if (regsize
> align
)
6144 loc
= (loc
- size
) & -align
;
6148 vset(&ret
.type
, VT_LOCAL
| VT_LVAL
, addr
+ offset
);
6152 if (--ret_nregs
== 0)
6156 vset(&s
->type
, VT_LOCAL
| VT_LVAL
, addr
);
6159 /* Promote char/short return values. This is matters only
6160 for calling function that were not compiled by TCC and
6161 only on some architectures. For those where it doesn't
6162 matter we expect things to be already promoted to int,
6164 t
= s
->type
.t
& VT_BTYPE
;
6165 if (t
== VT_BYTE
|| t
== VT_SHORT
|| t
== VT_BOOL
) {
6167 vtop
->r
|= BFVAL(VT_MUSTCAST
, 1);
6169 vtop
->type
.t
= VT_INT
;
6173 if (s
->f
.func_noreturn
)
6181 #ifndef precedence_parser /* original top-down parser */
6183 static void expr_prod(void)
6188 while ((t
= tok
) == '*' || t
== '/' || t
== '%') {
6195 static void expr_sum(void)
6200 while ((t
= tok
) == '+' || t
== '-') {
6207 static void expr_shift(void)
6212 while ((t
= tok
) == TOK_SHL
|| t
== TOK_SAR
) {
6219 static void expr_cmp(void)
6224 while (((t
= tok
) >= TOK_ULE
&& t
<= TOK_GT
) ||
6225 t
== TOK_ULT
|| t
== TOK_UGE
) {
6232 static void expr_cmpeq(void)
6237 while ((t
= tok
) == TOK_EQ
|| t
== TOK_NE
) {
6244 static void expr_and(void)
6247 while (tok
== '&') {
6254 static void expr_xor(void)
6257 while (tok
== '^') {
6264 static void expr_or(void)
6267 while (tok
== '|') {
6274 static void expr_landor(int op
);
6276 static void expr_land(void)
6279 if (tok
== TOK_LAND
)
6283 static void expr_lor(void)
6290 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6291 #else /* defined precedence_parser */
6292 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6293 # define expr_lor() unary(), expr_infix(1)
6295 static int precedence(int tok
)
6298 case TOK_LOR
: return 1;
6299 case TOK_LAND
: return 2;
6303 case TOK_EQ
: case TOK_NE
: return 6;
6304 relat
: case TOK_ULT
: case TOK_UGE
: return 7;
6305 case TOK_SHL
: case TOK_SAR
: return 8;
6306 case '+': case '-': return 9;
6307 case '*': case '/': case '%': return 10;
6309 if (tok
>= TOK_ULE
&& tok
<= TOK_GT
)
6314 static unsigned char prec
[256];
6315 static void init_prec(void)
6318 for (i
= 0; i
< 256; i
++)
6319 prec
[i
] = precedence(i
);
6321 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6323 static void expr_landor(int op
);
6325 static void expr_infix(int p
)
6328 while ((p2
= precedence(t
)) >= p
) {
6329 if (t
== TOK_LOR
|| t
== TOK_LAND
) {
6334 if (precedence(tok
) > p2
)
6343 /* Assuming vtop is a value used in a conditional context
6344 (i.e. compared with zero) return 0 if it's false, 1 if
6345 true and -1 if it can't be statically determined. */
6346 static int condition_3way(void)
6349 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) == VT_CONST
&&
6350 (!(vtop
->r
& VT_SYM
) || !vtop
->sym
->a
.weak
)) {
6352 gen_cast_s(VT_BOOL
);
6359 static void expr_landor(int op
)
6361 int t
= 0, cc
= 1, f
= 0, i
= op
== TOK_LAND
, c
;
6363 c
= f
? i
: condition_3way();
6365 save_regs(1), cc
= 0;
6367 nocode_wanted
++, f
= 1;
6375 expr_landor_next(op
);
6387 static int is_cond_bool(SValue
*sv
)
6389 if ((sv
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) == VT_CONST
6390 && (sv
->type
.t
& VT_BTYPE
) == VT_INT
)
6391 return (unsigned)sv
->c
.i
< 2;
6392 if (sv
->r
== VT_CMP
)
6397 static void expr_cond(void)
6399 int tt
, u
, r1
, r2
, rc
, t1
, t2
, islv
, c
, g
;
6407 c
= condition_3way();
6408 g
= (tok
== ':' && gnu_ext
);
6418 /* needed to avoid having different registers saved in
6425 ncw_prev
= nocode_wanted
;
6431 if (c
< 0 && vtop
->r
== VT_CMP
) {
6438 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6439 mk_pointer(&vtop
->type
);
6440 sv
= *vtop
; /* save value to handle it later */
6441 vtop
--; /* no vpop so that FP stack is not flushed */
6451 nocode_wanted
= ncw_prev
;
6457 if (c
< 0 && is_cond_bool(vtop
) && is_cond_bool(&sv
)) {
6458 if (sv
.r
== VT_CMP
) {
6469 nocode_wanted
= ncw_prev
;
6470 // tcc_warning("two conditions expr_cond");
6474 if ((vtop
->type
.t
& VT_BTYPE
) == VT_FUNC
)
6475 mk_pointer(&vtop
->type
);
6477 /* cast operands to correct type according to ISOC rules */
6478 if (!combine_types(&type
, &sv
, vtop
, '?'))
6479 type_incompatibility_error(&sv
.type
, &vtop
->type
,
6480 "type mismatch in conditional expression (have '%s' and '%s')");
6481 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6482 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6483 islv
= (vtop
->r
& VT_LVAL
) && (sv
.r
& VT_LVAL
) && VT_STRUCT
== (type
.t
& VT_BTYPE
);
6485 /* now we convert second operand */
6489 mk_pointer(&vtop
->type
);
6491 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6495 rc
= RC_TYPE(type
.t
);
6496 /* for long longs, we use fixed registers to avoid having
6497 to handle a complicated move */
6498 if (USING_TWO_WORDS(type
.t
))
6499 rc
= RC_RET(type
.t
);
6507 nocode_wanted
= ncw_prev
;
6509 /* this is horrible, but we must also convert first
6515 mk_pointer(&vtop
->type
);
6517 } else if (VT_STRUCT
== (vtop
->type
.t
& VT_BTYPE
))
6523 move_reg(r2
, r1
, islv
? VT_PTR
: type
.t
);
6533 static void expr_eq(void)
6538 if ((t
= tok
) == '=' || TOK_ASSIGN(t
)) {
6546 gen_op(TOK_ASSIGN_OP(t
));
6552 ST_FUNC
void gexpr(void)
6563 /* parse a constant expression and return value in vtop. */
6564 static void expr_const1(void)
6567 nocode_wanted
+= unevalmask
+ 1;
6569 nocode_wanted
-= unevalmask
+ 1;
6573 /* parse an integer constant and return its value. */
6574 static inline int64_t expr_const64(void)
6578 if ((vtop
->r
& (VT_VALMASK
| VT_LVAL
| VT_SYM
)) != VT_CONST
)
6579 expect("constant expression");
6585 /* parse an integer constant and return its value.
6586 Complain if it doesn't fit 32bit (signed or unsigned). */
6587 ST_FUNC
int expr_const(void)
6590 int64_t wc
= expr_const64();
6592 if (c
!= wc
&& (unsigned)c
!= wc
)
6593 tcc_error("constant exceeds 32 bit");
6597 /* ------------------------------------------------------------------------- */
6598 /* return from function */
6600 #ifndef TCC_TARGET_ARM64
6601 static void gfunc_return(CType
*func_type
)
6603 if ((func_type
->t
& VT_BTYPE
) == VT_STRUCT
) {
6604 CType type
, ret_type
;
6605 int ret_align
, ret_nregs
, regsize
;
6606 ret_nregs
= gfunc_sret(func_type
, func_var
, &ret_type
,
6607 &ret_align
, ®size
);
6608 if (ret_nregs
< 0) {
6609 #ifdef TCC_TARGET_RISCV64
6610 arch_transfer_ret_regs(0);
6612 } else if (0 == ret_nregs
) {
6613 /* if returning structure, must copy it to implicit
6614 first pointer arg location */
6617 vset(&type
, VT_LOCAL
| VT_LVAL
, func_vc
);
6620 /* copy structure value to pointer */
6623 /* returning structure packed into registers */
6624 int size
, addr
, align
, rc
;
6625 size
= type_size(func_type
,&align
);
6626 if ((vtop
->r
!= (VT_LOCAL
| VT_LVAL
) ||
6627 (vtop
->c
.i
& (ret_align
-1)))
6628 && (align
& (ret_align
-1))) {
6629 loc
= (loc
- size
) & -ret_align
;
6632 vset(&type
, VT_LOCAL
| VT_LVAL
, addr
);
6636 vset(&ret_type
, VT_LOCAL
| VT_LVAL
, addr
);
6638 vtop
->type
= ret_type
;
6639 rc
= RC_RET(ret_type
.t
);
6647 if (--ret_nregs
== 0)
6649 /* We assume that when a structure is returned in multiple
6650 registers, their classes are consecutive values of the
6653 vtop
->c
.i
+= regsize
;
6658 gv(RC_RET(func_type
->t
));
6660 vtop
--; /* NOT vpop() because on x86 it would flush the fp stack */
6664 static void check_func_return(void)
6666 if ((func_vt
.t
& VT_BTYPE
) == VT_VOID
)
6668 if (!strcmp (funcname
, "main")
6669 && (func_vt
.t
& VT_BTYPE
) == VT_INT
) {
6670 /* main returns 0 by default */
6672 gen_assign_cast(&func_vt
);
6673 gfunc_return(&func_vt
);
6675 tcc_warning("function might return no value: '%s'", funcname
);
6679 /* ------------------------------------------------------------------------- */
6682 static int case_cmpi(const void *pa
, const void *pb
)
6684 int64_t a
= (*(struct case_t
**) pa
)->v1
;
6685 int64_t b
= (*(struct case_t
**) pb
)->v1
;
6686 return a
< b
? -1 : a
> b
;
6689 static int case_cmpu(const void *pa
, const void *pb
)
6691 uint64_t a
= (uint64_t)(*(struct case_t
**) pa
)->v1
;
6692 uint64_t b
= (uint64_t)(*(struct case_t
**) pb
)->v1
;
6693 return a
< b
? -1 : a
> b
;
6696 static void gtst_addr(int t
, int a
)
6698 gsym_addr(gvtst(0, t
), a
);
6701 static void gcase(struct case_t
**base
, int len
, int *bsym
)
6705 int ll
= (vtop
->type
.t
& VT_BTYPE
) == VT_LLONG
;
6722 gtst_addr(0, p
->sym
); /* v1 <= x <= v2 */
6724 gcase(base
, len
/2, bsym
);
6728 base
+= e
; len
-= e
;
6738 if (p
->v1
== p
->v2
) {
6740 gtst_addr(0, p
->sym
);
6750 gtst_addr(0, p
->sym
);
6754 *bsym
= gjmp(*bsym
);
6757 /* ------------------------------------------------------------------------- */
6758 /* __attribute__((cleanup(fn))) */
6760 static void try_call_scope_cleanup(Sym
*stop
)
6762 Sym
*cls
= cur_scope
->cl
.s
;
6764 for (; cls
!= stop
; cls
= cls
->ncl
) {
6765 Sym
*fs
= cls
->next
;
6766 Sym
*vs
= cls
->prev_tok
;
6768 vpushsym(&fs
->type
, fs
);
6769 vset(&vs
->type
, vs
->r
, vs
->c
);
6771 mk_pointer(&vtop
->type
);
6777 static void try_call_cleanup_goto(Sym
*cleanupstate
)
6782 if (!cur_scope
->cl
.s
)
6785 /* search NCA of both cleanup chains given parents and initial depth */
6786 ocd
= cleanupstate
? cleanupstate
->v
& ~SYM_FIELD
: 0;
6787 for (ccd
= cur_scope
->cl
.n
, oc
= cleanupstate
; ocd
> ccd
; --ocd
, oc
= oc
->ncl
)
6789 for (cc
= cur_scope
->cl
.s
; ccd
> ocd
; --ccd
, cc
= cc
->ncl
)
6791 for (; cc
!= oc
; cc
= cc
->ncl
, oc
= oc
->ncl
, --ccd
)
6794 try_call_scope_cleanup(cc
);
6797 /* call 'func' for each __attribute__((cleanup(func))) */
6798 static void block_cleanup(struct scope
*o
)
6802 for (pg
= &pending_gotos
; (g
= *pg
) && g
->c
> o
->cl
.n
;) {
6803 if (g
->prev_tok
->r
& LABEL_FORWARD
) {
6808 try_call_scope_cleanup(o
->cl
.s
);
6809 pcl
->jnext
= gjmp(0);
6811 goto remove_pending
;
6821 try_call_scope_cleanup(o
->cl
.s
);
6824 /* ------------------------------------------------------------------------- */
6827 static void vla_restore(int loc
)
6830 gen_vla_sp_restore(loc
);
6833 static void vla_leave(struct scope
*o
)
6835 if (o
->vla
.num
< cur_scope
->vla
.num
)
6836 vla_restore(o
->vla
.loc
);
6839 /* ------------------------------------------------------------------------- */
6842 void new_scope(struct scope
*o
)
6844 /* copy and link previous scope */
6846 o
->prev
= cur_scope
;
6849 /* record local declaration stack position */
6850 o
->lstk
= local_stack
;
6851 o
->llstk
= local_label_stack
;
6855 if (tcc_state
->do_debug
)
6856 tcc_debug_stabn(N_LBRAC
, ind
- func_ind
);
6859 void prev_scope(struct scope
*o
, int is_expr
)
6863 if (o
->cl
.s
!= o
->prev
->cl
.s
)
6864 block_cleanup(o
->prev
);
6866 /* pop locally defined labels */
6867 label_pop(&local_label_stack
, o
->llstk
, is_expr
);
6869 /* In the is_expr case (a statement expression is finished here),
6870 vtop might refer to symbols on the local_stack. Either via the
6871 type or via vtop->sym. We can't pop those nor any that in turn
6872 might be referred to. To make it easier we don't roll back
6873 any symbols in that case; some upper level call to block() will
6874 do that. We do have to remove such symbols from the lookup
6875 tables, though. sym_pop will do that. */
6877 /* pop locally defined symbols */
6878 pop_local_syms(&local_stack
, o
->lstk
, is_expr
, 0);
6879 cur_scope
= o
->prev
;
6882 if (tcc_state
->do_debug
)
6883 tcc_debug_stabn(N_RBRAC
, ind
- func_ind
);
6886 /* leave a scope via break/continue(/goto) */
6887 void leave_scope(struct scope
*o
)
6891 try_call_scope_cleanup(o
->cl
.s
);
6895 /* ------------------------------------------------------------------------- */
6896 /* call block from 'for do while' loops */
6898 static void lblock(int *bsym
, int *csym
)
6900 struct scope
*lo
= loop_scope
, *co
= cur_scope
;
6901 int *b
= co
->bsym
, *c
= co
->csym
;
6915 static void block(int is_expr
)
6917 int a
, b
, c
, d
, e
, t
;
6922 /* default return value is (void) */
6924 vtop
->type
.t
= VT_VOID
;
6936 if (tok
== TOK_ELSE
) {
6941 gsym(d
); /* patch else jmp */
6946 } else if (t
== TOK_WHILE
) {
6958 } else if (t
== '{') {
6961 /* handle local labels declarations */
6962 while (tok
== TOK_LABEL
) {
6965 if (tok
< TOK_UIDENT
)
6966 expect("label identifier");
6967 label_push(&local_label_stack
, tok
, LABEL_DECLARED
);
6969 } while (tok
== ',');
6973 while (tok
!= '}') {
6982 prev_scope(&o
, is_expr
);
6985 else if (!nocode_wanted
)
6986 check_func_return();
6988 } else if (t
== TOK_RETURN
) {
6989 b
= (func_vt
.t
& VT_BTYPE
) != VT_VOID
;
6993 gen_assign_cast(&func_vt
);
6995 if (vtop
->type
.t
!= VT_VOID
)
6996 tcc_warning("void function returns a value");
7000 tcc_warning("'return' with no value");
7003 leave_scope(root_scope
);
7005 gfunc_return(&func_vt
);
7007 /* jump unless last stmt in top-level block */
7008 if (tok
!= '}' || local_scope
!= 1)
7012 } else if (t
== TOK_BREAK
) {
7014 if (!cur_scope
->bsym
)
7015 tcc_error("cannot break");
7016 if (cur_switch
&& cur_scope
->bsym
== cur_switch
->bsym
)
7017 leave_scope(cur_switch
->scope
);
7019 leave_scope(loop_scope
);
7020 *cur_scope
->bsym
= gjmp(*cur_scope
->bsym
);
7023 } else if (t
== TOK_CONTINUE
) {
7025 if (!cur_scope
->csym
)
7026 tcc_error("cannot continue");
7027 leave_scope(loop_scope
);
7028 *cur_scope
->csym
= gjmp(*cur_scope
->csym
);
7031 } else if (t
== TOK_FOR
) {
7036 /* c99 for-loop init decl? */
7037 if (!decl0(VT_LOCAL
, 1, NULL
)) {
7038 /* no, regular for-loop init expr */
7066 } else if (t
== TOK_DO
) {
7080 } else if (t
== TOK_SWITCH
) {
7081 struct switch_t
*sw
;
7083 sw
= tcc_mallocz(sizeof *sw
);
7085 sw
->scope
= cur_scope
;
7086 sw
->prev
= cur_switch
;
7092 sw
->sv
= *vtop
--; /* save switch value */
7095 b
= gjmp(0); /* jump to first case */
7097 a
= gjmp(a
); /* add implicit break */
7101 if (sw
->sv
.type
.t
& VT_UNSIGNED
)
7102 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpu
);
7104 qsort(sw
->p
, sw
->n
, sizeof(void*), case_cmpi
);
7106 for (b
= 1; b
< sw
->n
; b
++)
7107 if (sw
->sv
.type
.t
& VT_UNSIGNED
7108 ? (uint64_t)sw
->p
[b
- 1]->v2
>= (uint64_t)sw
->p
[b
]->v1
7109 : sw
->p
[b
- 1]->v2
>= sw
->p
[b
]->v1
)
7110 tcc_error("duplicate case value");
7114 d
= 0, gcase(sw
->p
, sw
->n
, &d
);
7117 gsym_addr(d
, sw
->def_sym
);
7123 dynarray_reset(&sw
->p
, &sw
->n
);
7124 cur_switch
= sw
->prev
;
7127 } else if (t
== TOK_CASE
) {
7128 struct case_t
*cr
= tcc_malloc(sizeof(struct case_t
));
7131 cr
->v1
= cr
->v2
= expr_const64();
7132 if (gnu_ext
&& tok
== TOK_DOTS
) {
7134 cr
->v2
= expr_const64();
7135 if ((!(cur_switch
->sv
.type
.t
& VT_UNSIGNED
) && cr
->v2
< cr
->v1
)
7136 || (cur_switch
->sv
.type
.t
& VT_UNSIGNED
&& (uint64_t)cr
->v2
< (uint64_t)cr
->v1
))
7137 tcc_warning("empty case range");
7140 dynarray_add(&cur_switch
->p
, &cur_switch
->n
, cr
);
7143 goto block_after_label
;
7145 } else if (t
== TOK_DEFAULT
) {
7148 if (cur_switch
->def_sym
)
7149 tcc_error("too many 'default'");
7150 cur_switch
->def_sym
= gind();
7153 goto block_after_label
;
7155 } else if (t
== TOK_GOTO
) {
7156 vla_restore(root_scope
->vla
.loc
);
7157 if (tok
== '*' && gnu_ext
) {
7161 if ((vtop
->type
.t
& VT_BTYPE
) != VT_PTR
)
7165 } else if (tok
>= TOK_UIDENT
) {
7166 s
= label_find(tok
);
7167 /* put forward definition if needed */
7169 s
= label_push(&global_label_stack
, tok
, LABEL_FORWARD
);
7170 else if (s
->r
== LABEL_DECLARED
)
7171 s
->r
= LABEL_FORWARD
;
7173 if (s
->r
& LABEL_FORWARD
) {
7174 /* start new goto chain for cleanups, linked via label->next */
7175 if (cur_scope
->cl
.s
&& !nocode_wanted
) {
7176 sym_push2(&pending_gotos
, SYM_FIELD
, 0, cur_scope
->cl
.n
);
7177 pending_gotos
->prev_tok
= s
;
7178 s
= sym_push2(&s
->next
, SYM_FIELD
, 0, 0);
7179 pending_gotos
->next
= s
;
7181 s
->jnext
= gjmp(s
->jnext
);
7183 try_call_cleanup_goto(s
->cleanupstate
);
7184 gjmp_addr(s
->jnext
);
7189 expect("label identifier");
7193 } else if (t
== TOK_ASM1
|| t
== TOK_ASM2
|| t
== TOK_ASM3
) {
7197 if (tok
== ':' && t
>= TOK_UIDENT
) {
7202 if (s
->r
== LABEL_DEFINED
)
7203 tcc_error("duplicate label '%s'", get_tok_str(s
->v
, NULL
));
7204 s
->r
= LABEL_DEFINED
;
7206 Sym
*pcl
; /* pending cleanup goto */
7207 for (pcl
= s
->next
; pcl
; pcl
= pcl
->prev
)
7209 sym_pop(&s
->next
, NULL
, 0);
7213 s
= label_push(&global_label_stack
, t
, LABEL_DEFINED
);
7216 s
->cleanupstate
= cur_scope
->cl
.s
;
7219 vla_restore(cur_scope
->vla
.loc
);
7220 /* we accept this, but it is a mistake */
7222 tcc_warning("deprecated use of label at end of compound statement");
7228 /* expression case */
7244 /* This skips over a stream of tokens containing balanced {} and ()
7245 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7246 with a '{'). If STR then allocates and stores the skipped tokens
7247 in *STR. This doesn't check if () and {} are nested correctly,
7248 i.e. "({)}" is accepted. */
7249 static void skip_or_save_block(TokenString
**str
)
7251 int braces
= tok
== '{';
7254 *str
= tok_str_alloc();
7256 while ((level
> 0 || (tok
!= '}' && tok
!= ',' && tok
!= ';' && tok
!= ')'))) {
7258 if (tok
== TOK_EOF
) {
7259 if (str
|| level
> 0)
7260 tcc_error("unexpected end of file");
7265 tok_str_add_tok(*str
);
7268 if (t
== '{' || t
== '(') {
7270 } else if (t
== '}' || t
== ')') {
7272 if (level
== 0 && braces
&& t
== '}')
7277 tok_str_add(*str
, -1);
7278 tok_str_add(*str
, 0);
7282 #define EXPR_CONST 1
7285 static void parse_init_elem(int expr_type
)
7287 int saved_global_expr
;
7290 /* compound literals must be allocated globally in this case */
7291 saved_global_expr
= global_expr
;
7294 global_expr
= saved_global_expr
;
7295 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7296 (compound literals). */
7297 if (((vtop
->r
& (VT_VALMASK
| VT_LVAL
)) != VT_CONST
7298 && ((vtop
->r
& (VT_SYM
|VT_LVAL
)) != (VT_SYM
|VT_LVAL
)
7299 || vtop
->sym
->v
< SYM_FIRST_ANOM
))
7300 #ifdef TCC_TARGET_PE
7301 || ((vtop
->r
& VT_SYM
) && vtop
->sym
->a
.dllimport
)
7304 tcc_error("initializer element is not constant");
7312 /* put zeros for variable based init */
7313 static void init_putz(Section
*sec
, unsigned long c
, int size
)
7316 /* nothing to do because globals are already set to zero */
7318 vpush_global_sym(&func_old_type
, TOK_memset
);
7320 #ifdef TCC_TARGET_ARM
7332 #define DIF_SIZE_ONLY 2
7333 #define DIF_HAVE_ELEM 4
7335 /* t is the array or struct type. c is the array or struct
7336 address. cur_field is the pointer to the current
7337 field, for arrays the 'c' member contains the current start
7338 index. 'flags' is as in decl_initializer.
7339 'al' contains the already initialized length of the
7340 current container (starting at c). This returns the new length of that. */
7341 static int decl_designator(CType
*type
, Section
*sec
, unsigned long c
,
7342 Sym
**cur_field
, int flags
, int al
)
7345 int index
, index_last
, align
, l
, nb_elems
, elem_size
;
7346 unsigned long corig
= c
;
7351 if (flags
& DIF_HAVE_ELEM
)
7354 if (gnu_ext
&& tok
>= TOK_UIDENT
) {
7361 /* NOTE: we only support ranges for last designator */
7362 while (nb_elems
== 1 && (tok
== '[' || tok
== '.')) {
7364 if (!(type
->t
& VT_ARRAY
))
7365 expect("array type");
7367 index
= index_last
= expr_const();
7368 if (tok
== TOK_DOTS
&& gnu_ext
) {
7370 index_last
= expr_const();
7374 if (index
< 0 || (s
->c
>= 0 && index_last
>= s
->c
) ||
7376 tcc_error("invalid index");
7378 (*cur_field
)->c
= index_last
;
7379 type
= pointed_type(type
);
7380 elem_size
= type_size(type
, &align
);
7381 c
+= index
* elem_size
;
7382 nb_elems
= index_last
- index
+ 1;
7389 if ((type
->t
& VT_BTYPE
) != VT_STRUCT
)
7390 expect("struct/union type");
7392 f
= find_field(type
, l
, &cumofs
);
7405 } else if (!gnu_ext
) {
7410 if (type
->t
& VT_ARRAY
) {
7411 index
= (*cur_field
)->c
;
7412 if (type
->ref
->c
>= 0 && index
>= type
->ref
->c
)
7413 tcc_error("index too large");
7414 type
= pointed_type(type
);
7415 c
+= index
* type_size(type
, &align
);
7418 while (f
&& (f
->v
& SYM_FIRST_ANOM
) && (f
->type
.t
& VT_BITFIELD
))
7419 *cur_field
= f
= f
->next
;
7421 tcc_error("too many field init");
7426 /* must put zero in holes (note that doing it that way
7427 ensures that it even works with designators) */
7428 if (!(flags
& DIF_SIZE_ONLY
) && c
- corig
> al
)
7429 init_putz(sec
, corig
+ al
, c
- corig
- al
);
7430 decl_initializer(type
, sec
, c
, flags
& ~DIF_FIRST
);
7432 /* XXX: make it more general */
7433 if (!(flags
& DIF_SIZE_ONLY
) && nb_elems
> 1) {
7434 unsigned long c_end
;
7439 vset(type
, VT_LOCAL
|VT_LVAL
, c
);
7440 for (i
= 1; i
< nb_elems
; i
++) {
7441 vset(type
, VT_LOCAL
|VT_LVAL
, c
+ elem_size
* i
);
7446 } else if (!NODATA_WANTED
) {
7447 c_end
= c
+ nb_elems
* elem_size
;
7448 if (c_end
> sec
->data_allocated
)
7449 section_realloc(sec
, c_end
);
7450 src
= sec
->data
+ c
;
7452 for(i
= 1; i
< nb_elems
; i
++) {
7454 memcpy(dst
, src
, elem_size
);
7458 c
+= nb_elems
* type_size(type
, &align
);
7464 /* store a value or an expression directly in global data or in local array */
7465 static void init_putv(CType
*type
, Section
*sec
, unsigned long c
)
7472 dtype
.t
&= ~VT_CONSTANT
; /* need to do that to avoid false warning */
7476 /* XXX: not portable */
7477 /* XXX: generate error if incorrect relocation */
7478 gen_assign_cast(&dtype
);
7479 bt
= type
->t
& VT_BTYPE
;
7481 if ((vtop
->r
& VT_SYM
)
7484 && (bt
!= (PTR_SIZE
== 8 ? VT_LLONG
: VT_INT
)
7485 || (type
->t
& VT_BITFIELD
))
7486 && !((vtop
->r
& VT_CONST
) && vtop
->sym
->v
>= SYM_FIRST_ANOM
)
7488 tcc_error("initializer element is not computable at load time");
7490 if (NODATA_WANTED
) {
7495 size
= type_size(type
, &align
);
7496 section_reserve(sec
, c
+ size
);
7497 ptr
= sec
->data
+ c
;
7499 /* XXX: make code faster ? */
7500 if ((vtop
->r
& (VT_SYM
|VT_CONST
)) == (VT_SYM
|VT_CONST
) &&
7501 vtop
->sym
->v
>= SYM_FIRST_ANOM
&&
7502 /* XXX This rejects compound literals like
7503 '(void *){ptr}'. The problem is that '&sym' is
7504 represented the same way, which would be ruled out
7505 by the SYM_FIRST_ANOM check above, but also '"string"'
7506 in 'char *p = "string"' is represented the same
7507 with the type being VT_PTR and the symbol being an
7508 anonymous one. That is, there's no difference in vtop
7509 between '(void *){x}' and '&(void *){x}'. Ignore
7510 pointer typed entities here. Hopefully no real code
7511 will ever use compound literals with scalar type. */
7512 (vtop
->type
.t
& VT_BTYPE
) != VT_PTR
) {
7513 /* These come from compound literals, memcpy stuff over. */
7517 esym
= elfsym(vtop
->sym
);
7518 ssec
= tcc_state
->sections
[esym
->st_shndx
];
7519 memmove (ptr
, ssec
->data
+ esym
->st_value
+ (int)vtop
->c
.i
, size
);
7521 /* We need to copy over all memory contents, and that
7522 includes relocations. Use the fact that relocs are
7523 created it order, so look from the end of relocs
7524 until we hit one before the copied region. */
7525 int num_relocs
= ssec
->reloc
->data_offset
/ sizeof(*rel
);
7526 rel
= (ElfW_Rel
*)(ssec
->reloc
->data
+ ssec
->reloc
->data_offset
);
7527 while (num_relocs
--) {
7529 if (rel
->r_offset
>= esym
->st_value
+ size
)
7531 if (rel
->r_offset
< esym
->st_value
)
7533 /* Note: if the same fields are initialized multiple
7534 times (possible with designators) then we possibly
7535 add multiple relocations for the same offset here.
7536 That would lead to wrong code, the last reloc needs
7537 to win. We clean this up later after the whole
7538 initializer is parsed. */
7539 put_elf_reloca(symtab_section
, sec
,
7540 c
+ rel
->r_offset
- esym
->st_value
,
7541 ELFW(R_TYPE
)(rel
->r_info
),
7542 ELFW(R_SYM
)(rel
->r_info
),
7552 if (type
->t
& VT_BITFIELD
) {
7553 int bit_pos
, bit_size
, bits
, n
;
7554 unsigned char *p
, v
, m
;
7555 bit_pos
= BIT_POS(vtop
->type
.t
);
7556 bit_size
= BIT_SIZE(vtop
->type
.t
);
7557 p
= (unsigned char*)ptr
+ (bit_pos
>> 3);
7558 bit_pos
&= 7, bits
= 0;
7563 v
= vtop
->c
.i
>> bits
<< bit_pos
;
7564 m
= ((1 << n
) - 1) << bit_pos
;
7565 *p
= (*p
& ~m
) | (v
& m
);
7566 bits
+= n
, bit_size
-= n
, bit_pos
= 0, ++p
;
7570 /* XXX: when cross-compiling we assume that each type has the
7571 same representation on host and target, which is likely to
7572 be wrong in the case of long double */
7574 vtop
->c
.i
= vtop
->c
.i
!= 0;
7576 *(char *)ptr
|= vtop
->c
.i
;
7579 *(short *)ptr
|= vtop
->c
.i
;
7582 *(float*)ptr
= vtop
->c
.f
;
7585 *(double *)ptr
= vtop
->c
.d
;
7588 #if defined TCC_IS_NATIVE_387
7589 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7590 memcpy(ptr
, &vtop
->c
.ld
, 10);
7592 else if (sizeof (long double) == sizeof (double))
7593 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr
) : "m" (vtop
->c
.ld
));
7595 else if (vtop
->c
.ld
== 0.0)
7599 if (sizeof(long double) == LDOUBLE_SIZE
)
7600 *(long double*)ptr
= vtop
->c
.ld
;
7601 else if (sizeof(double) == LDOUBLE_SIZE
)
7602 *(double *)ptr
= (double)vtop
->c
.ld
;
7604 tcc_error("can't cross compile long double constants");
7608 *(long long *)ptr
|= vtop
->c
.i
;
7615 addr_t val
= vtop
->c
.i
;
7617 if (vtop
->r
& VT_SYM
)
7618 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7620 *(addr_t
*)ptr
|= val
;
7622 if (vtop
->r
& VT_SYM
)
7623 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7624 *(addr_t
*)ptr
|= val
;
7630 int val
= vtop
->c
.i
;
7632 if (vtop
->r
& VT_SYM
)
7633 greloca(sec
, vtop
->sym
, c
, R_DATA_PTR
, val
);
7637 if (vtop
->r
& VT_SYM
)
7638 greloc(sec
, vtop
->sym
, c
, R_DATA_PTR
);
7647 vset(&dtype
, VT_LOCAL
|VT_LVAL
, c
);
7654 /* 't' contains the type and storage info. 'c' is the offset of the
7655 object in section 'sec'. If 'sec' is NULL, it means stack based
7656 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7657 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7658 size only evaluation is wanted (only for arrays). */
7659 static void decl_initializer(CType
*type
, Section
*sec
, unsigned long c
,
7662 int len
, n
, no_oblock
, i
;
7668 if (!(flags
& DIF_HAVE_ELEM
) && tok
!= '{' &&
7669 /* In case of strings we have special handling for arrays, so
7670 don't consume them as initializer value (which would commit them
7671 to some anonymous symbol). */
7672 tok
!= TOK_LSTR
&& tok
!= TOK_STR
&&
7673 !(flags
& DIF_SIZE_ONLY
)) {
7674 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7675 flags
|= DIF_HAVE_ELEM
;
7678 if ((flags
& DIF_HAVE_ELEM
) &&
7679 !(type
->t
& VT_ARRAY
) &&
7680 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7681 The source type might have VT_CONSTANT set, which is
7682 of course assignable to non-const elements. */
7683 is_compatible_unqualified_types(type
, &vtop
->type
)) {
7684 init_putv(type
, sec
, c
);
7685 } else if (type
->t
& VT_ARRAY
) {
7688 t1
= pointed_type(type
);
7689 size1
= type_size(t1
, &align1
);
7692 if (((flags
& DIF_FIRST
) && tok
!= TOK_LSTR
&& tok
!= TOK_STR
) ||
7695 tcc_error("character array initializer must be a literal,"
7696 " optionally enclosed in braces");
7701 /* only parse strings here if correct type (otherwise: handle
7702 them as ((w)char *) expressions */
7703 if ((tok
== TOK_LSTR
&&
7704 #ifdef TCC_TARGET_PE
7705 (t1
->t
& VT_BTYPE
) == VT_SHORT
&& (t1
->t
& VT_UNSIGNED
)
7707 (t1
->t
& VT_BTYPE
) == VT_INT
7709 ) || (tok
== TOK_STR
&& (t1
->t
& VT_BTYPE
) == VT_BYTE
)) {
7712 cstr_reset(&initstr
);
7713 if (size1
!= (tok
== TOK_STR
? 1 : sizeof(nwchar_t
)))
7714 tcc_error("unhandled string literal merging");
7715 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7717 initstr
.size
-= size1
;
7719 len
+= tokc
.str
.size
;
7721 len
+= tokc
.str
.size
/ sizeof(nwchar_t
);
7723 cstr_cat(&initstr
, tokc
.str
.data
, tokc
.str
.size
);
7726 if (tok
!= ')' && tok
!= '}' && tok
!= ',' && tok
!= ';'
7727 && tok
!= TOK_EOF
) {
7728 /* Not a lone literal but part of a bigger expression. */
7729 unget_tok(size1
== 1 ? TOK_STR
: TOK_LSTR
);
7730 tokc
.str
.size
= initstr
.size
;
7731 tokc
.str
.data
= initstr
.data
;
7737 if (n
>= 0 && len
> n
)
7739 if (!(flags
& DIF_SIZE_ONLY
)) {
7740 if (sec
&& !NODATA_WANTED
&&
7741 (c
+ nb
> sec
->data_allocated
))
7742 nb
= sec
->data_allocated
- c
;
7744 tcc_warning("initializer-string for array is too long");
7745 /* in order to go faster for common case (char
7746 string in global variable, we handle it
7748 if (sec
&& size1
== 1) {
7750 memcpy(sec
->data
+ c
, initstr
.data
, nb
);
7754 ch
= ((unsigned char *)initstr
.data
)[i
];
7756 ch
= ((nwchar_t
*)initstr
.data
)[i
];
7758 init_putv(t1
, sec
, c
+ i
* size1
);
7762 /* only add trailing zero if enough storage (no
7763 warning in this case since it is standard) */
7764 if (n
< 0 || len
< n
) {
7765 if (!(flags
& DIF_SIZE_ONLY
)) {
7767 init_putv(t1
, sec
, c
+ (len
* size1
));
7778 while (tok
!= '}' || (flags
& DIF_HAVE_ELEM
)) {
7779 len
= decl_designator(type
, sec
, c
, &f
, flags
, len
);
7780 flags
&= ~DIF_HAVE_ELEM
;
7781 if (type
->t
& VT_ARRAY
) {
7783 /* special test for multi dimensional arrays (may not
7784 be strictly correct if designators are used at the
7786 if (no_oblock
&& len
>= n
*size1
)
7789 if (s
->type
.t
== VT_UNION
)
7793 if (no_oblock
&& f
== NULL
)
7802 /* put zeros at the end */
7803 if (!(flags
& DIF_SIZE_ONLY
) && len
< n
*size1
)
7804 init_putz(sec
, c
+ len
, n
*size1
- len
);
7807 /* patch type size if needed, which happens only for array types */
7809 s
->c
= size1
== 1 ? len
: ((len
+ size1
- 1)/size1
);
7810 } else if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7813 if ((flags
& DIF_FIRST
) || tok
== '{') {
7821 } else if (tok
== '{') {
7822 if (flags
& DIF_HAVE_ELEM
)
7825 decl_initializer(type
, sec
, c
, flags
& ~DIF_HAVE_ELEM
);
7827 } else if ((flags
& DIF_SIZE_ONLY
)) {
7828 /* If we supported only ISO C we wouldn't have to accept calling
7829 this on anything than an array if DIF_SIZE_ONLY (and even then
7830 only on the outermost level, so no recursion would be needed),
7831 because initializing a flex array member isn't supported.
7832 But GNU C supports it, so we need to recurse even into
7833 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7834 /* just skip expression */
7835 skip_or_save_block(NULL
);
7837 if (!(flags
& DIF_HAVE_ELEM
)) {
7838 /* This should happen only when we haven't parsed
7839 the init element above for fear of committing a
7840 string constant to memory too early. */
7841 if (tok
!= TOK_STR
&& tok
!= TOK_LSTR
)
7842 expect("string constant");
7843 parse_init_elem(!sec
? EXPR_ANY
: EXPR_CONST
);
7845 init_putv(type
, sec
, c
);
7849 /* parse an initializer for type 't' if 'has_init' is non zero, and
7850 allocate space in local or global data space ('r' is either
7851 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7852 variable 'v' of scope 'scope' is declared before initializers
7853 are parsed. If 'v' is zero, then a reference to the new object
7854 is put in the value stack. If 'has_init' is 2, a special parsing
7855 is done to handle string constants. */
7856 static void decl_initializer_alloc(CType
*type
, AttributeDef
*ad
, int r
,
7857 int has_init
, int v
, int scope
)
7859 int size
, align
, addr
;
7860 TokenString
*init_str
= NULL
;
7863 Sym
*flexible_array
;
7865 int saved_nocode_wanted
= nocode_wanted
;
7866 #ifdef CONFIG_TCC_BCHECK
7867 int bcheck
= tcc_state
->do_bounds_check
&& !NODATA_WANTED
;
7870 /* Always allocate static or global variables */
7871 if (v
&& (r
& VT_VALMASK
) == VT_CONST
)
7872 nocode_wanted
|= 0x80000000;
7874 flexible_array
= NULL
;
7875 if ((type
->t
& VT_BTYPE
) == VT_STRUCT
) {
7876 Sym
*field
= type
->ref
->next
;
7879 field
= field
->next
;
7880 if (field
->type
.t
& VT_ARRAY
&& field
->type
.ref
->c
< 0)
7881 flexible_array
= field
;
7885 size
= type_size(type
, &align
);
7886 /* If unknown size, we must evaluate it before
7887 evaluating initializers because
7888 initializers can generate global data too
7889 (e.g. string pointers or ISOC99 compound
7890 literals). It also simplifies local
7891 initializers handling */
7892 if (size
< 0 || (flexible_array
&& has_init
)) {
7894 tcc_error("unknown type size");
7895 /* get all init string */
7896 if (has_init
== 2) {
7897 init_str
= tok_str_alloc();
7898 /* only get strings */
7899 while (tok
== TOK_STR
|| tok
== TOK_LSTR
) {
7900 tok_str_add_tok(init_str
);
7903 tok_str_add(init_str
, -1);
7904 tok_str_add(init_str
, 0);
7906 skip_or_save_block(&init_str
);
7911 begin_macro(init_str
, 1);
7913 decl_initializer(type
, NULL
, 0, DIF_FIRST
| DIF_SIZE_ONLY
);
7914 /* prepare second initializer parsing */
7915 macro_ptr
= init_str
->str
;
7918 /* if still unknown size, error */
7919 size
= type_size(type
, &align
);
7921 tcc_error("unknown type size");
7923 /* If there's a flex member and it was used in the initializer
7925 if (flexible_array
&&
7926 flexible_array
->type
.ref
->c
> 0)
7927 size
+= flexible_array
->type
.ref
->c
7928 * pointed_size(&flexible_array
->type
);
7929 /* take into account specified alignment if bigger */
7930 if (ad
->a
.aligned
) {
7931 int speca
= 1 << (ad
->a
.aligned
- 1);
7934 } else if (ad
->a
.packed
) {
7938 if (!v
&& NODATA_WANTED
)
7939 size
= 0, align
= 1;
7941 if ((r
& VT_VALMASK
) == VT_LOCAL
) {
7943 #ifdef CONFIG_TCC_BCHECK
7945 /* add padding between stack variables for bound checking */
7949 loc
= (loc
- size
) & -align
;
7951 #ifdef CONFIG_TCC_BCHECK
7953 /* add padding between stack variables for bound checking */
7958 /* local variable */
7959 #ifdef CONFIG_TCC_ASM
7960 if (ad
->asm_label
) {
7961 int reg
= asm_parse_regvar(ad
->asm_label
);
7963 r
= (r
& ~VT_VALMASK
) | reg
;
7966 sym
= sym_push(v
, type
, r
, addr
);
7967 if (ad
->cleanup_func
) {
7968 Sym
*cls
= sym_push2(&all_cleanups
,
7969 SYM_FIELD
| ++cur_scope
->cl
.n
, 0, 0);
7970 cls
->prev_tok
= sym
;
7971 cls
->next
= ad
->cleanup_func
;
7972 cls
->ncl
= cur_scope
->cl
.s
;
7973 cur_scope
->cl
.s
= cls
;
7978 /* push local reference */
7979 vset(type
, r
, addr
);
7982 if (v
&& scope
== VT_CONST
) {
7983 /* see if the symbol was already defined */
7986 patch_storage(sym
, ad
, type
);
7987 /* we accept several definitions of the same global variable. */
7988 if (!has_init
&& sym
->c
&& elfsym(sym
)->st_shndx
!= SHN_UNDEF
)
7993 /* allocate symbol in corresponding section */
7998 else if (tcc_state
->nocommon
)
8003 addr
= section_add(sec
, size
, align
);
8004 #ifdef CONFIG_TCC_BCHECK
8005 /* add padding if bound check */
8007 section_add(sec
, 1, 1);
8010 addr
= align
; /* SHN_COMMON is special, symbol value is align */
8011 sec
= common_section
;
8016 sym
= sym_push(v
, type
, r
| VT_SYM
, 0);
8017 patch_storage(sym
, ad
, NULL
);
8019 /* update symbol definition */
8020 put_extern_sym(sym
, sec
, addr
, size
);
8022 /* push global reference */
8023 vpush_ref(type
, sec
, addr
, size
);
8028 #ifdef CONFIG_TCC_BCHECK
8029 /* handles bounds now because the symbol must be defined
8030 before for the relocation */
8034 greloca(bounds_section
, sym
, bounds_section
->data_offset
, R_DATA_PTR
, 0);
8035 /* then add global bound info */
8036 bounds_ptr
= section_ptr_add(bounds_section
, 2 * sizeof(addr_t
));
8037 bounds_ptr
[0] = 0; /* relocated */
8038 bounds_ptr
[1] = size
;
8043 if (type
->t
& VT_VLA
) {
8049 /* save current stack pointer */
8050 if (root_scope
->vla
.loc
== 0) {
8051 struct scope
*v
= cur_scope
;
8052 gen_vla_sp_save(loc
-= PTR_SIZE
);
8053 do v
->vla
.loc
= loc
; while ((v
= v
->prev
));
8056 vla_runtime_type_size(type
, &a
);
8057 gen_vla_alloc(type
, a
);
8058 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8059 /* on _WIN64, because of the function args scratch area, the
8060 result of alloca differs from RSP and is returned in RAX. */
8061 gen_vla_result(addr
), addr
= (loc
-= PTR_SIZE
);
8063 gen_vla_sp_save(addr
);
8064 cur_scope
->vla
.loc
= addr
;
8065 cur_scope
->vla
.num
++;
8066 } else if (has_init
) {
8067 size_t oldreloc_offset
= 0;
8068 if (sec
&& sec
->reloc
)
8069 oldreloc_offset
= sec
->reloc
->data_offset
;
8070 decl_initializer(type
, sec
, addr
, DIF_FIRST
);
8071 if (sec
&& sec
->reloc
)
8072 squeeze_multi_relocs(sec
, oldreloc_offset
);
8073 /* patch flexible array member size back to -1, */
8074 /* for possible subsequent similar declarations */
8076 flexible_array
->type
.ref
->c
= -1;
8080 /* restore parse state if needed */
8086 nocode_wanted
= saved_nocode_wanted
;
8089 /* parse a function defined by symbol 'sym' and generate its code in
8090 'cur_text_section' */
8091 static void gen_function(Sym
*sym
)
8093 struct scope f
= { 0 };
8094 cur_scope
= root_scope
= &f
;
8096 ind
= cur_text_section
->data_offset
;
8097 if (sym
->a
.aligned
) {
8098 size_t newoff
= section_add(cur_text_section
, 0,
8099 1 << (sym
->a
.aligned
- 1));
8100 gen_fill_nops(newoff
- ind
);
8102 /* NOTE: we patch the symbol size later */
8103 put_extern_sym(sym
, cur_text_section
, ind
, 0);
8104 if (sym
->type
.ref
->f
.func_ctor
)
8105 add_array (tcc_state
, ".init_array", sym
->c
);
8106 if (sym
->type
.ref
->f
.func_dtor
)
8107 add_array (tcc_state
, ".fini_array", sym
->c
);
8109 funcname
= get_tok_str(sym
->v
, NULL
);
8111 func_vt
= sym
->type
.ref
->type
;
8112 func_var
= sym
->type
.ref
->f
.func_type
== FUNC_ELLIPSIS
;
8114 /* put debug symbol */
8115 tcc_debug_funcstart(tcc_state
, sym
);
8116 /* push a dummy symbol to enable local sym storage */
8117 sym_push2(&local_stack
, SYM_FIELD
, 0, 0);
8118 local_scope
= 1; /* for function parameters */
8122 clear_temp_local_var_list();
8126 /* reset local stack */
8127 pop_local_syms(&local_stack
, NULL
, 0, func_var
);
8129 cur_text_section
->data_offset
= ind
;
8131 label_pop(&global_label_stack
, NULL
, 0);
8132 sym_pop(&all_cleanups
, NULL
, 0);
8133 /* patch symbol size */
8134 elfsym(sym
)->st_size
= ind
- func_ind
;
8135 /* end of function */
8136 tcc_debug_funcend(tcc_state
, ind
- func_ind
);
8137 /* It's better to crash than to generate wrong code */
8138 cur_text_section
= NULL
;
8139 funcname
= ""; /* for safety */
8140 func_vt
.t
= VT_VOID
; /* for safety */
8141 func_var
= 0; /* for safety */
8142 ind
= 0; /* for safety */
8143 nocode_wanted
= 0x80000000;
8145 /* do this after funcend debug info */
8149 static void gen_inline_functions(TCCState
*s
)
8152 int inline_generated
, i
;
8153 struct InlineFunc
*fn
;
8155 tcc_open_bf(s
, ":inline:", 0);
8156 /* iterate while inline function are referenced */
8158 inline_generated
= 0;
8159 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8160 fn
= s
->inline_fns
[i
];
8162 if (sym
&& (sym
->c
|| !(sym
->type
.t
& VT_INLINE
))) {
8163 /* the function was used or forced (and then not internal):
8164 generate its code and convert it to a normal function */
8166 tcc_debug_putfile(s
, fn
->filename
);
8167 begin_macro(fn
->func_str
, 1);
8169 cur_text_section
= text_section
;
8173 inline_generated
= 1;
8176 } while (inline_generated
);
8180 static void free_inline_functions(TCCState
*s
)
8183 /* free tokens of unused inline functions */
8184 for (i
= 0; i
< s
->nb_inline_fns
; ++i
) {
8185 struct InlineFunc
*fn
= s
->inline_fns
[i
];
8187 tok_str_free(fn
->func_str
);
8189 dynarray_reset(&s
->inline_fns
, &s
->nb_inline_fns
);
8192 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8193 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8194 static int decl0(int l
, int is_for_loop_init
, Sym
*func_sym
)
8199 AttributeDef ad
, adbase
;
8202 if (tok
== TOK_STATIC_ASSERT
) {
8212 tcc_error("_Static_assert fail");
8214 goto static_assert_out
;
8218 parse_mult_str(&error_str
, "string constant");
8220 tcc_error("%s", (char *)error_str
.data
);
8221 cstr_free(&error_str
);
8227 if (!parse_btype(&btype
, &adbase
)) {
8228 if (is_for_loop_init
)
8230 /* skip redundant ';' if not in old parameter decl scope */
8231 if (tok
== ';' && l
!= VT_CMP
) {
8237 if (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
) {
8238 /* global asm block */
8242 if (tok
>= TOK_UIDENT
) {
8243 /* special test for old K&R protos without explicit int
8244 type. Only accepted when defining global data */
8248 expect("declaration");
8253 if ((btype
.t
& VT_BTYPE
) == VT_STRUCT
) {
8254 int v
= btype
.ref
->v
;
8255 if (!(v
& SYM_FIELD
) && (v
& ~SYM_STRUCT
) >= SYM_FIRST_ANOM
)
8256 tcc_warning("unnamed struct/union that defines no instances");
8260 if (IS_ENUM(btype
.t
)) {
8265 while (1) { /* iterate thru each declaration */
8267 /* If the base type itself was an array type of unspecified
8268 size (like in 'typedef int arr[]; arr x = {1};') then
8269 we will overwrite the unknown size by the real one for
8270 this decl. We need to unshare the ref symbol holding
8272 if ((type
.t
& VT_ARRAY
) && type
.ref
->c
< 0) {
8273 type
.ref
= sym_push(SYM_FIELD
, &type
.ref
->type
, 0, type
.ref
->c
);
8276 type_decl(&type
, &ad
, &v
, TYPE_DIRECT
);
8280 type_to_str(buf
, sizeof(buf
), &type
, get_tok_str(v
, NULL
));
8281 printf("type = '%s'\n", buf
);
8284 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8285 if ((type
.t
& VT_STATIC
) && (l
== VT_LOCAL
))
8286 tcc_error("function without file scope cannot be static");
8287 /* if old style function prototype, we accept a
8290 if (sym
->f
.func_type
== FUNC_OLD
&& l
== VT_CONST
)
8291 decl0(VT_CMP
, 0, sym
);
8292 #ifdef TCC_TARGET_MACHO
8293 if (sym
->f
.func_alwinl
8294 && ((type
.t
& (VT_EXTERN
| VT_INLINE
))
8295 == (VT_EXTERN
| VT_INLINE
))) {
8296 /* always_inline functions must be handled as if they
8297 don't generate multiple global defs, even if extern
8298 inline, i.e. GNU inline semantics for those. Rewrite
8299 them into static inline. */
8300 type
.t
&= ~VT_EXTERN
;
8301 type
.t
|= VT_STATIC
;
8304 /* always compile 'extern inline' */
8305 if (type
.t
& VT_EXTERN
)
8306 type
.t
&= ~VT_INLINE
;
8309 if (gnu_ext
&& (tok
== TOK_ASM1
|| tok
== TOK_ASM2
|| tok
== TOK_ASM3
)) {
8310 ad
.asm_label
= asm_label_instr();
8311 /* parse one last attribute list, after asm label */
8312 parse_attribute(&ad
);
8314 /* gcc does not allow __asm__("label") with function definition,
8321 #ifdef TCC_TARGET_PE
8322 if (ad
.a
.dllimport
|| ad
.a
.dllexport
) {
8323 if (type
.t
& VT_STATIC
)
8324 tcc_error("cannot have dll linkage with static");
8325 if (type
.t
& VT_TYPEDEF
) {
8326 tcc_warning("'%s' attribute ignored for typedef",
8327 ad
.a
.dllimport
? (ad
.a
.dllimport
= 0, "dllimport") :
8328 (ad
.a
.dllexport
= 0, "dllexport"));
8329 } else if (ad
.a
.dllimport
) {
8330 if ((type
.t
& VT_BTYPE
) == VT_FUNC
)
8333 type
.t
|= VT_EXTERN
;
8339 tcc_error("cannot use local functions");
8340 if ((type
.t
& VT_BTYPE
) != VT_FUNC
)
8341 expect("function definition");
8343 /* reject abstract declarators in function definition
8344 make old style params without decl have int type */
8346 while ((sym
= sym
->next
) != NULL
) {
8347 if (!(sym
->v
& ~SYM_FIELD
))
8348 expect("identifier");
8349 if (sym
->type
.t
== VT_VOID
)
8350 sym
->type
= int_type
;
8353 /* apply post-declaraton attributes */
8354 merge_funcattr(&type
.ref
->f
, &ad
.f
);
8356 /* put function symbol */
8357 type
.t
&= ~VT_EXTERN
;
8358 sym
= external_sym(v
, &type
, 0, &ad
);
8360 /* static inline functions are just recorded as a kind
8361 of macro. Their code will be emitted at the end of
8362 the compilation unit only if they are used */
8363 if (sym
->type
.t
& VT_INLINE
) {
8364 struct InlineFunc
*fn
;
8365 fn
= tcc_malloc(sizeof *fn
+ strlen(file
->filename
));
8366 strcpy(fn
->filename
, file
->filename
);
8368 skip_or_save_block(&fn
->func_str
);
8369 dynarray_add(&tcc_state
->inline_fns
,
8370 &tcc_state
->nb_inline_fns
, fn
);
8372 /* compute text section */
8373 cur_text_section
= ad
.section
;
8374 if (!cur_text_section
)
8375 cur_text_section
= text_section
;
8381 /* find parameter in function parameter list */
8382 for (sym
= func_sym
->next
; sym
; sym
= sym
->next
)
8383 if ((sym
->v
& ~SYM_FIELD
) == v
)
8385 tcc_error("declaration for parameter '%s' but no such parameter",
8386 get_tok_str(v
, NULL
));
8388 if (type
.t
& VT_STORAGE
) /* 'register' is okay */
8389 tcc_error("storage class specified for '%s'",
8390 get_tok_str(v
, NULL
));
8391 if (sym
->type
.t
!= VT_VOID
)
8392 tcc_error("redefinition of parameter '%s'",
8393 get_tok_str(v
, NULL
));
8394 convert_parameter_type(&type
);
8396 } else if (type
.t
& VT_TYPEDEF
) {
8397 /* save typedefed type */
8398 /* XXX: test storage specifiers ? */
8400 if (sym
&& sym
->sym_scope
== local_scope
) {
8401 if (!is_compatible_types(&sym
->type
, &type
)
8402 || !(sym
->type
.t
& VT_TYPEDEF
))
8403 tcc_error("incompatible redefinition of '%s'",
8404 get_tok_str(v
, NULL
));
8407 sym
= sym_push(v
, &type
, 0, 0);
8411 } else if ((type
.t
& VT_BTYPE
) == VT_VOID
8412 && !(type
.t
& VT_EXTERN
)) {
8413 tcc_error("declaration of void object");
8416 if ((type
.t
& VT_BTYPE
) == VT_FUNC
) {
8417 /* external function definition */
8418 /* specific case for func_call attribute */
8420 } else if (!(type
.t
& VT_ARRAY
)) {
8421 /* not lvalue if array */
8424 has_init
= (tok
== '=');
8425 if (has_init
&& (type
.t
& VT_VLA
))
8426 tcc_error("variable length array cannot be initialized");
8427 if (((type
.t
& VT_EXTERN
) && (!has_init
|| l
!= VT_CONST
))
8428 || (type
.t
& VT_BTYPE
) == VT_FUNC
8429 /* as with GCC, uninitialized global arrays with no size
8430 are considered extern: */
8431 || ((type
.t
& VT_ARRAY
) && !has_init
8432 && l
== VT_CONST
&& type
.ref
->c
< 0)
8434 /* external variable or function */
8435 type
.t
|= VT_EXTERN
;
8436 sym
= external_sym(v
, &type
, r
, &ad
);
8438 if (type
.t
& VT_STATIC
)
8444 else if (l
== VT_CONST
)
8445 /* uninitialized global variables may be overridden */
8446 type
.t
|= VT_EXTERN
;
8447 decl_initializer_alloc(&type
, &ad
, r
, has_init
, v
, l
);
8451 if (is_for_loop_init
)
8463 static void decl(int l
)
8468 /* ------------------------------------------------------------------------- */
8471 /* ------------------------------------------------------------------------- */