Clear vtop.sym if saving on stack
[tinycc.git] / tccgen.c
blobb7f9e139d1aeb50db183abd05cdd183f325706a4
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 static int section_sym;
49 ST_DATA char debug_modes;
51 ST_DATA SValue *vtop;
52 static SValue _vstack[1 + VSTACK_SIZE];
53 #define vstack (_vstack + 1)
55 ST_DATA int const_wanted; /* true if constant wanted */
56 ST_DATA int nocode_wanted; /* no code generation wanted */
57 #define unevalmask 0xffff /* unevaluated subexpression */
58 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
59 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
61 /* Automagical code suppression ----> */
62 #define CODE_OFF() (nocode_wanted |= 0x20000000)
63 #define CODE_ON() (nocode_wanted &= ~0x20000000)
65 static void tcc_tcov_block_begin(void);
67 /* Clear 'nocode_wanted' at label if it was used */
68 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
69 static int gind(void) { int t = ind; CODE_ON(); if (debug_modes) tcc_tcov_block_begin(); return t; }
71 /* Set 'nocode_wanted' after unconditional jumps */
72 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
73 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
75 /* These are #undef'd at the end of this file */
76 #define gjmp_addr gjmp_addr_acs
77 #define gjmp gjmp_acs
78 /* <---- */
80 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
81 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
82 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
83 ST_DATA int func_vc;
84 static int last_line_num, new_file, func_ind; /* debug info control */
85 ST_DATA const char *funcname;
86 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
87 static CString initstr;
89 #if PTR_SIZE == 4
90 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_INT
92 #elif LONG_SIZE == 4
93 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T VT_LLONG
95 #else
96 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
97 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
98 #endif
100 ST_DATA struct switch_t {
101 struct case_t {
102 int64_t v1, v2;
103 int sym;
104 } **p; int n; /* list of case ranges */
105 int def_sym; /* default symbol */
106 int *bsym;
107 struct scope *scope;
108 struct switch_t *prev;
109 SValue sv;
110 } *cur_switch; /* current switch */
112 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
113 /*list of temporary local variables on the stack in current function. */
114 ST_DATA struct temp_local_variable {
115 int location; //offset on stack. Svalue.c.i
116 short size;
117 short align;
118 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
119 short nb_temp_local_vars;
121 static struct scope {
122 struct scope *prev;
123 struct { int loc, locorig, num; } vla;
124 struct { Sym *s; int n; } cl;
125 int *bsym, *csym;
126 Sym *lstk, *llstk;
127 } *cur_scope, *loop_scope, *root_scope;
129 typedef struct {
130 Section *sec;
131 int local_offset;
132 Sym *flex_array_ref;
133 } init_params;
135 /********************************************************/
136 /* stab debug support */
138 static const struct {
139 int type;
140 const char *name;
141 } default_debug[] = {
142 { VT_INT, "int:t1=r1;-2147483648;2147483647;" },
143 { VT_BYTE, "char:t2=r2;0;127;" },
144 #if LONG_SIZE == 4
145 { VT_LONG | VT_INT, "long int:t3=r3;-2147483648;2147483647;" },
146 #else
147 { VT_LLONG | VT_LONG, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
148 #endif
149 { VT_INT | VT_UNSIGNED, "unsigned int:t4=r4;0;037777777777;" },
150 #if LONG_SIZE == 4
151 { VT_LONG | VT_INT | VT_UNSIGNED, "long unsigned int:t5=r5;0;037777777777;" },
152 #else
153 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
154 { VT_LLONG | VT_LONG | VT_UNSIGNED, "long unsigned int:t5=r5;0;01777777777777777777777;" },
155 #endif
156 { VT_QLONG, "__int128:t6=r6;0;-1;" },
157 { VT_QLONG | VT_UNSIGNED, "__int128 unsigned:t7=r7;0;-1;" },
158 { VT_LLONG, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
159 { VT_LLONG | VT_UNSIGNED, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
160 { VT_SHORT, "short int:t10=r10;-32768;32767;" },
161 { VT_SHORT | VT_UNSIGNED, "short unsigned int:t11=r11;0;65535;" },
162 { VT_BYTE | VT_DEFSIGN, "signed char:t12=r12;-128;127;" },
163 { VT_BYTE | VT_DEFSIGN | VT_UNSIGNED, "unsigned char:t13=r13;0;255;" },
164 { VT_FLOAT, "float:t14=r1;4;0;" },
165 { VT_DOUBLE, "double:t15=r1;8;0;" },
166 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
167 { VT_DOUBLE | VT_LONG, "long double:t16=r1;8;0;" },
168 #else
169 { VT_LDOUBLE, "long double:t16=r1;16;0;" },
170 #endif
171 { -1, "_Float32:t17=r1;4;0;" },
172 { -1, "_Float64:t18=r1;8;0;" },
173 { -1, "_Float128:t19=r1;16;0;" },
174 { -1, "_Float32x:t20=r1;8;0;" },
175 { -1, "_Float64x:t21=r1;16;0;" },
176 { -1, "_Decimal32:t22=r1;4;0;" },
177 { -1, "_Decimal64:t23=r1;8;0;" },
178 { -1, "_Decimal128:t24=r1;16;0;" },
179 /* if default char is unsigned */
180 { VT_BYTE | VT_UNSIGNED, "unsigned char:t25=r25;0;255;" },
181 /* boolean type */
182 { VT_BOOL, "bool:t26=r26;0;255;" },
183 { VT_VOID, "void:t27=27" },
186 static int debug_next_type;
188 static struct debug_hash {
189 int debug_type;
190 Sym *type;
191 } *debug_hash;
193 static int n_debug_hash;
195 static struct debug_info {
196 int start;
197 int end;
198 int n_sym;
199 struct debug_sym {
200 int type;
201 unsigned long value;
202 char *str;
203 Section *sec;
204 int sym_index;
205 } *sym;
206 struct debug_info *child, *next, *last, *parent;
207 } *debug_info, *debug_info_root;
209 static struct {
210 unsigned long offset;
211 unsigned long last_file_name;
212 unsigned long last_func_name;
213 int ind;
214 int line;
215 } tcov_data;
217 /********************************************************/
218 #if 1
219 #define precedence_parser
220 static void init_prec(void);
221 #endif
222 /********************************************************/
223 #ifndef CONFIG_TCC_ASM
224 ST_FUNC void asm_instr(void)
226 tcc_error("inline asm() not supported");
228 ST_FUNC void asm_global_instr(void)
230 tcc_error("inline asm() not supported");
232 #endif
234 /* ------------------------------------------------------------------------- */
235 static void gen_cast(CType *type);
236 static void gen_cast_s(int t);
237 static inline CType *pointed_type(CType *type);
238 static int is_compatible_types(CType *type1, CType *type2);
239 static int parse_btype(CType *type, AttributeDef *ad);
240 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
241 static void parse_expr_type(CType *type);
242 static void init_putv(init_params *p, CType *type, unsigned long c);
243 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
244 static void block(int is_expr);
245 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
246 static void decl(int l);
247 static int decl0(int l, int is_for_loop_init, Sym *);
248 static void expr_eq(void);
249 static void vla_runtime_type_size(CType *type, int *a);
250 static int is_compatible_unqualified_types(CType *type1, CType *type2);
251 static inline int64_t expr_const64(void);
252 static void vpush64(int ty, unsigned long long v);
253 static void vpush(CType *type);
254 static int gvtst(int inv, int t);
255 static void gen_inline_functions(TCCState *s);
256 static void free_inline_functions(TCCState *s);
257 static void skip_or_save_block(TokenString **str);
258 static void gv_dup(void);
259 static int get_temp_local_var(int size,int align);
260 static void clear_temp_local_var_list();
261 static void cast_error(CType *st, CType *dt);
263 ST_INLN int is_float(int t)
265 int bt = t & VT_BTYPE;
266 return bt == VT_LDOUBLE
267 || bt == VT_DOUBLE
268 || bt == VT_FLOAT
269 || bt == VT_QFLOAT;
272 static inline int is_integer_btype(int bt)
274 return bt == VT_BYTE
275 || bt == VT_BOOL
276 || bt == VT_SHORT
277 || bt == VT_INT
278 || bt == VT_LLONG;
281 static int btype_size(int bt)
283 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
284 bt == VT_SHORT ? 2 :
285 bt == VT_INT ? 4 :
286 bt == VT_LLONG ? 8 :
287 bt == VT_PTR ? PTR_SIZE : 0;
290 /* returns function return register from type */
291 static int R_RET(int t)
293 if (!is_float(t))
294 return REG_IRET;
295 #ifdef TCC_TARGET_X86_64
296 if ((t & VT_BTYPE) == VT_LDOUBLE)
297 return TREG_ST0;
298 #elif defined TCC_TARGET_RISCV64
299 if ((t & VT_BTYPE) == VT_LDOUBLE)
300 return REG_IRET;
301 #endif
302 return REG_FRET;
305 /* returns 2nd function return register, if any */
306 static int R2_RET(int t)
308 t &= VT_BTYPE;
309 #if PTR_SIZE == 4
310 if (t == VT_LLONG)
311 return REG_IRE2;
312 #elif defined TCC_TARGET_X86_64
313 if (t == VT_QLONG)
314 return REG_IRE2;
315 if (t == VT_QFLOAT)
316 return REG_FRE2;
317 #elif defined TCC_TARGET_RISCV64
318 if (t == VT_LDOUBLE)
319 return REG_IRE2;
320 #endif
321 return VT_CONST;
324 /* returns true for two-word types */
325 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
327 /* put function return registers to stack value */
328 static void PUT_R_RET(SValue *sv, int t)
330 sv->r = R_RET(t), sv->r2 = R2_RET(t);
333 /* returns function return register class for type t */
334 static int RC_RET(int t)
336 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
339 /* returns generic register class for type t */
340 static int RC_TYPE(int t)
342 if (!is_float(t))
343 return RC_INT;
344 #ifdef TCC_TARGET_X86_64
345 if ((t & VT_BTYPE) == VT_LDOUBLE)
346 return RC_ST0;
347 if ((t & VT_BTYPE) == VT_QFLOAT)
348 return RC_FRET;
349 #elif defined TCC_TARGET_RISCV64
350 if ((t & VT_BTYPE) == VT_LDOUBLE)
351 return RC_INT;
352 #endif
353 return RC_FLOAT;
356 /* returns 2nd register class corresponding to t and rc */
357 static int RC2_TYPE(int t, int rc)
359 if (!USING_TWO_WORDS(t))
360 return 0;
361 #ifdef RC_IRE2
362 if (rc == RC_IRET)
363 return RC_IRE2;
364 #endif
365 #ifdef RC_FRE2
366 if (rc == RC_FRET)
367 return RC_FRE2;
368 #endif
369 if (rc & RC_FLOAT)
370 return RC_FLOAT;
371 return RC_INT;
374 /* we use our own 'finite' function to avoid potential problems with
375 non standard math libs */
376 /* XXX: endianness dependent */
377 ST_FUNC int ieee_finite(double d)
379 int p[4];
380 memcpy(p, &d, sizeof(double));
381 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
384 /* compiling intel long double natively */
385 #if (defined __i386__ || defined __x86_64__) \
386 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
387 # define TCC_IS_NATIVE_387
388 #endif
390 ST_FUNC void test_lvalue(void)
392 if (!(vtop->r & VT_LVAL))
393 expect("lvalue");
396 ST_FUNC void check_vstack(void)
398 if (vtop != vstack - 1)
399 tcc_error("internal compiler error: vstack leak (%d)",
400 (int)(vtop - vstack + 1));
403 /* ------------------------------------------------------------------------- */
404 /* vstack debugging aid */
406 #if 0
407 void pv (const char *lbl, int a, int b)
409 int i;
410 for (i = a; i < a + b; ++i) {
411 SValue *p = &vtop[-i];
412 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
413 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
416 #endif
418 /* ------------------------------------------------------------------------- */
419 /* start of translation unit info */
420 ST_FUNC void tcc_debug_start(TCCState *s1)
422 if (s1->do_debug) {
423 int i;
424 char buf[512];
426 /* file info: full path + filename */
427 section_sym = put_elf_sym(symtab_section, 0, 0,
428 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
429 text_section->sh_num, NULL);
430 getcwd(buf, sizeof(buf));
431 #ifdef _WIN32
432 normalize_slashes(buf);
433 #endif
434 pstrcat(buf, sizeof(buf), "/");
435 put_stabs_r(s1, buf, N_SO, 0, 0,
436 text_section->data_offset, text_section, section_sym);
437 put_stabs_r(s1, file->prev ? file->prev->filename : file->filename,
438 N_SO, 0, 0,
439 text_section->data_offset, text_section, section_sym);
440 for (i = 0; i < sizeof (default_debug) / sizeof (default_debug[0]); i++)
441 put_stabs(s1, default_debug[i].name, N_LSYM, 0, 0, 0);
443 new_file = last_line_num = 0;
444 func_ind = -1;
445 debug_next_type = sizeof(default_debug) / sizeof(default_debug[0]);
446 debug_hash = NULL;
447 n_debug_hash = 0;
449 /* we're currently 'including' the <command line> */
450 tcc_debug_bincl(s1);
453 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
454 symbols can be safely used */
455 put_elf_sym(symtab_section, 0, 0,
456 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
457 SHN_ABS, file->filename);
460 /* put end of translation unit info */
461 ST_FUNC void tcc_debug_end(TCCState *s1)
463 if (!s1->do_debug)
464 return;
465 put_stabs_r(s1, NULL, N_SO, 0, 0,
466 text_section->data_offset, text_section, section_sym);
467 tcc_free(debug_hash);
470 static BufferedFile* put_new_file(TCCState *s1)
472 BufferedFile *f = file;
473 /* use upper file if from inline ":asm:" */
474 if (f->filename[0] == ':')
475 f = f->prev;
476 if (f && new_file) {
477 put_stabs_r(s1, f->filename, N_SOL, 0, 0, ind, text_section, section_sym);
478 new_file = last_line_num = 0;
480 return f;
483 /* put alternative filename */
484 ST_FUNC void tcc_debug_putfile(TCCState *s1, const char *filename)
486 if (0 == strcmp(file->filename, filename))
487 return;
488 pstrcpy(file->filename, sizeof(file->filename), filename);
489 new_file = 1;
492 /* begin of #include */
493 ST_FUNC void tcc_debug_bincl(TCCState *s1)
495 if (!s1->do_debug)
496 return;
497 put_stabs(s1, file->filename, N_BINCL, 0, 0, 0);
498 new_file = 1;
501 /* end of #include */
502 ST_FUNC void tcc_debug_eincl(TCCState *s1)
504 if (!s1->do_debug)
505 return;
506 put_stabn(s1, N_EINCL, 0, 0, 0);
507 new_file = 1;
510 /* generate line number info */
511 static void tcc_debug_line(TCCState *s1)
513 BufferedFile *f;
514 if (!s1->do_debug
515 || cur_text_section != text_section
516 || !(f = put_new_file(s1))
517 || last_line_num == f->line_num)
518 return;
519 if (func_ind != -1) {
520 put_stabn(s1, N_SLINE, 0, f->line_num, ind - func_ind);
521 } else {
522 /* from tcc_assemble */
523 put_stabs_r(s1, NULL, N_SLINE, 0, f->line_num, ind, text_section, section_sym);
525 last_line_num = f->line_num;
528 static void tcc_debug_stabs (TCCState *s1, const char *str, int type, unsigned long value,
529 Section *sec, int sym_index)
531 struct debug_sym *s;
533 if (debug_info) {
534 debug_info->sym =
535 (struct debug_sym *)tcc_realloc (debug_info->sym,
536 sizeof(struct debug_sym) *
537 (debug_info->n_sym + 1));
538 s = debug_info->sym + debug_info->n_sym++;
539 s->type = type;
540 s->value = value;
541 s->str = tcc_strdup(str);
542 s->sec = sec;
543 s->sym_index = sym_index;
545 else if (sec)
546 put_stabs_r (s1, str, type, 0, 0, value, sec, sym_index);
547 else
548 put_stabs (s1, str, type, 0, 0, value);
551 static void tcc_debug_stabn(TCCState *s1, int type, int value)
553 if (!s1->do_debug)
554 return;
555 if (type == N_LBRAC) {
556 struct debug_info *info =
557 (struct debug_info *) tcc_mallocz(sizeof (*info));
559 info->start = value;
560 info->parent = debug_info;
561 if (debug_info) {
562 if (debug_info->child) {
563 if (debug_info->child->last)
564 debug_info->child->last->next = info;
565 else
566 debug_info->child->next = info;
567 debug_info->child->last = info;
569 else
570 debug_info->child = info;
572 else
573 debug_info_root = info;
574 debug_info = info;
576 else {
577 debug_info->end = value;
578 debug_info = debug_info->parent;
582 static void tcc_get_debug_info(TCCState *s1, Sym *s, CString *result)
584 int type;
585 int n = 0;
586 int debug_type = -1;
587 Sym *t = s;
588 CString str;
590 for (;;) {
591 type = t->type.t & ~(VT_STORAGE | VT_CONSTANT | VT_VOLATILE);
592 if ((type & VT_BTYPE) != VT_BYTE)
593 type &= ~VT_DEFSIGN;
594 if (type == VT_PTR || type == (VT_PTR | VT_ARRAY))
595 n++, t = t->type.ref;
596 else
597 break;
599 if ((type & VT_BTYPE) == VT_STRUCT) {
600 int i;
602 t = t->type.ref;
603 for (i = 0; i < n_debug_hash; i++) {
604 if (t == debug_hash[i].type) {
605 debug_type = debug_hash[i].debug_type;
606 break;
609 if (debug_type == -1) {
610 debug_type = ++debug_next_type;
611 debug_hash = (struct debug_hash *)
612 tcc_realloc (debug_hash,
613 (n_debug_hash + 1) * sizeof(*debug_hash));
614 debug_hash[n_debug_hash].debug_type = debug_type;
615 debug_hash[n_debug_hash++].type = t;
616 cstr_new (&str);
617 cstr_printf (&str, "%s:T%d=%c%d",
618 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
619 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
620 debug_type,
621 IS_UNION (t->type.t) ? 'u' : 's',
622 t->c);
623 while (t->next) {
624 int pos, size, align;
626 t = t->next;
627 cstr_printf (&str, "%s:",
628 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
629 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
630 tcc_get_debug_info (s1, t, &str);
631 if (t->type.t & VT_BITFIELD) {
632 pos = t->c * 8 + BIT_POS(t->type.t);
633 size = BIT_SIZE(t->type.t);
635 else {
636 pos = t->c * 8;
637 size = type_size(&t->type, &align) * 8;
639 cstr_printf (&str, ",%d,%d;", pos, size);
641 cstr_printf (&str, ";");
642 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
643 cstr_free (&str);
646 else if (IS_ENUM(type)) {
647 Sym *e = t = t->type.ref;
649 debug_type = ++debug_next_type;
650 cstr_new (&str);
651 cstr_printf (&str, "%s:T%d=e",
652 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
653 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
654 debug_type);
655 while (t->next) {
656 t = t->next;
657 cstr_printf (&str, "%s:",
658 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
659 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
660 cstr_printf (&str, e->type.t & VT_UNSIGNED ? "%u," : "%d,",
661 (int)t->enum_val);
663 cstr_printf (&str, ";");
664 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
665 cstr_free (&str);
667 else if ((type & VT_BTYPE) != VT_FUNC) {
668 type &= ~VT_STRUCT_MASK;
669 for (debug_type = 1;
670 debug_type <= sizeof(default_debug) / sizeof(default_debug[0]);
671 debug_type++)
672 if (default_debug[debug_type - 1].type == type)
673 break;
674 if (debug_type > sizeof(default_debug) / sizeof(default_debug[0]))
675 return;
677 if (n > 0)
678 cstr_printf (result, "%d=", ++debug_next_type);
679 t = s;
680 for (;;) {
681 type = t->type.t & ~(VT_STORAGE | VT_CONSTANT | VT_VOLATILE);
682 if ((type & VT_BTYPE) != VT_BYTE)
683 type &= ~VT_DEFSIGN;
684 if (type == VT_PTR)
685 cstr_printf (result, "%d=*", ++debug_next_type);
686 else if (type == (VT_PTR | VT_ARRAY))
687 cstr_printf (result, "%d=ar1;0;%d;",
688 ++debug_next_type, t->type.ref->c - 1);
689 else if (type == VT_FUNC) {
690 cstr_printf (result, "%d=f", ++debug_next_type);
691 tcc_get_debug_info (s1, t->type.ref, result);
692 return;
694 else
695 break;
696 t = t->type.ref;
698 cstr_printf (result, "%d", debug_type);
701 static void tcc_debug_finish (TCCState *s1, struct debug_info *cur)
703 while (cur) {
704 int i;
705 struct debug_info *next = cur->next;
707 for (i = 0; i < cur->n_sym; i++) {
708 struct debug_sym *s = &cur->sym[i];
710 if (s->sec)
711 put_stabs_r(s1, s->str, s->type, 0, 0, s->value,
712 s->sec, s->sym_index);
713 else
714 put_stabs(s1, s->str, s->type, 0, 0, s->value);
715 tcc_free (s->str);
717 tcc_free (cur->sym);
718 put_stabn(s1, N_LBRAC, 0, 0, cur->start);
719 tcc_debug_finish (s1, cur->child);
720 put_stabn(s1, N_RBRAC, 0, 0, cur->end);
721 tcc_free (cur);
722 cur = next;
726 static void tcc_add_debug_info(TCCState *s1, int param, Sym *s, Sym *e)
728 CString debug_str;
729 if (!s1->do_debug)
730 return;
731 cstr_new (&debug_str);
732 for (; s != e; s = s->prev) {
733 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
734 continue;
735 cstr_reset (&debug_str);
736 cstr_printf (&debug_str, "%s:%s", get_tok_str(s->v, NULL), param ? "p" : "");
737 tcc_get_debug_info(s1, s, &debug_str);
738 tcc_debug_stabs(s1, debug_str.data, param ? N_PSYM : N_LSYM, s->c, NULL, 0);
740 cstr_free (&debug_str);
743 /* put function symbol */
744 static void tcc_debug_funcstart(TCCState *s1, Sym *sym)
746 CString debug_str;
747 BufferedFile *f;
748 if (!s1->do_debug)
749 return;
750 debug_info_root = NULL;
751 debug_info = NULL;
752 tcc_debug_stabn(s1, N_LBRAC, ind - func_ind);
753 if (!(f = put_new_file(s1)))
754 return;
755 cstr_new (&debug_str);
756 cstr_printf(&debug_str, "%s:%c", funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
757 tcc_get_debug_info(s1, sym->type.ref, &debug_str);
758 put_stabs_r(s1, debug_str.data, N_FUN, 0, f->line_num, 0, cur_text_section, sym->c);
759 cstr_free (&debug_str);
761 tcc_debug_line(s1);
764 /* put function size */
765 static void tcc_debug_funcend(TCCState *s1, int size)
767 if (!s1->do_debug)
768 return;
769 tcc_debug_stabn(s1, N_RBRAC, size);
770 tcc_debug_finish (s1, debug_info_root);
774 static void tcc_debug_extern_sym(TCCState *s1, Sym *sym, int sh_num, int sym_bind, int sym_type)
776 Section *s;
777 CString str;
779 if (!s1->do_debug)
780 return;
781 if (sym_type == STT_FUNC || sym->v >= SYM_FIRST_ANOM)
782 return;
783 s = s1->sections[sh_num];
785 cstr_new (&str);
786 cstr_printf (&str, "%s:%c",
787 get_tok_str(sym->v, NULL),
788 sym_bind == STB_GLOBAL ? 'G' : local_scope ? 'V' : 'S'
790 tcc_get_debug_info(s1, sym, &str);
791 if (sym_bind == STB_GLOBAL)
792 tcc_debug_stabs(s1, str.data, N_GSYM, 0, NULL, 0);
793 else
794 tcc_debug_stabs(s1, str.data,
795 (sym->type.t & VT_STATIC) && data_section == s
796 ? N_STSYM : N_LCSYM, 0, s, sym->c);
797 cstr_free (&str);
800 static void tcc_debug_typedef(TCCState *s1, Sym *sym)
802 CString str;
804 if (!s1->do_debug)
805 return;
806 cstr_new (&str);
807 cstr_printf (&str, "%s:t",
808 (sym->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
809 ? "" : get_tok_str(sym->v & ~SYM_FIELD, NULL));
810 tcc_get_debug_info(s1, sym, &str);
811 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
812 cstr_free (&str);
815 /* ------------------------------------------------------------------------- */
816 /* for section layout see lib/tcov.c */
818 static void tcc_tcov_block_end(int line);
820 static void tcc_tcov_block_begin(void)
822 SValue sv;
823 void *ptr;
824 unsigned long last_offset = tcov_data.offset;
826 tcc_tcov_block_end (0);
827 if (tcc_state->test_coverage == 0 || nocode_wanted)
828 return;
830 if (tcov_data.last_file_name == 0 ||
831 strcmp ((const char *)(tcov_section->data + tcov_data.last_file_name),
832 file->true_filename) != 0) {
833 char wd[1024];
834 CString cstr;
836 if (tcov_data.last_func_name)
837 section_ptr_add(tcov_section, 1);
838 if (tcov_data.last_file_name)
839 section_ptr_add(tcov_section, 1);
840 tcov_data.last_func_name = 0;
841 cstr_new (&cstr);
842 if (file->true_filename[0] == '/') {
843 tcov_data.last_file_name = tcov_section->data_offset;
844 cstr_printf (&cstr, "%s", file->true_filename);
846 else {
847 getcwd (wd, sizeof(wd));
848 tcov_data.last_file_name = tcov_section->data_offset + strlen(wd) + 1;
849 cstr_printf (&cstr, "%s/%s", wd, file->true_filename);
851 ptr = section_ptr_add(tcov_section, cstr.size + 1);
852 strncpy((char *)ptr, cstr.data, cstr.size);
853 #ifdef _WIN32
854 normalize_slashes((char *)ptr);
855 #endif
856 cstr_free (&cstr);
858 if (tcov_data.last_func_name == 0 ||
859 strcmp ((const char *)(tcov_section->data + tcov_data.last_func_name),
860 funcname) != 0) {
861 size_t len;
863 if (tcov_data.last_func_name)
864 section_ptr_add(tcov_section, 1);
865 tcov_data.last_func_name = tcov_section->data_offset;
866 len = strlen (funcname);
867 ptr = section_ptr_add(tcov_section, len + 1);
868 strncpy((char *)ptr, funcname, len);
869 section_ptr_add(tcov_section, -tcov_section->data_offset & 7);
870 ptr = section_ptr_add(tcov_section, 8);
871 write64le (ptr, file->line_num);
873 if (ind == tcov_data.ind && tcov_data.line == file->line_num)
874 tcov_data.offset = last_offset;
875 else {
876 Sym label = {0};
877 label.type.t = VT_LLONG | VT_STATIC;
879 ptr = section_ptr_add(tcov_section, 16);
880 tcov_data.line = file->line_num;
881 write64le (ptr, (tcov_data.line << 8) | 0xff);
882 put_extern_sym(&label, tcov_section,
883 ((unsigned char *)ptr - tcov_section->data) + 8, 0);
884 sv.type = label.type;
885 sv.r = VT_SYM | VT_LVAL | VT_CONST;
886 sv.r2 = VT_CONST;
887 sv.c.i = 0;
888 sv.sym = &label;
889 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || \
890 defined TCC_TARGET_ARM || defined TCC_TARGET_ARM64 || \
891 defined TCC_TARGET_RISCV64
892 gen_increment_tcov (&sv);
893 #else
894 vpushv(&sv);
895 inc(0, TOK_INC);
896 vpop();
897 #endif
898 tcov_data.offset = (unsigned char *)ptr - tcov_section->data;
899 tcov_data.ind = ind;
903 static void tcc_tcov_block_end(int line)
905 if (tcc_state->test_coverage == 0)
906 return;
907 if (tcov_data.offset) {
908 void *ptr = tcov_section->data + tcov_data.offset;
909 unsigned long long nline = line ? line : file->line_num;
911 write64le (ptr, (read64le (ptr) & 0xfffffffffull) | (nline << 36));
912 tcov_data.offset = 0;
916 static void tcc_tcov_check_line(int start)
918 if (tcc_state->test_coverage == 0)
919 return;
920 if (tcov_data.line != file->line_num) {
921 if ((tcov_data.line + 1) != file->line_num) {
922 tcc_tcov_block_end (tcov_data.line);
923 if (start)
924 tcc_tcov_block_begin ();
926 else
927 tcov_data.line = file->line_num;
931 static void tcc_tcov_start(void)
933 if (tcc_state->test_coverage == 0)
934 return;
935 memset (&tcov_data, 0, sizeof (tcov_data));
936 if (tcov_section == NULL) {
937 tcov_section = new_section(tcc_state, ".tcov", SHT_PROGBITS,
938 SHF_ALLOC | SHF_WRITE);
939 section_ptr_add(tcov_section, 4); // pointer to executable name
943 static void tcc_tcov_end(void)
945 if (tcc_state->test_coverage == 0)
946 return;
947 if (tcov_data.last_func_name)
948 section_ptr_add(tcov_section, 1);
949 if (tcov_data.last_file_name)
950 section_ptr_add(tcov_section, 1);
953 /* ------------------------------------------------------------------------- */
954 /* initialize vstack and types. This must be done also for tcc -E */
955 ST_FUNC void tccgen_init(TCCState *s1)
957 vtop = vstack - 1;
958 memset(vtop, 0, sizeof *vtop);
960 /* define some often used types */
961 int_type.t = VT_INT;
963 char_type.t = VT_BYTE;
964 if (s1->char_is_unsigned)
965 char_type.t |= VT_UNSIGNED;
966 char_pointer_type = char_type;
967 mk_pointer(&char_pointer_type);
969 func_old_type.t = VT_FUNC;
970 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
971 func_old_type.ref->f.func_call = FUNC_CDECL;
972 func_old_type.ref->f.func_type = FUNC_OLD;
973 #ifdef precedence_parser
974 init_prec();
975 #endif
976 cstr_new(&initstr);
979 ST_FUNC int tccgen_compile(TCCState *s1)
981 cur_text_section = NULL;
982 funcname = "";
983 anon_sym = SYM_FIRST_ANOM;
984 section_sym = 0;
985 const_wanted = 0;
986 nocode_wanted = 0x80000000;
987 local_scope = 0;
988 debug_modes = s1->do_debug | s1->test_coverage << 1;
990 tcc_debug_start(s1);
991 tcc_tcov_start ();
992 #ifdef TCC_TARGET_ARM
993 arm_init(s1);
994 #endif
995 #ifdef INC_DEBUG
996 printf("%s: **** new file\n", file->filename);
997 #endif
998 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
999 next();
1000 decl(VT_CONST);
1001 gen_inline_functions(s1);
1002 check_vstack();
1003 /* end of translation unit info */
1004 tcc_debug_end(s1);
1005 tcc_tcov_end ();
1006 return 0;
1009 ST_FUNC void tccgen_finish(TCCState *s1)
1011 cstr_free(&initstr);
1012 free_inline_functions(s1);
1013 sym_pop(&global_stack, NULL, 0);
1014 sym_pop(&local_stack, NULL, 0);
1015 /* free preprocessor macros */
1016 free_defines(NULL);
1017 /* free sym_pools */
1018 dynarray_reset(&sym_pools, &nb_sym_pools);
1019 sym_free_first = NULL;
1022 /* ------------------------------------------------------------------------- */
1023 ST_FUNC ElfSym *elfsym(Sym *s)
1025 if (!s || !s->c)
1026 return NULL;
1027 return &((ElfSym *)symtab_section->data)[s->c];
1030 /* apply storage attributes to Elf symbol */
1031 ST_FUNC void update_storage(Sym *sym)
1033 ElfSym *esym;
1034 int sym_bind, old_sym_bind;
1036 esym = elfsym(sym);
1037 if (!esym)
1038 return;
1040 if (sym->a.visibility)
1041 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
1042 | sym->a.visibility;
1044 if (sym->type.t & (VT_STATIC | VT_INLINE))
1045 sym_bind = STB_LOCAL;
1046 else if (sym->a.weak)
1047 sym_bind = STB_WEAK;
1048 else
1049 sym_bind = STB_GLOBAL;
1050 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
1051 if (sym_bind != old_sym_bind) {
1052 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
1055 #ifdef TCC_TARGET_PE
1056 if (sym->a.dllimport)
1057 esym->st_other |= ST_PE_IMPORT;
1058 if (sym->a.dllexport)
1059 esym->st_other |= ST_PE_EXPORT;
1060 #endif
1062 #if 0
1063 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
1064 get_tok_str(sym->v, NULL),
1065 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
1066 sym->a.visibility,
1067 sym->a.dllexport,
1068 sym->a.dllimport
1070 #endif
1073 /* ------------------------------------------------------------------------- */
1074 /* update sym->c so that it points to an external symbol in section
1075 'section' with value 'value' */
1077 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
1078 addr_t value, unsigned long size,
1079 int can_add_underscore)
1081 int sym_type, sym_bind, info, other, t;
1082 ElfSym *esym;
1083 const char *name;
1084 char buf1[256];
1086 if (!sym->c) {
1087 name = get_tok_str(sym->v, NULL);
1088 t = sym->type.t;
1089 if ((t & VT_BTYPE) == VT_FUNC) {
1090 sym_type = STT_FUNC;
1091 } else if ((t & VT_BTYPE) == VT_VOID) {
1092 sym_type = STT_NOTYPE;
1093 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
1094 sym_type = STT_FUNC;
1095 } else {
1096 sym_type = STT_OBJECT;
1098 if (t & (VT_STATIC | VT_INLINE))
1099 sym_bind = STB_LOCAL;
1100 else
1101 sym_bind = STB_GLOBAL;
1102 other = 0;
1104 #ifdef TCC_TARGET_PE
1105 if (sym_type == STT_FUNC && sym->type.ref) {
1106 Sym *ref = sym->type.ref;
1107 if (ref->a.nodecorate) {
1108 can_add_underscore = 0;
1110 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
1111 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
1112 name = buf1;
1113 other |= ST_PE_STDCALL;
1114 can_add_underscore = 0;
1117 #endif
1119 if (sym->asm_label) {
1120 name = get_tok_str(sym->asm_label, NULL);
1121 can_add_underscore = 0;
1124 if (tcc_state->leading_underscore && can_add_underscore) {
1125 buf1[0] = '_';
1126 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
1127 name = buf1;
1130 info = ELFW(ST_INFO)(sym_bind, sym_type);
1131 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
1133 if (debug_modes)
1134 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
1136 } else {
1137 esym = elfsym(sym);
1138 esym->st_value = value;
1139 esym->st_size = size;
1140 esym->st_shndx = sh_num;
1142 update_storage(sym);
1145 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
1146 addr_t value, unsigned long size)
1148 int sh_num = section ? section->sh_num : SHN_UNDEF;
1149 put_extern_sym2(sym, sh_num, value, size, 1);
1152 /* add a new relocation entry to symbol 'sym' in section 's' */
1153 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
1154 addr_t addend)
1156 int c = 0;
1158 if (nocode_wanted && s == cur_text_section)
1159 return;
1161 if (sym) {
1162 if (0 == sym->c)
1163 put_extern_sym(sym, NULL, 0, 0);
1164 c = sym->c;
1167 /* now we can add ELF relocation info */
1168 put_elf_reloca(symtab_section, s, offset, type, c, addend);
1171 #if PTR_SIZE == 4
1172 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
1174 greloca(s, sym, offset, type, 0);
1176 #endif
1178 /* ------------------------------------------------------------------------- */
1179 /* symbol allocator */
1180 static Sym *__sym_malloc(void)
1182 Sym *sym_pool, *sym, *last_sym;
1183 int i;
1185 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
1186 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
1188 last_sym = sym_free_first;
1189 sym = sym_pool;
1190 for(i = 0; i < SYM_POOL_NB; i++) {
1191 sym->next = last_sym;
1192 last_sym = sym;
1193 sym++;
1195 sym_free_first = last_sym;
1196 return last_sym;
1199 static inline Sym *sym_malloc(void)
1201 Sym *sym;
1202 #ifndef SYM_DEBUG
1203 sym = sym_free_first;
1204 if (!sym)
1205 sym = __sym_malloc();
1206 sym_free_first = sym->next;
1207 return sym;
1208 #else
1209 sym = tcc_malloc(sizeof(Sym));
1210 return sym;
1211 #endif
1214 ST_INLN void sym_free(Sym *sym)
1216 #ifndef SYM_DEBUG
1217 sym->next = sym_free_first;
1218 sym_free_first = sym;
1219 #else
1220 tcc_free(sym);
1221 #endif
1224 /* push, without hashing */
1225 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
1227 Sym *s;
1229 s = sym_malloc();
1230 memset(s, 0, sizeof *s);
1231 s->v = v;
1232 s->type.t = t;
1233 s->c = c;
1234 /* add in stack */
1235 s->prev = *ps;
1236 *ps = s;
1237 return s;
1240 /* find a symbol and return its associated structure. 's' is the top
1241 of the symbol stack */
1242 ST_FUNC Sym *sym_find2(Sym *s, int v)
1244 while (s) {
1245 if (s->v == v)
1246 return s;
1247 else if (s->v == -1)
1248 return NULL;
1249 s = s->prev;
1251 return NULL;
1254 /* structure lookup */
1255 ST_INLN Sym *struct_find(int v)
1257 v -= TOK_IDENT;
1258 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1259 return NULL;
1260 return table_ident[v]->sym_struct;
1263 /* find an identifier */
1264 ST_INLN Sym *sym_find(int v)
1266 v -= TOK_IDENT;
1267 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1268 return NULL;
1269 return table_ident[v]->sym_identifier;
1272 static int sym_scope(Sym *s)
1274 if (IS_ENUM_VAL (s->type.t))
1275 return s->type.ref->sym_scope;
1276 else
1277 return s->sym_scope;
1280 /* push a given symbol on the symbol stack */
1281 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
1283 Sym *s, **ps;
1284 TokenSym *ts;
1286 if (local_stack)
1287 ps = &local_stack;
1288 else
1289 ps = &global_stack;
1290 s = sym_push2(ps, v, type->t, c);
1291 s->type.ref = type->ref;
1292 s->r = r;
1293 /* don't record fields or anonymous symbols */
1294 /* XXX: simplify */
1295 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1296 /* record symbol in token array */
1297 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1298 if (v & SYM_STRUCT)
1299 ps = &ts->sym_struct;
1300 else
1301 ps = &ts->sym_identifier;
1302 s->prev_tok = *ps;
1303 *ps = s;
1304 s->sym_scope = local_scope;
1305 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
1306 tcc_error("redeclaration of '%s'",
1307 get_tok_str(v & ~SYM_STRUCT, NULL));
1309 return s;
1312 /* push a global identifier */
1313 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
1315 Sym *s, **ps;
1316 s = sym_push2(&global_stack, v, t, c);
1317 s->r = VT_CONST | VT_SYM;
1318 /* don't record anonymous symbol */
1319 if (v < SYM_FIRST_ANOM) {
1320 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
1321 /* modify the top most local identifier, so that sym_identifier will
1322 point to 's' when popped; happens when called from inline asm */
1323 while (*ps != NULL && (*ps)->sym_scope)
1324 ps = &(*ps)->prev_tok;
1325 s->prev_tok = *ps;
1326 *ps = s;
1328 return s;
1331 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1332 pop them yet from the list, but do remove them from the token array. */
1333 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
1335 Sym *s, *ss, **ps;
1336 TokenSym *ts;
1337 int v;
1339 s = *ptop;
1340 while(s != b) {
1341 ss = s->prev;
1342 v = s->v;
1343 /* remove symbol in token array */
1344 /* XXX: simplify */
1345 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1346 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1347 if (v & SYM_STRUCT)
1348 ps = &ts->sym_struct;
1349 else
1350 ps = &ts->sym_identifier;
1351 *ps = s->prev_tok;
1353 if (!keep)
1354 sym_free(s);
1355 s = ss;
1357 if (!keep)
1358 *ptop = b;
1361 /* ------------------------------------------------------------------------- */
1362 static void vcheck_cmp(void)
1364 /* cannot let cpu flags if other instruction are generated. Also
1365 avoid leaving VT_JMP anywhere except on the top of the stack
1366 because it would complicate the code generator.
1368 Don't do this when nocode_wanted. vtop might come from
1369 !nocode_wanted regions (see 88_codeopt.c) and transforming
1370 it to a register without actually generating code is wrong
1371 as their value might still be used for real. All values
1372 we push under nocode_wanted will eventually be popped
1373 again, so that the VT_CMP/VT_JMP value will be in vtop
1374 when code is unsuppressed again. */
1376 if (vtop->r == VT_CMP && !nocode_wanted)
1377 gv(RC_INT);
1380 static void vsetc(CType *type, int r, CValue *vc)
1382 if (vtop >= vstack + (VSTACK_SIZE - 1))
1383 tcc_error("memory full (vstack)");
1384 vcheck_cmp();
1385 vtop++;
1386 vtop->type = *type;
1387 vtop->r = r;
1388 vtop->r2 = VT_CONST;
1389 vtop->c = *vc;
1390 vtop->sym = NULL;
1393 ST_FUNC void vswap(void)
1395 SValue tmp;
1397 vcheck_cmp();
1398 tmp = vtop[0];
1399 vtop[0] = vtop[-1];
1400 vtop[-1] = tmp;
1403 /* pop stack value */
1404 ST_FUNC void vpop(void)
1406 int v;
1407 v = vtop->r & VT_VALMASK;
1408 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1409 /* for x86, we need to pop the FP stack */
1410 if (v == TREG_ST0) {
1411 o(0xd8dd); /* fstp %st(0) */
1412 } else
1413 #endif
1414 if (v == VT_CMP) {
1415 /* need to put correct jump if && or || without test */
1416 gsym(vtop->jtrue);
1417 gsym(vtop->jfalse);
1419 vtop--;
1422 /* push constant of type "type" with useless value */
1423 static void vpush(CType *type)
1425 vset(type, VT_CONST, 0);
1428 /* push arbitrary 64bit constant */
1429 static void vpush64(int ty, unsigned long long v)
1431 CValue cval;
1432 CType ctype;
1433 ctype.t = ty;
1434 ctype.ref = NULL;
1435 cval.i = v;
1436 vsetc(&ctype, VT_CONST, &cval);
1439 /* push integer constant */
1440 ST_FUNC void vpushi(int v)
1442 vpush64(VT_INT, v);
1445 /* push a pointer sized constant */
1446 static void vpushs(addr_t v)
1448 vpush64(VT_SIZE_T, v);
1451 /* push long long constant */
1452 static inline void vpushll(long long v)
1454 vpush64(VT_LLONG, v);
1457 ST_FUNC void vset(CType *type, int r, int v)
1459 CValue cval;
1460 cval.i = v;
1461 vsetc(type, r, &cval);
1464 static void vseti(int r, int v)
1466 CType type;
1467 type.t = VT_INT;
1468 type.ref = NULL;
1469 vset(&type, r, v);
1472 ST_FUNC void vpushv(SValue *v)
1474 if (vtop >= vstack + (VSTACK_SIZE - 1))
1475 tcc_error("memory full (vstack)");
1476 vtop++;
1477 *vtop = *v;
1480 static void vdup(void)
1482 vpushv(vtop);
1485 /* rotate n first stack elements to the bottom
1486 I1 ... In -> I2 ... In I1 [top is right]
1488 ST_FUNC void vrotb(int n)
1490 int i;
1491 SValue tmp;
1493 vcheck_cmp();
1494 tmp = vtop[-n + 1];
1495 for(i=-n+1;i!=0;i++)
1496 vtop[i] = vtop[i+1];
1497 vtop[0] = tmp;
1500 /* rotate the n elements before entry e towards the top
1501 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1503 ST_FUNC void vrote(SValue *e, int n)
1505 int i;
1506 SValue tmp;
1508 vcheck_cmp();
1509 tmp = *e;
1510 for(i = 0;i < n - 1; i++)
1511 e[-i] = e[-i - 1];
1512 e[-n + 1] = tmp;
1515 /* rotate n first stack elements to the top
1516 I1 ... In -> In I1 ... I(n-1) [top is right]
1518 ST_FUNC void vrott(int n)
1520 vrote(vtop, n);
1523 /* ------------------------------------------------------------------------- */
1524 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1526 /* called from generators to set the result from relational ops */
1527 ST_FUNC void vset_VT_CMP(int op)
1529 vtop->r = VT_CMP;
1530 vtop->cmp_op = op;
1531 vtop->jfalse = 0;
1532 vtop->jtrue = 0;
1535 /* called once before asking generators to load VT_CMP to a register */
1536 static void vset_VT_JMP(void)
1538 int op = vtop->cmp_op;
1540 if (vtop->jtrue || vtop->jfalse) {
1541 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1542 int inv = op & (op < 2); /* small optimization */
1543 vseti(VT_JMP+inv, gvtst(inv, 0));
1544 } else {
1545 /* otherwise convert flags (rsp. 0/1) to register */
1546 vtop->c.i = op;
1547 if (op < 2) /* doesn't seem to happen */
1548 vtop->r = VT_CONST;
1552 /* Set CPU Flags, doesn't yet jump */
1553 static void gvtst_set(int inv, int t)
1555 int *p;
1557 if (vtop->r != VT_CMP) {
1558 vpushi(0);
1559 gen_op(TOK_NE);
1560 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1561 vset_VT_CMP(vtop->c.i != 0);
1564 p = inv ? &vtop->jfalse : &vtop->jtrue;
1565 *p = gjmp_append(*p, t);
1568 /* Generate value test
1570 * Generate a test for any value (jump, comparison and integers) */
1571 static int gvtst(int inv, int t)
1573 int op, x, u;
1575 gvtst_set(inv, t);
1576 t = vtop->jtrue, u = vtop->jfalse;
1577 if (inv)
1578 x = u, u = t, t = x;
1579 op = vtop->cmp_op;
1581 /* jump to the wanted target */
1582 if (op > 1)
1583 t = gjmp_cond(op ^ inv, t);
1584 else if (op != inv)
1585 t = gjmp(t);
1586 /* resolve complementary jumps to here */
1587 gsym(u);
1589 vtop--;
1590 return t;
1593 /* generate a zero or nozero test */
1594 static void gen_test_zero(int op)
1596 if (vtop->r == VT_CMP) {
1597 int j;
1598 if (op == TOK_EQ) {
1599 j = vtop->jfalse;
1600 vtop->jfalse = vtop->jtrue;
1601 vtop->jtrue = j;
1602 vtop->cmp_op ^= 1;
1604 } else {
1605 vpushi(0);
1606 gen_op(op);
1610 /* ------------------------------------------------------------------------- */
1611 /* push a symbol value of TYPE */
1612 ST_FUNC void vpushsym(CType *type, Sym *sym)
1614 CValue cval;
1615 cval.i = 0;
1616 vsetc(type, VT_CONST | VT_SYM, &cval);
1617 vtop->sym = sym;
1620 /* Return a static symbol pointing to a section */
1621 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1623 int v;
1624 Sym *sym;
1626 v = anon_sym++;
1627 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1628 sym->type.t |= VT_STATIC;
1629 put_extern_sym(sym, sec, offset, size);
1630 return sym;
1633 /* push a reference to a section offset by adding a dummy symbol */
1634 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1636 vpushsym(type, get_sym_ref(type, sec, offset, size));
1639 /* define a new external reference to a symbol 'v' of type 'u' */
1640 ST_FUNC Sym *external_global_sym(int v, CType *type)
1642 Sym *s;
1644 s = sym_find(v);
1645 if (!s) {
1646 /* push forward reference */
1647 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1648 s->type.ref = type->ref;
1649 } else if (IS_ASM_SYM(s)) {
1650 s->type.t = type->t | (s->type.t & VT_EXTERN);
1651 s->type.ref = type->ref;
1652 update_storage(s);
1654 return s;
1657 /* create an external reference with no specific type similar to asm labels.
1658 This avoids type conflicts if the symbol is used from C too */
1659 ST_FUNC Sym *external_helper_sym(int v)
1661 CType ct = { VT_ASM_FUNC, NULL };
1662 return external_global_sym(v, &ct);
1665 /* push a reference to an helper function (such as memmove) */
1666 ST_FUNC void vpush_helper_func(int v)
1668 vpushsym(&func_old_type, external_helper_sym(v));
1671 /* Merge symbol attributes. */
1672 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1674 if (sa1->aligned && !sa->aligned)
1675 sa->aligned = sa1->aligned;
1676 sa->packed |= sa1->packed;
1677 sa->weak |= sa1->weak;
1678 if (sa1->visibility != STV_DEFAULT) {
1679 int vis = sa->visibility;
1680 if (vis == STV_DEFAULT
1681 || vis > sa1->visibility)
1682 vis = sa1->visibility;
1683 sa->visibility = vis;
1685 sa->dllexport |= sa1->dllexport;
1686 sa->nodecorate |= sa1->nodecorate;
1687 sa->dllimport |= sa1->dllimport;
1690 /* Merge function attributes. */
1691 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1693 if (fa1->func_call && !fa->func_call)
1694 fa->func_call = fa1->func_call;
1695 if (fa1->func_type && !fa->func_type)
1696 fa->func_type = fa1->func_type;
1697 if (fa1->func_args && !fa->func_args)
1698 fa->func_args = fa1->func_args;
1699 if (fa1->func_noreturn)
1700 fa->func_noreturn = 1;
1701 if (fa1->func_ctor)
1702 fa->func_ctor = 1;
1703 if (fa1->func_dtor)
1704 fa->func_dtor = 1;
1707 /* Merge attributes. */
1708 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1710 merge_symattr(&ad->a, &ad1->a);
1711 merge_funcattr(&ad->f, &ad1->f);
1713 if (ad1->section)
1714 ad->section = ad1->section;
1715 if (ad1->alias_target)
1716 ad->alias_target = ad1->alias_target;
1717 if (ad1->asm_label)
1718 ad->asm_label = ad1->asm_label;
1719 if (ad1->attr_mode)
1720 ad->attr_mode = ad1->attr_mode;
1723 /* Merge some type attributes. */
1724 static void patch_type(Sym *sym, CType *type)
1726 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1727 if (!(sym->type.t & VT_EXTERN))
1728 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1729 sym->type.t &= ~VT_EXTERN;
1732 if (IS_ASM_SYM(sym)) {
1733 /* stay static if both are static */
1734 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1735 sym->type.ref = type->ref;
1738 if (!is_compatible_types(&sym->type, type)) {
1739 tcc_error("incompatible types for redefinition of '%s'",
1740 get_tok_str(sym->v, NULL));
1742 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1743 int static_proto = sym->type.t & VT_STATIC;
1744 /* warn if static follows non-static function declaration */
1745 if ((type->t & VT_STATIC) && !static_proto
1746 /* XXX this test for inline shouldn't be here. Until we
1747 implement gnu-inline mode again it silences a warning for
1748 mingw caused by our workarounds. */
1749 && !((type->t | sym->type.t) & VT_INLINE))
1750 tcc_warning("static storage ignored for redefinition of '%s'",
1751 get_tok_str(sym->v, NULL));
1753 /* set 'inline' if both agree or if one has static */
1754 if ((type->t | sym->type.t) & VT_INLINE) {
1755 if (!((type->t ^ sym->type.t) & VT_INLINE)
1756 || ((type->t | sym->type.t) & VT_STATIC))
1757 static_proto |= VT_INLINE;
1760 if (0 == (type->t & VT_EXTERN)) {
1761 struct FuncAttr f = sym->type.ref->f;
1762 /* put complete type, use static from prototype */
1763 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1764 sym->type.ref = type->ref;
1765 merge_funcattr(&sym->type.ref->f, &f);
1766 } else {
1767 sym->type.t &= ~VT_INLINE | static_proto;
1770 if (sym->type.ref->f.func_type == FUNC_OLD
1771 && type->ref->f.func_type != FUNC_OLD) {
1772 sym->type.ref = type->ref;
1775 } else {
1776 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1777 /* set array size if it was omitted in extern declaration */
1778 sym->type.ref->c = type->ref->c;
1780 if ((type->t ^ sym->type.t) & VT_STATIC)
1781 tcc_warning("storage mismatch for redefinition of '%s'",
1782 get_tok_str(sym->v, NULL));
1786 /* Merge some storage attributes. */
1787 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1789 if (type)
1790 patch_type(sym, type);
1792 #ifdef TCC_TARGET_PE
1793 if (sym->a.dllimport != ad->a.dllimport)
1794 tcc_error("incompatible dll linkage for redefinition of '%s'",
1795 get_tok_str(sym->v, NULL));
1796 #endif
1797 merge_symattr(&sym->a, &ad->a);
1798 if (ad->asm_label)
1799 sym->asm_label = ad->asm_label;
1800 update_storage(sym);
1803 /* copy sym to other stack */
1804 static Sym *sym_copy(Sym *s0, Sym **ps)
1806 Sym *s;
1807 s = sym_malloc(), *s = *s0;
1808 s->prev = *ps, *ps = s;
1809 if (s->v < SYM_FIRST_ANOM) {
1810 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1811 s->prev_tok = *ps, *ps = s;
1813 return s;
1816 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1817 static void sym_copy_ref(Sym *s, Sym **ps)
1819 int bt = s->type.t & VT_BTYPE;
1820 if (bt == VT_FUNC || bt == VT_PTR) {
1821 Sym **sp = &s->type.ref;
1822 for (s = *sp, *sp = NULL; s; s = s->next) {
1823 Sym *s2 = sym_copy(s, ps);
1824 sp = &(*sp = s2)->next;
1825 sym_copy_ref(s2, ps);
1830 /* define a new external reference to a symbol 'v' */
1831 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1833 Sym *s;
1835 /* look for global symbol */
1836 s = sym_find(v);
1837 while (s && s->sym_scope)
1838 s = s->prev_tok;
1840 if (!s) {
1841 /* push forward reference */
1842 s = global_identifier_push(v, type->t, 0);
1843 s->r |= r;
1844 s->a = ad->a;
1845 s->asm_label = ad->asm_label;
1846 s->type.ref = type->ref;
1847 /* copy type to the global stack */
1848 if (local_stack)
1849 sym_copy_ref(s, &global_stack);
1850 } else {
1851 patch_storage(s, ad, type);
1853 /* push variables on local_stack if any */
1854 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1855 s = sym_copy(s, &local_stack);
1856 return s;
1859 /* save registers up to (vtop - n) stack entry */
1860 ST_FUNC void save_regs(int n)
1862 SValue *p, *p1;
1863 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1864 save_reg(p->r);
1867 /* save r to the memory stack, and mark it as being free */
1868 ST_FUNC void save_reg(int r)
1870 save_reg_upstack(r, 0);
1873 /* save r to the memory stack, and mark it as being free,
1874 if seen up to (vtop - n) stack entry */
1875 ST_FUNC void save_reg_upstack(int r, int n)
1877 int l, size, align, bt;
1878 SValue *p, *p1, sv;
1880 if ((r &= VT_VALMASK) >= VT_CONST)
1881 return;
1882 if (nocode_wanted)
1883 return;
1884 l = 0;
1885 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1886 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1887 /* must save value on stack if not already done */
1888 if (!l) {
1889 bt = p->type.t & VT_BTYPE;
1890 if (bt == VT_VOID)
1891 continue;
1892 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1893 bt = VT_PTR;
1894 sv.type.t = bt;
1895 size = type_size(&sv.type, &align);
1896 l = get_temp_local_var(size,align);
1897 sv.r = VT_LOCAL | VT_LVAL;
1898 sv.c.i = l;
1899 store(p->r & VT_VALMASK, &sv);
1900 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1901 /* x86 specific: need to pop fp register ST0 if saved */
1902 if (r == TREG_ST0) {
1903 o(0xd8dd); /* fstp %st(0) */
1905 #endif
1906 /* special long long case */
1907 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1908 sv.c.i += PTR_SIZE;
1909 store(p->r2, &sv);
1912 /* mark that stack entry as being saved on the stack */
1913 if (p->r & VT_LVAL) {
1914 /* also clear the bounded flag because the
1915 relocation address of the function was stored in
1916 p->c.i */
1917 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1918 } else {
1919 p->r = VT_LVAL | VT_LOCAL;
1921 p->sym = NULL;
1922 p->r2 = VT_CONST;
1923 p->c.i = l;
1928 #ifdef TCC_TARGET_ARM
1929 /* find a register of class 'rc2' with at most one reference on stack.
1930 * If none, call get_reg(rc) */
1931 ST_FUNC int get_reg_ex(int rc, int rc2)
1933 int r;
1934 SValue *p;
1936 for(r=0;r<NB_REGS;r++) {
1937 if (reg_classes[r] & rc2) {
1938 int n;
1939 n=0;
1940 for(p = vstack; p <= vtop; p++) {
1941 if ((p->r & VT_VALMASK) == r ||
1942 p->r2 == r)
1943 n++;
1945 if (n <= 1)
1946 return r;
1949 return get_reg(rc);
1951 #endif
1953 /* find a free register of class 'rc'. If none, save one register */
1954 ST_FUNC int get_reg(int rc)
1956 int r;
1957 SValue *p;
1959 /* find a free register */
1960 for(r=0;r<NB_REGS;r++) {
1961 if (reg_classes[r] & rc) {
1962 if (nocode_wanted)
1963 return r;
1964 for(p=vstack;p<=vtop;p++) {
1965 if ((p->r & VT_VALMASK) == r ||
1966 p->r2 == r)
1967 goto notfound;
1969 return r;
1971 notfound: ;
1974 /* no register left : free the first one on the stack (VERY
1975 IMPORTANT to start from the bottom to ensure that we don't
1976 spill registers used in gen_opi()) */
1977 for(p=vstack;p<=vtop;p++) {
1978 /* look at second register (if long long) */
1979 r = p->r2;
1980 if (r < VT_CONST && (reg_classes[r] & rc))
1981 goto save_found;
1982 r = p->r & VT_VALMASK;
1983 if (r < VT_CONST && (reg_classes[r] & rc)) {
1984 save_found:
1985 save_reg(r);
1986 return r;
1989 /* Should never comes here */
1990 return -1;
1993 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1994 static int get_temp_local_var(int size,int align){
1995 int i;
1996 struct temp_local_variable *temp_var;
1997 int found_var;
1998 SValue *p;
1999 int r;
2000 char free;
2001 char found;
2002 found=0;
2003 for(i=0;i<nb_temp_local_vars;i++){
2004 temp_var=&arr_temp_local_vars[i];
2005 if(temp_var->size<size||align!=temp_var->align){
2006 continue;
2008 /*check if temp_var is free*/
2009 free=1;
2010 for(p=vstack;p<=vtop;p++) {
2011 r=p->r&VT_VALMASK;
2012 if(r==VT_LOCAL||r==VT_LLOCAL){
2013 if(p->c.i==temp_var->location){
2014 free=0;
2015 break;
2019 if(free){
2020 found_var=temp_var->location;
2021 found=1;
2022 break;
2025 if(!found){
2026 loc = (loc - size) & -align;
2027 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
2028 temp_var=&arr_temp_local_vars[i];
2029 temp_var->location=loc;
2030 temp_var->size=size;
2031 temp_var->align=align;
2032 nb_temp_local_vars++;
2034 found_var=loc;
2036 return found_var;
2039 static void clear_temp_local_var_list(){
2040 nb_temp_local_vars=0;
2043 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
2044 if needed */
2045 static void move_reg(int r, int s, int t)
2047 SValue sv;
2049 if (r != s) {
2050 save_reg(r);
2051 sv.type.t = t;
2052 sv.type.ref = NULL;
2053 sv.r = s;
2054 sv.c.i = 0;
2055 load(r, &sv);
2059 /* get address of vtop (vtop MUST BE an lvalue) */
2060 ST_FUNC void gaddrof(void)
2062 vtop->r &= ~VT_LVAL;
2063 /* tricky: if saved lvalue, then we can go back to lvalue */
2064 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
2065 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
2068 #ifdef CONFIG_TCC_BCHECK
2069 /* generate a bounded pointer addition */
2070 static void gen_bounded_ptr_add(void)
2072 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
2073 if (save) {
2074 vpushv(&vtop[-1]);
2075 vrott(3);
2077 vpush_helper_func(TOK___bound_ptr_add);
2078 vrott(3);
2079 gfunc_call(2);
2080 vtop -= save;
2081 vpushi(0);
2082 /* returned pointer is in REG_IRET */
2083 vtop->r = REG_IRET | VT_BOUNDED;
2084 if (nocode_wanted)
2085 return;
2086 /* relocation offset of the bounding function call point */
2087 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
2090 /* patch pointer addition in vtop so that pointer dereferencing is
2091 also tested */
2092 static void gen_bounded_ptr_deref(void)
2094 addr_t func;
2095 int size, align;
2096 ElfW_Rel *rel;
2097 Sym *sym;
2099 if (nocode_wanted)
2100 return;
2102 size = type_size(&vtop->type, &align);
2103 switch(size) {
2104 case 1: func = TOK___bound_ptr_indir1; break;
2105 case 2: func = TOK___bound_ptr_indir2; break;
2106 case 4: func = TOK___bound_ptr_indir4; break;
2107 case 8: func = TOK___bound_ptr_indir8; break;
2108 case 12: func = TOK___bound_ptr_indir12; break;
2109 case 16: func = TOK___bound_ptr_indir16; break;
2110 default:
2111 /* may happen with struct member access */
2112 return;
2114 sym = external_helper_sym(func);
2115 if (!sym->c)
2116 put_extern_sym(sym, NULL, 0, 0);
2117 /* patch relocation */
2118 /* XXX: find a better solution ? */
2119 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
2120 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
2123 /* generate lvalue bound code */
2124 static void gbound(void)
2126 CType type1;
2128 vtop->r &= ~VT_MUSTBOUND;
2129 /* if lvalue, then use checking code before dereferencing */
2130 if (vtop->r & VT_LVAL) {
2131 /* if not VT_BOUNDED value, then make one */
2132 if (!(vtop->r & VT_BOUNDED)) {
2133 /* must save type because we must set it to int to get pointer */
2134 type1 = vtop->type;
2135 vtop->type.t = VT_PTR;
2136 gaddrof();
2137 vpushi(0);
2138 gen_bounded_ptr_add();
2139 vtop->r |= VT_LVAL;
2140 vtop->type = type1;
2142 /* then check for dereferencing */
2143 gen_bounded_ptr_deref();
2147 /* we need to call __bound_ptr_add before we start to load function
2148 args into registers */
2149 ST_FUNC void gbound_args(int nb_args)
2151 int i, v;
2152 SValue *sv;
2154 for (i = 1; i <= nb_args; ++i)
2155 if (vtop[1 - i].r & VT_MUSTBOUND) {
2156 vrotb(i);
2157 gbound();
2158 vrott(i);
2161 sv = vtop - nb_args;
2162 if (sv->r & VT_SYM) {
2163 v = sv->sym->v;
2164 if (v == TOK_setjmp
2165 || v == TOK__setjmp
2166 #ifndef TCC_TARGET_PE
2167 || v == TOK_sigsetjmp
2168 || v == TOK___sigsetjmp
2169 #endif
2171 vpush_helper_func(TOK___bound_setjmp);
2172 vpushv(sv + 1);
2173 gfunc_call(1);
2174 func_bound_add_epilog = 1;
2176 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
2177 if (v == TOK_alloca)
2178 func_bound_add_epilog = 1;
2179 #endif
2180 #if TARGETOS_NetBSD
2181 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
2182 sv->sym->asm_label = TOK___bound_longjmp;
2183 #endif
2187 /* Add bounds for local symbols from S to E (via ->prev) */
2188 static void add_local_bounds(Sym *s, Sym *e)
2190 for (; s != e; s = s->prev) {
2191 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
2192 continue;
2193 /* Add arrays/structs/unions because we always take address */
2194 if ((s->type.t & VT_ARRAY)
2195 || (s->type.t & VT_BTYPE) == VT_STRUCT
2196 || s->a.addrtaken) {
2197 /* add local bound info */
2198 int align, size = type_size(&s->type, &align);
2199 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
2200 2 * sizeof(addr_t));
2201 bounds_ptr[0] = s->c;
2202 bounds_ptr[1] = size;
2206 #endif
2208 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2209 static void pop_local_syms(Sym *b, int keep)
2211 #ifdef CONFIG_TCC_BCHECK
2212 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
2213 add_local_bounds(local_stack, b);
2214 #endif
2215 if (debug_modes)
2216 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
2217 sym_pop(&local_stack, b, keep);
2220 static void incr_bf_adr(int o)
2222 vtop->type = char_pointer_type;
2223 gaddrof();
2224 vpushs(o);
2225 gen_op('+');
2226 vtop->type.t = VT_BYTE | VT_UNSIGNED;
2227 vtop->r |= VT_LVAL;
2230 /* single-byte load mode for packed or otherwise unaligned bitfields */
2231 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
2233 int n, o, bits;
2234 save_reg_upstack(vtop->r, 1);
2235 vpush64(type->t & VT_BTYPE, 0); // B X
2236 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2237 do {
2238 vswap(); // X B
2239 incr_bf_adr(o);
2240 vdup(); // X B B
2241 n = 8 - bit_pos;
2242 if (n > bit_size)
2243 n = bit_size;
2244 if (bit_pos)
2245 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
2246 if (n < 8)
2247 vpushi((1 << n) - 1), gen_op('&');
2248 gen_cast(type);
2249 if (bits)
2250 vpushi(bits), gen_op(TOK_SHL);
2251 vrotb(3); // B Y X
2252 gen_op('|'); // B X
2253 bits += n, bit_size -= n, o = 1;
2254 } while (bit_size);
2255 vswap(), vpop();
2256 if (!(type->t & VT_UNSIGNED)) {
2257 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
2258 vpushi(n), gen_op(TOK_SHL);
2259 vpushi(n), gen_op(TOK_SAR);
2263 /* single-byte store mode for packed or otherwise unaligned bitfields */
2264 static void store_packed_bf(int bit_pos, int bit_size)
2266 int bits, n, o, m, c;
2267 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2268 vswap(); // X B
2269 save_reg_upstack(vtop->r, 1);
2270 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2271 do {
2272 incr_bf_adr(o); // X B
2273 vswap(); //B X
2274 c ? vdup() : gv_dup(); // B V X
2275 vrott(3); // X B V
2276 if (bits)
2277 vpushi(bits), gen_op(TOK_SHR);
2278 if (bit_pos)
2279 vpushi(bit_pos), gen_op(TOK_SHL);
2280 n = 8 - bit_pos;
2281 if (n > bit_size)
2282 n = bit_size;
2283 if (n < 8) {
2284 m = ((1 << n) - 1) << bit_pos;
2285 vpushi(m), gen_op('&'); // X B V1
2286 vpushv(vtop-1); // X B V1 B
2287 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
2288 gen_op('&'); // X B V1 B1
2289 gen_op('|'); // X B V2
2291 vdup(), vtop[-1] = vtop[-2]; // X B B V2
2292 vstore(), vpop(); // X B
2293 bits += n, bit_size -= n, bit_pos = 0, o = 1;
2294 } while (bit_size);
2295 vpop(), vpop();
2298 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
2300 int t;
2301 if (0 == sv->type.ref)
2302 return 0;
2303 t = sv->type.ref->auxtype;
2304 if (t != -1 && t != VT_STRUCT) {
2305 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
2306 sv->r |= VT_LVAL;
2308 return t;
2311 /* store vtop a register belonging to class 'rc'. lvalues are
2312 converted to values. Cannot be used if cannot be converted to
2313 register value (such as structures). */
2314 ST_FUNC int gv(int rc)
2316 int r, r2, r_ok, r2_ok, rc2, bt;
2317 int bit_pos, bit_size, size, align;
2319 /* NOTE: get_reg can modify vstack[] */
2320 if (vtop->type.t & VT_BITFIELD) {
2321 CType type;
2323 bit_pos = BIT_POS(vtop->type.t);
2324 bit_size = BIT_SIZE(vtop->type.t);
2325 /* remove bit field info to avoid loops */
2326 vtop->type.t &= ~VT_STRUCT_MASK;
2328 type.ref = NULL;
2329 type.t = vtop->type.t & VT_UNSIGNED;
2330 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
2331 type.t |= VT_UNSIGNED;
2333 r = adjust_bf(vtop, bit_pos, bit_size);
2335 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2336 type.t |= VT_LLONG;
2337 else
2338 type.t |= VT_INT;
2340 if (r == VT_STRUCT) {
2341 load_packed_bf(&type, bit_pos, bit_size);
2342 } else {
2343 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
2344 /* cast to int to propagate signedness in following ops */
2345 gen_cast(&type);
2346 /* generate shifts */
2347 vpushi(bits - (bit_pos + bit_size));
2348 gen_op(TOK_SHL);
2349 vpushi(bits - bit_size);
2350 /* NOTE: transformed to SHR if unsigned */
2351 gen_op(TOK_SAR);
2353 r = gv(rc);
2354 } else {
2355 if (is_float(vtop->type.t) &&
2356 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2357 /* CPUs usually cannot use float constants, so we store them
2358 generically in data segment */
2359 init_params p = { data_section };
2360 unsigned long offset;
2361 size = type_size(&vtop->type, &align);
2362 if (NODATA_WANTED)
2363 size = 0, align = 1;
2364 offset = section_add(p.sec, size, align);
2365 vpush_ref(&vtop->type, p.sec, offset, size);
2366 vswap();
2367 init_putv(&p, &vtop->type, offset);
2368 vtop->r |= VT_LVAL;
2370 #ifdef CONFIG_TCC_BCHECK
2371 if (vtop->r & VT_MUSTBOUND)
2372 gbound();
2373 #endif
2375 bt = vtop->type.t & VT_BTYPE;
2377 #ifdef TCC_TARGET_RISCV64
2378 /* XXX mega hack */
2379 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
2380 rc = RC_INT;
2381 #endif
2382 rc2 = RC2_TYPE(bt, rc);
2384 /* need to reload if:
2385 - constant
2386 - lvalue (need to dereference pointer)
2387 - already a register, but not in the right class */
2388 r = vtop->r & VT_VALMASK;
2389 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
2390 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
2392 if (!r_ok || !r2_ok) {
2393 if (!r_ok)
2394 r = get_reg(rc);
2395 if (rc2) {
2396 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
2397 int original_type = vtop->type.t;
2399 /* two register type load :
2400 expand to two words temporarily */
2401 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2402 /* load constant */
2403 unsigned long long ll = vtop->c.i;
2404 vtop->c.i = ll; /* first word */
2405 load(r, vtop);
2406 vtop->r = r; /* save register value */
2407 vpushi(ll >> 32); /* second word */
2408 } else if (vtop->r & VT_LVAL) {
2409 /* We do not want to modifier the long long pointer here.
2410 So we save any other instances down the stack */
2411 save_reg_upstack(vtop->r, 1);
2412 /* load from memory */
2413 vtop->type.t = load_type;
2414 load(r, vtop);
2415 vdup();
2416 vtop[-1].r = r; /* save register value */
2417 /* increment pointer to get second word */
2418 vtop->type.t = VT_PTRDIFF_T;
2419 gaddrof();
2420 vpushs(PTR_SIZE);
2421 gen_op('+');
2422 vtop->r |= VT_LVAL;
2423 vtop->type.t = load_type;
2424 } else {
2425 /* move registers */
2426 if (!r_ok)
2427 load(r, vtop);
2428 if (r2_ok && vtop->r2 < VT_CONST)
2429 goto done;
2430 vdup();
2431 vtop[-1].r = r; /* save register value */
2432 vtop->r = vtop[-1].r2;
2434 /* Allocate second register. Here we rely on the fact that
2435 get_reg() tries first to free r2 of an SValue. */
2436 r2 = get_reg(rc2);
2437 load(r2, vtop);
2438 vpop();
2439 /* write second register */
2440 vtop->r2 = r2;
2441 done:
2442 vtop->type.t = original_type;
2443 } else {
2444 if (vtop->r == VT_CMP)
2445 vset_VT_JMP();
2446 /* one register type load */
2447 load(r, vtop);
2450 vtop->r = r;
2451 #ifdef TCC_TARGET_C67
2452 /* uses register pairs for doubles */
2453 if (bt == VT_DOUBLE)
2454 vtop->r2 = r+1;
2455 #endif
2457 return r;
2460 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2461 ST_FUNC void gv2(int rc1, int rc2)
2463 /* generate more generic register first. But VT_JMP or VT_CMP
2464 values must be generated first in all cases to avoid possible
2465 reload errors */
2466 if (vtop->r != VT_CMP && rc1 <= rc2) {
2467 vswap();
2468 gv(rc1);
2469 vswap();
2470 gv(rc2);
2471 /* test if reload is needed for first register */
2472 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
2473 vswap();
2474 gv(rc1);
2475 vswap();
2477 } else {
2478 gv(rc2);
2479 vswap();
2480 gv(rc1);
2481 vswap();
2482 /* test if reload is needed for first register */
2483 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
2484 gv(rc2);
2489 #if PTR_SIZE == 4
2490 /* expand 64bit on stack in two ints */
2491 ST_FUNC void lexpand(void)
2493 int u, v;
2494 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
2495 v = vtop->r & (VT_VALMASK | VT_LVAL);
2496 if (v == VT_CONST) {
2497 vdup();
2498 vtop[0].c.i >>= 32;
2499 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
2500 vdup();
2501 vtop[0].c.i += 4;
2502 } else {
2503 gv(RC_INT);
2504 vdup();
2505 vtop[0].r = vtop[-1].r2;
2506 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
2508 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
2510 #endif
2512 #if PTR_SIZE == 4
2513 /* build a long long from two ints */
2514 static void lbuild(int t)
2516 gv2(RC_INT, RC_INT);
2517 vtop[-1].r2 = vtop[0].r;
2518 vtop[-1].type.t = t;
2519 vpop();
2521 #endif
2523 /* convert stack entry to register and duplicate its value in another
2524 register */
2525 static void gv_dup(void)
2527 int t, rc, r;
2529 t = vtop->type.t;
2530 #if PTR_SIZE == 4
2531 if ((t & VT_BTYPE) == VT_LLONG) {
2532 if (t & VT_BITFIELD) {
2533 gv(RC_INT);
2534 t = vtop->type.t;
2536 lexpand();
2537 gv_dup();
2538 vswap();
2539 vrotb(3);
2540 gv_dup();
2541 vrotb(4);
2542 /* stack: H L L1 H1 */
2543 lbuild(t);
2544 vrotb(3);
2545 vrotb(3);
2546 vswap();
2547 lbuild(t);
2548 vswap();
2549 return;
2551 #endif
2552 /* duplicate value */
2553 rc = RC_TYPE(t);
2554 gv(rc);
2555 r = get_reg(rc);
2556 vdup();
2557 load(r, vtop);
2558 vtop->r = r;
2561 #if PTR_SIZE == 4
2562 /* generate CPU independent (unsigned) long long operations */
2563 static void gen_opl(int op)
2565 int t, a, b, op1, c, i;
2566 int func;
2567 unsigned short reg_iret = REG_IRET;
2568 unsigned short reg_lret = REG_IRE2;
2569 SValue tmp;
2571 switch(op) {
2572 case '/':
2573 case TOK_PDIV:
2574 func = TOK___divdi3;
2575 goto gen_func;
2576 case TOK_UDIV:
2577 func = TOK___udivdi3;
2578 goto gen_func;
2579 case '%':
2580 func = TOK___moddi3;
2581 goto gen_mod_func;
2582 case TOK_UMOD:
2583 func = TOK___umoddi3;
2584 gen_mod_func:
2585 #ifdef TCC_ARM_EABI
2586 reg_iret = TREG_R2;
2587 reg_lret = TREG_R3;
2588 #endif
2589 gen_func:
2590 /* call generic long long function */
2591 vpush_helper_func(func);
2592 vrott(3);
2593 gfunc_call(2);
2594 vpushi(0);
2595 vtop->r = reg_iret;
2596 vtop->r2 = reg_lret;
2597 break;
2598 case '^':
2599 case '&':
2600 case '|':
2601 case '*':
2602 case '+':
2603 case '-':
2604 //pv("gen_opl A",0,2);
2605 t = vtop->type.t;
2606 vswap();
2607 lexpand();
2608 vrotb(3);
2609 lexpand();
2610 /* stack: L1 H1 L2 H2 */
2611 tmp = vtop[0];
2612 vtop[0] = vtop[-3];
2613 vtop[-3] = tmp;
2614 tmp = vtop[-2];
2615 vtop[-2] = vtop[-3];
2616 vtop[-3] = tmp;
2617 vswap();
2618 /* stack: H1 H2 L1 L2 */
2619 //pv("gen_opl B",0,4);
2620 if (op == '*') {
2621 vpushv(vtop - 1);
2622 vpushv(vtop - 1);
2623 gen_op(TOK_UMULL);
2624 lexpand();
2625 /* stack: H1 H2 L1 L2 ML MH */
2626 for(i=0;i<4;i++)
2627 vrotb(6);
2628 /* stack: ML MH H1 H2 L1 L2 */
2629 tmp = vtop[0];
2630 vtop[0] = vtop[-2];
2631 vtop[-2] = tmp;
2632 /* stack: ML MH H1 L2 H2 L1 */
2633 gen_op('*');
2634 vrotb(3);
2635 vrotb(3);
2636 gen_op('*');
2637 /* stack: ML MH M1 M2 */
2638 gen_op('+');
2639 gen_op('+');
2640 } else if (op == '+' || op == '-') {
2641 /* XXX: add non carry method too (for MIPS or alpha) */
2642 if (op == '+')
2643 op1 = TOK_ADDC1;
2644 else
2645 op1 = TOK_SUBC1;
2646 gen_op(op1);
2647 /* stack: H1 H2 (L1 op L2) */
2648 vrotb(3);
2649 vrotb(3);
2650 gen_op(op1 + 1); /* TOK_xxxC2 */
2651 } else {
2652 gen_op(op);
2653 /* stack: H1 H2 (L1 op L2) */
2654 vrotb(3);
2655 vrotb(3);
2656 /* stack: (L1 op L2) H1 H2 */
2657 gen_op(op);
2658 /* stack: (L1 op L2) (H1 op H2) */
2660 /* stack: L H */
2661 lbuild(t);
2662 break;
2663 case TOK_SAR:
2664 case TOK_SHR:
2665 case TOK_SHL:
2666 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2667 t = vtop[-1].type.t;
2668 vswap();
2669 lexpand();
2670 vrotb(3);
2671 /* stack: L H shift */
2672 c = (int)vtop->c.i;
2673 /* constant: simpler */
2674 /* NOTE: all comments are for SHL. the other cases are
2675 done by swapping words */
2676 vpop();
2677 if (op != TOK_SHL)
2678 vswap();
2679 if (c >= 32) {
2680 /* stack: L H */
2681 vpop();
2682 if (c > 32) {
2683 vpushi(c - 32);
2684 gen_op(op);
2686 if (op != TOK_SAR) {
2687 vpushi(0);
2688 } else {
2689 gv_dup();
2690 vpushi(31);
2691 gen_op(TOK_SAR);
2693 vswap();
2694 } else {
2695 vswap();
2696 gv_dup();
2697 /* stack: H L L */
2698 vpushi(c);
2699 gen_op(op);
2700 vswap();
2701 vpushi(32 - c);
2702 if (op == TOK_SHL)
2703 gen_op(TOK_SHR);
2704 else
2705 gen_op(TOK_SHL);
2706 vrotb(3);
2707 /* stack: L L H */
2708 vpushi(c);
2709 if (op == TOK_SHL)
2710 gen_op(TOK_SHL);
2711 else
2712 gen_op(TOK_SHR);
2713 gen_op('|');
2715 if (op != TOK_SHL)
2716 vswap();
2717 lbuild(t);
2718 } else {
2719 /* XXX: should provide a faster fallback on x86 ? */
2720 switch(op) {
2721 case TOK_SAR:
2722 func = TOK___ashrdi3;
2723 goto gen_func;
2724 case TOK_SHR:
2725 func = TOK___lshrdi3;
2726 goto gen_func;
2727 case TOK_SHL:
2728 func = TOK___ashldi3;
2729 goto gen_func;
2732 break;
2733 default:
2734 /* compare operations */
2735 t = vtop->type.t;
2736 vswap();
2737 lexpand();
2738 vrotb(3);
2739 lexpand();
2740 /* stack: L1 H1 L2 H2 */
2741 tmp = vtop[-1];
2742 vtop[-1] = vtop[-2];
2743 vtop[-2] = tmp;
2744 /* stack: L1 L2 H1 H2 */
2745 save_regs(4);
2746 /* compare high */
2747 op1 = op;
2748 /* when values are equal, we need to compare low words. since
2749 the jump is inverted, we invert the test too. */
2750 if (op1 == TOK_LT)
2751 op1 = TOK_LE;
2752 else if (op1 == TOK_GT)
2753 op1 = TOK_GE;
2754 else if (op1 == TOK_ULT)
2755 op1 = TOK_ULE;
2756 else if (op1 == TOK_UGT)
2757 op1 = TOK_UGE;
2758 a = 0;
2759 b = 0;
2760 gen_op(op1);
2761 if (op == TOK_NE) {
2762 b = gvtst(0, 0);
2763 } else {
2764 a = gvtst(1, 0);
2765 if (op != TOK_EQ) {
2766 /* generate non equal test */
2767 vpushi(0);
2768 vset_VT_CMP(TOK_NE);
2769 b = gvtst(0, 0);
2772 /* compare low. Always unsigned */
2773 op1 = op;
2774 if (op1 == TOK_LT)
2775 op1 = TOK_ULT;
2776 else if (op1 == TOK_LE)
2777 op1 = TOK_ULE;
2778 else if (op1 == TOK_GT)
2779 op1 = TOK_UGT;
2780 else if (op1 == TOK_GE)
2781 op1 = TOK_UGE;
2782 gen_op(op1);
2783 #if 0//def TCC_TARGET_I386
2784 if (op == TOK_NE) { gsym(b); break; }
2785 if (op == TOK_EQ) { gsym(a); break; }
2786 #endif
2787 gvtst_set(1, a);
2788 gvtst_set(0, b);
2789 break;
2792 #endif
2794 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2796 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2797 return (a ^ b) >> 63 ? -x : x;
2800 static int gen_opic_lt(uint64_t a, uint64_t b)
2802 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2805 /* handle integer constant optimizations and various machine
2806 independent opt */
2807 static void gen_opic(int op)
2809 SValue *v1 = vtop - 1;
2810 SValue *v2 = vtop;
2811 int t1 = v1->type.t & VT_BTYPE;
2812 int t2 = v2->type.t & VT_BTYPE;
2813 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2814 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2815 uint64_t l1 = c1 ? v1->c.i : 0;
2816 uint64_t l2 = c2 ? v2->c.i : 0;
2817 int shm = (t1 == VT_LLONG) ? 63 : 31;
2819 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2820 l1 = ((uint32_t)l1 |
2821 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2822 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2823 l2 = ((uint32_t)l2 |
2824 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2826 if (c1 && c2) {
2827 switch(op) {
2828 case '+': l1 += l2; break;
2829 case '-': l1 -= l2; break;
2830 case '&': l1 &= l2; break;
2831 case '^': l1 ^= l2; break;
2832 case '|': l1 |= l2; break;
2833 case '*': l1 *= l2; break;
2835 case TOK_PDIV:
2836 case '/':
2837 case '%':
2838 case TOK_UDIV:
2839 case TOK_UMOD:
2840 /* if division by zero, generate explicit division */
2841 if (l2 == 0) {
2842 if (const_wanted && !(nocode_wanted & unevalmask))
2843 tcc_error("division by zero in constant");
2844 goto general_case;
2846 switch(op) {
2847 default: l1 = gen_opic_sdiv(l1, l2); break;
2848 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2849 case TOK_UDIV: l1 = l1 / l2; break;
2850 case TOK_UMOD: l1 = l1 % l2; break;
2852 break;
2853 case TOK_SHL: l1 <<= (l2 & shm); break;
2854 case TOK_SHR: l1 >>= (l2 & shm); break;
2855 case TOK_SAR:
2856 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2857 break;
2858 /* tests */
2859 case TOK_ULT: l1 = l1 < l2; break;
2860 case TOK_UGE: l1 = l1 >= l2; break;
2861 case TOK_EQ: l1 = l1 == l2; break;
2862 case TOK_NE: l1 = l1 != l2; break;
2863 case TOK_ULE: l1 = l1 <= l2; break;
2864 case TOK_UGT: l1 = l1 > l2; break;
2865 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2866 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2867 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2868 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2869 /* logical */
2870 case TOK_LAND: l1 = l1 && l2; break;
2871 case TOK_LOR: l1 = l1 || l2; break;
2872 default:
2873 goto general_case;
2875 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2876 l1 = ((uint32_t)l1 |
2877 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2878 v1->c.i = l1;
2879 vtop--;
2880 } else {
2881 /* if commutative ops, put c2 as constant */
2882 if (c1 && (op == '+' || op == '&' || op == '^' ||
2883 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2884 vswap();
2885 c2 = c1; //c = c1, c1 = c2, c2 = c;
2886 l2 = l1; //l = l1, l1 = l2, l2 = l;
2888 if (!const_wanted &&
2889 c1 && ((l1 == 0 &&
2890 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2891 (l1 == -1 && op == TOK_SAR))) {
2892 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2893 vtop--;
2894 } else if (!const_wanted &&
2895 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2896 (op == '|' &&
2897 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2898 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2899 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2900 if (l2 == 1)
2901 vtop->c.i = 0;
2902 vswap();
2903 vtop--;
2904 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2905 op == TOK_PDIV) &&
2906 l2 == 1) ||
2907 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2908 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2909 l2 == 0) ||
2910 (op == '&' &&
2911 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2912 /* filter out NOP operations like x*1, x-0, x&-1... */
2913 vtop--;
2914 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2915 /* try to use shifts instead of muls or divs */
2916 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2917 int n = -1;
2918 while (l2) {
2919 l2 >>= 1;
2920 n++;
2922 vtop->c.i = n;
2923 if (op == '*')
2924 op = TOK_SHL;
2925 else if (op == TOK_PDIV)
2926 op = TOK_SAR;
2927 else
2928 op = TOK_SHR;
2930 goto general_case;
2931 } else if (c2 && (op == '+' || op == '-') &&
2932 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2933 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2934 /* symbol + constant case */
2935 if (op == '-')
2936 l2 = -l2;
2937 l2 += vtop[-1].c.i;
2938 /* The backends can't always deal with addends to symbols
2939 larger than +-1<<31. Don't construct such. */
2940 if ((int)l2 != l2)
2941 goto general_case;
2942 vtop--;
2943 vtop->c.i = l2;
2944 } else {
2945 general_case:
2946 /* call low level op generator */
2947 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2948 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2949 gen_opl(op);
2950 else
2951 gen_opi(op);
2956 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2957 # define gen_negf gen_opf
2958 #elif defined TCC_TARGET_ARM
2959 void gen_negf(int op)
2961 /* arm will detect 0-x and replace by vneg */
2962 vpushi(0), vswap(), gen_op('-');
2964 #else
2965 /* XXX: implement in gen_opf() for other backends too */
2966 void gen_negf(int op)
2968 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2969 subtract(-0, x), but with them it's really a sign flip
2970 operation. We implement this with bit manipulation and have
2971 to do some type reinterpretation for this, which TCC can do
2972 only via memory. */
2974 int align, size, bt;
2976 size = type_size(&vtop->type, &align);
2977 bt = vtop->type.t & VT_BTYPE;
2978 save_reg(gv(RC_TYPE(bt)));
2979 vdup();
2980 incr_bf_adr(size - 1);
2981 vdup();
2982 vpushi(0x80); /* flip sign */
2983 gen_op('^');
2984 vstore();
2985 vpop();
2987 #endif
2989 /* generate a floating point operation with constant propagation */
2990 static void gen_opif(int op)
2992 int c1, c2;
2993 SValue *v1, *v2;
2994 #if defined _MSC_VER && defined __x86_64__
2995 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2996 volatile
2997 #endif
2998 long double f1, f2;
3000 v1 = vtop - 1;
3001 v2 = vtop;
3002 if (op == TOK_NEG)
3003 v1 = v2;
3005 /* currently, we cannot do computations with forward symbols */
3006 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3007 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3008 if (c1 && c2) {
3009 if (v1->type.t == VT_FLOAT) {
3010 f1 = v1->c.f;
3011 f2 = v2->c.f;
3012 } else if (v1->type.t == VT_DOUBLE) {
3013 f1 = v1->c.d;
3014 f2 = v2->c.d;
3015 } else {
3016 f1 = v1->c.ld;
3017 f2 = v2->c.ld;
3019 /* NOTE: we only do constant propagation if finite number (not
3020 NaN or infinity) (ANSI spec) */
3021 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
3022 goto general_case;
3023 switch(op) {
3024 case '+': f1 += f2; break;
3025 case '-': f1 -= f2; break;
3026 case '*': f1 *= f2; break;
3027 case '/':
3028 if (f2 == 0.0) {
3029 union { float f; unsigned u; } x1, x2, y;
3030 /* If not in initializer we need to potentially generate
3031 FP exceptions at runtime, otherwise we want to fold. */
3032 if (!const_wanted)
3033 goto general_case;
3034 /* the run-time result of 0.0/0.0 on x87, also of other compilers
3035 when used to compile the f1 /= f2 below, would be -nan */
3036 x1.f = f1, x2.f = f2;
3037 if (f1 == 0.0)
3038 y.u = 0x7fc00000; /* nan */
3039 else
3040 y.u = 0x7f800000; /* infinity */
3041 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
3042 f1 = y.f;
3043 break;
3045 f1 /= f2;
3046 break;
3047 case TOK_NEG:
3048 f1 = -f1;
3049 goto unary_result;
3050 /* XXX: also handles tests ? */
3051 default:
3052 goto general_case;
3054 vtop--;
3055 unary_result:
3056 /* XXX: overflow test ? */
3057 if (v1->type.t == VT_FLOAT) {
3058 v1->c.f = f1;
3059 } else if (v1->type.t == VT_DOUBLE) {
3060 v1->c.d = f1;
3061 } else {
3062 v1->c.ld = f1;
3064 } else {
3065 general_case:
3066 if (op == TOK_NEG) {
3067 gen_negf(op);
3068 } else {
3069 gen_opf(op);
3074 /* print a type. If 'varstr' is not NULL, then the variable is also
3075 printed in the type */
3076 /* XXX: union */
3077 /* XXX: add array and function pointers */
3078 static void type_to_str(char *buf, int buf_size,
3079 CType *type, const char *varstr)
3081 int bt, v, t;
3082 Sym *s, *sa;
3083 char buf1[256];
3084 const char *tstr;
3086 t = type->t;
3087 bt = t & VT_BTYPE;
3088 buf[0] = '\0';
3090 if (t & VT_EXTERN)
3091 pstrcat(buf, buf_size, "extern ");
3092 if (t & VT_STATIC)
3093 pstrcat(buf, buf_size, "static ");
3094 if (t & VT_TYPEDEF)
3095 pstrcat(buf, buf_size, "typedef ");
3096 if (t & VT_INLINE)
3097 pstrcat(buf, buf_size, "inline ");
3098 if (t & VT_VOLATILE)
3099 pstrcat(buf, buf_size, "volatile ");
3100 if (t & VT_CONSTANT)
3101 pstrcat(buf, buf_size, "const ");
3103 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3104 || ((t & VT_UNSIGNED)
3105 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3106 && !IS_ENUM(t)
3108 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3110 buf_size -= strlen(buf);
3111 buf += strlen(buf);
3113 switch(bt) {
3114 case VT_VOID:
3115 tstr = "void";
3116 goto add_tstr;
3117 case VT_BOOL:
3118 tstr = "_Bool";
3119 goto add_tstr;
3120 case VT_BYTE:
3121 tstr = "char";
3122 goto add_tstr;
3123 case VT_SHORT:
3124 tstr = "short";
3125 goto add_tstr;
3126 case VT_INT:
3127 tstr = "int";
3128 goto maybe_long;
3129 case VT_LLONG:
3130 tstr = "long long";
3131 maybe_long:
3132 if (t & VT_LONG)
3133 tstr = "long";
3134 if (!IS_ENUM(t))
3135 goto add_tstr;
3136 tstr = "enum ";
3137 goto tstruct;
3138 case VT_FLOAT:
3139 tstr = "float";
3140 goto add_tstr;
3141 case VT_DOUBLE:
3142 tstr = "double";
3143 if (!(t & VT_LONG))
3144 goto add_tstr;
3145 case VT_LDOUBLE:
3146 tstr = "long double";
3147 add_tstr:
3148 pstrcat(buf, buf_size, tstr);
3149 break;
3150 case VT_STRUCT:
3151 tstr = "struct ";
3152 if (IS_UNION(t))
3153 tstr = "union ";
3154 tstruct:
3155 pstrcat(buf, buf_size, tstr);
3156 v = type->ref->v & ~SYM_STRUCT;
3157 if (v >= SYM_FIRST_ANOM)
3158 pstrcat(buf, buf_size, "<anonymous>");
3159 else
3160 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3161 break;
3162 case VT_FUNC:
3163 s = type->ref;
3164 buf1[0]=0;
3165 if (varstr && '*' == *varstr) {
3166 pstrcat(buf1, sizeof(buf1), "(");
3167 pstrcat(buf1, sizeof(buf1), varstr);
3168 pstrcat(buf1, sizeof(buf1), ")");
3170 pstrcat(buf1, buf_size, "(");
3171 sa = s->next;
3172 while (sa != NULL) {
3173 char buf2[256];
3174 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3175 pstrcat(buf1, sizeof(buf1), buf2);
3176 sa = sa->next;
3177 if (sa)
3178 pstrcat(buf1, sizeof(buf1), ", ");
3180 if (s->f.func_type == FUNC_ELLIPSIS)
3181 pstrcat(buf1, sizeof(buf1), ", ...");
3182 pstrcat(buf1, sizeof(buf1), ")");
3183 type_to_str(buf, buf_size, &s->type, buf1);
3184 goto no_var;
3185 case VT_PTR:
3186 s = type->ref;
3187 if (t & VT_ARRAY) {
3188 if (varstr && '*' == *varstr)
3189 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3190 else
3191 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3192 type_to_str(buf, buf_size, &s->type, buf1);
3193 goto no_var;
3195 pstrcpy(buf1, sizeof(buf1), "*");
3196 if (t & VT_CONSTANT)
3197 pstrcat(buf1, buf_size, "const ");
3198 if (t & VT_VOLATILE)
3199 pstrcat(buf1, buf_size, "volatile ");
3200 if (varstr)
3201 pstrcat(buf1, sizeof(buf1), varstr);
3202 type_to_str(buf, buf_size, &s->type, buf1);
3203 goto no_var;
3205 if (varstr) {
3206 pstrcat(buf, buf_size, " ");
3207 pstrcat(buf, buf_size, varstr);
3209 no_var: ;
3212 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
3214 char buf1[256], buf2[256];
3215 type_to_str(buf1, sizeof(buf1), st, NULL);
3216 type_to_str(buf2, sizeof(buf2), dt, NULL);
3217 tcc_error(fmt, buf1, buf2);
3220 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
3222 char buf1[256], buf2[256];
3223 type_to_str(buf1, sizeof(buf1), st, NULL);
3224 type_to_str(buf2, sizeof(buf2), dt, NULL);
3225 tcc_warning(fmt, buf1, buf2);
3228 static int pointed_size(CType *type)
3230 int align;
3231 return type_size(pointed_type(type), &align);
3234 static void vla_runtime_pointed_size(CType *type)
3236 int align;
3237 vla_runtime_type_size(pointed_type(type), &align);
3240 static inline int is_null_pointer(SValue *p)
3242 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
3243 return 0;
3244 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
3245 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
3246 ((p->type.t & VT_BTYPE) == VT_PTR &&
3247 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
3248 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
3249 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
3253 /* compare function types. OLD functions match any new functions */
3254 static int is_compatible_func(CType *type1, CType *type2)
3256 Sym *s1, *s2;
3258 s1 = type1->ref;
3259 s2 = type2->ref;
3260 if (s1->f.func_call != s2->f.func_call)
3261 return 0;
3262 if (s1->f.func_type != s2->f.func_type
3263 && s1->f.func_type != FUNC_OLD
3264 && s2->f.func_type != FUNC_OLD)
3265 return 0;
3266 for (;;) {
3267 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
3268 return 0;
3269 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
3270 return 1;
3271 s1 = s1->next;
3272 s2 = s2->next;
3273 if (!s1)
3274 return !s2;
3275 if (!s2)
3276 return 0;
3280 /* return true if type1 and type2 are the same. If unqualified is
3281 true, qualifiers on the types are ignored.
3283 static int compare_types(CType *type1, CType *type2, int unqualified)
3285 int bt1, t1, t2;
3287 t1 = type1->t & VT_TYPE;
3288 t2 = type2->t & VT_TYPE;
3289 if (unqualified) {
3290 /* strip qualifiers before comparing */
3291 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3292 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3295 /* Default Vs explicit signedness only matters for char */
3296 if ((t1 & VT_BTYPE) != VT_BYTE) {
3297 t1 &= ~VT_DEFSIGN;
3298 t2 &= ~VT_DEFSIGN;
3300 /* XXX: bitfields ? */
3301 if (t1 != t2)
3302 return 0;
3304 if ((t1 & VT_ARRAY)
3305 && !(type1->ref->c < 0
3306 || type2->ref->c < 0
3307 || type1->ref->c == type2->ref->c))
3308 return 0;
3310 /* test more complicated cases */
3311 bt1 = t1 & VT_BTYPE;
3312 if (bt1 == VT_PTR) {
3313 type1 = pointed_type(type1);
3314 type2 = pointed_type(type2);
3315 return is_compatible_types(type1, type2);
3316 } else if (bt1 == VT_STRUCT) {
3317 return (type1->ref == type2->ref);
3318 } else if (bt1 == VT_FUNC) {
3319 return is_compatible_func(type1, type2);
3320 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
3321 /* If both are enums then they must be the same, if only one is then
3322 t1 and t2 must be equal, which was checked above already. */
3323 return type1->ref == type2->ref;
3324 } else {
3325 return 1;
3329 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3330 type is stored in DEST if non-null (except for pointer plus/minus) . */
3331 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
3333 CType *type1 = &op1->type, *type2 = &op2->type, type;
3334 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
3335 int ret = 1;
3337 type.t = VT_VOID;
3338 type.ref = NULL;
3340 if (bt1 == VT_VOID || bt2 == VT_VOID) {
3341 ret = op == '?' ? 1 : 0;
3342 /* NOTE: as an extension, we accept void on only one side */
3343 type.t = VT_VOID;
3344 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3345 if (op == '+') ; /* Handled in caller */
3346 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3347 /* If one is a null ptr constant the result type is the other. */
3348 else if (is_null_pointer (op2)) type = *type1;
3349 else if (is_null_pointer (op1)) type = *type2;
3350 else if (bt1 != bt2) {
3351 /* accept comparison or cond-expr between pointer and integer
3352 with a warning */
3353 if ((op == '?' || TOK_ISCOND(op))
3354 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
3355 tcc_warning("pointer/integer mismatch in %s",
3356 op == '?' ? "conditional expression" : "comparison");
3357 else if (op != '-' || !is_integer_btype(bt2))
3358 ret = 0;
3359 type = *(bt1 == VT_PTR ? type1 : type2);
3360 } else {
3361 CType *pt1 = pointed_type(type1);
3362 CType *pt2 = pointed_type(type2);
3363 int pbt1 = pt1->t & VT_BTYPE;
3364 int pbt2 = pt2->t & VT_BTYPE;
3365 int newquals, copied = 0;
3366 if (pbt1 != VT_VOID && pbt2 != VT_VOID
3367 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
3368 if (op != '?' && !TOK_ISCOND(op))
3369 ret = 0;
3370 else
3371 type_incompatibility_warning(type1, type2,
3372 op == '?'
3373 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3374 : "pointer type mismatch in comparison('%s' and '%s')");
3376 if (op == '?') {
3377 /* pointers to void get preferred, otherwise the
3378 pointed to types minus qualifs should be compatible */
3379 type = *((pbt1 == VT_VOID) ? type1 : type2);
3380 /* combine qualifs */
3381 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
3382 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
3383 & newquals)
3385 /* copy the pointer target symbol */
3386 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3387 0, type.ref->c);
3388 copied = 1;
3389 pointed_type(&type)->t |= newquals;
3391 /* pointers to incomplete arrays get converted to
3392 pointers to completed ones if possible */
3393 if (pt1->t & VT_ARRAY
3394 && pt2->t & VT_ARRAY
3395 && pointed_type(&type)->ref->c < 0
3396 && (pt1->ref->c > 0 || pt2->ref->c > 0))
3398 if (!copied)
3399 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3400 0, type.ref->c);
3401 pointed_type(&type)->ref =
3402 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
3403 0, pointed_type(&type)->ref->c);
3404 pointed_type(&type)->ref->c =
3405 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
3409 if (TOK_ISCOND(op))
3410 type.t = VT_SIZE_T;
3411 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
3412 if (op != '?' || !compare_types(type1, type2, 1))
3413 ret = 0;
3414 type = *type1;
3415 } else if (is_float(bt1) || is_float(bt2)) {
3416 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
3417 type.t = VT_LDOUBLE;
3418 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
3419 type.t = VT_DOUBLE;
3420 } else {
3421 type.t = VT_FLOAT;
3423 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
3424 /* cast to biggest op */
3425 type.t = VT_LLONG | VT_LONG;
3426 if (bt1 == VT_LLONG)
3427 type.t &= t1;
3428 if (bt2 == VT_LLONG)
3429 type.t &= t2;
3430 /* convert to unsigned if it does not fit in a long long */
3431 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
3432 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
3433 type.t |= VT_UNSIGNED;
3434 } else {
3435 /* integer operations */
3436 type.t = VT_INT | (VT_LONG & (t1 | t2));
3437 /* convert to unsigned if it does not fit in an integer */
3438 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
3439 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
3440 type.t |= VT_UNSIGNED;
3442 if (dest)
3443 *dest = type;
3444 return ret;
3447 /* generic gen_op: handles types problems */
3448 ST_FUNC void gen_op(int op)
3450 int u, t1, t2, bt1, bt2, t;
3451 CType type1, combtype;
3453 redo:
3454 t1 = vtop[-1].type.t;
3455 t2 = vtop[0].type.t;
3456 bt1 = t1 & VT_BTYPE;
3457 bt2 = t2 & VT_BTYPE;
3459 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
3460 if (bt2 == VT_FUNC) {
3461 mk_pointer(&vtop->type);
3462 gaddrof();
3464 if (bt1 == VT_FUNC) {
3465 vswap();
3466 mk_pointer(&vtop->type);
3467 gaddrof();
3468 vswap();
3470 goto redo;
3471 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
3472 tcc_error_noabort("invalid operand types for binary operation");
3473 vpop();
3474 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3475 /* at least one operand is a pointer */
3476 /* relational op: must be both pointers */
3477 if (TOK_ISCOND(op))
3478 goto std_op;
3479 /* if both pointers, then it must be the '-' op */
3480 if (bt1 == VT_PTR && bt2 == VT_PTR) {
3481 if (op != '-')
3482 tcc_error("cannot use pointers here");
3483 if (vtop[-1].type.t & VT_VLA) {
3484 vla_runtime_pointed_size(&vtop[-1].type);
3485 } else {
3486 vpushi(pointed_size(&vtop[-1].type));
3488 vrott(3);
3489 gen_opic(op);
3490 vtop->type.t = VT_PTRDIFF_T;
3491 vswap();
3492 gen_op(TOK_PDIV);
3493 } else {
3494 /* exactly one pointer : must be '+' or '-'. */
3495 if (op != '-' && op != '+')
3496 tcc_error("cannot use pointers here");
3497 /* Put pointer as first operand */
3498 if (bt2 == VT_PTR) {
3499 vswap();
3500 t = t1, t1 = t2, t2 = t;
3502 #if PTR_SIZE == 4
3503 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
3504 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3505 gen_cast_s(VT_INT);
3506 #endif
3507 type1 = vtop[-1].type;
3508 if (vtop[-1].type.t & VT_VLA)
3509 vla_runtime_pointed_size(&vtop[-1].type);
3510 else {
3511 u = pointed_size(&vtop[-1].type);
3512 if (u < 0)
3513 tcc_error("unknown array element size");
3514 #if PTR_SIZE == 8
3515 vpushll(u);
3516 #else
3517 /* XXX: cast to int ? (long long case) */
3518 vpushi(u);
3519 #endif
3521 gen_op('*');
3522 #ifdef CONFIG_TCC_BCHECK
3523 if (tcc_state->do_bounds_check && !const_wanted) {
3524 /* if bounded pointers, we generate a special code to
3525 test bounds */
3526 if (op == '-') {
3527 vpushi(0);
3528 vswap();
3529 gen_op('-');
3531 gen_bounded_ptr_add();
3532 } else
3533 #endif
3535 gen_opic(op);
3537 type1.t &= ~VT_ARRAY;
3538 /* put again type if gen_opic() swaped operands */
3539 vtop->type = type1;
3541 } else {
3542 /* floats can only be used for a few operations */
3543 if (is_float(combtype.t)
3544 && op != '+' && op != '-' && op != '*' && op != '/'
3545 && !TOK_ISCOND(op))
3546 tcc_error("invalid operands for binary operation");
3547 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
3548 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
3549 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
3550 t |= VT_UNSIGNED;
3551 t |= (VT_LONG & t1);
3552 combtype.t = t;
3554 std_op:
3555 t = t2 = combtype.t;
3556 /* XXX: currently, some unsigned operations are explicit, so
3557 we modify them here */
3558 if (t & VT_UNSIGNED) {
3559 if (op == TOK_SAR)
3560 op = TOK_SHR;
3561 else if (op == '/')
3562 op = TOK_UDIV;
3563 else if (op == '%')
3564 op = TOK_UMOD;
3565 else if (op == TOK_LT)
3566 op = TOK_ULT;
3567 else if (op == TOK_GT)
3568 op = TOK_UGT;
3569 else if (op == TOK_LE)
3570 op = TOK_ULE;
3571 else if (op == TOK_GE)
3572 op = TOK_UGE;
3574 vswap();
3575 gen_cast_s(t);
3576 vswap();
3577 /* special case for shifts and long long: we keep the shift as
3578 an integer */
3579 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
3580 t2 = VT_INT;
3581 gen_cast_s(t2);
3582 if (is_float(t))
3583 gen_opif(op);
3584 else
3585 gen_opic(op);
3586 if (TOK_ISCOND(op)) {
3587 /* relational op: the result is an int */
3588 vtop->type.t = VT_INT;
3589 } else {
3590 vtop->type.t = t;
3593 // Make sure that we have converted to an rvalue:
3594 if (vtop->r & VT_LVAL)
3595 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3598 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3599 #define gen_cvt_itof1 gen_cvt_itof
3600 #else
3601 /* generic itof for unsigned long long case */
3602 static void gen_cvt_itof1(int t)
3604 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3605 (VT_LLONG | VT_UNSIGNED)) {
3607 if (t == VT_FLOAT)
3608 vpush_helper_func(TOK___floatundisf);
3609 #if LDOUBLE_SIZE != 8
3610 else if (t == VT_LDOUBLE)
3611 vpush_helper_func(TOK___floatundixf);
3612 #endif
3613 else
3614 vpush_helper_func(TOK___floatundidf);
3615 vrott(2);
3616 gfunc_call(1);
3617 vpushi(0);
3618 PUT_R_RET(vtop, t);
3619 } else {
3620 gen_cvt_itof(t);
3623 #endif
3625 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3626 #define gen_cvt_ftoi1 gen_cvt_ftoi
3627 #else
3628 /* generic ftoi for unsigned long long case */
3629 static void gen_cvt_ftoi1(int t)
3631 int st;
3632 if (t == (VT_LLONG | VT_UNSIGNED)) {
3633 /* not handled natively */
3634 st = vtop->type.t & VT_BTYPE;
3635 if (st == VT_FLOAT)
3636 vpush_helper_func(TOK___fixunssfdi);
3637 #if LDOUBLE_SIZE != 8
3638 else if (st == VT_LDOUBLE)
3639 vpush_helper_func(TOK___fixunsxfdi);
3640 #endif
3641 else
3642 vpush_helper_func(TOK___fixunsdfdi);
3643 vrott(2);
3644 gfunc_call(1);
3645 vpushi(0);
3646 PUT_R_RET(vtop, t);
3647 } else {
3648 gen_cvt_ftoi(t);
3651 #endif
3653 /* special delayed cast for char/short */
3654 static void force_charshort_cast(void)
3656 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3657 int dbt = vtop->type.t;
3658 vtop->r &= ~VT_MUSTCAST;
3659 vtop->type.t = sbt;
3660 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3661 vtop->type.t = dbt;
3664 static void gen_cast_s(int t)
3666 CType type;
3667 type.t = t;
3668 type.ref = NULL;
3669 gen_cast(&type);
3672 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3673 static void gen_cast(CType *type)
3675 int sbt, dbt, sf, df, c;
3676 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3678 /* special delayed cast for char/short */
3679 if (vtop->r & VT_MUSTCAST)
3680 force_charshort_cast();
3682 /* bitfields first get cast to ints */
3683 if (vtop->type.t & VT_BITFIELD)
3684 gv(RC_INT);
3686 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3687 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3688 if (sbt == VT_FUNC)
3689 sbt = VT_PTR;
3691 again:
3692 if (sbt != dbt) {
3693 sf = is_float(sbt);
3694 df = is_float(dbt);
3695 dbt_bt = dbt & VT_BTYPE;
3696 sbt_bt = sbt & VT_BTYPE;
3698 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3699 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3700 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3701 #endif
3702 if (c) {
3703 /* constant case: we can do it now */
3704 /* XXX: in ISOC, cannot do it if error in convert */
3705 if (sbt == VT_FLOAT)
3706 vtop->c.ld = vtop->c.f;
3707 else if (sbt == VT_DOUBLE)
3708 vtop->c.ld = vtop->c.d;
3710 if (df) {
3711 if (sbt_bt == VT_LLONG) {
3712 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3713 vtop->c.ld = vtop->c.i;
3714 else
3715 vtop->c.ld = -(long double)-vtop->c.i;
3716 } else if(!sf) {
3717 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3718 vtop->c.ld = (uint32_t)vtop->c.i;
3719 else
3720 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3723 if (dbt == VT_FLOAT)
3724 vtop->c.f = (float)vtop->c.ld;
3725 else if (dbt == VT_DOUBLE)
3726 vtop->c.d = (double)vtop->c.ld;
3727 } else if (sf && dbt == VT_BOOL) {
3728 vtop->c.i = (vtop->c.ld != 0);
3729 } else {
3730 if(sf)
3731 vtop->c.i = vtop->c.ld;
3732 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3734 else if (sbt & VT_UNSIGNED)
3735 vtop->c.i = (uint32_t)vtop->c.i;
3736 else
3737 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3739 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3741 else if (dbt == VT_BOOL)
3742 vtop->c.i = (vtop->c.i != 0);
3743 else {
3744 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3745 dbt_bt == VT_SHORT ? 0xffff :
3746 0xffffffff;
3747 vtop->c.i &= m;
3748 if (!(dbt & VT_UNSIGNED))
3749 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3752 goto done;
3754 } else if (dbt == VT_BOOL
3755 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3756 == (VT_CONST | VT_SYM)) {
3757 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3758 vtop->r = VT_CONST;
3759 vtop->c.i = 1;
3760 goto done;
3763 /* cannot generate code for global or static initializers */
3764 if (STATIC_DATA_WANTED)
3765 goto done;
3767 /* non constant case: generate code */
3768 if (dbt == VT_BOOL) {
3769 gen_test_zero(TOK_NE);
3770 goto done;
3773 if (sf || df) {
3774 if (sf && df) {
3775 /* convert from fp to fp */
3776 gen_cvt_ftof(dbt);
3777 } else if (df) {
3778 /* convert int to fp */
3779 gen_cvt_itof1(dbt);
3780 } else {
3781 /* convert fp to int */
3782 sbt = dbt;
3783 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3784 sbt = VT_INT;
3785 gen_cvt_ftoi1(sbt);
3786 goto again; /* may need char/short cast */
3788 goto done;
3791 ds = btype_size(dbt_bt);
3792 ss = btype_size(sbt_bt);
3793 if (ds == 0 || ss == 0) {
3794 if (dbt_bt == VT_VOID)
3795 goto done;
3796 cast_error(&vtop->type, type);
3798 if (IS_ENUM(type->t) && type->ref->c < 0)
3799 tcc_error("cast to incomplete type");
3801 /* same size and no sign conversion needed */
3802 if (ds == ss && ds >= 4)
3803 goto done;
3804 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3805 tcc_warning("cast between pointer and integer of different size");
3806 if (sbt_bt == VT_PTR) {
3807 /* put integer type to allow logical operations below */
3808 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3812 /* processor allows { int a = 0, b = *(char*)&a; }
3813 That means that if we cast to less width, we can just
3814 change the type and read it still later. */
3815 #define ALLOW_SUBTYPE_ACCESS 1
3817 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3818 /* value still in memory */
3819 if (ds <= ss)
3820 goto done;
3821 /* ss <= 4 here */
3822 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3823 gv(RC_INT);
3824 goto done; /* no 64bit envolved */
3827 gv(RC_INT);
3829 trunc = 0;
3830 #if PTR_SIZE == 4
3831 if (ds == 8) {
3832 /* generate high word */
3833 if (sbt & VT_UNSIGNED) {
3834 vpushi(0);
3835 gv(RC_INT);
3836 } else {
3837 gv_dup();
3838 vpushi(31);
3839 gen_op(TOK_SAR);
3841 lbuild(dbt);
3842 } else if (ss == 8) {
3843 /* from long long: just take low order word */
3844 lexpand();
3845 vpop();
3847 ss = 4;
3849 #elif PTR_SIZE == 8
3850 if (ds == 8) {
3851 /* need to convert from 32bit to 64bit */
3852 if (sbt & VT_UNSIGNED) {
3853 #if defined(TCC_TARGET_RISCV64)
3854 /* RISC-V keeps 32bit vals in registers sign-extended.
3855 So here we need a zero-extension. */
3856 trunc = 32;
3857 #else
3858 goto done;
3859 #endif
3860 } else {
3861 gen_cvt_sxtw();
3862 goto done;
3864 ss = ds, ds = 4, dbt = sbt;
3865 } else if (ss == 8) {
3866 /* RISC-V keeps 32bit vals in registers sign-extended.
3867 So here we need a sign-extension for signed types and
3868 zero-extension. for unsigned types. */
3869 #if !defined(TCC_TARGET_RISCV64)
3870 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3871 #endif
3872 } else {
3873 ss = 4;
3875 #endif
3877 if (ds >= ss)
3878 goto done;
3879 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3880 if (ss == 4) {
3881 gen_cvt_csti(dbt);
3882 goto done;
3884 #endif
3885 bits = (ss - ds) * 8;
3886 /* for unsigned, gen_op will convert SAR to SHR */
3887 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3888 vpushi(bits);
3889 gen_op(TOK_SHL);
3890 vpushi(bits - trunc);
3891 gen_op(TOK_SAR);
3892 vpushi(trunc);
3893 gen_op(TOK_SHR);
3895 done:
3896 vtop->type = *type;
3897 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3900 /* return type size as known at compile time. Put alignment at 'a' */
3901 ST_FUNC int type_size(CType *type, int *a)
3903 Sym *s;
3904 int bt;
3906 bt = type->t & VT_BTYPE;
3907 if (bt == VT_STRUCT) {
3908 /* struct/union */
3909 s = type->ref;
3910 *a = s->r;
3911 return s->c;
3912 } else if (bt == VT_PTR) {
3913 if (type->t & VT_ARRAY) {
3914 int ts;
3916 s = type->ref;
3917 ts = type_size(&s->type, a);
3919 if (ts < 0 && s->c < 0)
3920 ts = -ts;
3922 return ts * s->c;
3923 } else {
3924 *a = PTR_SIZE;
3925 return PTR_SIZE;
3927 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3928 return -1; /* incomplete enum */
3929 } else if (bt == VT_LDOUBLE) {
3930 *a = LDOUBLE_ALIGN;
3931 return LDOUBLE_SIZE;
3932 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3933 #ifdef TCC_TARGET_I386
3934 #ifdef TCC_TARGET_PE
3935 *a = 8;
3936 #else
3937 *a = 4;
3938 #endif
3939 #elif defined(TCC_TARGET_ARM)
3940 #ifdef TCC_ARM_EABI
3941 *a = 8;
3942 #else
3943 *a = 4;
3944 #endif
3945 #else
3946 *a = 8;
3947 #endif
3948 return 8;
3949 } else if (bt == VT_INT || bt == VT_FLOAT) {
3950 *a = 4;
3951 return 4;
3952 } else if (bt == VT_SHORT) {
3953 *a = 2;
3954 return 2;
3955 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3956 *a = 8;
3957 return 16;
3958 } else {
3959 /* char, void, function, _Bool */
3960 *a = 1;
3961 return 1;
3965 /* push type size as known at runtime time on top of value stack. Put
3966 alignment at 'a' */
3967 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
3969 if (type->t & VT_VLA) {
3970 type_size(&type->ref->type, a);
3971 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3972 } else {
3973 vpushi(type_size(type, a));
3977 /* return the pointed type of t */
3978 static inline CType *pointed_type(CType *type)
3980 return &type->ref->type;
3983 /* modify type so that its it is a pointer to type. */
3984 ST_FUNC void mk_pointer(CType *type)
3986 Sym *s;
3987 s = sym_push(SYM_FIELD, type, 0, -1);
3988 type->t = VT_PTR | (type->t & VT_STORAGE);
3989 type->ref = s;
3992 /* return true if type1 and type2 are exactly the same (including
3993 qualifiers).
3995 static int is_compatible_types(CType *type1, CType *type2)
3997 return compare_types(type1,type2,0);
4000 /* return true if type1 and type2 are the same (ignoring qualifiers).
4002 static int is_compatible_unqualified_types(CType *type1, CType *type2)
4004 return compare_types(type1,type2,1);
4007 static void cast_error(CType *st, CType *dt)
4009 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
4012 /* verify type compatibility to store vtop in 'dt' type */
4013 static void verify_assign_cast(CType *dt)
4015 CType *st, *type1, *type2;
4016 int dbt, sbt, qualwarn, lvl;
4018 st = &vtop->type; /* source type */
4019 dbt = dt->t & VT_BTYPE;
4020 sbt = st->t & VT_BTYPE;
4021 if (dt->t & VT_CONSTANT)
4022 tcc_warning("assignment of read-only location");
4023 switch(dbt) {
4024 case VT_VOID:
4025 if (sbt != dbt)
4026 tcc_error("assignment to void expression");
4027 break;
4028 case VT_PTR:
4029 /* special cases for pointers */
4030 /* '0' can also be a pointer */
4031 if (is_null_pointer(vtop))
4032 break;
4033 /* accept implicit pointer to integer cast with warning */
4034 if (is_integer_btype(sbt)) {
4035 tcc_warning("assignment makes pointer from integer without a cast");
4036 break;
4038 type1 = pointed_type(dt);
4039 if (sbt == VT_PTR)
4040 type2 = pointed_type(st);
4041 else if (sbt == VT_FUNC)
4042 type2 = st; /* a function is implicitly a function pointer */
4043 else
4044 goto error;
4045 if (is_compatible_types(type1, type2))
4046 break;
4047 for (qualwarn = lvl = 0;; ++lvl) {
4048 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
4049 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
4050 qualwarn = 1;
4051 dbt = type1->t & (VT_BTYPE|VT_LONG);
4052 sbt = type2->t & (VT_BTYPE|VT_LONG);
4053 if (dbt != VT_PTR || sbt != VT_PTR)
4054 break;
4055 type1 = pointed_type(type1);
4056 type2 = pointed_type(type2);
4058 if (!is_compatible_unqualified_types(type1, type2)) {
4059 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
4060 /* void * can match anything */
4061 } else if (dbt == sbt
4062 && is_integer_btype(sbt & VT_BTYPE)
4063 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
4064 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
4065 /* Like GCC don't warn by default for merely changes
4066 in pointer target signedness. Do warn for different
4067 base types, though, in particular for unsigned enums
4068 and signed int targets. */
4069 } else {
4070 tcc_warning("assignment from incompatible pointer type");
4071 break;
4074 if (qualwarn)
4075 tcc_warning("assignment discards qualifiers from pointer target type");
4076 break;
4077 case VT_BYTE:
4078 case VT_SHORT:
4079 case VT_INT:
4080 case VT_LLONG:
4081 if (sbt == VT_PTR || sbt == VT_FUNC) {
4082 tcc_warning("assignment makes integer from pointer without a cast");
4083 } else if (sbt == VT_STRUCT) {
4084 goto case_VT_STRUCT;
4086 /* XXX: more tests */
4087 break;
4088 case VT_STRUCT:
4089 case_VT_STRUCT:
4090 if (!is_compatible_unqualified_types(dt, st)) {
4091 error:
4092 cast_error(st, dt);
4094 break;
4098 static void gen_assign_cast(CType *dt)
4100 verify_assign_cast(dt);
4101 gen_cast(dt);
4104 /* store vtop in lvalue pushed on stack */
4105 ST_FUNC void vstore(void)
4107 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
4109 ft = vtop[-1].type.t;
4110 sbt = vtop->type.t & VT_BTYPE;
4111 dbt = ft & VT_BTYPE;
4113 verify_assign_cast(&vtop[-1].type);
4115 if (sbt == VT_STRUCT) {
4116 /* if structure, only generate pointer */
4117 /* structure assignment : generate memcpy */
4118 /* XXX: optimize if small size */
4119 size = type_size(&vtop->type, &align);
4121 /* destination */
4122 vswap();
4123 #ifdef CONFIG_TCC_BCHECK
4124 if (vtop->r & VT_MUSTBOUND)
4125 gbound(); /* check would be wrong after gaddrof() */
4126 #endif
4127 vtop->type.t = VT_PTR;
4128 gaddrof();
4130 /* address of memcpy() */
4131 #ifdef TCC_ARM_EABI
4132 if(!(align & 7))
4133 vpush_helper_func(TOK_memmove8);
4134 else if(!(align & 3))
4135 vpush_helper_func(TOK_memmove4);
4136 else
4137 #endif
4138 /* Use memmove, rather than memcpy, as dest and src may be same: */
4139 vpush_helper_func(TOK_memmove);
4141 vswap();
4142 /* source */
4143 vpushv(vtop - 2);
4144 #ifdef CONFIG_TCC_BCHECK
4145 if (vtop->r & VT_MUSTBOUND)
4146 gbound();
4147 #endif
4148 vtop->type.t = VT_PTR;
4149 gaddrof();
4150 /* type size */
4151 vpushi(size);
4152 gfunc_call(3);
4153 /* leave source on stack */
4155 } else if (ft & VT_BITFIELD) {
4156 /* bitfield store handling */
4158 /* save lvalue as expression result (example: s.b = s.a = n;) */
4159 vdup(), vtop[-1] = vtop[-2];
4161 bit_pos = BIT_POS(ft);
4162 bit_size = BIT_SIZE(ft);
4163 /* remove bit field info to avoid loops */
4164 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
4166 if (dbt == VT_BOOL) {
4167 gen_cast(&vtop[-1].type);
4168 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
4170 r = adjust_bf(vtop - 1, bit_pos, bit_size);
4171 if (dbt != VT_BOOL) {
4172 gen_cast(&vtop[-1].type);
4173 dbt = vtop[-1].type.t & VT_BTYPE;
4175 if (r == VT_STRUCT) {
4176 store_packed_bf(bit_pos, bit_size);
4177 } else {
4178 unsigned long long mask = (1ULL << bit_size) - 1;
4179 if (dbt != VT_BOOL) {
4180 /* mask source */
4181 if (dbt == VT_LLONG)
4182 vpushll(mask);
4183 else
4184 vpushi((unsigned)mask);
4185 gen_op('&');
4187 /* shift source */
4188 vpushi(bit_pos);
4189 gen_op(TOK_SHL);
4190 vswap();
4191 /* duplicate destination */
4192 vdup();
4193 vrott(3);
4194 /* load destination, mask and or with source */
4195 if (dbt == VT_LLONG)
4196 vpushll(~(mask << bit_pos));
4197 else
4198 vpushi(~((unsigned)mask << bit_pos));
4199 gen_op('&');
4200 gen_op('|');
4201 /* store result */
4202 vstore();
4203 /* ... and discard */
4204 vpop();
4206 } else if (dbt == VT_VOID) {
4207 --vtop;
4208 } else {
4209 /* optimize char/short casts */
4210 delayed_cast = 0;
4211 if ((dbt == VT_BYTE || dbt == VT_SHORT)
4212 && is_integer_btype(sbt)
4214 if ((vtop->r & VT_MUSTCAST)
4215 && btype_size(dbt) > btype_size(sbt)
4217 force_charshort_cast();
4218 delayed_cast = 1;
4219 } else {
4220 gen_cast(&vtop[-1].type);
4223 #ifdef CONFIG_TCC_BCHECK
4224 /* bound check case */
4225 if (vtop[-1].r & VT_MUSTBOUND) {
4226 vswap();
4227 gbound();
4228 vswap();
4230 #endif
4231 gv(RC_TYPE(dbt)); /* generate value */
4233 if (delayed_cast) {
4234 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
4235 //tcc_warning("deley cast %x -> %x", sbt, dbt);
4236 vtop->type.t = ft & VT_TYPE;
4239 /* if lvalue was saved on stack, must read it */
4240 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
4241 SValue sv;
4242 r = get_reg(RC_INT);
4243 sv.type.t = VT_PTRDIFF_T;
4244 sv.r = VT_LOCAL | VT_LVAL;
4245 sv.c.i = vtop[-1].c.i;
4246 load(r, &sv);
4247 vtop[-1].r = r | VT_LVAL;
4250 r = vtop->r & VT_VALMASK;
4251 /* two word case handling :
4252 store second register at word + 4 (or +8 for x86-64) */
4253 if (USING_TWO_WORDS(dbt)) {
4254 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
4255 vtop[-1].type.t = load_type;
4256 store(r, vtop - 1);
4257 vswap();
4258 /* convert to int to increment easily */
4259 vtop->type.t = VT_PTRDIFF_T;
4260 gaddrof();
4261 vpushs(PTR_SIZE);
4262 gen_op('+');
4263 vtop->r |= VT_LVAL;
4264 vswap();
4265 vtop[-1].type.t = load_type;
4266 /* XXX: it works because r2 is spilled last ! */
4267 store(vtop->r2, vtop - 1);
4268 } else {
4269 /* single word */
4270 store(r, vtop - 1);
4272 vswap();
4273 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
4277 /* post defines POST/PRE add. c is the token ++ or -- */
4278 ST_FUNC void inc(int post, int c)
4280 test_lvalue();
4281 vdup(); /* save lvalue */
4282 if (post) {
4283 gv_dup(); /* duplicate value */
4284 vrotb(3);
4285 vrotb(3);
4287 /* add constant */
4288 vpushi(c - TOK_MID);
4289 gen_op('+');
4290 vstore(); /* store value */
4291 if (post)
4292 vpop(); /* if post op, return saved value */
4295 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
4297 /* read the string */
4298 if (tok != TOK_STR)
4299 expect(msg);
4300 cstr_new(astr);
4301 while (tok == TOK_STR) {
4302 /* XXX: add \0 handling too ? */
4303 cstr_cat(astr, tokc.str.data, -1);
4304 next();
4306 cstr_ccat(astr, '\0');
4309 /* If I is >= 1 and a power of two, returns log2(i)+1.
4310 If I is 0 returns 0. */
4311 ST_FUNC int exact_log2p1(int i)
4313 int ret;
4314 if (!i)
4315 return 0;
4316 for (ret = 1; i >= 1 << 8; ret += 8)
4317 i >>= 8;
4318 if (i >= 1 << 4)
4319 ret += 4, i >>= 4;
4320 if (i >= 1 << 2)
4321 ret += 2, i >>= 2;
4322 if (i >= 1 << 1)
4323 ret++;
4324 return ret;
4327 /* Parse __attribute__((...)) GNUC extension. */
4328 static void parse_attribute(AttributeDef *ad)
4330 int t, n;
4331 CString astr;
4333 redo:
4334 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
4335 return;
4336 next();
4337 skip('(');
4338 skip('(');
4339 while (tok != ')') {
4340 if (tok < TOK_IDENT)
4341 expect("attribute name");
4342 t = tok;
4343 next();
4344 switch(t) {
4345 case TOK_CLEANUP1:
4346 case TOK_CLEANUP2:
4348 Sym *s;
4350 skip('(');
4351 s = sym_find(tok);
4352 if (!s) {
4353 tcc_warning("implicit declaration of function '%s'",
4354 get_tok_str(tok, &tokc));
4355 s = external_global_sym(tok, &func_old_type);
4356 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
4357 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
4358 ad->cleanup_func = s;
4359 next();
4360 skip(')');
4361 break;
4363 case TOK_CONSTRUCTOR1:
4364 case TOK_CONSTRUCTOR2:
4365 ad->f.func_ctor = 1;
4366 break;
4367 case TOK_DESTRUCTOR1:
4368 case TOK_DESTRUCTOR2:
4369 ad->f.func_dtor = 1;
4370 break;
4371 case TOK_ALWAYS_INLINE1:
4372 case TOK_ALWAYS_INLINE2:
4373 ad->f.func_alwinl = 1;
4374 break;
4375 case TOK_SECTION1:
4376 case TOK_SECTION2:
4377 skip('(');
4378 parse_mult_str(&astr, "section name");
4379 ad->section = find_section(tcc_state, (char *)astr.data);
4380 skip(')');
4381 cstr_free(&astr);
4382 break;
4383 case TOK_ALIAS1:
4384 case TOK_ALIAS2:
4385 skip('(');
4386 parse_mult_str(&astr, "alias(\"target\")");
4387 ad->alias_target = /* save string as token, for later */
4388 tok_alloc((char*)astr.data, astr.size-1)->tok;
4389 skip(')');
4390 cstr_free(&astr);
4391 break;
4392 case TOK_VISIBILITY1:
4393 case TOK_VISIBILITY2:
4394 skip('(');
4395 parse_mult_str(&astr,
4396 "visibility(\"default|hidden|internal|protected\")");
4397 if (!strcmp (astr.data, "default"))
4398 ad->a.visibility = STV_DEFAULT;
4399 else if (!strcmp (astr.data, "hidden"))
4400 ad->a.visibility = STV_HIDDEN;
4401 else if (!strcmp (astr.data, "internal"))
4402 ad->a.visibility = STV_INTERNAL;
4403 else if (!strcmp (astr.data, "protected"))
4404 ad->a.visibility = STV_PROTECTED;
4405 else
4406 expect("visibility(\"default|hidden|internal|protected\")");
4407 skip(')');
4408 cstr_free(&astr);
4409 break;
4410 case TOK_ALIGNED1:
4411 case TOK_ALIGNED2:
4412 if (tok == '(') {
4413 next();
4414 n = expr_const();
4415 if (n <= 0 || (n & (n - 1)) != 0)
4416 tcc_error("alignment must be a positive power of two");
4417 skip(')');
4418 } else {
4419 n = MAX_ALIGN;
4421 ad->a.aligned = exact_log2p1(n);
4422 if (n != 1 << (ad->a.aligned - 1))
4423 tcc_error("alignment of %d is larger than implemented", n);
4424 break;
4425 case TOK_PACKED1:
4426 case TOK_PACKED2:
4427 ad->a.packed = 1;
4428 break;
4429 case TOK_WEAK1:
4430 case TOK_WEAK2:
4431 ad->a.weak = 1;
4432 break;
4433 case TOK_UNUSED1:
4434 case TOK_UNUSED2:
4435 /* currently, no need to handle it because tcc does not
4436 track unused objects */
4437 break;
4438 case TOK_NORETURN1:
4439 case TOK_NORETURN2:
4440 ad->f.func_noreturn = 1;
4441 break;
4442 case TOK_CDECL1:
4443 case TOK_CDECL2:
4444 case TOK_CDECL3:
4445 ad->f.func_call = FUNC_CDECL;
4446 break;
4447 case TOK_STDCALL1:
4448 case TOK_STDCALL2:
4449 case TOK_STDCALL3:
4450 ad->f.func_call = FUNC_STDCALL;
4451 break;
4452 #ifdef TCC_TARGET_I386
4453 case TOK_REGPARM1:
4454 case TOK_REGPARM2:
4455 skip('(');
4456 n = expr_const();
4457 if (n > 3)
4458 n = 3;
4459 else if (n < 0)
4460 n = 0;
4461 if (n > 0)
4462 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
4463 skip(')');
4464 break;
4465 case TOK_FASTCALL1:
4466 case TOK_FASTCALL2:
4467 case TOK_FASTCALL3:
4468 ad->f.func_call = FUNC_FASTCALLW;
4469 break;
4470 #endif
4471 case TOK_MODE:
4472 skip('(');
4473 switch(tok) {
4474 case TOK_MODE_DI:
4475 ad->attr_mode = VT_LLONG + 1;
4476 break;
4477 case TOK_MODE_QI:
4478 ad->attr_mode = VT_BYTE + 1;
4479 break;
4480 case TOK_MODE_HI:
4481 ad->attr_mode = VT_SHORT + 1;
4482 break;
4483 case TOK_MODE_SI:
4484 case TOK_MODE_word:
4485 ad->attr_mode = VT_INT + 1;
4486 break;
4487 default:
4488 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
4489 break;
4491 next();
4492 skip(')');
4493 break;
4494 case TOK_DLLEXPORT:
4495 ad->a.dllexport = 1;
4496 break;
4497 case TOK_NODECORATE:
4498 ad->a.nodecorate = 1;
4499 break;
4500 case TOK_DLLIMPORT:
4501 ad->a.dllimport = 1;
4502 break;
4503 default:
4504 if (tcc_state->warn_unsupported)
4505 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
4506 /* skip parameters */
4507 if (tok == '(') {
4508 int parenthesis = 0;
4509 do {
4510 if (tok == '(')
4511 parenthesis++;
4512 else if (tok == ')')
4513 parenthesis--;
4514 next();
4515 } while (parenthesis && tok != -1);
4517 break;
4519 if (tok != ',')
4520 break;
4521 next();
4523 skip(')');
4524 skip(')');
4525 goto redo;
4528 static Sym * find_field (CType *type, int v, int *cumofs)
4530 Sym *s = type->ref;
4531 v |= SYM_FIELD;
4532 while ((s = s->next) != NULL) {
4533 if ((s->v & SYM_FIELD) &&
4534 (s->type.t & VT_BTYPE) == VT_STRUCT &&
4535 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
4536 Sym *ret = find_field (&s->type, v, cumofs);
4537 if (ret) {
4538 *cumofs += s->c;
4539 return ret;
4542 if (s->v == v)
4543 break;
4545 return s;
4548 static void check_fields (CType *type, int check)
4550 Sym *s = type->ref;
4552 while ((s = s->next) != NULL) {
4553 int v = s->v & ~SYM_FIELD;
4554 if (v < SYM_FIRST_ANOM) {
4555 TokenSym *ts = table_ident[v - TOK_IDENT];
4556 if (check && (ts->tok & SYM_FIELD))
4557 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4558 ts->tok ^= SYM_FIELD;
4559 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4560 check_fields (&s->type, check);
4564 static void struct_layout(CType *type, AttributeDef *ad)
4566 int size, align, maxalign, offset, c, bit_pos, bit_size;
4567 int packed, a, bt, prevbt, prev_bit_size;
4568 int pcc = !tcc_state->ms_bitfields;
4569 int pragma_pack = *tcc_state->pack_stack_ptr;
4570 Sym *f;
4572 maxalign = 1;
4573 offset = 0;
4574 c = 0;
4575 bit_pos = 0;
4576 prevbt = VT_STRUCT; /* make it never match */
4577 prev_bit_size = 0;
4579 //#define BF_DEBUG
4581 for (f = type->ref->next; f; f = f->next) {
4582 if (f->type.t & VT_BITFIELD)
4583 bit_size = BIT_SIZE(f->type.t);
4584 else
4585 bit_size = -1;
4586 size = type_size(&f->type, &align);
4587 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4588 packed = 0;
4590 if (pcc && bit_size == 0) {
4591 /* in pcc mode, packing does not affect zero-width bitfields */
4593 } else {
4594 /* in pcc mode, attribute packed overrides if set. */
4595 if (pcc && (f->a.packed || ad->a.packed))
4596 align = packed = 1;
4598 /* pragma pack overrides align if lesser and packs bitfields always */
4599 if (pragma_pack) {
4600 packed = 1;
4601 if (pragma_pack < align)
4602 align = pragma_pack;
4603 /* in pcc mode pragma pack also overrides individual align */
4604 if (pcc && pragma_pack < a)
4605 a = 0;
4608 /* some individual align was specified */
4609 if (a)
4610 align = a;
4612 if (type->ref->type.t == VT_UNION) {
4613 if (pcc && bit_size >= 0)
4614 size = (bit_size + 7) >> 3;
4615 offset = 0;
4616 if (size > c)
4617 c = size;
4619 } else if (bit_size < 0) {
4620 if (pcc)
4621 c += (bit_pos + 7) >> 3;
4622 c = (c + align - 1) & -align;
4623 offset = c;
4624 if (size > 0)
4625 c += size;
4626 bit_pos = 0;
4627 prevbt = VT_STRUCT;
4628 prev_bit_size = 0;
4630 } else {
4631 /* A bit-field. Layout is more complicated. There are two
4632 options: PCC (GCC) compatible and MS compatible */
4633 if (pcc) {
4634 /* In PCC layout a bit-field is placed adjacent to the
4635 preceding bit-fields, except if:
4636 - it has zero-width
4637 - an individual alignment was given
4638 - it would overflow its base type container and
4639 there is no packing */
4640 if (bit_size == 0) {
4641 new_field:
4642 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4643 bit_pos = 0;
4644 } else if (f->a.aligned) {
4645 goto new_field;
4646 } else if (!packed) {
4647 int a8 = align * 8;
4648 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4649 if (ofs > size / align)
4650 goto new_field;
4653 /* in pcc mode, long long bitfields have type int if they fit */
4654 if (size == 8 && bit_size <= 32)
4655 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4657 while (bit_pos >= align * 8)
4658 c += align, bit_pos -= align * 8;
4659 offset = c;
4661 /* In PCC layout named bit-fields influence the alignment
4662 of the containing struct using the base types alignment,
4663 except for packed fields (which here have correct align). */
4664 if (f->v & SYM_FIRST_ANOM
4665 // && bit_size // ??? gcc on ARM/rpi does that
4667 align = 1;
4669 } else {
4670 bt = f->type.t & VT_BTYPE;
4671 if ((bit_pos + bit_size > size * 8)
4672 || (bit_size > 0) == (bt != prevbt)
4674 c = (c + align - 1) & -align;
4675 offset = c;
4676 bit_pos = 0;
4677 /* In MS bitfield mode a bit-field run always uses
4678 at least as many bits as the underlying type.
4679 To start a new run it's also required that this
4680 or the last bit-field had non-zero width. */
4681 if (bit_size || prev_bit_size)
4682 c += size;
4684 /* In MS layout the records alignment is normally
4685 influenced by the field, except for a zero-width
4686 field at the start of a run (but by further zero-width
4687 fields it is again). */
4688 if (bit_size == 0 && prevbt != bt)
4689 align = 1;
4690 prevbt = bt;
4691 prev_bit_size = bit_size;
4694 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4695 | (bit_pos << VT_STRUCT_SHIFT);
4696 bit_pos += bit_size;
4698 if (align > maxalign)
4699 maxalign = align;
4701 #ifdef BF_DEBUG
4702 printf("set field %s offset %-2d size %-2d align %-2d",
4703 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4704 if (f->type.t & VT_BITFIELD) {
4705 printf(" pos %-2d bits %-2d",
4706 BIT_POS(f->type.t),
4707 BIT_SIZE(f->type.t)
4710 printf("\n");
4711 #endif
4713 f->c = offset;
4714 f->r = 0;
4717 if (pcc)
4718 c += (bit_pos + 7) >> 3;
4720 /* store size and alignment */
4721 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4722 if (a < maxalign)
4723 a = maxalign;
4724 type->ref->r = a;
4725 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4726 /* can happen if individual align for some member was given. In
4727 this case MSVC ignores maxalign when aligning the size */
4728 a = pragma_pack;
4729 if (a < bt)
4730 a = bt;
4732 c = (c + a - 1) & -a;
4733 type->ref->c = c;
4735 #ifdef BF_DEBUG
4736 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4737 #endif
4739 /* check whether we can access bitfields by their type */
4740 for (f = type->ref->next; f; f = f->next) {
4741 int s, px, cx, c0;
4742 CType t;
4744 if (0 == (f->type.t & VT_BITFIELD))
4745 continue;
4746 f->type.ref = f;
4747 f->auxtype = -1;
4748 bit_size = BIT_SIZE(f->type.t);
4749 if (bit_size == 0)
4750 continue;
4751 bit_pos = BIT_POS(f->type.t);
4752 size = type_size(&f->type, &align);
4754 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4755 #ifdef TCC_TARGET_ARM
4756 && !(f->c & (align - 1))
4757 #endif
4759 continue;
4761 /* try to access the field using a different type */
4762 c0 = -1, s = align = 1;
4763 t.t = VT_BYTE;
4764 for (;;) {
4765 px = f->c * 8 + bit_pos;
4766 cx = (px >> 3) & -align;
4767 px = px - (cx << 3);
4768 if (c0 == cx)
4769 break;
4770 s = (px + bit_size + 7) >> 3;
4771 if (s > 4) {
4772 t.t = VT_LLONG;
4773 } else if (s > 2) {
4774 t.t = VT_INT;
4775 } else if (s > 1) {
4776 t.t = VT_SHORT;
4777 } else {
4778 t.t = VT_BYTE;
4780 s = type_size(&t, &align);
4781 c0 = cx;
4784 if (px + bit_size <= s * 8 && cx + s <= c
4785 #ifdef TCC_TARGET_ARM
4786 && !(cx & (align - 1))
4787 #endif
4789 /* update offset and bit position */
4790 f->c = cx;
4791 bit_pos = px;
4792 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4793 | (bit_pos << VT_STRUCT_SHIFT);
4794 if (s != size)
4795 f->auxtype = t.t;
4796 #ifdef BF_DEBUG
4797 printf("FIX field %s offset %-2d size %-2d align %-2d "
4798 "pos %-2d bits %-2d\n",
4799 get_tok_str(f->v & ~SYM_FIELD, NULL),
4800 cx, s, align, px, bit_size);
4801 #endif
4802 } else {
4803 /* fall back to load/store single-byte wise */
4804 f->auxtype = VT_STRUCT;
4805 #ifdef BF_DEBUG
4806 printf("FIX field %s : load byte-wise\n",
4807 get_tok_str(f->v & ~SYM_FIELD, NULL));
4808 #endif
4813 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4814 static void struct_decl(CType *type, int u)
4816 int v, c, size, align, flexible;
4817 int bit_size, bsize, bt;
4818 Sym *s, *ss, **ps;
4819 AttributeDef ad, ad1;
4820 CType type1, btype;
4822 memset(&ad, 0, sizeof ad);
4823 next();
4824 parse_attribute(&ad);
4825 if (tok != '{') {
4826 v = tok;
4827 next();
4828 /* struct already defined ? return it */
4829 if (v < TOK_IDENT)
4830 expect("struct/union/enum name");
4831 s = struct_find(v);
4832 if (s && (s->sym_scope == local_scope || tok != '{')) {
4833 if (u == s->type.t)
4834 goto do_decl;
4835 if (u == VT_ENUM && IS_ENUM(s->type.t))
4836 goto do_decl;
4837 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4839 } else {
4840 v = anon_sym++;
4842 /* Record the original enum/struct/union token. */
4843 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4844 type1.ref = NULL;
4845 /* we put an undefined size for struct/union */
4846 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4847 s->r = 0; /* default alignment is zero as gcc */
4848 do_decl:
4849 type->t = s->type.t;
4850 type->ref = s;
4852 if (tok == '{') {
4853 next();
4854 if (s->c != -1)
4855 tcc_error("struct/union/enum already defined");
4856 s->c = -2;
4857 /* cannot be empty */
4858 /* non empty enums are not allowed */
4859 ps = &s->next;
4860 if (u == VT_ENUM) {
4861 long long ll = 0, pl = 0, nl = 0;
4862 CType t;
4863 t.ref = s;
4864 /* enum symbols have static storage */
4865 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4866 for(;;) {
4867 v = tok;
4868 if (v < TOK_UIDENT)
4869 expect("identifier");
4870 ss = sym_find(v);
4871 if (ss && !local_stack)
4872 tcc_error("redefinition of enumerator '%s'",
4873 get_tok_str(v, NULL));
4874 next();
4875 if (tok == '=') {
4876 next();
4877 ll = expr_const64();
4879 ss = sym_push(v, &t, VT_CONST, 0);
4880 ss->enum_val = ll;
4881 *ps = ss, ps = &ss->next;
4882 if (ll < nl)
4883 nl = ll;
4884 if (ll > pl)
4885 pl = ll;
4886 if (tok != ',')
4887 break;
4888 next();
4889 ll++;
4890 /* NOTE: we accept a trailing comma */
4891 if (tok == '}')
4892 break;
4894 skip('}');
4895 /* set integral type of the enum */
4896 t.t = VT_INT;
4897 if (nl >= 0) {
4898 if (pl != (unsigned)pl)
4899 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4900 t.t |= VT_UNSIGNED;
4901 } else if (pl != (int)pl || nl != (int)nl)
4902 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4903 s->type.t = type->t = t.t | VT_ENUM;
4904 s->c = 0;
4905 /* set type for enum members */
4906 for (ss = s->next; ss; ss = ss->next) {
4907 ll = ss->enum_val;
4908 if (ll == (int)ll) /* default is int if it fits */
4909 continue;
4910 if (t.t & VT_UNSIGNED) {
4911 ss->type.t |= VT_UNSIGNED;
4912 if (ll == (unsigned)ll)
4913 continue;
4915 ss->type.t = (ss->type.t & ~VT_BTYPE)
4916 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4918 } else {
4919 c = 0;
4920 flexible = 0;
4921 while (tok != '}') {
4922 if (!parse_btype(&btype, &ad1)) {
4923 skip(';');
4924 continue;
4926 while (1) {
4927 if (flexible)
4928 tcc_error("flexible array member '%s' not at the end of struct",
4929 get_tok_str(v, NULL));
4930 bit_size = -1;
4931 v = 0;
4932 type1 = btype;
4933 if (tok != ':') {
4934 if (tok != ';')
4935 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4936 if (v == 0) {
4937 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4938 expect("identifier");
4939 else {
4940 int v = btype.ref->v;
4941 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4942 if (tcc_state->ms_extensions == 0)
4943 expect("identifier");
4947 if (type_size(&type1, &align) < 0) {
4948 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4949 flexible = 1;
4950 else
4951 tcc_error("field '%s' has incomplete type",
4952 get_tok_str(v, NULL));
4954 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4955 (type1.t & VT_BTYPE) == VT_VOID ||
4956 (type1.t & VT_STORAGE))
4957 tcc_error("invalid type for '%s'",
4958 get_tok_str(v, NULL));
4960 if (tok == ':') {
4961 next();
4962 bit_size = expr_const();
4963 /* XXX: handle v = 0 case for messages */
4964 if (bit_size < 0)
4965 tcc_error("negative width in bit-field '%s'",
4966 get_tok_str(v, NULL));
4967 if (v && bit_size == 0)
4968 tcc_error("zero width for bit-field '%s'",
4969 get_tok_str(v, NULL));
4970 parse_attribute(&ad1);
4972 size = type_size(&type1, &align);
4973 if (bit_size >= 0) {
4974 bt = type1.t & VT_BTYPE;
4975 if (bt != VT_INT &&
4976 bt != VT_BYTE &&
4977 bt != VT_SHORT &&
4978 bt != VT_BOOL &&
4979 bt != VT_LLONG)
4980 tcc_error("bitfields must have scalar type");
4981 bsize = size * 8;
4982 if (bit_size > bsize) {
4983 tcc_error("width of '%s' exceeds its type",
4984 get_tok_str(v, NULL));
4985 } else if (bit_size == bsize
4986 && !ad.a.packed && !ad1.a.packed) {
4987 /* no need for bit fields */
4989 } else if (bit_size == 64) {
4990 tcc_error("field width 64 not implemented");
4991 } else {
4992 type1.t = (type1.t & ~VT_STRUCT_MASK)
4993 | VT_BITFIELD
4994 | (bit_size << (VT_STRUCT_SHIFT + 6));
4997 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4998 /* Remember we've seen a real field to check
4999 for placement of flexible array member. */
5000 c = 1;
5002 /* If member is a struct or bit-field, enforce
5003 placing into the struct (as anonymous). */
5004 if (v == 0 &&
5005 ((type1.t & VT_BTYPE) == VT_STRUCT ||
5006 bit_size >= 0)) {
5007 v = anon_sym++;
5009 if (v) {
5010 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
5011 ss->a = ad1.a;
5012 *ps = ss;
5013 ps = &ss->next;
5015 if (tok == ';' || tok == TOK_EOF)
5016 break;
5017 skip(',');
5019 skip(';');
5021 skip('}');
5022 parse_attribute(&ad);
5023 if (ad.cleanup_func) {
5024 tcc_warning("attribute '__cleanup__' ignored on type");
5026 check_fields(type, 1);
5027 check_fields(type, 0);
5028 struct_layout(type, &ad);
5033 static void sym_to_attr(AttributeDef *ad, Sym *s)
5035 merge_symattr(&ad->a, &s->a);
5036 merge_funcattr(&ad->f, &s->f);
5039 /* Add type qualifiers to a type. If the type is an array then the qualifiers
5040 are added to the element type, copied because it could be a typedef. */
5041 static void parse_btype_qualify(CType *type, int qualifiers)
5043 while (type->t & VT_ARRAY) {
5044 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
5045 type = &type->ref->type;
5047 type->t |= qualifiers;
5050 /* return 0 if no type declaration. otherwise, return the basic type
5051 and skip it.
5053 static int parse_btype(CType *type, AttributeDef *ad)
5055 int t, u, bt, st, type_found, typespec_found, g, n;
5056 Sym *s;
5057 CType type1;
5059 memset(ad, 0, sizeof(AttributeDef));
5060 type_found = 0;
5061 typespec_found = 0;
5062 t = VT_INT;
5063 bt = st = -1;
5064 type->ref = NULL;
5066 while(1) {
5067 switch(tok) {
5068 case TOK_EXTENSION:
5069 /* currently, we really ignore extension */
5070 next();
5071 continue;
5073 /* basic types */
5074 case TOK_CHAR:
5075 u = VT_BYTE;
5076 basic_type:
5077 next();
5078 basic_type1:
5079 if (u == VT_SHORT || u == VT_LONG) {
5080 if (st != -1 || (bt != -1 && bt != VT_INT))
5081 tmbt: tcc_error("too many basic types");
5082 st = u;
5083 } else {
5084 if (bt != -1 || (st != -1 && u != VT_INT))
5085 goto tmbt;
5086 bt = u;
5088 if (u != VT_INT)
5089 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
5090 typespec_found = 1;
5091 break;
5092 case TOK_VOID:
5093 u = VT_VOID;
5094 goto basic_type;
5095 case TOK_SHORT:
5096 u = VT_SHORT;
5097 goto basic_type;
5098 case TOK_INT:
5099 u = VT_INT;
5100 goto basic_type;
5101 case TOK_ALIGNAS:
5102 { int n;
5103 AttributeDef ad1;
5104 next();
5105 skip('(');
5106 memset(&ad1, 0, sizeof(AttributeDef));
5107 if (parse_btype(&type1, &ad1)) {
5108 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
5109 if (ad1.a.aligned)
5110 n = 1 << (ad1.a.aligned - 1);
5111 else
5112 type_size(&type1, &n);
5113 } else {
5114 n = expr_const();
5115 if (n <= 0 || (n & (n - 1)) != 0)
5116 tcc_error("alignment must be a positive power of two");
5118 skip(')');
5119 ad->a.aligned = exact_log2p1(n);
5121 continue;
5122 case TOK_LONG:
5123 if ((t & VT_BTYPE) == VT_DOUBLE) {
5124 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
5125 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
5126 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
5127 } else {
5128 u = VT_LONG;
5129 goto basic_type;
5131 next();
5132 break;
5133 #ifdef TCC_TARGET_ARM64
5134 case TOK_UINT128:
5135 /* GCC's __uint128_t appears in some Linux header files. Make it a
5136 synonym for long double to get the size and alignment right. */
5137 u = VT_LDOUBLE;
5138 goto basic_type;
5139 #endif
5140 case TOK_BOOL:
5141 u = VT_BOOL;
5142 goto basic_type;
5143 case TOK_FLOAT:
5144 u = VT_FLOAT;
5145 goto basic_type;
5146 case TOK_DOUBLE:
5147 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
5148 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
5149 } else {
5150 u = VT_DOUBLE;
5151 goto basic_type;
5153 next();
5154 break;
5155 case TOK_ENUM:
5156 struct_decl(&type1, VT_ENUM);
5157 basic_type2:
5158 u = type1.t;
5159 type->ref = type1.ref;
5160 goto basic_type1;
5161 case TOK_STRUCT:
5162 struct_decl(&type1, VT_STRUCT);
5163 goto basic_type2;
5164 case TOK_UNION:
5165 struct_decl(&type1, VT_UNION);
5166 goto basic_type2;
5168 /* type modifiers */
5169 case TOK__Atomic:
5170 next();
5171 type->t = t;
5172 parse_btype_qualify(type, VT_ATOMIC);
5173 t = type->t;
5174 if (tok == '(') {
5175 parse_expr_type(&type1);
5176 /* remove all storage modifiers except typedef */
5177 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
5178 if (type1.ref)
5179 sym_to_attr(ad, type1.ref);
5180 goto basic_type2;
5182 break;
5183 case TOK_CONST1:
5184 case TOK_CONST2:
5185 case TOK_CONST3:
5186 type->t = t;
5187 parse_btype_qualify(type, VT_CONSTANT);
5188 t = type->t;
5189 next();
5190 break;
5191 case TOK_VOLATILE1:
5192 case TOK_VOLATILE2:
5193 case TOK_VOLATILE3:
5194 type->t = t;
5195 parse_btype_qualify(type, VT_VOLATILE);
5196 t = type->t;
5197 next();
5198 break;
5199 case TOK_SIGNED1:
5200 case TOK_SIGNED2:
5201 case TOK_SIGNED3:
5202 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
5203 tcc_error("signed and unsigned modifier");
5204 t |= VT_DEFSIGN;
5205 next();
5206 typespec_found = 1;
5207 break;
5208 case TOK_REGISTER:
5209 case TOK_AUTO:
5210 case TOK_RESTRICT1:
5211 case TOK_RESTRICT2:
5212 case TOK_RESTRICT3:
5213 next();
5214 break;
5215 case TOK_UNSIGNED:
5216 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
5217 tcc_error("signed and unsigned modifier");
5218 t |= VT_DEFSIGN | VT_UNSIGNED;
5219 next();
5220 typespec_found = 1;
5221 break;
5223 /* storage */
5224 case TOK_EXTERN:
5225 g = VT_EXTERN;
5226 goto storage;
5227 case TOK_STATIC:
5228 g = VT_STATIC;
5229 goto storage;
5230 case TOK_TYPEDEF:
5231 g = VT_TYPEDEF;
5232 goto storage;
5233 storage:
5234 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
5235 tcc_error("multiple storage classes");
5236 t |= g;
5237 next();
5238 break;
5239 case TOK_INLINE1:
5240 case TOK_INLINE2:
5241 case TOK_INLINE3:
5242 t |= VT_INLINE;
5243 next();
5244 break;
5245 case TOK_NORETURN3:
5246 next();
5247 ad->f.func_noreturn = 1;
5248 break;
5249 /* GNUC attribute */
5250 case TOK_ATTRIBUTE1:
5251 case TOK_ATTRIBUTE2:
5252 parse_attribute(ad);
5253 if (ad->attr_mode) {
5254 u = ad->attr_mode -1;
5255 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
5257 continue;
5258 /* GNUC typeof */
5259 case TOK_TYPEOF1:
5260 case TOK_TYPEOF2:
5261 case TOK_TYPEOF3:
5262 next();
5263 parse_expr_type(&type1);
5264 /* remove all storage modifiers except typedef */
5265 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
5266 if (type1.ref)
5267 sym_to_attr(ad, type1.ref);
5268 goto basic_type2;
5269 default:
5270 if (typespec_found)
5271 goto the_end;
5272 s = sym_find(tok);
5273 if (!s || !(s->type.t & VT_TYPEDEF))
5274 goto the_end;
5276 n = tok, next();
5277 if (tok == ':' && !in_generic) {
5278 /* ignore if it's a label */
5279 unget_tok(n);
5280 goto the_end;
5283 t &= ~(VT_BTYPE|VT_LONG);
5284 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
5285 type->t = (s->type.t & ~VT_TYPEDEF) | u;
5286 type->ref = s->type.ref;
5287 if (t)
5288 parse_btype_qualify(type, t);
5289 t = type->t;
5290 /* get attributes from typedef */
5291 sym_to_attr(ad, s);
5292 typespec_found = 1;
5293 st = bt = -2;
5294 break;
5296 type_found = 1;
5298 the_end:
5299 if (tcc_state->char_is_unsigned) {
5300 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
5301 t |= VT_UNSIGNED;
5303 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5304 bt = t & (VT_BTYPE|VT_LONG);
5305 if (bt == VT_LONG)
5306 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
5307 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5308 if (bt == VT_LDOUBLE)
5309 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
5310 #endif
5311 type->t = t;
5312 return type_found;
5315 /* convert a function parameter type (array to pointer and function to
5316 function pointer) */
5317 static inline void convert_parameter_type(CType *pt)
5319 /* remove const and volatile qualifiers (XXX: const could be used
5320 to indicate a const function parameter */
5321 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
5322 /* array must be transformed to pointer according to ANSI C */
5323 pt->t &= ~VT_ARRAY;
5324 if ((pt->t & VT_BTYPE) == VT_FUNC) {
5325 mk_pointer(pt);
5329 ST_FUNC void parse_asm_str(CString *astr)
5331 skip('(');
5332 parse_mult_str(astr, "string constant");
5335 /* Parse an asm label and return the token */
5336 static int asm_label_instr(void)
5338 int v;
5339 CString astr;
5341 next();
5342 parse_asm_str(&astr);
5343 skip(')');
5344 #ifdef ASM_DEBUG
5345 printf("asm_alias: \"%s\"\n", (char *)astr.data);
5346 #endif
5347 v = tok_alloc(astr.data, astr.size - 1)->tok;
5348 cstr_free(&astr);
5349 return v;
5352 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
5354 int n, l, t1, arg_size, align, unused_align;
5355 Sym **plast, *s, *first;
5356 AttributeDef ad1;
5357 CType pt;
5359 if (tok == '(') {
5360 /* function type, or recursive declarator (return if so) */
5361 next();
5362 if (td && !(td & TYPE_ABSTRACT))
5363 return 0;
5364 if (tok == ')')
5365 l = 0;
5366 else if (parse_btype(&pt, &ad1))
5367 l = FUNC_NEW;
5368 else if (td) {
5369 merge_attr (ad, &ad1);
5370 return 0;
5371 } else
5372 l = FUNC_OLD;
5373 first = NULL;
5374 plast = &first;
5375 arg_size = 0;
5376 if (l) {
5377 for(;;) {
5378 /* read param name and compute offset */
5379 if (l != FUNC_OLD) {
5380 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
5381 break;
5382 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
5383 if ((pt.t & VT_BTYPE) == VT_VOID)
5384 tcc_error("parameter declared as void");
5385 } else {
5386 n = tok;
5387 if (n < TOK_UIDENT)
5388 expect("identifier");
5389 pt.t = VT_VOID; /* invalid type */
5390 pt.ref = NULL;
5391 next();
5393 convert_parameter_type(&pt);
5394 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
5395 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
5396 *plast = s;
5397 plast = &s->next;
5398 if (tok == ')')
5399 break;
5400 skip(',');
5401 if (l == FUNC_NEW && tok == TOK_DOTS) {
5402 l = FUNC_ELLIPSIS;
5403 next();
5404 break;
5406 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
5407 tcc_error("invalid type");
5409 } else
5410 /* if no parameters, then old type prototype */
5411 l = FUNC_OLD;
5412 skip(')');
5413 /* NOTE: const is ignored in returned type as it has a special
5414 meaning in gcc / C++ */
5415 type->t &= ~VT_CONSTANT;
5416 /* some ancient pre-K&R C allows a function to return an array
5417 and the array brackets to be put after the arguments, such
5418 that "int c()[]" means something like "int[] c()" */
5419 if (tok == '[') {
5420 next();
5421 skip(']'); /* only handle simple "[]" */
5422 mk_pointer(type);
5424 /* we push a anonymous symbol which will contain the function prototype */
5425 ad->f.func_args = arg_size;
5426 ad->f.func_type = l;
5427 s = sym_push(SYM_FIELD, type, 0, 0);
5428 s->a = ad->a;
5429 s->f = ad->f;
5430 s->next = first;
5431 type->t = VT_FUNC;
5432 type->ref = s;
5433 } else if (tok == '[') {
5434 int saved_nocode_wanted = nocode_wanted;
5435 /* array definition */
5436 next();
5437 while (1) {
5438 /* XXX The optional type-quals and static should only be accepted
5439 in parameter decls. The '*' as well, and then even only
5440 in prototypes (not function defs). */
5441 switch (tok) {
5442 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
5443 case TOK_CONST1:
5444 case TOK_VOLATILE1:
5445 case TOK_STATIC:
5446 case '*':
5447 next();
5448 continue;
5449 default:
5450 break;
5452 break;
5454 n = -1;
5455 t1 = 0;
5456 if (tok != ']') {
5457 if (!local_stack || (storage & VT_STATIC))
5458 vpushi(expr_const());
5459 else {
5460 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5461 length must always be evaluated, even under nocode_wanted,
5462 so that its size slot is initialized (e.g. under sizeof
5463 or typeof). */
5464 nocode_wanted = 0;
5465 gexpr();
5467 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5468 n = vtop->c.i;
5469 if (n < 0)
5470 tcc_error("invalid array size");
5471 } else {
5472 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5473 tcc_error("size of variable length array should be an integer");
5474 n = 0;
5475 t1 = VT_VLA;
5478 skip(']');
5479 /* parse next post type */
5480 post_type(type, ad, storage, 0);
5482 if ((type->t & VT_BTYPE) == VT_FUNC)
5483 tcc_error("declaration of an array of functions");
5484 if ((type->t & VT_BTYPE) == VT_VOID
5485 || type_size(type, &unused_align) < 0)
5486 tcc_error("declaration of an array of incomplete type elements");
5488 t1 |= type->t & VT_VLA;
5490 if (t1 & VT_VLA) {
5491 if (n < 0)
5492 tcc_error("need explicit inner array size in VLAs");
5493 loc -= type_size(&int_type, &align);
5494 loc &= -align;
5495 n = loc;
5497 vla_runtime_type_size(type, &align);
5498 gen_op('*');
5499 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5500 vswap();
5501 vstore();
5503 if (n != -1)
5504 vpop();
5505 nocode_wanted = saved_nocode_wanted;
5507 /* we push an anonymous symbol which will contain the array
5508 element type */
5509 s = sym_push(SYM_FIELD, type, 0, n);
5510 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5511 type->ref = s;
5513 return 1;
5516 /* Parse a type declarator (except basic type), and return the type
5517 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5518 expected. 'type' should contain the basic type. 'ad' is the
5519 attribute definition of the basic type. It can be modified by
5520 type_decl(). If this (possibly abstract) declarator is a pointer chain
5521 it returns the innermost pointed to type (equals *type, but is a different
5522 pointer), otherwise returns type itself, that's used for recursive calls. */
5523 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5525 CType *post, *ret;
5526 int qualifiers, storage;
5528 /* recursive type, remove storage bits first, apply them later again */
5529 storage = type->t & VT_STORAGE;
5530 type->t &= ~VT_STORAGE;
5531 post = ret = type;
5533 while (tok == '*') {
5534 qualifiers = 0;
5535 redo:
5536 next();
5537 switch(tok) {
5538 case TOK__Atomic:
5539 qualifiers |= VT_ATOMIC;
5540 goto redo;
5541 case TOK_CONST1:
5542 case TOK_CONST2:
5543 case TOK_CONST3:
5544 qualifiers |= VT_CONSTANT;
5545 goto redo;
5546 case TOK_VOLATILE1:
5547 case TOK_VOLATILE2:
5548 case TOK_VOLATILE3:
5549 qualifiers |= VT_VOLATILE;
5550 goto redo;
5551 case TOK_RESTRICT1:
5552 case TOK_RESTRICT2:
5553 case TOK_RESTRICT3:
5554 goto redo;
5555 /* XXX: clarify attribute handling */
5556 case TOK_ATTRIBUTE1:
5557 case TOK_ATTRIBUTE2:
5558 parse_attribute(ad);
5559 break;
5561 mk_pointer(type);
5562 type->t |= qualifiers;
5563 if (ret == type)
5564 /* innermost pointed to type is the one for the first derivation */
5565 ret = pointed_type(type);
5568 if (tok == '(') {
5569 /* This is possibly a parameter type list for abstract declarators
5570 ('int ()'), use post_type for testing this. */
5571 if (!post_type(type, ad, 0, td)) {
5572 /* It's not, so it's a nested declarator, and the post operations
5573 apply to the innermost pointed to type (if any). */
5574 /* XXX: this is not correct to modify 'ad' at this point, but
5575 the syntax is not clear */
5576 parse_attribute(ad);
5577 post = type_decl(type, ad, v, td);
5578 skip(')');
5579 } else
5580 goto abstract;
5581 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5582 /* type identifier */
5583 *v = tok;
5584 next();
5585 } else {
5586 abstract:
5587 if (!(td & TYPE_ABSTRACT))
5588 expect("identifier");
5589 *v = 0;
5591 post_type(post, ad, storage, 0);
5592 parse_attribute(ad);
5593 type->t |= storage;
5594 return ret;
5597 /* indirection with full error checking and bound check */
5598 ST_FUNC void indir(void)
5600 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5601 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5602 return;
5603 expect("pointer");
5605 if (vtop->r & VT_LVAL)
5606 gv(RC_INT);
5607 vtop->type = *pointed_type(&vtop->type);
5608 /* Arrays and functions are never lvalues */
5609 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5610 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5611 vtop->r |= VT_LVAL;
5612 /* if bound checking, the referenced pointer must be checked */
5613 #ifdef CONFIG_TCC_BCHECK
5614 if (tcc_state->do_bounds_check)
5615 vtop->r |= VT_MUSTBOUND;
5616 #endif
5620 /* pass a parameter to a function and do type checking and casting */
5621 static void gfunc_param_typed(Sym *func, Sym *arg)
5623 int func_type;
5624 CType type;
5626 func_type = func->f.func_type;
5627 if (func_type == FUNC_OLD ||
5628 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5629 /* default casting : only need to convert float to double */
5630 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5631 gen_cast_s(VT_DOUBLE);
5632 } else if (vtop->type.t & VT_BITFIELD) {
5633 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5634 type.ref = vtop->type.ref;
5635 gen_cast(&type);
5636 } else if (vtop->r & VT_MUSTCAST) {
5637 force_charshort_cast();
5639 } else if (arg == NULL) {
5640 tcc_error("too many arguments to function");
5641 } else {
5642 type = arg->type;
5643 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5644 gen_assign_cast(&type);
5648 /* parse an expression and return its type without any side effect. */
5649 static void expr_type(CType *type, void (*expr_fn)(void))
5651 nocode_wanted++;
5652 expr_fn();
5653 *type = vtop->type;
5654 vpop();
5655 nocode_wanted--;
5658 /* parse an expression of the form '(type)' or '(expr)' and return its
5659 type */
5660 static void parse_expr_type(CType *type)
5662 int n;
5663 AttributeDef ad;
5665 skip('(');
5666 if (parse_btype(type, &ad)) {
5667 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5668 } else {
5669 expr_type(type, gexpr);
5671 skip(')');
5674 static void parse_type(CType *type)
5676 AttributeDef ad;
5677 int n;
5679 if (!parse_btype(type, &ad)) {
5680 expect("type");
5682 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5685 static void parse_builtin_params(int nc, const char *args)
5687 char c, sep = '(';
5688 CType type;
5689 if (nc)
5690 nocode_wanted++;
5691 next();
5692 if (*args == 0)
5693 skip(sep);
5694 while ((c = *args++)) {
5695 skip(sep);
5696 sep = ',';
5697 if (c == 't') {
5698 parse_type(&type);
5699 vpush(&type);
5700 continue;
5702 expr_eq();
5703 type.ref = NULL;
5704 type.t = 0;
5705 switch (c) {
5706 case 'e':
5707 continue;
5708 case 'V':
5709 type.t = VT_CONSTANT;
5710 case 'v':
5711 type.t |= VT_VOID;
5712 mk_pointer (&type);
5713 break;
5714 case 'S':
5715 type.t = VT_CONSTANT;
5716 case 's':
5717 type.t |= char_type.t;
5718 mk_pointer (&type);
5719 break;
5720 case 'i':
5721 type.t = VT_INT;
5722 break;
5723 case 'l':
5724 type.t = VT_SIZE_T;
5725 break;
5726 default:
5727 break;
5729 gen_assign_cast(&type);
5731 skip(')');
5732 if (nc)
5733 nocode_wanted--;
5736 static inline int is_memory_model(const SValue *sv)
5739 * FIXME
5740 * The memory models should better be backed by an enumeration.
5742 * const int t = sv->type.t;
5744 * if (!IS_ENUM_VAL(t))
5745 * return 0;
5747 * if (!(t & VT_STATIC))
5748 * return 0;
5750 * Ideally we should check whether the model matches 1:1.
5751 * If it is possible, we should check by the name of the value.
5753 return (((sv->type.t & VT_BTYPE) == VT_INT) && (sv->c.i < 6));
5756 static void parse_atomic(int atok)
5758 size_t op;
5759 size_t arg;
5760 size_t argc;
5761 CType *atom = NULL;
5762 char const *params = NULL;
5763 static struct {
5764 int const tok;
5765 char const *const params;
5766 } const ops[] = {
5768 * a -- atomic
5769 * A -- read-only atomic
5770 * p -- pointer to memory
5771 * P -- pointer to read-only memory
5772 * v -- value
5773 * m -- memory model
5775 {TOK___c11_atomic_init, "-av"},
5776 {TOK___c11_atomic_store, "-avm"},
5777 {TOK___c11_atomic_load, "am"},
5778 {TOK___c11_atomic_exchange, "avm"},
5779 {TOK___c11_atomic_compare_exchange_strong, "apvmm"},
5780 {TOK___c11_atomic_compare_exchange_weak, "apvmm"},
5781 {TOK___c11_atomic_fetch_add, "avm"},
5782 {TOK___c11_atomic_fetch_sub, "avm"},
5783 {TOK___c11_atomic_fetch_or, "avm"},
5784 {TOK___c11_atomic_fetch_xor, "avm"},
5785 {TOK___c11_atomic_fetch_and, "avm"},
5788 next();
5790 for (op = 0; op < (sizeof(ops) / sizeof(*ops)); ++op) {
5791 if (ops[op].tok == atok) {
5792 params = ops[op].params;
5793 break;
5796 if (!params)
5797 tcc_error("unknown atomic operation");
5799 argc = strlen(params);
5800 if (params[0] == '-') {
5801 ++params;
5802 --argc;
5805 vpushi(0);
5806 vpushi(0); /* function address */
5808 skip('(');
5809 for (arg = 0; arg < argc; ++arg) {
5810 expr_eq();
5812 switch (params[arg]) {
5813 case 'a':
5814 case 'A':
5815 if (atom)
5816 expect_arg("exactly one pointer to atomic", arg);
5817 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5818 expect_arg("pointer to atomic expected", arg);
5819 atom = pointed_type(&vtop->type);
5820 if (!(atom->t & VT_ATOMIC))
5821 expect_arg("qualified pointer to atomic", arg);
5822 if ((params[arg] == 'a') && (atom->t & VT_CONSTANT))
5823 expect_arg("pointer to writable atomic", arg);
5824 atom->t &= ~VT_ATOMIC;
5825 switch (btype_size(atom->t & VT_BTYPE)) {
5826 case 1: atok += 1; break;
5827 case 2: atok += 2; break;
5828 case 4: atok += 3; break;
5829 case 8: atok += 4; break;
5830 default: tcc_error("only integer-sized types are supported");
5832 vswap();
5833 vpop();
5834 vpush_helper_func(atok);
5835 vswap();
5836 break;
5838 case 'p':
5839 if (((vtop->type.t & VT_BTYPE) != VT_PTR)
5840 || !is_compatible_unqualified_types(atom, pointed_type(&vtop->type)))
5841 expect_arg("pointer to compatible type", arg);
5842 break;
5844 case 'v':
5845 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5846 expect_arg("integer type", arg);
5847 break;
5849 case 'm':
5850 if (!is_memory_model(vtop))
5851 expect_arg("memory model", arg);
5852 vtop->type.t &= ~VT_MEMMODEL;
5853 break;
5855 default:
5856 tcc_error("unknown parameter type");
5858 if (tok == ')')
5859 break;
5860 skip(',');
5862 if (arg < (argc - 1))
5863 expect("more parameters");
5864 if (arg > (argc - 1))
5865 expect("less parameters");
5866 skip(')');
5868 gfunc_call(argc);
5871 ST_FUNC void unary(void)
5873 int n, t, align, size, r, sizeof_caller;
5874 CType type;
5875 Sym *s;
5876 AttributeDef ad;
5878 /* generate line number info */
5879 if (debug_modes)
5880 tcc_debug_line(tcc_state), tcc_tcov_check_line (1);
5882 sizeof_caller = in_sizeof;
5883 in_sizeof = 0;
5884 type.ref = NULL;
5885 /* XXX: GCC 2.95.3 does not generate a table although it should be
5886 better here */
5887 tok_next:
5888 switch(tok) {
5889 case TOK_EXTENSION:
5890 next();
5891 goto tok_next;
5892 case TOK_LCHAR:
5893 #ifdef TCC_TARGET_PE
5894 t = VT_SHORT|VT_UNSIGNED;
5895 goto push_tokc;
5896 #endif
5897 case TOK_CINT:
5898 case TOK_CCHAR:
5899 t = VT_INT;
5900 push_tokc:
5901 type.t = t;
5902 vsetc(&type, VT_CONST, &tokc);
5903 next();
5904 break;
5905 case TOK_CUINT:
5906 t = VT_INT | VT_UNSIGNED;
5907 goto push_tokc;
5908 case TOK_CLLONG:
5909 t = VT_LLONG;
5910 goto push_tokc;
5911 case TOK_CULLONG:
5912 t = VT_LLONG | VT_UNSIGNED;
5913 goto push_tokc;
5914 case TOK_CFLOAT:
5915 t = VT_FLOAT;
5916 goto push_tokc;
5917 case TOK_CDOUBLE:
5918 t = VT_DOUBLE;
5919 goto push_tokc;
5920 case TOK_CLDOUBLE:
5921 t = VT_LDOUBLE;
5922 goto push_tokc;
5923 case TOK_CLONG:
5924 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5925 goto push_tokc;
5926 case TOK_CULONG:
5927 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5928 goto push_tokc;
5929 case TOK___FUNCTION__:
5930 if (!gnu_ext)
5931 goto tok_identifier;
5932 /* fall thru */
5933 case TOK___FUNC__:
5935 void *ptr;
5936 int len;
5937 /* special function name identifier */
5938 len = strlen(funcname) + 1;
5939 /* generate char[len] type */
5940 type.t = VT_BYTE;
5941 mk_pointer(&type);
5942 type.t |= VT_ARRAY;
5943 type.ref->c = len;
5944 vpush_ref(&type, data_section, data_section->data_offset, len);
5945 if (!NODATA_WANTED) {
5946 ptr = section_ptr_add(data_section, len);
5947 memcpy(ptr, funcname, len);
5949 next();
5951 break;
5952 case TOK_LSTR:
5953 #ifdef TCC_TARGET_PE
5954 t = VT_SHORT | VT_UNSIGNED;
5955 #else
5956 t = VT_INT;
5957 #endif
5958 goto str_init;
5959 case TOK_STR:
5960 /* string parsing */
5961 t = VT_BYTE;
5962 if (tcc_state->char_is_unsigned)
5963 t = VT_BYTE | VT_UNSIGNED;
5964 str_init:
5965 if (tcc_state->warn_write_strings)
5966 t |= VT_CONSTANT;
5967 type.t = t;
5968 mk_pointer(&type);
5969 type.t |= VT_ARRAY;
5970 memset(&ad, 0, sizeof(AttributeDef));
5971 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5972 break;
5973 case '(':
5974 next();
5975 /* cast ? */
5976 if (parse_btype(&type, &ad)) {
5977 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5978 skip(')');
5979 /* check ISOC99 compound literal */
5980 if (tok == '{') {
5981 /* data is allocated locally by default */
5982 if (global_expr)
5983 r = VT_CONST;
5984 else
5985 r = VT_LOCAL;
5986 /* all except arrays are lvalues */
5987 if (!(type.t & VT_ARRAY))
5988 r |= VT_LVAL;
5989 memset(&ad, 0, sizeof(AttributeDef));
5990 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5991 } else {
5992 if (sizeof_caller) {
5993 vpush(&type);
5994 return;
5996 unary();
5997 gen_cast(&type);
5999 } else if (tok == '{') {
6000 int saved_nocode_wanted = nocode_wanted;
6001 if (const_wanted && !(nocode_wanted & unevalmask))
6002 expect("constant");
6003 if (0 == local_scope)
6004 tcc_error("statement expression outside of function");
6005 /* save all registers */
6006 save_regs(0);
6007 /* statement expression : we do not accept break/continue
6008 inside as GCC does. We do retain the nocode_wanted state,
6009 as statement expressions can't ever be entered from the
6010 outside, so any reactivation of code emission (from labels
6011 or loop heads) can be disabled again after the end of it. */
6012 block(1);
6013 nocode_wanted = saved_nocode_wanted;
6014 skip(')');
6015 } else {
6016 gexpr();
6017 skip(')');
6019 break;
6020 case '*':
6021 next();
6022 unary();
6023 indir();
6024 break;
6025 case '&':
6026 next();
6027 unary();
6028 /* functions names must be treated as function pointers,
6029 except for unary '&' and sizeof. Since we consider that
6030 functions are not lvalues, we only have to handle it
6031 there and in function calls. */
6032 /* arrays can also be used although they are not lvalues */
6033 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
6034 !(vtop->type.t & VT_ARRAY))
6035 test_lvalue();
6036 if (vtop->sym)
6037 vtop->sym->a.addrtaken = 1;
6038 mk_pointer(&vtop->type);
6039 gaddrof();
6040 break;
6041 case '!':
6042 next();
6043 unary();
6044 gen_test_zero(TOK_EQ);
6045 break;
6046 case '~':
6047 next();
6048 unary();
6049 vpushi(-1);
6050 gen_op('^');
6051 break;
6052 case '+':
6053 next();
6054 unary();
6055 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
6056 tcc_error("pointer not accepted for unary plus");
6057 /* In order to force cast, we add zero, except for floating point
6058 where we really need an noop (otherwise -0.0 will be transformed
6059 into +0.0). */
6060 if (!is_float(vtop->type.t)) {
6061 vpushi(0);
6062 gen_op('+');
6064 break;
6065 case TOK_SIZEOF:
6066 case TOK_ALIGNOF1:
6067 case TOK_ALIGNOF2:
6068 case TOK_ALIGNOF3:
6069 t = tok;
6070 next();
6071 in_sizeof++;
6072 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
6073 s = NULL;
6074 if (vtop[1].r & VT_SYM)
6075 s = vtop[1].sym; /* hack: accessing previous vtop */
6076 size = type_size(&type, &align);
6077 if (s && s->a.aligned)
6078 align = 1 << (s->a.aligned - 1);
6079 if (t == TOK_SIZEOF) {
6080 if (!(type.t & VT_VLA)) {
6081 if (size < 0)
6082 tcc_error("sizeof applied to an incomplete type");
6083 vpushs(size);
6084 } else {
6085 vla_runtime_type_size(&type, &align);
6087 } else {
6088 vpushs(align);
6090 vtop->type.t |= VT_UNSIGNED;
6091 break;
6093 case TOK_builtin_expect:
6094 /* __builtin_expect is a no-op for now */
6095 parse_builtin_params(0, "ee");
6096 vpop();
6097 break;
6098 case TOK_builtin_types_compatible_p:
6099 parse_builtin_params(0, "tt");
6100 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
6101 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
6102 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
6103 vtop -= 2;
6104 vpushi(n);
6105 break;
6106 case TOK_builtin_choose_expr:
6108 int64_t c;
6109 next();
6110 skip('(');
6111 c = expr_const64();
6112 skip(',');
6113 if (!c) {
6114 nocode_wanted++;
6116 expr_eq();
6117 if (!c) {
6118 vpop();
6119 nocode_wanted--;
6121 skip(',');
6122 if (c) {
6123 nocode_wanted++;
6125 expr_eq();
6126 if (c) {
6127 vpop();
6128 nocode_wanted--;
6130 skip(')');
6132 break;
6133 case TOK_builtin_constant_p:
6134 parse_builtin_params(1, "e");
6135 n = (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6136 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
6137 vtop--;
6138 vpushi(n);
6139 break;
6140 case TOK_builtin_frame_address:
6141 case TOK_builtin_return_address:
6143 int tok1 = tok;
6144 int level;
6145 next();
6146 skip('(');
6147 if (tok != TOK_CINT) {
6148 tcc_error("%s only takes positive integers",
6149 tok1 == TOK_builtin_return_address ?
6150 "__builtin_return_address" :
6151 "__builtin_frame_address");
6153 level = (uint32_t)tokc.i;
6154 next();
6155 skip(')');
6156 type.t = VT_VOID;
6157 mk_pointer(&type);
6158 vset(&type, VT_LOCAL, 0); /* local frame */
6159 while (level--) {
6160 #ifdef TCC_TARGET_RISCV64
6161 vpushi(2*PTR_SIZE);
6162 gen_op('-');
6163 #endif
6164 mk_pointer(&vtop->type);
6165 indir(); /* -> parent frame */
6167 if (tok1 == TOK_builtin_return_address) {
6168 // assume return address is just above frame pointer on stack
6169 #ifdef TCC_TARGET_ARM
6170 vpushi(2*PTR_SIZE);
6171 gen_op('+');
6172 #elif defined TCC_TARGET_RISCV64
6173 vpushi(PTR_SIZE);
6174 gen_op('-');
6175 #else
6176 vpushi(PTR_SIZE);
6177 gen_op('+');
6178 #endif
6179 mk_pointer(&vtop->type);
6180 indir();
6183 break;
6184 #ifdef TCC_TARGET_RISCV64
6185 case TOK_builtin_va_start:
6186 parse_builtin_params(0, "ee");
6187 r = vtop->r & VT_VALMASK;
6188 if (r == VT_LLOCAL)
6189 r = VT_LOCAL;
6190 if (r != VT_LOCAL)
6191 tcc_error("__builtin_va_start expects a local variable");
6192 gen_va_start();
6193 vstore();
6194 break;
6195 #endif
6196 #ifdef TCC_TARGET_X86_64
6197 #ifdef TCC_TARGET_PE
6198 case TOK_builtin_va_start:
6199 parse_builtin_params(0, "ee");
6200 r = vtop->r & VT_VALMASK;
6201 if (r == VT_LLOCAL)
6202 r = VT_LOCAL;
6203 if (r != VT_LOCAL)
6204 tcc_error("__builtin_va_start expects a local variable");
6205 vtop->r = r;
6206 vtop->type = char_pointer_type;
6207 vtop->c.i += 8;
6208 vstore();
6209 break;
6210 #else
6211 case TOK_builtin_va_arg_types:
6212 parse_builtin_params(0, "t");
6213 vpushi(classify_x86_64_va_arg(&vtop->type));
6214 vswap();
6215 vpop();
6216 break;
6217 #endif
6218 #endif
6220 #ifdef TCC_TARGET_ARM64
6221 case TOK_builtin_va_start: {
6222 parse_builtin_params(0, "ee");
6223 //xx check types
6224 gen_va_start();
6225 vpushi(0);
6226 vtop->type.t = VT_VOID;
6227 break;
6229 case TOK_builtin_va_arg: {
6230 parse_builtin_params(0, "et");
6231 type = vtop->type;
6232 vpop();
6233 //xx check types
6234 gen_va_arg(&type);
6235 vtop->type = type;
6236 break;
6238 case TOK___arm64_clear_cache: {
6239 parse_builtin_params(0, "ee");
6240 gen_clear_cache();
6241 vpushi(0);
6242 vtop->type.t = VT_VOID;
6243 break;
6245 #endif
6247 /* atomic operations */
6248 case TOK___c11_atomic_init:
6249 case TOK___c11_atomic_store:
6250 case TOK___c11_atomic_load:
6251 case TOK___c11_atomic_exchange:
6252 case TOK___c11_atomic_compare_exchange_strong:
6253 case TOK___c11_atomic_compare_exchange_weak:
6254 case TOK___c11_atomic_fetch_add:
6255 case TOK___c11_atomic_fetch_sub:
6256 case TOK___c11_atomic_fetch_or:
6257 case TOK___c11_atomic_fetch_xor:
6258 case TOK___c11_atomic_fetch_and:
6259 parse_atomic(tok);
6260 break;
6262 /* pre operations */
6263 case TOK_INC:
6264 case TOK_DEC:
6265 t = tok;
6266 next();
6267 unary();
6268 inc(0, t);
6269 break;
6270 case '-':
6271 next();
6272 unary();
6273 if (is_float(vtop->type.t)) {
6274 gen_opif(TOK_NEG);
6275 } else {
6276 vpushi(0);
6277 vswap();
6278 gen_op('-');
6280 break;
6281 case TOK_LAND:
6282 if (!gnu_ext)
6283 goto tok_identifier;
6284 next();
6285 /* allow to take the address of a label */
6286 if (tok < TOK_UIDENT)
6287 expect("label identifier");
6288 s = label_find(tok);
6289 if (!s) {
6290 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6291 } else {
6292 if (s->r == LABEL_DECLARED)
6293 s->r = LABEL_FORWARD;
6295 if (!s->type.t) {
6296 s->type.t = VT_VOID;
6297 mk_pointer(&s->type);
6298 s->type.t |= VT_STATIC;
6300 vpushsym(&s->type, s);
6301 next();
6302 break;
6304 case TOK_GENERIC:
6306 CType controlling_type;
6307 int has_default = 0;
6308 int has_match = 0;
6309 int learn = 0;
6310 TokenString *str = NULL;
6311 int saved_const_wanted = const_wanted;
6313 next();
6314 skip('(');
6315 const_wanted = 0;
6316 expr_type(&controlling_type, expr_eq);
6317 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
6318 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
6319 mk_pointer(&controlling_type);
6320 const_wanted = saved_const_wanted;
6321 for (;;) {
6322 learn = 0;
6323 skip(',');
6324 if (tok == TOK_DEFAULT) {
6325 if (has_default)
6326 tcc_error("too many 'default'");
6327 has_default = 1;
6328 if (!has_match)
6329 learn = 1;
6330 next();
6331 } else {
6332 AttributeDef ad_tmp;
6333 int itmp;
6334 CType cur_type;
6336 in_generic++;
6337 parse_btype(&cur_type, &ad_tmp);
6338 in_generic--;
6340 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
6341 if (compare_types(&controlling_type, &cur_type, 0)) {
6342 if (has_match) {
6343 tcc_error("type match twice");
6345 has_match = 1;
6346 learn = 1;
6349 skip(':');
6350 if (learn) {
6351 if (str)
6352 tok_str_free(str);
6353 skip_or_save_block(&str);
6354 } else {
6355 skip_or_save_block(NULL);
6357 if (tok == ')')
6358 break;
6360 if (!str) {
6361 char buf[60];
6362 type_to_str(buf, sizeof buf, &controlling_type, NULL);
6363 tcc_error("type '%s' does not match any association", buf);
6365 begin_macro(str, 1);
6366 next();
6367 expr_eq();
6368 if (tok != TOK_EOF)
6369 expect(",");
6370 end_macro();
6371 next();
6372 break;
6374 // special qnan , snan and infinity values
6375 case TOK___NAN__:
6376 n = 0x7fc00000;
6377 special_math_val:
6378 vpushi(n);
6379 vtop->type.t = VT_FLOAT;
6380 next();
6381 break;
6382 case TOK___SNAN__:
6383 n = 0x7f800001;
6384 goto special_math_val;
6385 case TOK___INF__:
6386 n = 0x7f800000;
6387 goto special_math_val;
6389 default:
6390 tok_identifier:
6391 t = tok;
6392 next();
6393 if (t < TOK_UIDENT)
6394 expect("identifier");
6395 s = sym_find(t);
6396 if (!s || IS_ASM_SYM(s)) {
6397 const char *name = get_tok_str(t, NULL);
6398 if (tok != '(')
6399 tcc_error("'%s' undeclared", name);
6400 /* for simple function calls, we tolerate undeclared
6401 external reference to int() function */
6402 if (tcc_state->warn_implicit_function_declaration
6403 #ifdef TCC_TARGET_PE
6404 /* people must be warned about using undeclared WINAPI functions
6405 (which usually start with uppercase letter) */
6406 || (name[0] >= 'A' && name[0] <= 'Z')
6407 #endif
6409 tcc_warning("implicit declaration of function '%s'", name);
6410 s = external_global_sym(t, &func_old_type);
6413 r = s->r;
6414 /* A symbol that has a register is a local register variable,
6415 which starts out as VT_LOCAL value. */
6416 if ((r & VT_VALMASK) < VT_CONST)
6417 r = (r & ~VT_VALMASK) | VT_LOCAL;
6419 vset(&s->type, r, s->c);
6420 /* Point to s as backpointer (even without r&VT_SYM).
6421 Will be used by at least the x86 inline asm parser for
6422 regvars. */
6423 vtop->sym = s;
6425 if (r & VT_SYM) {
6426 vtop->c.i = 0;
6427 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
6428 vtop->c.i = s->enum_val;
6430 break;
6433 /* post operations */
6434 while (1) {
6435 if (tok == TOK_INC || tok == TOK_DEC) {
6436 inc(1, tok);
6437 next();
6438 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
6439 int qualifiers, cumofs = 0;
6440 /* field */
6441 if (tok == TOK_ARROW)
6442 indir();
6443 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
6444 test_lvalue();
6445 gaddrof();
6446 /* expect pointer on structure */
6447 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
6448 expect("struct or union");
6449 if (tok == TOK_CDOUBLE)
6450 expect("field name");
6451 next();
6452 if (tok == TOK_CINT || tok == TOK_CUINT)
6453 expect("field name");
6454 s = find_field(&vtop->type, tok, &cumofs);
6455 if (!s)
6456 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
6457 /* add field offset to pointer */
6458 vtop->type = char_pointer_type; /* change type to 'char *' */
6459 vpushi(cumofs + s->c);
6460 gen_op('+');
6461 /* change type to field type, and set to lvalue */
6462 vtop->type = s->type;
6463 vtop->type.t |= qualifiers;
6464 /* an array is never an lvalue */
6465 if (!(vtop->type.t & VT_ARRAY)) {
6466 vtop->r |= VT_LVAL;
6467 #ifdef CONFIG_TCC_BCHECK
6468 /* if bound checking, the referenced pointer must be checked */
6469 if (tcc_state->do_bounds_check)
6470 vtop->r |= VT_MUSTBOUND;
6471 #endif
6473 next();
6474 } else if (tok == '[') {
6475 next();
6476 gexpr();
6477 gen_op('+');
6478 indir();
6479 skip(']');
6480 } else if (tok == '(') {
6481 SValue ret;
6482 Sym *sa;
6483 int nb_args, ret_nregs, ret_align, regsize, variadic;
6485 /* function call */
6486 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
6487 /* pointer test (no array accepted) */
6488 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
6489 vtop->type = *pointed_type(&vtop->type);
6490 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
6491 goto error_func;
6492 } else {
6493 error_func:
6494 expect("function pointer");
6496 } else {
6497 vtop->r &= ~VT_LVAL; /* no lvalue */
6499 /* get return type */
6500 s = vtop->type.ref;
6501 next();
6502 sa = s->next; /* first parameter */
6503 nb_args = regsize = 0;
6504 ret.r2 = VT_CONST;
6505 /* compute first implicit argument if a structure is returned */
6506 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
6507 variadic = (s->f.func_type == FUNC_ELLIPSIS);
6508 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6509 &ret_align, &regsize);
6510 if (ret_nregs <= 0) {
6511 /* get some space for the returned structure */
6512 size = type_size(&s->type, &align);
6513 #ifdef TCC_TARGET_ARM64
6514 /* On arm64, a small struct is return in registers.
6515 It is much easier to write it to memory if we know
6516 that we are allowed to write some extra bytes, so
6517 round the allocated space up to a power of 2: */
6518 if (size < 16)
6519 while (size & (size - 1))
6520 size = (size | (size - 1)) + 1;
6521 #endif
6522 loc = (loc - size) & -align;
6523 ret.type = s->type;
6524 ret.r = VT_LOCAL | VT_LVAL;
6525 /* pass it as 'int' to avoid structure arg passing
6526 problems */
6527 vseti(VT_LOCAL, loc);
6528 #ifdef CONFIG_TCC_BCHECK
6529 if (tcc_state->do_bounds_check)
6530 --loc;
6531 #endif
6532 ret.c = vtop->c;
6533 if (ret_nregs < 0)
6534 vtop--;
6535 else
6536 nb_args++;
6538 } else {
6539 ret_nregs = 1;
6540 ret.type = s->type;
6543 if (ret_nregs > 0) {
6544 /* return in register */
6545 ret.c.i = 0;
6546 PUT_R_RET(&ret, ret.type.t);
6548 if (tok != ')') {
6549 for(;;) {
6550 expr_eq();
6551 gfunc_param_typed(s, sa);
6552 nb_args++;
6553 if (sa)
6554 sa = sa->next;
6555 if (tok == ')')
6556 break;
6557 skip(',');
6560 if (sa)
6561 tcc_error("too few arguments to function");
6562 skip(')');
6563 gfunc_call(nb_args);
6565 if (ret_nregs < 0) {
6566 vsetc(&ret.type, ret.r, &ret.c);
6567 #ifdef TCC_TARGET_RISCV64
6568 arch_transfer_ret_regs(1);
6569 #endif
6570 } else {
6571 /* return value */
6572 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6573 vsetc(&ret.type, r, &ret.c);
6574 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6577 /* handle packed struct return */
6578 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6579 int addr, offset;
6581 size = type_size(&s->type, &align);
6582 /* We're writing whole regs often, make sure there's enough
6583 space. Assume register size is power of 2. */
6584 if (regsize > align)
6585 align = regsize;
6586 loc = (loc - size) & -align;
6587 addr = loc;
6588 offset = 0;
6589 for (;;) {
6590 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6591 vswap();
6592 vstore();
6593 vtop--;
6594 if (--ret_nregs == 0)
6595 break;
6596 offset += regsize;
6598 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6601 /* Promote char/short return values. This is matters only
6602 for calling function that were not compiled by TCC and
6603 only on some architectures. For those where it doesn't
6604 matter we expect things to be already promoted to int,
6605 but not larger. */
6606 t = s->type.t & VT_BTYPE;
6607 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6608 #ifdef PROMOTE_RET
6609 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6610 #else
6611 vtop->type.t = VT_INT;
6612 #endif
6615 if (s->f.func_noreturn) {
6616 if (debug_modes)
6617 tcc_tcov_block_end (tcov_data.line);
6618 CODE_OFF();
6620 } else {
6621 break;
6626 #ifndef precedence_parser /* original top-down parser */
6628 static void expr_prod(void)
6630 int t;
6632 unary();
6633 while ((t = tok) == '*' || t == '/' || t == '%') {
6634 next();
6635 unary();
6636 gen_op(t);
6640 static void expr_sum(void)
6642 int t;
6644 expr_prod();
6645 while ((t = tok) == '+' || t == '-') {
6646 next();
6647 expr_prod();
6648 gen_op(t);
6652 static void expr_shift(void)
6654 int t;
6656 expr_sum();
6657 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6658 next();
6659 expr_sum();
6660 gen_op(t);
6664 static void expr_cmp(void)
6666 int t;
6668 expr_shift();
6669 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6670 t == TOK_ULT || t == TOK_UGE) {
6671 next();
6672 expr_shift();
6673 gen_op(t);
6677 static void expr_cmpeq(void)
6679 int t;
6681 expr_cmp();
6682 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6683 next();
6684 expr_cmp();
6685 gen_op(t);
6689 static void expr_and(void)
6691 expr_cmpeq();
6692 while (tok == '&') {
6693 next();
6694 expr_cmpeq();
6695 gen_op('&');
6699 static void expr_xor(void)
6701 expr_and();
6702 while (tok == '^') {
6703 next();
6704 expr_and();
6705 gen_op('^');
6709 static void expr_or(void)
6711 expr_xor();
6712 while (tok == '|') {
6713 next();
6714 expr_xor();
6715 gen_op('|');
6719 static void expr_landor(int op);
6721 static void expr_land(void)
6723 expr_or();
6724 if (tok == TOK_LAND)
6725 expr_landor(tok);
6728 static void expr_lor(void)
6730 expr_land();
6731 if (tok == TOK_LOR)
6732 expr_landor(tok);
6735 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6736 #else /* defined precedence_parser */
6737 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6738 # define expr_lor() unary(), expr_infix(1)
6740 static int precedence(int tok)
6742 switch (tok) {
6743 case TOK_LOR: return 1;
6744 case TOK_LAND: return 2;
6745 case '|': return 3;
6746 case '^': return 4;
6747 case '&': return 5;
6748 case TOK_EQ: case TOK_NE: return 6;
6749 relat: case TOK_ULT: case TOK_UGE: return 7;
6750 case TOK_SHL: case TOK_SAR: return 8;
6751 case '+': case '-': return 9;
6752 case '*': case '/': case '%': return 10;
6753 default:
6754 if (tok >= TOK_ULE && tok <= TOK_GT)
6755 goto relat;
6756 return 0;
6759 static unsigned char prec[256];
6760 static void init_prec(void)
6762 int i;
6763 for (i = 0; i < 256; i++)
6764 prec[i] = precedence(i);
6766 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6768 static void expr_landor(int op);
6770 static void expr_infix(int p)
6772 int t = tok, p2;
6773 while ((p2 = precedence(t)) >= p) {
6774 if (t == TOK_LOR || t == TOK_LAND) {
6775 expr_landor(t);
6776 } else {
6777 next();
6778 unary();
6779 if (precedence(tok) > p2)
6780 expr_infix(p2 + 1);
6781 gen_op(t);
6783 t = tok;
6786 #endif
6788 /* Assuming vtop is a value used in a conditional context
6789 (i.e. compared with zero) return 0 if it's false, 1 if
6790 true and -1 if it can't be statically determined. */
6791 static int condition_3way(void)
6793 int c = -1;
6794 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6795 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6796 vdup();
6797 gen_cast_s(VT_BOOL);
6798 c = vtop->c.i;
6799 vpop();
6801 return c;
6804 static void expr_landor(int op)
6806 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6807 for(;;) {
6808 c = f ? i : condition_3way();
6809 if (c < 0)
6810 save_regs(1), cc = 0;
6811 else if (c != i)
6812 nocode_wanted++, f = 1;
6813 if (tok != op)
6814 break;
6815 if (c < 0)
6816 t = gvtst(i, t);
6817 else
6818 vpop();
6819 next();
6820 expr_landor_next(op);
6822 if (cc || f) {
6823 vpop();
6824 vpushi(i ^ f);
6825 gsym(t);
6826 nocode_wanted -= f;
6827 } else {
6828 gvtst_set(i, t);
6832 static int is_cond_bool(SValue *sv)
6834 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6835 && (sv->type.t & VT_BTYPE) == VT_INT)
6836 return (unsigned)sv->c.i < 2;
6837 if (sv->r == VT_CMP)
6838 return 1;
6839 return 0;
6842 static void expr_cond(void)
6844 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6845 SValue sv;
6846 CType type;
6847 int ncw_prev;
6849 expr_lor();
6850 if (tok == '?') {
6851 next();
6852 c = condition_3way();
6853 g = (tok == ':' && gnu_ext);
6854 tt = 0;
6855 if (!g) {
6856 if (c < 0) {
6857 save_regs(1);
6858 tt = gvtst(1, 0);
6859 } else {
6860 vpop();
6862 } else if (c < 0) {
6863 /* needed to avoid having different registers saved in
6864 each branch */
6865 save_regs(1);
6866 gv_dup();
6867 tt = gvtst(0, 0);
6870 ncw_prev = nocode_wanted;
6871 if (c == 0)
6872 nocode_wanted++;
6873 if (!g)
6874 gexpr();
6876 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6877 mk_pointer(&vtop->type);
6878 sv = *vtop; /* save value to handle it later */
6879 vtop--; /* no vpop so that FP stack is not flushed */
6881 if (g) {
6882 u = tt;
6883 } else if (c < 0) {
6884 u = gjmp(0);
6885 gsym(tt);
6886 } else
6887 u = 0;
6889 nocode_wanted = ncw_prev;
6890 if (c == 1)
6891 nocode_wanted++;
6892 skip(':');
6893 expr_cond();
6895 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6896 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6897 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6898 this code jumps directly to the if's then/else branches. */
6899 t1 = gvtst(0, 0);
6900 t2 = gjmp(0);
6901 gsym(u);
6902 vpushv(&sv);
6903 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6904 gvtst_set(0, t1);
6905 gvtst_set(1, t2);
6906 nocode_wanted = ncw_prev;
6907 // tcc_warning("two conditions expr_cond");
6908 return;
6911 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6912 mk_pointer(&vtop->type);
6914 /* cast operands to correct type according to ISOC rules */
6915 if (!combine_types(&type, &sv, vtop, '?'))
6916 type_incompatibility_error(&sv.type, &vtop->type,
6917 "type mismatch in conditional expression (have '%s' and '%s')");
6918 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6919 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6920 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6922 /* now we convert second operand */
6923 if (c != 1) {
6924 gen_cast(&type);
6925 if (islv) {
6926 mk_pointer(&vtop->type);
6927 gaddrof();
6928 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6929 gaddrof();
6932 rc = RC_TYPE(type.t);
6933 /* for long longs, we use fixed registers to avoid having
6934 to handle a complicated move */
6935 if (USING_TWO_WORDS(type.t))
6936 rc = RC_RET(type.t);
6938 tt = r2 = 0;
6939 if (c < 0) {
6940 r2 = gv(rc);
6941 tt = gjmp(0);
6943 gsym(u);
6944 nocode_wanted = ncw_prev;
6946 /* this is horrible, but we must also convert first
6947 operand */
6948 if (c != 0) {
6949 *vtop = sv;
6950 gen_cast(&type);
6951 if (islv) {
6952 mk_pointer(&vtop->type);
6953 gaddrof();
6954 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6955 gaddrof();
6958 if (c < 0) {
6959 r1 = gv(rc);
6960 move_reg(r2, r1, islv ? VT_PTR : type.t);
6961 vtop->r = r2;
6962 gsym(tt);
6965 if (islv)
6966 indir();
6970 static void expr_eq(void)
6972 int t;
6974 expr_cond();
6975 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6976 test_lvalue();
6977 next();
6978 if (t == '=') {
6979 expr_eq();
6980 } else {
6981 vdup();
6982 expr_eq();
6983 gen_op(TOK_ASSIGN_OP(t));
6985 vstore();
6989 ST_FUNC void gexpr(void)
6991 while (1) {
6992 expr_eq();
6993 if (tok != ',')
6994 break;
6995 vpop();
6996 next();
7000 /* parse a constant expression and return value in vtop. */
7001 static void expr_const1(void)
7003 const_wanted++;
7004 nocode_wanted += unevalmask + 1;
7005 expr_cond();
7006 nocode_wanted -= unevalmask + 1;
7007 const_wanted--;
7010 /* parse an integer constant and return its value. */
7011 static inline int64_t expr_const64(void)
7013 int64_t c;
7014 expr_const1();
7015 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
7016 expect("constant expression");
7017 c = vtop->c.i;
7018 vpop();
7019 return c;
7022 /* parse an integer constant and return its value.
7023 Complain if it doesn't fit 32bit (signed or unsigned). */
7024 ST_FUNC int expr_const(void)
7026 int c;
7027 int64_t wc = expr_const64();
7028 c = wc;
7029 if (c != wc && (unsigned)c != wc)
7030 tcc_error("constant exceeds 32 bit");
7031 return c;
7034 /* ------------------------------------------------------------------------- */
7035 /* return from function */
7037 #ifndef TCC_TARGET_ARM64
7038 static void gfunc_return(CType *func_type)
7040 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
7041 CType type, ret_type;
7042 int ret_align, ret_nregs, regsize;
7043 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
7044 &ret_align, &regsize);
7045 if (ret_nregs < 0) {
7046 #ifdef TCC_TARGET_RISCV64
7047 arch_transfer_ret_regs(0);
7048 #endif
7049 } else if (0 == ret_nregs) {
7050 /* if returning structure, must copy it to implicit
7051 first pointer arg location */
7052 type = *func_type;
7053 mk_pointer(&type);
7054 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
7055 indir();
7056 vswap();
7057 /* copy structure value to pointer */
7058 vstore();
7059 } else {
7060 /* returning structure packed into registers */
7061 int size, addr, align, rc;
7062 size = type_size(func_type,&align);
7063 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
7064 (vtop->c.i & (ret_align-1)))
7065 && (align & (ret_align-1))) {
7066 loc = (loc - size) & -ret_align;
7067 addr = loc;
7068 type = *func_type;
7069 vset(&type, VT_LOCAL | VT_LVAL, addr);
7070 vswap();
7071 vstore();
7072 vpop();
7073 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
7075 vtop->type = ret_type;
7076 rc = RC_RET(ret_type.t);
7077 if (ret_nregs == 1)
7078 gv(rc);
7079 else {
7080 for (;;) {
7081 vdup();
7082 gv(rc);
7083 vpop();
7084 if (--ret_nregs == 0)
7085 break;
7086 /* We assume that when a structure is returned in multiple
7087 registers, their classes are consecutive values of the
7088 suite s(n) = 2^n */
7089 rc <<= 1;
7090 vtop->c.i += regsize;
7094 } else {
7095 gv(RC_RET(func_type->t));
7097 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
7099 #endif
7101 static void check_func_return(void)
7103 if ((func_vt.t & VT_BTYPE) == VT_VOID)
7104 return;
7105 if (!strcmp (funcname, "main")
7106 && (func_vt.t & VT_BTYPE) == VT_INT) {
7107 /* main returns 0 by default */
7108 vpushi(0);
7109 gen_assign_cast(&func_vt);
7110 gfunc_return(&func_vt);
7111 } else {
7112 tcc_warning("function might return no value: '%s'", funcname);
7116 /* ------------------------------------------------------------------------- */
7117 /* switch/case */
7119 static int case_cmpi(const void *pa, const void *pb)
7121 int64_t a = (*(struct case_t**) pa)->v1;
7122 int64_t b = (*(struct case_t**) pb)->v1;
7123 return a < b ? -1 : a > b;
7126 static int case_cmpu(const void *pa, const void *pb)
7128 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
7129 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
7130 return a < b ? -1 : a > b;
7133 static void gtst_addr(int t, int a)
7135 gsym_addr(gvtst(0, t), a);
7138 static void gcase(struct case_t **base, int len, int *bsym)
7140 struct case_t *p;
7141 int e;
7142 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
7143 while (len > 8) {
7144 /* binary search */
7145 p = base[len/2];
7146 vdup();
7147 if (ll)
7148 vpushll(p->v2);
7149 else
7150 vpushi(p->v2);
7151 gen_op(TOK_LE);
7152 e = gvtst(1, 0);
7153 vdup();
7154 if (ll)
7155 vpushll(p->v1);
7156 else
7157 vpushi(p->v1);
7158 gen_op(TOK_GE);
7159 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
7160 /* x < v1 */
7161 gcase(base, len/2, bsym);
7162 /* x > v2 */
7163 gsym(e);
7164 e = len/2 + 1;
7165 base += e; len -= e;
7167 /* linear scan */
7168 while (len--) {
7169 p = *base++;
7170 vdup();
7171 if (ll)
7172 vpushll(p->v2);
7173 else
7174 vpushi(p->v2);
7175 if (p->v1 == p->v2) {
7176 gen_op(TOK_EQ);
7177 gtst_addr(0, p->sym);
7178 } else {
7179 gen_op(TOK_LE);
7180 e = gvtst(1, 0);
7181 vdup();
7182 if (ll)
7183 vpushll(p->v1);
7184 else
7185 vpushi(p->v1);
7186 gen_op(TOK_GE);
7187 gtst_addr(0, p->sym);
7188 gsym(e);
7191 *bsym = gjmp(*bsym);
7194 /* ------------------------------------------------------------------------- */
7195 /* __attribute__((cleanup(fn))) */
7197 static void try_call_scope_cleanup(Sym *stop)
7199 Sym *cls = cur_scope->cl.s;
7201 for (; cls != stop; cls = cls->ncl) {
7202 Sym *fs = cls->next;
7203 Sym *vs = cls->prev_tok;
7205 vpushsym(&fs->type, fs);
7206 vset(&vs->type, vs->r, vs->c);
7207 vtop->sym = vs;
7208 mk_pointer(&vtop->type);
7209 gaddrof();
7210 gfunc_call(1);
7214 static void try_call_cleanup_goto(Sym *cleanupstate)
7216 Sym *oc, *cc;
7217 int ocd, ccd;
7219 if (!cur_scope->cl.s)
7220 return;
7222 /* search NCA of both cleanup chains given parents and initial depth */
7223 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
7224 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
7226 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
7228 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
7231 try_call_scope_cleanup(cc);
7234 /* call 'func' for each __attribute__((cleanup(func))) */
7235 static void block_cleanup(struct scope *o)
7237 int jmp = 0;
7238 Sym *g, **pg;
7239 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
7240 if (g->prev_tok->r & LABEL_FORWARD) {
7241 Sym *pcl = g->next;
7242 if (!jmp)
7243 jmp = gjmp(0);
7244 gsym(pcl->jnext);
7245 try_call_scope_cleanup(o->cl.s);
7246 pcl->jnext = gjmp(0);
7247 if (!o->cl.n)
7248 goto remove_pending;
7249 g->c = o->cl.n;
7250 pg = &g->prev;
7251 } else {
7252 remove_pending:
7253 *pg = g->prev;
7254 sym_free(g);
7257 gsym(jmp);
7258 try_call_scope_cleanup(o->cl.s);
7261 /* ------------------------------------------------------------------------- */
7262 /* VLA */
7264 static void vla_restore(int loc)
7266 if (loc)
7267 gen_vla_sp_restore(loc);
7270 static void vla_leave(struct scope *o)
7272 struct scope *c = cur_scope, *v = NULL;
7273 for (; c != o && c; c = c->prev)
7274 if (c->vla.num)
7275 v = c;
7276 if (v)
7277 vla_restore(v->vla.locorig);
7280 /* ------------------------------------------------------------------------- */
7281 /* local scopes */
7283 void new_scope(struct scope *o)
7285 /* copy and link previous scope */
7286 *o = *cur_scope;
7287 o->prev = cur_scope;
7288 cur_scope = o;
7289 cur_scope->vla.num = 0;
7291 /* record local declaration stack position */
7292 o->lstk = local_stack;
7293 o->llstk = local_label_stack;
7294 ++local_scope;
7296 if (debug_modes)
7297 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
7300 void prev_scope(struct scope *o, int is_expr)
7302 vla_leave(o->prev);
7304 if (o->cl.s != o->prev->cl.s)
7305 block_cleanup(o->prev);
7307 /* pop locally defined labels */
7308 label_pop(&local_label_stack, o->llstk, is_expr);
7310 /* In the is_expr case (a statement expression is finished here),
7311 vtop might refer to symbols on the local_stack. Either via the
7312 type or via vtop->sym. We can't pop those nor any that in turn
7313 might be referred to. To make it easier we don't roll back
7314 any symbols in that case; some upper level call to block() will
7315 do that. We do have to remove such symbols from the lookup
7316 tables, though. sym_pop will do that. */
7318 /* pop locally defined symbols */
7319 pop_local_syms(o->lstk, is_expr);
7320 cur_scope = o->prev;
7321 --local_scope;
7323 if (debug_modes)
7324 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
7327 /* leave a scope via break/continue(/goto) */
7328 void leave_scope(struct scope *o)
7330 if (!o)
7331 return;
7332 try_call_scope_cleanup(o->cl.s);
7333 vla_leave(o);
7336 /* ------------------------------------------------------------------------- */
7337 /* call block from 'for do while' loops */
7339 static void lblock(int *bsym, int *csym)
7341 struct scope *lo = loop_scope, *co = cur_scope;
7342 int *b = co->bsym, *c = co->csym;
7343 if (csym) {
7344 co->csym = csym;
7345 loop_scope = co;
7347 co->bsym = bsym;
7348 block(0);
7349 co->bsym = b;
7350 if (csym) {
7351 co->csym = c;
7352 loop_scope = lo;
7356 static void block(int is_expr)
7358 int a, b, c, d, e, t;
7359 struct scope o;
7360 Sym *s;
7362 if (is_expr) {
7363 /* default return value is (void) */
7364 vpushi(0);
7365 vtop->type.t = VT_VOID;
7368 again:
7369 t = tok;
7370 /* If the token carries a value, next() might destroy it. Only with
7371 invalid code such as f(){"123"4;} */
7372 if (TOK_HAS_VALUE(t))
7373 goto expr;
7374 next();
7376 if (debug_modes)
7377 tcc_tcov_check_line (0), tcc_tcov_block_begin ();
7379 if (t == TOK_IF) {
7380 skip('(');
7381 gexpr();
7382 skip(')');
7383 a = gvtst(1, 0);
7384 block(0);
7385 if (tok == TOK_ELSE) {
7386 d = gjmp(0);
7387 gsym(a);
7388 next();
7389 block(0);
7390 gsym(d); /* patch else jmp */
7391 } else {
7392 gsym(a);
7395 } else if (t == TOK_WHILE) {
7396 d = gind();
7397 skip('(');
7398 gexpr();
7399 skip(')');
7400 a = gvtst(1, 0);
7401 b = 0;
7402 lblock(&a, &b);
7403 gjmp_addr(d);
7404 gsym_addr(b, d);
7405 gsym(a);
7407 } else if (t == '{') {
7408 new_scope(&o);
7410 /* handle local labels declarations */
7411 while (tok == TOK_LABEL) {
7412 do {
7413 next();
7414 if (tok < TOK_UIDENT)
7415 expect("label identifier");
7416 label_push(&local_label_stack, tok, LABEL_DECLARED);
7417 next();
7418 } while (tok == ',');
7419 skip(';');
7422 while (tok != '}') {
7423 decl(VT_LOCAL);
7424 if (tok != '}') {
7425 if (is_expr)
7426 vpop();
7427 block(is_expr);
7431 prev_scope(&o, is_expr);
7432 if (local_scope)
7433 next();
7434 else if (!nocode_wanted)
7435 check_func_return();
7437 } else if (t == TOK_RETURN) {
7438 b = (func_vt.t & VT_BTYPE) != VT_VOID;
7439 if (tok != ';') {
7440 gexpr();
7441 if (b) {
7442 gen_assign_cast(&func_vt);
7443 } else {
7444 if (vtop->type.t != VT_VOID)
7445 tcc_warning("void function returns a value");
7446 vtop--;
7448 } else if (b) {
7449 tcc_warning("'return' with no value");
7450 b = 0;
7452 leave_scope(root_scope);
7453 if (b)
7454 gfunc_return(&func_vt);
7455 skip(';');
7456 /* jump unless last stmt in top-level block */
7457 if (tok != '}' || local_scope != 1)
7458 rsym = gjmp(rsym);
7459 if (debug_modes)
7460 tcc_tcov_block_end (tcov_data.line);
7461 CODE_OFF();
7463 } else if (t == TOK_BREAK) {
7464 /* compute jump */
7465 if (!cur_scope->bsym)
7466 tcc_error("cannot break");
7467 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
7468 leave_scope(cur_switch->scope);
7469 else
7470 leave_scope(loop_scope);
7471 *cur_scope->bsym = gjmp(*cur_scope->bsym);
7472 skip(';');
7474 } else if (t == TOK_CONTINUE) {
7475 /* compute jump */
7476 if (!cur_scope->csym)
7477 tcc_error("cannot continue");
7478 leave_scope(loop_scope);
7479 *cur_scope->csym = gjmp(*cur_scope->csym);
7480 skip(';');
7482 } else if (t == TOK_FOR) {
7483 new_scope(&o);
7485 skip('(');
7486 if (tok != ';') {
7487 /* c99 for-loop init decl? */
7488 if (!decl0(VT_LOCAL, 1, NULL)) {
7489 /* no, regular for-loop init expr */
7490 gexpr();
7491 vpop();
7494 skip(';');
7495 a = b = 0;
7496 c = d = gind();
7497 if (tok != ';') {
7498 gexpr();
7499 a = gvtst(1, 0);
7501 skip(';');
7502 if (tok != ')') {
7503 e = gjmp(0);
7504 d = gind();
7505 gexpr();
7506 vpop();
7507 gjmp_addr(c);
7508 gsym(e);
7510 skip(')');
7511 lblock(&a, &b);
7512 gjmp_addr(d);
7513 gsym_addr(b, d);
7514 gsym(a);
7515 prev_scope(&o, 0);
7517 } else if (t == TOK_DO) {
7518 a = b = 0;
7519 d = gind();
7520 lblock(&a, &b);
7521 gsym(b);
7522 skip(TOK_WHILE);
7523 skip('(');
7524 gexpr();
7525 skip(')');
7526 skip(';');
7527 c = gvtst(0, 0);
7528 gsym_addr(c, d);
7529 gsym(a);
7531 } else if (t == TOK_SWITCH) {
7532 struct switch_t *sw;
7534 sw = tcc_mallocz(sizeof *sw);
7535 sw->bsym = &a;
7536 sw->scope = cur_scope;
7537 sw->prev = cur_switch;
7538 cur_switch = sw;
7540 skip('(');
7541 gexpr();
7542 skip(')');
7543 sw->sv = *vtop--; /* save switch value */
7545 a = 0;
7546 b = gjmp(0); /* jump to first case */
7547 lblock(&a, NULL);
7548 a = gjmp(a); /* add implicit break */
7549 /* case lookup */
7550 gsym(b);
7552 if (sw->sv.type.t & VT_UNSIGNED)
7553 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7554 else
7555 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7557 for (b = 1; b < sw->n; b++)
7558 if (sw->sv.type.t & VT_UNSIGNED
7559 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7560 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7561 tcc_error("duplicate case value");
7563 vpushv(&sw->sv);
7564 gv(RC_INT);
7565 d = 0, gcase(sw->p, sw->n, &d);
7566 vpop();
7567 if (sw->def_sym)
7568 gsym_addr(d, sw->def_sym);
7569 else
7570 gsym(d);
7571 /* break label */
7572 gsym(a);
7574 dynarray_reset(&sw->p, &sw->n);
7575 cur_switch = sw->prev;
7576 tcc_free(sw);
7578 } else if (t == TOK_CASE) {
7579 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7580 if (!cur_switch)
7581 expect("switch");
7582 cr->v1 = cr->v2 = expr_const64();
7583 if (gnu_ext && tok == TOK_DOTS) {
7584 next();
7585 cr->v2 = expr_const64();
7586 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7587 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7588 tcc_warning("empty case range");
7590 tcov_data.ind = 0;
7591 cr->sym = gind();
7592 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7593 skip(':');
7594 is_expr = 0;
7595 goto block_after_label;
7597 } else if (t == TOK_DEFAULT) {
7598 if (!cur_switch)
7599 expect("switch");
7600 if (cur_switch->def_sym)
7601 tcc_error("too many 'default'");
7602 tcov_data.ind = 0;
7603 cur_switch->def_sym = gind();
7604 skip(':');
7605 is_expr = 0;
7606 goto block_after_label;
7608 } else if (t == TOK_GOTO) {
7609 if (cur_scope->vla.num)
7610 vla_restore(cur_scope->vla.locorig);
7611 if (tok == '*' && gnu_ext) {
7612 /* computed goto */
7613 next();
7614 gexpr();
7615 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7616 expect("pointer");
7617 ggoto();
7619 } else if (tok >= TOK_UIDENT) {
7620 s = label_find(tok);
7621 /* put forward definition if needed */
7622 if (!s)
7623 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7624 else if (s->r == LABEL_DECLARED)
7625 s->r = LABEL_FORWARD;
7627 if (s->r & LABEL_FORWARD) {
7628 /* start new goto chain for cleanups, linked via label->next */
7629 if (cur_scope->cl.s && !nocode_wanted) {
7630 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7631 pending_gotos->prev_tok = s;
7632 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7633 pending_gotos->next = s;
7635 s->jnext = gjmp(s->jnext);
7636 } else {
7637 try_call_cleanup_goto(s->cleanupstate);
7638 gjmp_addr(s->jnext);
7640 next();
7642 } else {
7643 expect("label identifier");
7645 skip(';');
7647 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7648 asm_instr();
7650 } else {
7651 if (tok == ':' && t >= TOK_UIDENT) {
7652 /* label case */
7653 next();
7654 s = label_find(t);
7655 if (s) {
7656 if (s->r == LABEL_DEFINED)
7657 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7658 s->r = LABEL_DEFINED;
7659 if (s->next) {
7660 Sym *pcl; /* pending cleanup goto */
7661 for (pcl = s->next; pcl; pcl = pcl->prev)
7662 gsym(pcl->jnext);
7663 sym_pop(&s->next, NULL, 0);
7664 } else
7665 gsym(s->jnext);
7666 } else {
7667 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7669 s->jnext = gind();
7670 s->cleanupstate = cur_scope->cl.s;
7672 block_after_label:
7673 vla_restore(cur_scope->vla.loc);
7674 /* we accept this, but it is a mistake */
7675 if (tok == '}') {
7676 tcc_warning("deprecated use of label at end of compound statement");
7677 } else {
7678 goto again;
7681 } else {
7682 /* expression case */
7683 if (t != ';') {
7684 unget_tok(t);
7685 expr:
7686 if (is_expr) {
7687 vpop();
7688 gexpr();
7689 } else {
7690 gexpr();
7691 vpop();
7693 skip(';');
7698 if (debug_modes)
7699 tcc_tcov_check_line (0), tcc_tcov_block_end (0);
7702 /* This skips over a stream of tokens containing balanced {} and ()
7703 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7704 with a '{'). If STR then allocates and stores the skipped tokens
7705 in *STR. This doesn't check if () and {} are nested correctly,
7706 i.e. "({)}" is accepted. */
7707 static void skip_or_save_block(TokenString **str)
7709 int braces = tok == '{';
7710 int level = 0;
7711 if (str)
7712 *str = tok_str_alloc();
7714 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
7715 int t;
7716 if (tok == TOK_EOF) {
7717 if (str || level > 0)
7718 tcc_error("unexpected end of file");
7719 else
7720 break;
7722 if (str)
7723 tok_str_add_tok(*str);
7724 t = tok;
7725 next();
7726 if (t == '{' || t == '(') {
7727 level++;
7728 } else if (t == '}' || t == ')') {
7729 level--;
7730 if (level == 0 && braces && t == '}')
7731 break;
7734 if (str) {
7735 tok_str_add(*str, -1);
7736 tok_str_add(*str, 0);
7740 #define EXPR_CONST 1
7741 #define EXPR_ANY 2
7743 static void parse_init_elem(int expr_type)
7745 int saved_global_expr;
7746 switch(expr_type) {
7747 case EXPR_CONST:
7748 /* compound literals must be allocated globally in this case */
7749 saved_global_expr = global_expr;
7750 global_expr = 1;
7751 expr_const1();
7752 global_expr = saved_global_expr;
7753 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7754 (compound literals). */
7755 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7756 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7757 || vtop->sym->v < SYM_FIRST_ANOM))
7758 #ifdef TCC_TARGET_PE
7759 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7760 #endif
7762 tcc_error("initializer element is not constant");
7763 break;
7764 case EXPR_ANY:
7765 expr_eq();
7766 break;
7770 #if 1
7771 static void init_assert(init_params *p, int offset)
7773 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7774 : !nocode_wanted && offset > p->local_offset)
7775 tcc_internal_error("initializer overflow");
7777 #else
7778 #define init_assert(sec, offset)
7779 #endif
7781 /* put zeros for variable based init */
7782 static void init_putz(init_params *p, unsigned long c, int size)
7784 init_assert(p, c + size);
7785 if (p->sec) {
7786 /* nothing to do because globals are already set to zero */
7787 } else {
7788 vpush_helper_func(TOK_memset);
7789 vseti(VT_LOCAL, c);
7790 #ifdef TCC_TARGET_ARM
7791 vpushs(size);
7792 vpushi(0);
7793 #else
7794 vpushi(0);
7795 vpushs(size);
7796 #endif
7797 gfunc_call(3);
7801 #define DIF_FIRST 1
7802 #define DIF_SIZE_ONLY 2
7803 #define DIF_HAVE_ELEM 4
7804 #define DIF_CLEAR 8
7806 /* delete relocations for specified range c ... c + size. Unfortunatly
7807 in very special cases, relocations may occur unordered */
7808 static void decl_design_delrels(Section *sec, int c, int size)
7810 ElfW_Rel *rel, *rel2, *rel_end;
7811 if (!sec || !sec->reloc)
7812 return;
7813 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7814 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7815 while (rel < rel_end) {
7816 if (rel->r_offset >= c && rel->r_offset < c + size) {
7817 sec->reloc->data_offset -= sizeof *rel;
7818 } else {
7819 if (rel2 != rel)
7820 memcpy(rel2, rel, sizeof *rel);
7821 ++rel2;
7823 ++rel;
7827 static void decl_design_flex(init_params *p, Sym *ref, int index)
7829 if (ref == p->flex_array_ref) {
7830 if (index >= ref->c)
7831 ref->c = index + 1;
7832 } else if (ref->c < 0)
7833 tcc_error("flexible array has zero size in this context");
7836 /* t is the array or struct type. c is the array or struct
7837 address. cur_field is the pointer to the current
7838 field, for arrays the 'c' member contains the current start
7839 index. 'flags' is as in decl_initializer.
7840 'al' contains the already initialized length of the
7841 current container (starting at c). This returns the new length of that. */
7842 static int decl_designator(init_params *p, CType *type, unsigned long c,
7843 Sym **cur_field, int flags, int al)
7845 Sym *s, *f;
7846 int index, index_last, align, l, nb_elems, elem_size;
7847 unsigned long corig = c;
7849 elem_size = 0;
7850 nb_elems = 1;
7852 if (flags & DIF_HAVE_ELEM)
7853 goto no_designator;
7855 if (gnu_ext && tok >= TOK_UIDENT) {
7856 l = tok, next();
7857 if (tok == ':')
7858 goto struct_field;
7859 unget_tok(l);
7862 /* NOTE: we only support ranges for last designator */
7863 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7864 if (tok == '[') {
7865 if (!(type->t & VT_ARRAY))
7866 expect("array type");
7867 next();
7868 index = index_last = expr_const();
7869 if (tok == TOK_DOTS && gnu_ext) {
7870 next();
7871 index_last = expr_const();
7873 skip(']');
7874 s = type->ref;
7875 decl_design_flex(p, s, index_last);
7876 if (index < 0 || index_last >= s->c || index_last < index)
7877 tcc_error("index exceeds array bounds or range is empty");
7878 if (cur_field)
7879 (*cur_field)->c = index_last;
7880 type = pointed_type(type);
7881 elem_size = type_size(type, &align);
7882 c += index * elem_size;
7883 nb_elems = index_last - index + 1;
7884 } else {
7885 int cumofs;
7886 next();
7887 l = tok;
7888 struct_field:
7889 next();
7890 if ((type->t & VT_BTYPE) != VT_STRUCT)
7891 expect("struct/union type");
7892 cumofs = 0;
7893 f = find_field(type, l, &cumofs);
7894 if (!f)
7895 expect("field");
7896 if (cur_field)
7897 *cur_field = f;
7898 type = &f->type;
7899 c += cumofs + f->c;
7901 cur_field = NULL;
7903 if (!cur_field) {
7904 if (tok == '=') {
7905 next();
7906 } else if (!gnu_ext) {
7907 expect("=");
7909 } else {
7910 no_designator:
7911 if (type->t & VT_ARRAY) {
7912 index = (*cur_field)->c;
7913 s = type->ref;
7914 decl_design_flex(p, s, index);
7915 if (index >= s->c)
7916 tcc_error("too many initializers");
7917 type = pointed_type(type);
7918 elem_size = type_size(type, &align);
7919 c += index * elem_size;
7920 } else {
7921 f = *cur_field;
7922 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
7923 *cur_field = f = f->next;
7924 if (!f)
7925 tcc_error("too many initializers");
7926 type = &f->type;
7927 c += f->c;
7931 if (!elem_size) /* for structs */
7932 elem_size = type_size(type, &align);
7934 /* Using designators the same element can be initialized more
7935 than once. In that case we need to delete possibly already
7936 existing relocations. */
7937 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7938 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7939 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7942 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7944 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7945 Sym aref = {0};
7946 CType t1;
7947 int i;
7948 if (p->sec || (type->t & VT_ARRAY)) {
7949 /* make init_putv/vstore believe it were a struct */
7950 aref.c = elem_size;
7951 t1.t = VT_STRUCT, t1.ref = &aref;
7952 type = &t1;
7954 if (p->sec)
7955 vpush_ref(type, p->sec, c, elem_size);
7956 else
7957 vset(type, VT_LOCAL|VT_LVAL, c);
7958 for (i = 1; i < nb_elems; i++) {
7959 vdup();
7960 init_putv(p, type, c + elem_size * i);
7962 vpop();
7965 c += nb_elems * elem_size;
7966 if (c - corig > al)
7967 al = c - corig;
7968 return al;
7971 /* store a value or an expression directly in global data or in local array */
7972 static void init_putv(init_params *p, CType *type, unsigned long c)
7974 int bt;
7975 void *ptr;
7976 CType dtype;
7977 int size, align;
7978 Section *sec = p->sec;
7979 uint64_t val;
7981 dtype = *type;
7982 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7984 size = type_size(type, &align);
7985 if (type->t & VT_BITFIELD)
7986 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7987 init_assert(p, c + size);
7989 if (sec) {
7990 /* XXX: not portable */
7991 /* XXX: generate error if incorrect relocation */
7992 gen_assign_cast(&dtype);
7993 bt = type->t & VT_BTYPE;
7995 if ((vtop->r & VT_SYM)
7996 && bt != VT_PTR
7997 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7998 || (type->t & VT_BITFIELD))
7999 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
8001 tcc_error("initializer element is not computable at load time");
8003 if (NODATA_WANTED) {
8004 vtop--;
8005 return;
8008 ptr = sec->data + c;
8009 val = vtop->c.i;
8011 /* XXX: make code faster ? */
8012 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
8013 vtop->sym->v >= SYM_FIRST_ANOM &&
8014 /* XXX This rejects compound literals like
8015 '(void *){ptr}'. The problem is that '&sym' is
8016 represented the same way, which would be ruled out
8017 by the SYM_FIRST_ANOM check above, but also '"string"'
8018 in 'char *p = "string"' is represented the same
8019 with the type being VT_PTR and the symbol being an
8020 anonymous one. That is, there's no difference in vtop
8021 between '(void *){x}' and '&(void *){x}'. Ignore
8022 pointer typed entities here. Hopefully no real code
8023 will ever use compound literals with scalar type. */
8024 (vtop->type.t & VT_BTYPE) != VT_PTR) {
8025 /* These come from compound literals, memcpy stuff over. */
8026 Section *ssec;
8027 ElfSym *esym;
8028 ElfW_Rel *rel;
8029 esym = elfsym(vtop->sym);
8030 ssec = tcc_state->sections[esym->st_shndx];
8031 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
8032 if (ssec->reloc) {
8033 /* We need to copy over all memory contents, and that
8034 includes relocations. Use the fact that relocs are
8035 created it order, so look from the end of relocs
8036 until we hit one before the copied region. */
8037 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
8038 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
8039 while (num_relocs--) {
8040 rel--;
8041 if (rel->r_offset >= esym->st_value + size)
8042 continue;
8043 if (rel->r_offset < esym->st_value)
8044 break;
8045 put_elf_reloca(symtab_section, sec,
8046 c + rel->r_offset - esym->st_value,
8047 ELFW(R_TYPE)(rel->r_info),
8048 ELFW(R_SYM)(rel->r_info),
8049 #if PTR_SIZE == 8
8050 rel->r_addend
8051 #else
8053 #endif
8057 } else {
8058 if (type->t & VT_BITFIELD) {
8059 int bit_pos, bit_size, bits, n;
8060 unsigned char *p, v, m;
8061 bit_pos = BIT_POS(vtop->type.t);
8062 bit_size = BIT_SIZE(vtop->type.t);
8063 p = (unsigned char*)ptr + (bit_pos >> 3);
8064 bit_pos &= 7, bits = 0;
8065 while (bit_size) {
8066 n = 8 - bit_pos;
8067 if (n > bit_size)
8068 n = bit_size;
8069 v = val >> bits << bit_pos;
8070 m = ((1 << n) - 1) << bit_pos;
8071 *p = (*p & ~m) | (v & m);
8072 bits += n, bit_size -= n, bit_pos = 0, ++p;
8074 } else
8075 switch(bt) {
8076 case VT_BOOL:
8077 *(char *)ptr = val != 0;
8078 break;
8079 case VT_BYTE:
8080 *(char *)ptr = val;
8081 break;
8082 case VT_SHORT:
8083 write16le(ptr, val);
8084 break;
8085 case VT_FLOAT:
8086 write32le(ptr, val);
8087 break;
8088 case VT_DOUBLE:
8089 write64le(ptr, val);
8090 break;
8091 case VT_LDOUBLE:
8092 #if defined TCC_IS_NATIVE_387
8093 /* Host and target platform may be different but both have x87.
8094 On windows, tcc does not use VT_LDOUBLE, except when it is a
8095 cross compiler. In this case a mingw gcc as host compiler
8096 comes here with 10-byte long doubles, while msvc or tcc won't.
8097 tcc itself can still translate by asm.
8098 In any case we avoid possibly random bytes 11 and 12.
8100 if (sizeof (long double) >= 10)
8101 memcpy(ptr, &vtop->c.ld, 10);
8102 #ifdef __TINYC__
8103 else if (sizeof (long double) == sizeof (double))
8104 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
8105 #endif
8106 else if (vtop->c.ld == 0.0)
8108 else
8109 #endif
8110 /* For other platforms it should work natively, but may not work
8111 for cross compilers */
8112 if (sizeof(long double) == LDOUBLE_SIZE)
8113 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
8114 else if (sizeof(double) == LDOUBLE_SIZE)
8115 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
8116 #ifndef TCC_CROSS_TEST
8117 else
8118 tcc_error("can't cross compile long double constants");
8119 #endif
8120 break;
8122 #if PTR_SIZE == 8
8123 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
8124 case VT_LLONG:
8125 case VT_PTR:
8126 if (vtop->r & VT_SYM)
8127 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
8128 else
8129 write64le(ptr, val);
8130 break;
8131 case VT_INT:
8132 write32le(ptr, val);
8133 break;
8134 #else
8135 case VT_LLONG:
8136 write64le(ptr, val);
8137 break;
8138 case VT_PTR:
8139 case VT_INT:
8140 if (vtop->r & VT_SYM)
8141 greloc(sec, vtop->sym, c, R_DATA_PTR);
8142 write32le(ptr, val);
8143 break;
8144 #endif
8145 default:
8146 //tcc_internal_error("unexpected type");
8147 break;
8150 vtop--;
8151 } else {
8152 vset(&dtype, VT_LOCAL|VT_LVAL, c);
8153 vswap();
8154 vstore();
8155 vpop();
8159 /* 't' contains the type and storage info. 'c' is the offset of the
8160 object in section 'sec'. If 'sec' is NULL, it means stack based
8161 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
8162 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
8163 size only evaluation is wanted (only for arrays). */
8164 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
8166 int len, n, no_oblock, i;
8167 int size1, align1;
8168 Sym *s, *f;
8169 Sym indexsym;
8170 CType *t1;
8172 /* generate line number info */
8173 if (debug_modes && !p->sec)
8174 tcc_debug_line(tcc_state), tcc_tcov_check_line (1);
8176 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
8177 /* In case of strings we have special handling for arrays, so
8178 don't consume them as initializer value (which would commit them
8179 to some anonymous symbol). */
8180 tok != TOK_LSTR && tok != TOK_STR &&
8181 !(flags & DIF_SIZE_ONLY)) {
8182 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
8183 flags |= DIF_HAVE_ELEM;
8186 if ((flags & DIF_HAVE_ELEM) &&
8187 !(type->t & VT_ARRAY) &&
8188 /* Use i_c_parameter_t, to strip toplevel qualifiers.
8189 The source type might have VT_CONSTANT set, which is
8190 of course assignable to non-const elements. */
8191 is_compatible_unqualified_types(type, &vtop->type)) {
8192 goto init_putv;
8194 } else if (type->t & VT_ARRAY) {
8195 no_oblock = 1;
8196 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
8197 tok == '{') {
8198 skip('{');
8199 no_oblock = 0;
8202 s = type->ref;
8203 n = s->c;
8204 t1 = pointed_type(type);
8205 size1 = type_size(t1, &align1);
8207 /* only parse strings here if correct type (otherwise: handle
8208 them as ((w)char *) expressions */
8209 if ((tok == TOK_LSTR &&
8210 #ifdef TCC_TARGET_PE
8211 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
8212 #else
8213 (t1->t & VT_BTYPE) == VT_INT
8214 #endif
8215 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
8216 len = 0;
8217 cstr_reset(&initstr);
8218 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
8219 tcc_error("unhandled string literal merging");
8220 while (tok == TOK_STR || tok == TOK_LSTR) {
8221 if (initstr.size)
8222 initstr.size -= size1;
8223 if (tok == TOK_STR)
8224 len += tokc.str.size;
8225 else
8226 len += tokc.str.size / sizeof(nwchar_t);
8227 len--;
8228 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
8229 next();
8231 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
8232 && tok != TOK_EOF) {
8233 /* Not a lone literal but part of a bigger expression. */
8234 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
8235 tokc.str.size = initstr.size;
8236 tokc.str.data = initstr.data;
8237 goto do_init_array;
8240 if (!(flags & DIF_SIZE_ONLY)) {
8241 int nb = n;
8242 if (len < nb)
8243 nb = len;
8244 if (len > nb)
8245 tcc_warning("initializer-string for array is too long");
8246 /* in order to go faster for common case (char
8247 string in global variable, we handle it
8248 specifically */
8249 if (p->sec && size1 == 1) {
8250 init_assert(p, c + nb);
8251 if (!NODATA_WANTED)
8252 memcpy(p->sec->data + c, initstr.data, nb);
8253 } else {
8254 for(i=0;i<n;i++) {
8255 if (i >= nb) {
8256 /* only add trailing zero if enough storage (no
8257 warning in this case since it is standard) */
8258 if (flags & DIF_CLEAR)
8259 break;
8260 if (n - i >= 4) {
8261 init_putz(p, c + i * size1, (n - i) * size1);
8262 break;
8264 ch = 0;
8265 } else if (size1 == 1)
8266 ch = ((unsigned char *)initstr.data)[i];
8267 else
8268 ch = ((nwchar_t *)initstr.data)[i];
8269 vpushi(ch);
8270 init_putv(p, t1, c + i * size1);
8273 } else {
8274 decl_design_flex(p, s, len);
8276 } else {
8278 do_init_array:
8279 indexsym.c = 0;
8280 f = &indexsym;
8282 do_init_list:
8283 /* zero memory once in advance */
8284 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
8285 init_putz(p, c, n*size1);
8286 flags |= DIF_CLEAR;
8289 len = 0;
8290 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
8291 len = decl_designator(p, type, c, &f, flags, len);
8292 flags &= ~DIF_HAVE_ELEM;
8293 if (type->t & VT_ARRAY) {
8294 ++indexsym.c;
8295 /* special test for multi dimensional arrays (may not
8296 be strictly correct if designators are used at the
8297 same time) */
8298 if (no_oblock && len >= n*size1)
8299 break;
8300 } else {
8301 if (s->type.t == VT_UNION)
8302 f = NULL;
8303 else
8304 f = f->next;
8305 if (no_oblock && f == NULL)
8306 break;
8309 if (tok == '}')
8310 break;
8311 skip(',');
8314 if (!no_oblock)
8315 skip('}');
8316 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
8317 no_oblock = 1;
8318 if ((flags & DIF_FIRST) || tok == '{') {
8319 skip('{');
8320 no_oblock = 0;
8322 s = type->ref;
8323 f = s->next;
8324 n = s->c;
8325 size1 = 1;
8326 goto do_init_list;
8327 } else if (tok == '{') {
8328 if (flags & DIF_HAVE_ELEM)
8329 skip(';');
8330 next();
8331 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
8332 skip('}');
8333 } else if ((flags & DIF_SIZE_ONLY)) {
8334 /* If we supported only ISO C we wouldn't have to accept calling
8335 this on anything than an array if DIF_SIZE_ONLY (and even then
8336 only on the outermost level, so no recursion would be needed),
8337 because initializing a flex array member isn't supported.
8338 But GNU C supports it, so we need to recurse even into
8339 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
8340 /* just skip expression */
8341 skip_or_save_block(NULL);
8342 } else {
8343 if (!(flags & DIF_HAVE_ELEM)) {
8344 /* This should happen only when we haven't parsed
8345 the init element above for fear of committing a
8346 string constant to memory too early. */
8347 if (tok != TOK_STR && tok != TOK_LSTR)
8348 expect("string constant");
8349 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
8351 init_putv:
8352 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
8353 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
8354 && vtop->c.i == 0
8355 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
8357 vpop();
8358 else
8359 init_putv(p, type, c);
8363 /* parse an initializer for type 't' if 'has_init' is non zero, and
8364 allocate space in local or global data space ('r' is either
8365 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
8366 variable 'v' of scope 'scope' is declared before initializers
8367 are parsed. If 'v' is zero, then a reference to the new object
8368 is put in the value stack. If 'has_init' is 2, a special parsing
8369 is done to handle string constants. */
8370 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
8371 int has_init, int v, int scope)
8373 int size, align, addr;
8374 TokenString *init_str = NULL;
8376 Section *sec;
8377 Sym *flexible_array;
8378 Sym *sym = NULL;
8379 int saved_nocode_wanted = nocode_wanted;
8380 #ifdef CONFIG_TCC_BCHECK
8381 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
8382 #endif
8383 init_params p = {0};
8385 /* Always allocate static or global variables */
8386 if (v && (r & VT_VALMASK) == VT_CONST)
8387 nocode_wanted |= 0x80000000;
8389 flexible_array = NULL;
8390 size = type_size(type, &align);
8392 /* exactly one flexible array may be initialized, either the
8393 toplevel array or the last member of the toplevel struct */
8395 if (size < 0) {
8396 /* If the base type itself was an array type of unspecified size
8397 (like in 'typedef int arr[]; arr x = {1};') then we will
8398 overwrite the unknown size by the real one for this decl.
8399 We need to unshare the ref symbol holding that size. */
8400 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
8401 p.flex_array_ref = type->ref;
8403 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
8404 Sym *field = type->ref->next;
8405 if (field) {
8406 while (field->next)
8407 field = field->next;
8408 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
8409 flexible_array = field;
8410 p.flex_array_ref = field->type.ref;
8411 size = -1;
8416 if (size < 0) {
8417 /* If unknown size, do a dry-run 1st pass */
8418 if (!has_init)
8419 tcc_error("unknown type size");
8420 if (has_init == 2) {
8421 /* only get strings */
8422 init_str = tok_str_alloc();
8423 while (tok == TOK_STR || tok == TOK_LSTR) {
8424 tok_str_add_tok(init_str);
8425 next();
8427 tok_str_add(init_str, -1);
8428 tok_str_add(init_str, 0);
8429 } else
8430 skip_or_save_block(&init_str);
8431 unget_tok(0);
8433 /* compute size */
8434 begin_macro(init_str, 1);
8435 next();
8436 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
8437 /* prepare second initializer parsing */
8438 macro_ptr = init_str->str;
8439 next();
8441 /* if still unknown size, error */
8442 size = type_size(type, &align);
8443 if (size < 0)
8444 tcc_error("unknown type size");
8446 /* If there's a flex member and it was used in the initializer
8447 adjust size. */
8448 if (flexible_array && flexible_array->type.ref->c > 0)
8449 size += flexible_array->type.ref->c
8450 * pointed_size(&flexible_array->type);
8453 /* take into account specified alignment if bigger */
8454 if (ad->a.aligned) {
8455 int speca = 1 << (ad->a.aligned - 1);
8456 if (speca > align)
8457 align = speca;
8458 } else if (ad->a.packed) {
8459 align = 1;
8462 if (!v && NODATA_WANTED)
8463 size = 0, align = 1;
8465 if ((r & VT_VALMASK) == VT_LOCAL) {
8466 sec = NULL;
8467 #ifdef CONFIG_TCC_BCHECK
8468 if (bcheck && v) {
8469 /* add padding between stack variables for bound checking */
8470 loc -= align;
8472 #endif
8473 loc = (loc - size) & -align;
8474 addr = loc;
8475 p.local_offset = addr + size;
8476 #ifdef CONFIG_TCC_BCHECK
8477 if (bcheck && v) {
8478 /* add padding between stack variables for bound checking */
8479 loc -= align;
8481 #endif
8482 if (v) {
8483 /* local variable */
8484 #ifdef CONFIG_TCC_ASM
8485 if (ad->asm_label) {
8486 int reg = asm_parse_regvar(ad->asm_label);
8487 if (reg >= 0)
8488 r = (r & ~VT_VALMASK) | reg;
8490 #endif
8491 sym = sym_push(v, type, r, addr);
8492 if (ad->cleanup_func) {
8493 Sym *cls = sym_push2(&all_cleanups,
8494 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8495 cls->prev_tok = sym;
8496 cls->next = ad->cleanup_func;
8497 cls->ncl = cur_scope->cl.s;
8498 cur_scope->cl.s = cls;
8501 sym->a = ad->a;
8502 } else {
8503 /* push local reference */
8504 vset(type, r, addr);
8506 } else {
8507 if (v && scope == VT_CONST) {
8508 /* see if the symbol was already defined */
8509 sym = sym_find(v);
8510 if (sym) {
8511 patch_storage(sym, ad, type);
8512 /* we accept several definitions of the same global variable. */
8513 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8514 goto no_alloc;
8518 /* allocate symbol in corresponding section */
8519 sec = ad->section;
8520 if (!sec) {
8521 if (type->t & VT_CONSTANT)
8522 sec = data_ro_section;
8523 else if (has_init)
8524 sec = data_section;
8525 else if (tcc_state->nocommon)
8526 sec = bss_section;
8529 if (sec) {
8530 addr = section_add(sec, size, align);
8531 #ifdef CONFIG_TCC_BCHECK
8532 /* add padding if bound check */
8533 if (bcheck)
8534 section_add(sec, 1, 1);
8535 #endif
8536 } else {
8537 addr = align; /* SHN_COMMON is special, symbol value is align */
8538 sec = common_section;
8541 if (v) {
8542 if (!sym) {
8543 sym = sym_push(v, type, r | VT_SYM, 0);
8544 patch_storage(sym, ad, NULL);
8546 /* update symbol definition */
8547 put_extern_sym(sym, sec, addr, size);
8548 } else {
8549 /* push global reference */
8550 vpush_ref(type, sec, addr, size);
8551 sym = vtop->sym;
8552 vtop->r |= r;
8555 #ifdef CONFIG_TCC_BCHECK
8556 /* handles bounds now because the symbol must be defined
8557 before for the relocation */
8558 if (bcheck) {
8559 addr_t *bounds_ptr;
8561 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8562 /* then add global bound info */
8563 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8564 bounds_ptr[0] = 0; /* relocated */
8565 bounds_ptr[1] = size;
8567 #endif
8570 if (type->t & VT_VLA) {
8571 int a;
8573 if (NODATA_WANTED)
8574 goto no_alloc;
8576 /* save before-VLA stack pointer if needed */
8577 if (cur_scope->vla.num == 0) {
8578 if (cur_scope->prev && cur_scope->prev->vla.num) {
8579 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8580 } else {
8581 gen_vla_sp_save(loc -= PTR_SIZE);
8582 cur_scope->vla.locorig = loc;
8586 vla_runtime_type_size(type, &a);
8587 gen_vla_alloc(type, a);
8588 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8589 /* on _WIN64, because of the function args scratch area, the
8590 result of alloca differs from RSP and is returned in RAX. */
8591 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8592 #endif
8593 gen_vla_sp_save(addr);
8594 cur_scope->vla.loc = addr;
8595 cur_scope->vla.num++;
8596 } else if (has_init) {
8597 p.sec = sec;
8598 decl_initializer(&p, type, addr, DIF_FIRST);
8599 /* patch flexible array member size back to -1, */
8600 /* for possible subsequent similar declarations */
8601 if (flexible_array)
8602 flexible_array->type.ref->c = -1;
8605 no_alloc:
8606 /* restore parse state if needed */
8607 if (init_str) {
8608 end_macro();
8609 next();
8612 nocode_wanted = saved_nocode_wanted;
8615 /* parse a function defined by symbol 'sym' and generate its code in
8616 'cur_text_section' */
8617 static void gen_function(Sym *sym)
8619 struct scope f = { 0 };
8620 cur_scope = root_scope = &f;
8621 nocode_wanted = 0;
8622 ind = cur_text_section->data_offset;
8623 if (sym->a.aligned) {
8624 size_t newoff = section_add(cur_text_section, 0,
8625 1 << (sym->a.aligned - 1));
8626 gen_fill_nops(newoff - ind);
8628 /* NOTE: we patch the symbol size later */
8629 put_extern_sym(sym, cur_text_section, ind, 0);
8630 if (sym->type.ref->f.func_ctor)
8631 add_array (tcc_state, ".init_array", sym->c);
8632 if (sym->type.ref->f.func_dtor)
8633 add_array (tcc_state, ".fini_array", sym->c);
8635 funcname = get_tok_str(sym->v, NULL);
8636 func_ind = ind;
8637 func_vt = sym->type.ref->type;
8638 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8640 /* put debug symbol */
8641 tcc_debug_funcstart(tcc_state, sym);
8642 /* push a dummy symbol to enable local sym storage */
8643 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8644 local_scope = 1; /* for function parameters */
8645 gfunc_prolog(sym);
8646 local_scope = 0;
8647 rsym = 0;
8648 clear_temp_local_var_list();
8649 block(0);
8650 gsym(rsym);
8651 nocode_wanted = 0;
8652 /* reset local stack */
8653 pop_local_syms(NULL, 0);
8654 gfunc_epilog();
8655 cur_text_section->data_offset = ind;
8656 local_scope = 0;
8657 label_pop(&global_label_stack, NULL, 0);
8658 sym_pop(&all_cleanups, NULL, 0);
8659 /* patch symbol size */
8660 elfsym(sym)->st_size = ind - func_ind;
8661 /* end of function */
8662 tcc_debug_funcend(tcc_state, ind - func_ind);
8663 /* It's better to crash than to generate wrong code */
8664 cur_text_section = NULL;
8665 funcname = ""; /* for safety */
8666 func_vt.t = VT_VOID; /* for safety */
8667 func_var = 0; /* for safety */
8668 ind = 0; /* for safety */
8669 nocode_wanted = 0x80000000;
8670 check_vstack();
8671 /* do this after funcend debug info */
8672 next();
8675 static void gen_inline_functions(TCCState *s)
8677 Sym *sym;
8678 int inline_generated, i;
8679 struct InlineFunc *fn;
8681 tcc_open_bf(s, ":inline:", 0);
8682 /* iterate while inline function are referenced */
8683 do {
8684 inline_generated = 0;
8685 for (i = 0; i < s->nb_inline_fns; ++i) {
8686 fn = s->inline_fns[i];
8687 sym = fn->sym;
8688 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8689 /* the function was used or forced (and then not internal):
8690 generate its code and convert it to a normal function */
8691 fn->sym = NULL;
8692 tcc_debug_putfile(s, fn->filename);
8693 begin_macro(fn->func_str, 1);
8694 next();
8695 cur_text_section = text_section;
8696 gen_function(sym);
8697 end_macro();
8699 inline_generated = 1;
8702 } while (inline_generated);
8703 tcc_close();
8706 static void free_inline_functions(TCCState *s)
8708 int i;
8709 /* free tokens of unused inline functions */
8710 for (i = 0; i < s->nb_inline_fns; ++i) {
8711 struct InlineFunc *fn = s->inline_fns[i];
8712 if (fn->sym)
8713 tok_str_free(fn->func_str);
8715 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8718 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8719 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8720 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
8722 int v, has_init, r, oldint;
8723 CType type, btype;
8724 Sym *sym;
8725 AttributeDef ad, adbase;
8727 while (1) {
8728 if (tok == TOK_STATIC_ASSERT) {
8729 CString error_str;
8730 int c;
8732 next();
8733 skip('(');
8734 c = expr_const();
8736 if (tok == ')') {
8737 if (!c)
8738 tcc_error("_Static_assert fail");
8739 next();
8740 goto static_assert_out;
8743 skip(',');
8744 parse_mult_str(&error_str, "string constant");
8745 if (c == 0)
8746 tcc_error("%s", (char *)error_str.data);
8747 cstr_free(&error_str);
8748 skip(')');
8749 static_assert_out:
8750 skip(';');
8751 continue;
8754 oldint = 0;
8755 if (!parse_btype(&btype, &adbase)) {
8756 if (is_for_loop_init)
8757 return 0;
8758 /* skip redundant ';' if not in old parameter decl scope */
8759 if (tok == ';' && l != VT_CMP) {
8760 next();
8761 continue;
8763 if (l != VT_CONST)
8764 break;
8765 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8766 /* global asm block */
8767 asm_global_instr();
8768 continue;
8770 if (tok >= TOK_UIDENT) {
8771 /* special test for old K&R protos without explicit int
8772 type. Only accepted when defining global data */
8773 btype.t = VT_INT;
8774 oldint = 1;
8775 } else {
8776 if (tok != TOK_EOF)
8777 expect("declaration");
8778 break;
8782 if (tok == ';') {
8783 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8784 v = btype.ref->v;
8785 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8786 tcc_warning("unnamed struct/union that defines no instances");
8787 next();
8788 continue;
8790 if (IS_ENUM(btype.t)) {
8791 next();
8792 continue;
8796 while (1) { /* iterate thru each declaration */
8797 type = btype;
8798 ad = adbase;
8799 type_decl(&type, &ad, &v, TYPE_DIRECT);
8800 #if 0
8802 char buf[500];
8803 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8804 printf("type = '%s'\n", buf);
8806 #endif
8807 if ((type.t & VT_BTYPE) == VT_FUNC) {
8808 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
8809 tcc_error("function without file scope cannot be static");
8810 /* if old style function prototype, we accept a
8811 declaration list */
8812 sym = type.ref;
8813 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
8814 decl0(VT_CMP, 0, sym);
8815 #ifdef TCC_TARGET_MACHO
8816 if (sym->f.func_alwinl
8817 && ((type.t & (VT_EXTERN | VT_INLINE))
8818 == (VT_EXTERN | VT_INLINE))) {
8819 /* always_inline functions must be handled as if they
8820 don't generate multiple global defs, even if extern
8821 inline, i.e. GNU inline semantics for those. Rewrite
8822 them into static inline. */
8823 type.t &= ~VT_EXTERN;
8824 type.t |= VT_STATIC;
8826 #endif
8827 /* always compile 'extern inline' */
8828 if (type.t & VT_EXTERN)
8829 type.t &= ~VT_INLINE;
8831 } else if (oldint) {
8832 tcc_warning("type defaults to int");
8835 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8836 ad.asm_label = asm_label_instr();
8837 /* parse one last attribute list, after asm label */
8838 parse_attribute(&ad);
8839 #if 0
8840 /* gcc does not allow __asm__("label") with function definition,
8841 but why not ... */
8842 if (tok == '{')
8843 expect(";");
8844 #endif
8847 #ifdef TCC_TARGET_PE
8848 if (ad.a.dllimport || ad.a.dllexport) {
8849 if (type.t & VT_STATIC)
8850 tcc_error("cannot have dll linkage with static");
8851 if (type.t & VT_TYPEDEF) {
8852 tcc_warning("'%s' attribute ignored for typedef",
8853 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8854 (ad.a.dllexport = 0, "dllexport"));
8855 } else if (ad.a.dllimport) {
8856 if ((type.t & VT_BTYPE) == VT_FUNC)
8857 ad.a.dllimport = 0;
8858 else
8859 type.t |= VT_EXTERN;
8862 #endif
8863 if (tok == '{') {
8864 if (l != VT_CONST)
8865 tcc_error("cannot use local functions");
8866 if ((type.t & VT_BTYPE) != VT_FUNC)
8867 expect("function definition");
8869 /* reject abstract declarators in function definition
8870 make old style params without decl have int type */
8871 sym = type.ref;
8872 while ((sym = sym->next) != NULL) {
8873 if (!(sym->v & ~SYM_FIELD))
8874 expect("identifier");
8875 if (sym->type.t == VT_VOID)
8876 sym->type = int_type;
8879 /* apply post-declaraton attributes */
8880 merge_funcattr(&type.ref->f, &ad.f);
8882 /* put function symbol */
8883 type.t &= ~VT_EXTERN;
8884 sym = external_sym(v, &type, 0, &ad);
8886 /* static inline functions are just recorded as a kind
8887 of macro. Their code will be emitted at the end of
8888 the compilation unit only if they are used */
8889 if (sym->type.t & VT_INLINE) {
8890 struct InlineFunc *fn;
8891 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8892 strcpy(fn->filename, file->filename);
8893 fn->sym = sym;
8894 skip_or_save_block(&fn->func_str);
8895 dynarray_add(&tcc_state->inline_fns,
8896 &tcc_state->nb_inline_fns, fn);
8897 } else {
8898 /* compute text section */
8899 cur_text_section = ad.section;
8900 if (!cur_text_section)
8901 cur_text_section = text_section;
8902 gen_function(sym);
8904 break;
8905 } else {
8906 if (l == VT_CMP) {
8907 /* find parameter in function parameter list */
8908 for (sym = func_sym->next; sym; sym = sym->next)
8909 if ((sym->v & ~SYM_FIELD) == v)
8910 goto found;
8911 tcc_error("declaration for parameter '%s' but no such parameter",
8912 get_tok_str(v, NULL));
8913 found:
8914 if (type.t & VT_STORAGE) /* 'register' is okay */
8915 tcc_error("storage class specified for '%s'",
8916 get_tok_str(v, NULL));
8917 if (sym->type.t != VT_VOID)
8918 tcc_error("redefinition of parameter '%s'",
8919 get_tok_str(v, NULL));
8920 convert_parameter_type(&type);
8921 sym->type = type;
8922 } else if (type.t & VT_TYPEDEF) {
8923 /* save typedefed type */
8924 /* XXX: test storage specifiers ? */
8925 sym = sym_find(v);
8926 if (sym && sym->sym_scope == local_scope) {
8927 if (!is_compatible_types(&sym->type, &type)
8928 || !(sym->type.t & VT_TYPEDEF))
8929 tcc_error("incompatible redefinition of '%s'",
8930 get_tok_str(v, NULL));
8931 sym->type = type;
8932 } else {
8933 sym = sym_push(v, &type, 0, 0);
8935 sym->a = ad.a;
8936 sym->f = ad.f;
8937 if (debug_modes)
8938 tcc_debug_typedef (tcc_state, sym);
8939 } else if ((type.t & VT_BTYPE) == VT_VOID
8940 && !(type.t & VT_EXTERN)) {
8941 tcc_error("declaration of void object");
8942 } else {
8943 r = 0;
8944 if ((type.t & VT_BTYPE) == VT_FUNC) {
8945 /* external function definition */
8946 /* specific case for func_call attribute */
8947 type.ref->f = ad.f;
8948 } else if (!(type.t & VT_ARRAY)) {
8949 /* not lvalue if array */
8950 r |= VT_LVAL;
8952 has_init = (tok == '=');
8953 if (has_init && (type.t & VT_VLA))
8954 tcc_error("variable length array cannot be initialized");
8955 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8956 || (type.t & VT_BTYPE) == VT_FUNC
8957 /* as with GCC, uninitialized global arrays with no size
8958 are considered extern: */
8959 || ((type.t & VT_ARRAY) && !has_init
8960 && l == VT_CONST && type.ref->c < 0)
8962 /* external variable or function */
8963 type.t |= VT_EXTERN;
8964 sym = external_sym(v, &type, r, &ad);
8965 if (ad.alias_target) {
8966 /* Aliases need to be emitted when their target
8967 symbol is emitted, even if perhaps unreferenced.
8968 We only support the case where the base is
8969 already defined, otherwise we would need
8970 deferring to emit the aliases until the end of
8971 the compile unit. */
8972 Sym *alias_target = sym_find(ad.alias_target);
8973 ElfSym *esym = elfsym(alias_target);
8974 if (!esym)
8975 tcc_error("unsupported forward __alias__ attribute");
8976 put_extern_sym2(sym, esym->st_shndx,
8977 esym->st_value, esym->st_size, 1);
8979 } else {
8980 if (type.t & VT_STATIC)
8981 r |= VT_CONST;
8982 else
8983 r |= l;
8984 if (has_init)
8985 next();
8986 else if (l == VT_CONST)
8987 /* uninitialized global variables may be overridden */
8988 type.t |= VT_EXTERN;
8989 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
8992 if (tok != ',') {
8993 if (is_for_loop_init)
8994 return 1;
8995 skip(';');
8996 break;
8998 next();
9002 return 0;
9005 static void decl(int l)
9007 decl0(l, 0, NULL);
9010 /* ------------------------------------------------------------------------- */
9011 #undef gjmp_addr
9012 #undef gjmp
9013 /* ------------------------------------------------------------------------- */