Support --version cmdline arg
[tinycc.git] / tccgen.c
blob511cd8b5c7c9d2aa2757bce2ae419c476ee90a99
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 static int section_sym;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int const_wanted; /* true if constant wanted */
55 ST_DATA int nocode_wanted; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
66 static int gind(void) { CODE_ON(); return ind; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
70 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
74 #define gjmp gjmp_acs
75 /* <---- */
77 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
79 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
80 ST_DATA int func_vc;
81 static int last_line_num, new_file, func_ind; /* debug info control */
82 ST_DATA const char *funcname;
83 ST_DATA CType int_type, func_old_type, char_pointer_type;
85 #if PTR_SIZE == 4
86 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
87 #define VT_PTRDIFF_T VT_INT
88 #elif LONG_SIZE == 4
89 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
90 #define VT_PTRDIFF_T VT_LLONG
91 #else
92 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
93 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
94 #endif
96 ST_DATA struct switch_t {
97 struct case_t {
98 int64_t v1, v2;
99 int sym;
100 } **p; int n; /* list of case ranges */
101 int def_sym; /* default symbol */
102 int *bsym;
103 struct scope *scope;
104 struct switch_t *prev;
105 SValue sv;
106 } *cur_switch; /* current switch */
108 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
109 /*list of temporary local variables on the stack in current function. */
110 ST_DATA struct temp_local_variable {
111 int location; //offset on stack. Svalue.c.i
112 short size;
113 short align;
114 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
115 short nb_temp_local_vars;
117 static struct scope {
118 struct scope *prev;
119 struct { int loc, num; } vla;
120 struct { Sym *s; int n; } cl;
121 int *bsym, *csym;
122 Sym *lstk, *llstk;
123 } *cur_scope, *loop_scope, *root_scope;
125 static struct {
126 int type;
127 const char *name;
128 } default_debug[] = {
129 { VT_INT, "int:t1=r1;-2147483648;2147483647;", },
130 { VT_BYTE, "char:t2=r2;0;127;", },
131 #if LONG_SIZE == 4
132 { VT_LONG | VT_INT, "long int:t3=r3;-2147483648;2147483647;", },
133 #else
134 { VT_LLONG | VT_LONG, "long int:t3=r3;-9223372036854775808;9223372036854775807;", },
135 #endif
136 { VT_INT | VT_UNSIGNED, "unsigned int:t4=r4;0;4294967295;", },
137 #if LONG_SIZE == 4
138 { VT_LONG | VT_INT | VT_UNSIGNED, "long unsigned int:t5=r5;0;4294967295;", },
139 #else
140 { VT_LLONG | VT_LONG | VT_UNSIGNED, "long unsigned int:t5=r5;0;-1;", },
141 #endif
142 { VT_QLONG, "__int128:t6=r6;0;-1;", },
143 { VT_QLONG | VT_UNSIGNED, "__int128 unsigned:t7=r7;0;-1;", },
144 { VT_LLONG, "long long int:t8=r8;-9223372036854775808;9223372036854775807;", },
145 { VT_LLONG | VT_UNSIGNED, "long long unsigned int:t9=r9;0;-1;", },
146 { VT_SHORT, "short int:t10=r10;-32768;32767;", },
147 { VT_SHORT | VT_UNSIGNED, "short unsigned int:t11=r11;0;65535;", },
148 { VT_BYTE | VT_DEFSIGN, "signed char:t12=r12;-128;127;", },
149 { VT_BYTE | VT_DEFSIGN | VT_UNSIGNED, "unsigned char:t13=r13;0;255;", },
150 { VT_FLOAT, "float:t14=r1;4;0;", },
151 { VT_DOUBLE, "double:t15=r1;8;0;", },
152 { VT_LDOUBLE, "long double:t16=r1;16;0;", },
153 { -1, "_Float32:t17=r1;4;0;", },
154 { -1, "_Float64:t18=r1;8;0;", },
155 { -1, "_Float128:t19=r1;16;0;", },
156 { -1, "_Float32x:t20=r1;8;0;", },
157 { -1, "_Float64x:t21=r1;16;0;", },
158 { -1, "_Decimal32:t22=r1;4;0;", },
159 { -1, "_Decimal64:t23=r1;8;0;", },
160 { -1, "_Decimal128:t24=r1;16;0;", },
161 { VT_VOID, "void:t25=25", },
164 static int debug_next_type;
166 static struct debug_hash {
167 int debug_type;
168 Sym *type;
169 } *debug_hash;
171 static int n_debug_hash;
173 static struct debug_info {
174 int start;
175 int end;
176 int n_sym;
177 struct debug_sym {
178 int type;
179 unsigned long value;
180 char *str;
181 Section *sec;
182 int sym_index;
183 } *sym;
184 struct debug_info *child, *next, *last, *parent;
185 } *debug_info, *debug_info_root;
187 static CString debug_str;
189 /********************************************************/
190 #if 1
191 #define precedence_parser
192 static void init_prec(void);
193 #endif
194 /********************************************************/
195 #ifndef CONFIG_TCC_ASM
196 ST_FUNC void asm_instr(void)
198 tcc_error("inline asm() not supported");
200 ST_FUNC void asm_global_instr(void)
202 tcc_error("inline asm() not supported");
204 #endif
206 /* ------------------------------------------------------------------------- */
207 static void gen_cast(CType *type);
208 static void gen_cast_s(int t);
209 static inline CType *pointed_type(CType *type);
210 static int is_compatible_types(CType *type1, CType *type2);
211 static int parse_btype(CType *type, AttributeDef *ad);
212 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
213 static void parse_expr_type(CType *type);
214 static void init_putv(CType *type, Section *sec, unsigned long c);
215 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
216 static void block(int is_expr);
217 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
218 static void decl(int l);
219 static int decl0(int l, int is_for_loop_init, Sym *);
220 static void expr_eq(void);
221 static void vla_runtime_type_size(CType *type, int *a);
222 static int is_compatible_unqualified_types(CType *type1, CType *type2);
223 static inline int64_t expr_const64(void);
224 static void vpush64(int ty, unsigned long long v);
225 static void vpush(CType *type);
226 static int gvtst(int inv, int t);
227 static void gen_inline_functions(TCCState *s);
228 static void free_inline_functions(TCCState *s);
229 static void skip_or_save_block(TokenString **str);
230 static void gv_dup(void);
231 static int get_temp_local_var(int size,int align);
232 static void clear_temp_local_var_list();
233 static void cast_error(CType *st, CType *dt);
235 ST_INLN int is_float(int t)
237 int bt = t & VT_BTYPE;
238 return bt == VT_LDOUBLE
239 || bt == VT_DOUBLE
240 || bt == VT_FLOAT
241 || bt == VT_QFLOAT;
244 static inline int is_integer_btype(int bt)
246 return bt == VT_BYTE
247 || bt == VT_BOOL
248 || bt == VT_SHORT
249 || bt == VT_INT
250 || bt == VT_LLONG;
253 static int btype_size(int bt)
255 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
256 bt == VT_SHORT ? 2 :
257 bt == VT_INT ? 4 :
258 bt == VT_LLONG ? 8 :
259 bt == VT_PTR ? PTR_SIZE : 0;
262 /* returns function return register from type */
263 static int R_RET(int t)
265 if (!is_float(t))
266 return REG_IRET;
267 #ifdef TCC_TARGET_X86_64
268 if ((t & VT_BTYPE) == VT_LDOUBLE)
269 return TREG_ST0;
270 #elif defined TCC_TARGET_RISCV64
271 if ((t & VT_BTYPE) == VT_LDOUBLE)
272 return REG_IRET;
273 #endif
274 return REG_FRET;
277 /* returns 2nd function return register, if any */
278 static int R2_RET(int t)
280 t &= VT_BTYPE;
281 #if PTR_SIZE == 4
282 if (t == VT_LLONG)
283 return REG_IRE2;
284 #elif defined TCC_TARGET_X86_64
285 if (t == VT_QLONG)
286 return REG_IRE2;
287 if (t == VT_QFLOAT)
288 return REG_FRE2;
289 #elif defined TCC_TARGET_RISCV64
290 if (t == VT_LDOUBLE)
291 return REG_IRE2;
292 #endif
293 return VT_CONST;
296 /* returns true for two-word types */
297 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
299 /* put function return registers to stack value */
300 static void PUT_R_RET(SValue *sv, int t)
302 sv->r = R_RET(t), sv->r2 = R2_RET(t);
305 /* returns function return register class for type t */
306 static int RC_RET(int t)
308 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
311 /* returns generic register class for type t */
312 static int RC_TYPE(int t)
314 if (!is_float(t))
315 return RC_INT;
316 #ifdef TCC_TARGET_X86_64
317 if ((t & VT_BTYPE) == VT_LDOUBLE)
318 return RC_ST0;
319 if ((t & VT_BTYPE) == VT_QFLOAT)
320 return RC_FRET;
321 #elif defined TCC_TARGET_RISCV64
322 if ((t & VT_BTYPE) == VT_LDOUBLE)
323 return RC_INT;
324 #endif
325 return RC_FLOAT;
328 /* returns 2nd register class corresponding to t and rc */
329 static int RC2_TYPE(int t, int rc)
331 if (!USING_TWO_WORDS(t))
332 return 0;
333 #ifdef RC_IRE2
334 if (rc == RC_IRET)
335 return RC_IRE2;
336 #endif
337 #ifdef RC_FRE2
338 if (rc == RC_FRET)
339 return RC_FRE2;
340 #endif
341 if (rc & RC_FLOAT)
342 return RC_FLOAT;
343 return RC_INT;
346 /* we use our own 'finite' function to avoid potential problems with
347 non standard math libs */
348 /* XXX: endianness dependent */
349 ST_FUNC int ieee_finite(double d)
351 int p[4];
352 memcpy(p, &d, sizeof(double));
353 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
356 /* compiling intel long double natively */
357 #if (defined __i386__ || defined __x86_64__) \
358 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
359 # define TCC_IS_NATIVE_387
360 #endif
362 ST_FUNC void test_lvalue(void)
364 if (!(vtop->r & VT_LVAL))
365 expect("lvalue");
368 ST_FUNC void check_vstack(void)
370 if (vtop != vstack - 1)
371 tcc_error("internal compiler error: vstack leak (%d)",
372 (int)(vtop - vstack + 1));
375 /* ------------------------------------------------------------------------- */
376 /* vstack debugging aid */
378 #if 0
379 void pv (const char *lbl, int a, int b)
381 int i;
382 for (i = a; i < a + b; ++i) {
383 SValue *p = &vtop[-i];
384 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
385 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
388 #endif
390 /* ------------------------------------------------------------------------- */
391 /* start of translation unit info */
392 ST_FUNC void tcc_debug_start(TCCState *s1)
394 if (s1->do_debug) {
395 int i;
396 char buf[512];
398 /* file info: full path + filename */
399 section_sym = put_elf_sym(symtab_section, 0, 0,
400 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
401 text_section->sh_num, NULL);
402 getcwd(buf, sizeof(buf));
403 #ifdef _WIN32
404 normalize_slashes(buf);
405 #endif
406 pstrcat(buf, sizeof(buf), "/");
407 debug_next_type = sizeof(default_debug) / sizeof(default_debug[0]);
408 put_stabs_r(s1, buf, N_SO, 0, 0,
409 text_section->data_offset, text_section, section_sym);
410 put_stabs_r(s1, file->prev->filename, N_SO, 0, 0,
411 text_section->data_offset, text_section, section_sym);
412 for (i = 0; i < sizeof (default_debug) / sizeof (default_debug[0]); i++)
413 put_stabs(s1, default_debug[i].name, N_LSYM, 0, 0, 0);
414 new_file = last_line_num = 0;
415 func_ind = -1;
416 /* we're currently 'including' the <command line> */
417 tcc_debug_bincl(s1);
420 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
421 symbols can be safely used */
422 put_elf_sym(symtab_section, 0, 0,
423 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
424 SHN_ABS, file->filename);
427 static void tcc_debug_stabs (const char *str, int type, unsigned long value,
428 Section *sec, int sym_index)
430 struct debug_sym *s;
432 if (debug_info) {
433 debug_info->sym =
434 (struct debug_sym *)tcc_realloc (debug_info->sym,
435 sizeof(struct debug_sym) *
436 (debug_info->n_sym + 1));
437 s = debug_info->sym + debug_info->n_sym++;
438 s->type = type;
439 s->value = value;
440 s->str = tcc_strdup(str);
441 s->sec = sec;
442 s->sym_index = sym_index;
444 else if (sec)
445 put_stabs_r (tcc_state, str, type, 0, 0, value, sec, sym_index);
446 else
447 put_stabs (tcc_state, str, type, 0, 0, value);
450 static void tcc_debug_stabn(int type, int value)
452 if (!tcc_state->do_debug)
453 return;
454 if (type == N_LBRAC) {
455 struct debug_info *info =
456 (struct debug_info *) tcc_mallocz(sizeof (*info));
458 info->start = value;
459 info->parent = debug_info;
460 if (debug_info) {
461 if (debug_info->child) {
462 if (debug_info->child->last)
463 debug_info->child->last->next = info;
464 else
465 debug_info->child->next = info;
466 debug_info->child->last = info;
468 else
469 debug_info->child = info;
471 else
472 debug_info_root = info;
473 debug_info = info;
475 else {
476 debug_info->end = value;
477 debug_info = debug_info->parent;
481 static void tcc_get_debug_info(Sym *s, CString *result)
483 int type;
484 int n = 0;
485 int debug_type = -1;
486 Sym *t = s;
487 CString str;
489 for (;;) {
490 type = t->type.t & ~(VT_EXTERN | VT_STATIC | VT_CONSTANT | VT_VOLATILE);
491 if ((type & VT_BTYPE) != VT_BYTE)
492 type &= ~VT_DEFSIGN;
493 if (type == VT_PTR || type == (VT_PTR | VT_ARRAY))
494 n++, t = t->type.ref;
495 else
496 break;
498 if ((type & VT_BTYPE) == VT_STRUCT) {
499 int i;
501 t = t->type.ref;
502 for (i = 0; i < n_debug_hash; i++) {
503 if (t == debug_hash[i].type) {
504 debug_type = debug_hash[i].debug_type;
505 break;
508 if (debug_type == -1) {
509 debug_type = ++debug_next_type;
510 debug_hash = (struct debug_hash *)
511 tcc_realloc (debug_hash,
512 (n_debug_hash + 1) * sizeof(*debug_hash));
513 debug_hash[n_debug_hash].debug_type = debug_type;
514 debug_hash[n_debug_hash++].type = t;
515 cstr_new (&str);
516 cstr_printf (&str, "%s:T%d=%c%d",
517 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
518 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
519 debug_type,
520 IS_UNION (t->type.t) ? 'u' : 's',
521 t->c);
522 while (t->next) {
523 int pos, size, align;
525 t = t->next;
526 cstr_printf (&str, "%s:",
527 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
528 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
529 tcc_get_debug_info (t, &str);
530 if (t->type.t & VT_BITFIELD) {
531 pos = t->c * 8 + BIT_POS(t->type.t);
532 size = BIT_SIZE(t->type.t);
534 else {
535 pos = t->c * 8;
536 size = type_size(&t->type, &align) * 8;
538 cstr_printf (&str, ",%d,%d;", pos, size);
540 cstr_printf (&str, ";");
541 tcc_debug_stabs(str.data, N_LSYM, 0, NULL, 0);
542 cstr_free (&str);
545 else if (IS_ENUM(type)) {
546 Sym *e = t = t->type.ref;
548 debug_type = ++debug_next_type;
549 cstr_new (&str);
550 cstr_printf (&str, "%s:T%d=e",
551 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
552 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
553 debug_type);
554 while (t->next) {
555 t = t->next;
556 cstr_printf (&str, "%s:",
557 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
558 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
559 cstr_printf (&str, e->type.t & VT_UNSIGNED ? "%llu," : "%lld,",
560 t->enum_val);
562 cstr_printf (&str, ";");
563 tcc_debug_stabs(str.data, N_LSYM, 0, NULL, 0);
564 cstr_free (&str);
566 else {
567 type &= ~VT_STRUCT_MASK;
568 for (debug_type = 1;
569 debug_type <= sizeof(default_debug) / sizeof(default_debug[0]);
570 debug_type++)
571 if (default_debug[debug_type - 1].type == type)
572 break;
573 if (debug_type > sizeof(default_debug) / sizeof(default_debug[0]))
574 return;
576 if (n > 0)
577 cstr_printf (result, "%d=", ++debug_next_type);
578 t = s;
579 for (;;) {
580 type = t->type.t & ~(VT_EXTERN | VT_STATIC | VT_CONSTANT | VT_VOLATILE);
581 if ((type & VT_BTYPE) != VT_BYTE)
582 type &= ~VT_DEFSIGN;
583 if (type == VT_PTR)
584 cstr_printf (result, "%d=*", ++debug_next_type);
585 else if (type == (VT_PTR | VT_ARRAY))
586 cstr_printf (result, "%d=ar1;0;%d;",
587 ++debug_next_type, t->type.ref->c - 1);
588 else
589 break;
590 t = t->type.ref;
592 cstr_printf (result, "%d", debug_type);
595 static void tcc_debug_finish (struct debug_info *cur)
597 while (cur) {
598 int i;
599 struct debug_info *next = cur->next;
601 for (i = 0; i < cur->n_sym; i++) {
602 struct debug_sym *s = &cur->sym[i];
604 if (s->sec)
605 put_stabs_r(tcc_state, s->str, s->type, 0, 0, s->value,
606 s->sec, s->sym_index);
607 else
608 put_stabs(tcc_state, s->str, s->type, 0, 0, s->value);
609 tcc_free (s->str);
611 tcc_free (cur->sym);
612 put_stabn(tcc_state, N_LBRAC, 0, 0, cur->start);
613 tcc_debug_finish (cur->child);
614 put_stabn(tcc_state, N_RBRAC, 0, 0, cur->end);
615 tcc_free (cur);
616 cur = next;
620 static void tcc_add_debug_info(int param, Sym *s, Sym *e)
622 if (!tcc_state->do_debug)
623 return;
624 for (; s != e; s = s->prev) {
625 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
626 continue;
627 cstr_reset (&debug_str);
628 cstr_printf (&debug_str, "%s:%s", get_tok_str(s->v, NULL), param ? "p" : "");
629 tcc_get_debug_info(s, &debug_str);
630 tcc_debug_stabs(debug_str.data, param ? N_PSYM : N_LSYM, s->c, NULL, 0);
634 /* put end of translation unit info */
635 ST_FUNC void tcc_debug_end(TCCState *s1)
637 if (!s1->do_debug)
638 return;
639 put_stabs_r(s1, NULL, N_SO, 0, 0,
640 text_section->data_offset, text_section, section_sym);
641 tcc_free(debug_hash);
642 debug_hash = NULL;
643 n_debug_hash = 0;
646 static BufferedFile* put_new_file(TCCState *s1)
648 BufferedFile *f = file;
649 /* use upper file if from inline ":asm:" */
650 if (f->filename[0] == ':')
651 f = f->prev;
652 if (f && new_file) {
653 put_stabs_r(s1, f->filename, N_SOL, 0, 0, ind, text_section, section_sym);
654 new_file = last_line_num = 0;
656 return f;
659 /* generate line number info */
660 ST_FUNC void tcc_debug_line(TCCState *s1)
662 BufferedFile *f;
663 if (!s1->do_debug
664 || cur_text_section != text_section
665 || !(f = put_new_file(s1))
666 || last_line_num == f->line_num)
667 return;
668 if (func_ind != -1) {
669 put_stabn(s1, N_SLINE, 0, f->line_num, ind - func_ind);
670 } else {
671 /* from tcc_assemble */
672 put_stabs_r(s1, NULL, N_SLINE, 0, f->line_num, ind, text_section, section_sym);
674 last_line_num = f->line_num;
677 /* put function symbol */
678 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
680 BufferedFile *f;
681 if (!s1->do_debug || !(f = put_new_file(s1)))
682 return;
683 tcc_debug_stabn(N_LBRAC, ind - func_ind);
684 cstr_reset (&debug_str);
685 cstr_printf(&debug_str, "%s:%c", funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
686 tcc_get_debug_info(sym->type.ref, &debug_str);
687 put_stabs_r(s1, debug_str.data, N_FUN, 0, f->line_num, 0, cur_text_section, sym->c);
688 tcc_debug_line(s1);
691 /* put function size */
692 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
694 if (!s1->do_debug)
695 return;
696 tcc_debug_stabn(N_RBRAC, size);
697 tcc_debug_finish (debug_info_root);
698 debug_info_root = NULL;
699 debug_info = NULL;
700 cstr_free (&debug_str);
703 /* put alternative filename */
704 ST_FUNC void tcc_debug_putfile(TCCState *s1, const char *filename)
706 if (0 == strcmp(file->filename, filename))
707 return;
708 pstrcpy(file->filename, sizeof(file->filename), filename);
709 new_file = 1;
712 /* begin of #include */
713 ST_FUNC void tcc_debug_bincl(TCCState *s1)
715 if (!s1->do_debug)
716 return;
717 put_stabs(s1, file->filename, N_BINCL, 0, 0, 0);
718 new_file = 1;
721 /* end of #include */
722 ST_FUNC void tcc_debug_eincl(TCCState *s1)
724 if (!s1->do_debug)
725 return;
726 put_stabn(s1, N_EINCL, 0, 0, 0);
727 new_file = 1;
730 /* ------------------------------------------------------------------------- */
731 /* initialize vstack and types. This must be done also for tcc -E */
732 ST_FUNC void tccgen_init(TCCState *s1)
734 vtop = vstack - 1;
735 memset(vtop, 0, sizeof *vtop);
737 /* define some often used types */
738 int_type.t = VT_INT;
739 char_pointer_type.t = VT_BYTE;
740 mk_pointer(&char_pointer_type);
741 func_old_type.t = VT_FUNC;
742 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
743 func_old_type.ref->f.func_call = FUNC_CDECL;
744 func_old_type.ref->f.func_type = FUNC_OLD;
745 #ifdef precedence_parser
746 init_prec();
747 #endif
750 ST_FUNC int tccgen_compile(TCCState *s1)
752 cur_text_section = NULL;
753 funcname = "";
754 anon_sym = SYM_FIRST_ANOM;
755 section_sym = 0;
756 const_wanted = 0;
757 nocode_wanted = 0x80000000;
758 local_scope = 0;
760 tcc_debug_start(s1);
761 #ifdef TCC_TARGET_ARM
762 arm_init(s1);
763 #endif
764 #ifdef INC_DEBUG
765 printf("%s: **** new file\n", file->filename);
766 #endif
767 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
768 next();
769 decl(VT_CONST);
770 gen_inline_functions(s1);
771 check_vstack();
772 /* end of translation unit info */
773 tcc_debug_end(s1);
774 return 0;
777 ST_FUNC void tccgen_finish(TCCState *s1)
779 free_inline_functions(s1);
780 sym_pop(&global_stack, NULL, 0);
781 sym_pop(&local_stack, NULL, 0);
782 /* free preprocessor macros */
783 free_defines(NULL);
784 /* free sym_pools */
785 dynarray_reset(&sym_pools, &nb_sym_pools);
786 sym_free_first = NULL;
789 /* ------------------------------------------------------------------------- */
790 ST_FUNC ElfSym *elfsym(Sym *s)
792 if (!s || !s->c)
793 return NULL;
794 return &((ElfSym *)symtab_section->data)[s->c];
797 /* apply storage attributes to Elf symbol */
798 ST_FUNC void update_storage(Sym *sym)
800 ElfSym *esym;
801 int sym_bind, old_sym_bind;
803 esym = elfsym(sym);
804 if (!esym)
805 return;
807 if (sym->a.visibility)
808 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
809 | sym->a.visibility;
811 if (sym->type.t & (VT_STATIC | VT_INLINE))
812 sym_bind = STB_LOCAL;
813 else if (sym->a.weak)
814 sym_bind = STB_WEAK;
815 else
816 sym_bind = STB_GLOBAL;
817 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
818 if (sym_bind != old_sym_bind) {
819 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
822 #ifdef TCC_TARGET_PE
823 if (sym->a.dllimport)
824 esym->st_other |= ST_PE_IMPORT;
825 if (sym->a.dllexport)
826 esym->st_other |= ST_PE_EXPORT;
827 #endif
829 #if 0
830 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
831 get_tok_str(sym->v, NULL),
832 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
833 sym->a.visibility,
834 sym->a.dllexport,
835 sym->a.dllimport
837 #endif
840 /* ------------------------------------------------------------------------- */
841 /* update sym->c so that it points to an external symbol in section
842 'section' with value 'value' */
844 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
845 addr_t value, unsigned long size,
846 int can_add_underscore)
848 int sym_type, sym_bind, info, other, t;
849 ElfSym *esym;
850 const char *name;
851 char buf1[256];
852 #ifdef CONFIG_TCC_BCHECK
853 char buf[32];
854 #endif
855 if (!sym->c) {
856 name = get_tok_str(sym->v, NULL);
857 #ifdef CONFIG_TCC_BCHECK
858 if (tcc_state->do_bounds_check) {
859 /* XXX: avoid doing that for statics ? */
860 /* if bound checking is activated, we change some function
861 names by adding the "__bound" prefix */
862 switch(sym->v) {
863 #ifdef TCC_TARGET_PE
864 /* XXX: we rely only on malloc hooks */
865 case TOK_malloc:
866 case TOK_free:
867 case TOK_realloc:
868 case TOK_memalign:
869 case TOK_calloc:
870 #endif
871 case TOK_memcpy:
872 case TOK_memmove:
873 case TOK_memset:
874 case TOK_memcmp:
875 case TOK_strlen:
876 case TOK_strcpy:
877 case TOK_strncpy:
878 case TOK_strcmp:
879 case TOK_strncmp:
880 case TOK_strcat:
881 case TOK_strchr:
882 case TOK_strdup:
883 case TOK_alloca:
884 case TOK_mmap:
885 case TOK_munmap:
886 strcpy(buf, "__bound_");
887 strcat(buf, name);
888 name = buf;
889 break;
892 #endif
893 t = sym->type.t;
894 if ((t & VT_BTYPE) == VT_FUNC) {
895 sym_type = STT_FUNC;
896 } else if ((t & VT_BTYPE) == VT_VOID) {
897 sym_type = STT_NOTYPE;
898 } else {
899 sym_type = STT_OBJECT;
901 if (t & (VT_STATIC | VT_INLINE))
902 sym_bind = STB_LOCAL;
903 else
904 sym_bind = STB_GLOBAL;
905 other = 0;
906 #ifdef TCC_TARGET_PE
907 if (sym_type == STT_FUNC && sym->type.ref) {
908 Sym *ref = sym->type.ref;
909 if (ref->a.nodecorate) {
910 can_add_underscore = 0;
912 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
913 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
914 name = buf1;
915 other |= ST_PE_STDCALL;
916 can_add_underscore = 0;
919 #endif
920 if (tcc_state->leading_underscore && can_add_underscore) {
921 buf1[0] = '_';
922 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
923 name = buf1;
925 if (sym->asm_label)
926 name = get_tok_str(sym->asm_label, NULL);
927 info = ELFW(ST_INFO)(sym_bind, sym_type);
928 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
929 if (tcc_state->do_debug && sym_type != STT_FUNC && sym->v < SYM_FIRST_ANOM) {
930 CString str;
932 cstr_new (&str);
933 cstr_printf (&str, "%s:%c", get_tok_str(sym->v, NULL),
934 debug_info ? 'V' : sym_bind == STB_GLOBAL ? 'G' : 'S');
935 tcc_get_debug_info(sym, &str);
936 if (sym_bind == STB_GLOBAL)
937 tcc_debug_stabs(str.data, N_GSYM, 0, NULL, 0);
938 else
939 tcc_debug_stabs(str.data,
940 (t & VT_STATIC) && data_section->sh_num == sh_num
941 ? N_STSYM : N_LCSYM, 0,
942 data_section->sh_num == sh_num ? data_section :
943 bss_section->sh_num == sh_num ? bss_section :
944 common_section->sh_num == sh_num ? common_section :
945 text_section, sym->c);
946 cstr_free (&str);
948 } else {
949 esym = elfsym(sym);
950 esym->st_value = value;
951 esym->st_size = size;
952 esym->st_shndx = sh_num;
954 update_storage(sym);
957 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
958 addr_t value, unsigned long size)
960 int sh_num = section ? section->sh_num : SHN_UNDEF;
961 put_extern_sym2(sym, sh_num, value, size, 1);
964 /* add a new relocation entry to symbol 'sym' in section 's' */
965 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
966 addr_t addend)
968 int c = 0;
970 if (nocode_wanted && s == cur_text_section)
971 return;
973 if (sym) {
974 if (0 == sym->c)
975 put_extern_sym(sym, NULL, 0, 0);
976 c = sym->c;
979 /* now we can add ELF relocation info */
980 put_elf_reloca(symtab_section, s, offset, type, c, addend);
983 #if PTR_SIZE == 4
984 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
986 greloca(s, sym, offset, type, 0);
988 #endif
990 /* ------------------------------------------------------------------------- */
991 /* symbol allocator */
992 static Sym *__sym_malloc(void)
994 Sym *sym_pool, *sym, *last_sym;
995 int i;
997 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
998 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
1000 last_sym = sym_free_first;
1001 sym = sym_pool;
1002 for(i = 0; i < SYM_POOL_NB; i++) {
1003 sym->next = last_sym;
1004 last_sym = sym;
1005 sym++;
1007 sym_free_first = last_sym;
1008 return last_sym;
1011 static inline Sym *sym_malloc(void)
1013 Sym *sym;
1014 #ifndef SYM_DEBUG
1015 sym = sym_free_first;
1016 if (!sym)
1017 sym = __sym_malloc();
1018 sym_free_first = sym->next;
1019 return sym;
1020 #else
1021 sym = tcc_malloc(sizeof(Sym));
1022 return sym;
1023 #endif
1026 ST_INLN void sym_free(Sym *sym)
1028 #ifndef SYM_DEBUG
1029 sym->next = sym_free_first;
1030 sym_free_first = sym;
1031 #else
1032 tcc_free(sym);
1033 #endif
1036 /* push, without hashing */
1037 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
1039 Sym *s;
1041 s = sym_malloc();
1042 memset(s, 0, sizeof *s);
1043 s->v = v;
1044 s->type.t = t;
1045 s->c = c;
1046 /* add in stack */
1047 s->prev = *ps;
1048 *ps = s;
1049 return s;
1052 /* find a symbol and return its associated structure. 's' is the top
1053 of the symbol stack */
1054 ST_FUNC Sym *sym_find2(Sym *s, int v)
1056 while (s) {
1057 if (s->v == v)
1058 return s;
1059 else if (s->v == -1)
1060 return NULL;
1061 s = s->prev;
1063 return NULL;
1066 /* structure lookup */
1067 ST_INLN Sym *struct_find(int v)
1069 v -= TOK_IDENT;
1070 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1071 return NULL;
1072 return table_ident[v]->sym_struct;
1075 /* find an identifier */
1076 ST_INLN Sym *sym_find(int v)
1078 v -= TOK_IDENT;
1079 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1080 return NULL;
1081 return table_ident[v]->sym_identifier;
1084 static int sym_scope(Sym *s)
1086 if (IS_ENUM_VAL (s->type.t))
1087 return s->type.ref->sym_scope;
1088 else
1089 return s->sym_scope;
1092 /* push a given symbol on the symbol stack */
1093 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
1095 Sym *s, **ps;
1096 TokenSym *ts;
1098 if (local_stack)
1099 ps = &local_stack;
1100 else
1101 ps = &global_stack;
1102 s = sym_push2(ps, v, type->t, c);
1103 s->type.ref = type->ref;
1104 s->r = r;
1105 /* don't record fields or anonymous symbols */
1106 /* XXX: simplify */
1107 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1108 /* record symbol in token array */
1109 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1110 if (v & SYM_STRUCT)
1111 ps = &ts->sym_struct;
1112 else
1113 ps = &ts->sym_identifier;
1114 s->prev_tok = *ps;
1115 *ps = s;
1116 s->sym_scope = local_scope;
1117 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
1118 tcc_error("redeclaration of '%s'",
1119 get_tok_str(v & ~SYM_STRUCT, NULL));
1121 return s;
1124 /* push a global identifier */
1125 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
1127 Sym *s, **ps;
1128 s = sym_push2(&global_stack, v, t, c);
1129 s->r = VT_CONST | VT_SYM;
1130 /* don't record anonymous symbol */
1131 if (v < SYM_FIRST_ANOM) {
1132 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
1133 /* modify the top most local identifier, so that sym_identifier will
1134 point to 's' when popped; happens when called from inline asm */
1135 while (*ps != NULL && (*ps)->sym_scope)
1136 ps = &(*ps)->prev_tok;
1137 s->prev_tok = *ps;
1138 *ps = s;
1140 return s;
1143 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1144 pop them yet from the list, but do remove them from the token array. */
1145 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
1147 Sym *s, *ss, **ps;
1148 TokenSym *ts;
1149 int v;
1151 s = *ptop;
1152 while(s != b) {
1153 ss = s->prev;
1154 v = s->v;
1155 /* remove symbol in token array */
1156 /* XXX: simplify */
1157 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1158 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1159 if (v & SYM_STRUCT)
1160 ps = &ts->sym_struct;
1161 else
1162 ps = &ts->sym_identifier;
1163 *ps = s->prev_tok;
1165 if (!keep)
1166 sym_free(s);
1167 s = ss;
1169 if (!keep)
1170 *ptop = b;
1173 /* ------------------------------------------------------------------------- */
1174 static void vcheck_cmp(void)
1176 /* cannot let cpu flags if other instruction are generated. Also
1177 avoid leaving VT_JMP anywhere except on the top of the stack
1178 because it would complicate the code generator.
1180 Don't do this when nocode_wanted. vtop might come from
1181 !nocode_wanted regions (see 88_codeopt.c) and transforming
1182 it to a register without actually generating code is wrong
1183 as their value might still be used for real. All values
1184 we push under nocode_wanted will eventually be popped
1185 again, so that the VT_CMP/VT_JMP value will be in vtop
1186 when code is unsuppressed again. */
1188 if (vtop->r == VT_CMP && !nocode_wanted)
1189 gv(RC_INT);
1192 static void vsetc(CType *type, int r, CValue *vc)
1194 if (vtop >= vstack + (VSTACK_SIZE - 1))
1195 tcc_error("memory full (vstack)");
1196 vcheck_cmp();
1197 vtop++;
1198 vtop->type = *type;
1199 vtop->r = r;
1200 vtop->r2 = VT_CONST;
1201 vtop->c = *vc;
1202 vtop->sym = NULL;
1205 ST_FUNC void vswap(void)
1207 SValue tmp;
1209 vcheck_cmp();
1210 tmp = vtop[0];
1211 vtop[0] = vtop[-1];
1212 vtop[-1] = tmp;
1215 /* pop stack value */
1216 ST_FUNC void vpop(void)
1218 int v;
1219 v = vtop->r & VT_VALMASK;
1220 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1221 /* for x86, we need to pop the FP stack */
1222 if (v == TREG_ST0) {
1223 o(0xd8dd); /* fstp %st(0) */
1224 } else
1225 #endif
1226 if (v == VT_CMP) {
1227 /* need to put correct jump if && or || without test */
1228 gsym(vtop->jtrue);
1229 gsym(vtop->jfalse);
1231 vtop--;
1234 /* push constant of type "type" with useless value */
1235 static void vpush(CType *type)
1237 vset(type, VT_CONST, 0);
1240 /* push arbitrary 64bit constant */
1241 static void vpush64(int ty, unsigned long long v)
1243 CValue cval;
1244 CType ctype;
1245 ctype.t = ty;
1246 ctype.ref = NULL;
1247 cval.i = v;
1248 vsetc(&ctype, VT_CONST, &cval);
1251 /* push integer constant */
1252 ST_FUNC void vpushi(int v)
1254 vpush64(VT_INT, v);
1257 /* push a pointer sized constant */
1258 static void vpushs(addr_t v)
1260 vpush64(VT_SIZE_T, v);
1263 /* push long long constant */
1264 static inline void vpushll(long long v)
1266 vpush64(VT_LLONG, v);
1269 ST_FUNC void vset(CType *type, int r, int v)
1271 CValue cval;
1272 cval.i = v;
1273 vsetc(type, r, &cval);
1276 static void vseti(int r, int v)
1278 CType type;
1279 type.t = VT_INT;
1280 type.ref = NULL;
1281 vset(&type, r, v);
1284 ST_FUNC void vpushv(SValue *v)
1286 if (vtop >= vstack + (VSTACK_SIZE - 1))
1287 tcc_error("memory full (vstack)");
1288 vtop++;
1289 *vtop = *v;
1292 static void vdup(void)
1294 vpushv(vtop);
1297 /* rotate n first stack elements to the bottom
1298 I1 ... In -> I2 ... In I1 [top is right]
1300 ST_FUNC void vrotb(int n)
1302 int i;
1303 SValue tmp;
1305 vcheck_cmp();
1306 tmp = vtop[-n + 1];
1307 for(i=-n+1;i!=0;i++)
1308 vtop[i] = vtop[i+1];
1309 vtop[0] = tmp;
1312 /* rotate the n elements before entry e towards the top
1313 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1315 ST_FUNC void vrote(SValue *e, int n)
1317 int i;
1318 SValue tmp;
1320 vcheck_cmp();
1321 tmp = *e;
1322 for(i = 0;i < n - 1; i++)
1323 e[-i] = e[-i - 1];
1324 e[-n + 1] = tmp;
1327 /* rotate n first stack elements to the top
1328 I1 ... In -> In I1 ... I(n-1) [top is right]
1330 ST_FUNC void vrott(int n)
1332 vrote(vtop, n);
1335 /* ------------------------------------------------------------------------- */
1336 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1338 /* called from generators to set the result from relational ops */
1339 ST_FUNC void vset_VT_CMP(int op)
1341 vtop->r = VT_CMP;
1342 vtop->cmp_op = op;
1343 vtop->jfalse = 0;
1344 vtop->jtrue = 0;
1347 /* called once before asking generators to load VT_CMP to a register */
1348 static void vset_VT_JMP(void)
1350 int op = vtop->cmp_op;
1352 if (vtop->jtrue || vtop->jfalse) {
1353 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1354 int inv = op & (op < 2); /* small optimization */
1355 vseti(VT_JMP+inv, gvtst(inv, 0));
1356 } else {
1357 /* otherwise convert flags (rsp. 0/1) to register */
1358 vtop->c.i = op;
1359 if (op < 2) /* doesn't seem to happen */
1360 vtop->r = VT_CONST;
1364 /* Set CPU Flags, doesn't yet jump */
1365 static void gvtst_set(int inv, int t)
1367 int *p;
1369 if (vtop->r != VT_CMP) {
1370 vpushi(0);
1371 gen_op(TOK_NE);
1372 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1373 vset_VT_CMP(vtop->c.i != 0);
1376 p = inv ? &vtop->jfalse : &vtop->jtrue;
1377 *p = gjmp_append(*p, t);
1380 /* Generate value test
1382 * Generate a test for any value (jump, comparison and integers) */
1383 static int gvtst(int inv, int t)
1385 int op, x, u;
1387 gvtst_set(inv, t);
1388 t = vtop->jtrue, u = vtop->jfalse;
1389 if (inv)
1390 x = u, u = t, t = x;
1391 op = vtop->cmp_op;
1393 /* jump to the wanted target */
1394 if (op > 1)
1395 t = gjmp_cond(op ^ inv, t);
1396 else if (op != inv)
1397 t = gjmp(t);
1398 /* resolve complementary jumps to here */
1399 gsym(u);
1401 vtop--;
1402 return t;
1405 /* generate a zero or nozero test */
1406 static void gen_test_zero(int op)
1408 if (vtop->r == VT_CMP) {
1409 int j;
1410 if (op == TOK_EQ) {
1411 j = vtop->jfalse;
1412 vtop->jfalse = vtop->jtrue;
1413 vtop->jtrue = j;
1414 vtop->cmp_op ^= 1;
1416 } else {
1417 vpushi(0);
1418 gen_op(op);
1422 /* ------------------------------------------------------------------------- */
1423 /* push a symbol value of TYPE */
1424 static inline void vpushsym(CType *type, Sym *sym)
1426 CValue cval;
1427 cval.i = 0;
1428 vsetc(type, VT_CONST | VT_SYM, &cval);
1429 vtop->sym = sym;
1432 /* Return a static symbol pointing to a section */
1433 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1435 int v;
1436 Sym *sym;
1438 v = anon_sym++;
1439 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1440 sym->type.t |= VT_STATIC;
1441 put_extern_sym(sym, sec, offset, size);
1442 return sym;
1445 /* push a reference to a section offset by adding a dummy symbol */
1446 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1448 vpushsym(type, get_sym_ref(type, sec, offset, size));
1451 /* define a new external reference to a symbol 'v' of type 'u' */
1452 ST_FUNC Sym *external_global_sym(int v, CType *type)
1454 Sym *s;
1456 s = sym_find(v);
1457 if (!s) {
1458 /* push forward reference */
1459 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1460 s->type.ref = type->ref;
1461 } else if (IS_ASM_SYM(s)) {
1462 s->type.t = type->t | (s->type.t & VT_EXTERN);
1463 s->type.ref = type->ref;
1464 update_storage(s);
1466 return s;
1469 /* Merge symbol attributes. */
1470 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1472 if (sa1->aligned && !sa->aligned)
1473 sa->aligned = sa1->aligned;
1474 sa->packed |= sa1->packed;
1475 sa->weak |= sa1->weak;
1476 if (sa1->visibility != STV_DEFAULT) {
1477 int vis = sa->visibility;
1478 if (vis == STV_DEFAULT
1479 || vis > sa1->visibility)
1480 vis = sa1->visibility;
1481 sa->visibility = vis;
1483 sa->dllexport |= sa1->dllexport;
1484 sa->nodecorate |= sa1->nodecorate;
1485 sa->dllimport |= sa1->dllimport;
1488 /* Merge function attributes. */
1489 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1491 if (fa1->func_call && !fa->func_call)
1492 fa->func_call = fa1->func_call;
1493 if (fa1->func_type && !fa->func_type)
1494 fa->func_type = fa1->func_type;
1495 if (fa1->func_args && !fa->func_args)
1496 fa->func_args = fa1->func_args;
1499 /* Merge attributes. */
1500 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1502 merge_symattr(&ad->a, &ad1->a);
1503 merge_funcattr(&ad->f, &ad1->f);
1505 if (ad1->section)
1506 ad->section = ad1->section;
1507 if (ad1->alias_target)
1508 ad->alias_target = ad1->alias_target;
1509 if (ad1->asm_label)
1510 ad->asm_label = ad1->asm_label;
1511 if (ad1->attr_mode)
1512 ad->attr_mode = ad1->attr_mode;
1515 /* Merge some type attributes. */
1516 static void patch_type(Sym *sym, CType *type)
1518 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1519 if (!(sym->type.t & VT_EXTERN))
1520 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1521 sym->type.t &= ~VT_EXTERN;
1524 if (IS_ASM_SYM(sym)) {
1525 /* stay static if both are static */
1526 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1527 sym->type.ref = type->ref;
1530 if (!is_compatible_types(&sym->type, type)) {
1531 tcc_error("incompatible types for redefinition of '%s'",
1532 get_tok_str(sym->v, NULL));
1534 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1535 int static_proto = sym->type.t & VT_STATIC;
1536 /* warn if static follows non-static function declaration */
1537 if ((type->t & VT_STATIC) && !static_proto
1538 /* XXX this test for inline shouldn't be here. Until we
1539 implement gnu-inline mode again it silences a warning for
1540 mingw caused by our workarounds. */
1541 && !((type->t | sym->type.t) & VT_INLINE))
1542 tcc_warning("static storage ignored for redefinition of '%s'",
1543 get_tok_str(sym->v, NULL));
1545 /* set 'inline' if both agree or if one has static */
1546 if ((type->t | sym->type.t) & VT_INLINE) {
1547 if (!((type->t ^ sym->type.t) & VT_INLINE)
1548 || ((type->t | sym->type.t) & VT_STATIC))
1549 static_proto |= VT_INLINE;
1552 if (0 == (type->t & VT_EXTERN)) {
1553 /* put complete type, use static from prototype */
1554 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1555 sym->type.ref = type->ref;
1556 } else {
1557 sym->type.t &= ~VT_INLINE | static_proto;
1560 if (sym->type.ref->f.func_type == FUNC_OLD
1561 && type->ref->f.func_type != FUNC_OLD) {
1562 sym->type.ref = type->ref;
1565 } else {
1566 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1567 /* set array size if it was omitted in extern declaration */
1568 sym->type.ref->c = type->ref->c;
1570 if ((type->t ^ sym->type.t) & VT_STATIC)
1571 tcc_warning("storage mismatch for redefinition of '%s'",
1572 get_tok_str(sym->v, NULL));
1576 /* Merge some storage attributes. */
1577 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1579 if (type)
1580 patch_type(sym, type);
1582 #ifdef TCC_TARGET_PE
1583 if (sym->a.dllimport != ad->a.dllimport)
1584 tcc_error("incompatible dll linkage for redefinition of '%s'",
1585 get_tok_str(sym->v, NULL));
1586 #endif
1587 merge_symattr(&sym->a, &ad->a);
1588 if (ad->asm_label)
1589 sym->asm_label = ad->asm_label;
1590 update_storage(sym);
1593 /* copy sym to other stack */
1594 static Sym *sym_copy(Sym *s0, Sym **ps)
1596 Sym *s;
1597 s = sym_malloc(), *s = *s0;
1598 s->prev = *ps, *ps = s;
1599 if (s->v < SYM_FIRST_ANOM) {
1600 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1601 s->prev_tok = *ps, *ps = s;
1603 return s;
1606 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1607 static void sym_copy_ref(Sym *s, Sym **ps)
1609 int bt = s->type.t & VT_BTYPE;
1610 if (bt == VT_FUNC || bt == VT_PTR) {
1611 Sym **sp = &s->type.ref;
1612 for (s = *sp, *sp = NULL; s; s = s->next) {
1613 Sym *s2 = sym_copy(s, ps);
1614 sp = &(*sp = s2)->next;
1615 sym_copy_ref(s2, ps);
1620 /* define a new external reference to a symbol 'v' */
1621 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1623 Sym *s;
1625 /* look for global symbol */
1626 s = sym_find(v);
1627 while (s && s->sym_scope)
1628 s = s->prev_tok;
1630 if (!s) {
1631 /* push forward reference */
1632 s = global_identifier_push(v, type->t, 0);
1633 s->r |= r;
1634 s->a = ad->a;
1635 s->asm_label = ad->asm_label;
1636 s->type.ref = type->ref;
1637 /* copy type to the global stack */
1638 if (local_stack)
1639 sym_copy_ref(s, &global_stack);
1640 } else {
1641 patch_storage(s, ad, type);
1643 /* push variables on local_stack if any */
1644 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1645 s = sym_copy(s, &local_stack);
1646 return s;
1649 /* push a reference to global symbol v */
1650 ST_FUNC void vpush_global_sym(CType *type, int v)
1652 vpushsym(type, external_global_sym(v, type));
1655 /* save registers up to (vtop - n) stack entry */
1656 ST_FUNC void save_regs(int n)
1658 SValue *p, *p1;
1659 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1660 save_reg(p->r);
1663 /* save r to the memory stack, and mark it as being free */
1664 ST_FUNC void save_reg(int r)
1666 save_reg_upstack(r, 0);
1669 /* save r to the memory stack, and mark it as being free,
1670 if seen up to (vtop - n) stack entry */
1671 ST_FUNC void save_reg_upstack(int r, int n)
1673 int l, size, align, bt;
1674 SValue *p, *p1, sv;
1676 if ((r &= VT_VALMASK) >= VT_CONST)
1677 return;
1678 if (nocode_wanted)
1679 return;
1680 l = 0;
1681 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1682 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1683 /* must save value on stack if not already done */
1684 if (!l) {
1685 bt = p->type.t & VT_BTYPE;
1686 if (bt == VT_VOID)
1687 continue;
1688 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1689 bt = VT_PTR;
1690 sv.type.t = bt;
1691 size = type_size(&sv.type, &align);
1692 l = get_temp_local_var(size,align);
1693 sv.r = VT_LOCAL | VT_LVAL;
1694 sv.c.i = l;
1695 store(p->r & VT_VALMASK, &sv);
1696 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1697 /* x86 specific: need to pop fp register ST0 if saved */
1698 if (r == TREG_ST0) {
1699 o(0xd8dd); /* fstp %st(0) */
1701 #endif
1702 /* special long long case */
1703 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1704 sv.c.i += PTR_SIZE;
1705 store(p->r2, &sv);
1708 /* mark that stack entry as being saved on the stack */
1709 if (p->r & VT_LVAL) {
1710 /* also clear the bounded flag because the
1711 relocation address of the function was stored in
1712 p->c.i */
1713 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1714 } else {
1715 p->r = VT_LVAL | VT_LOCAL;
1717 p->r2 = VT_CONST;
1718 p->c.i = l;
1723 #ifdef TCC_TARGET_ARM
1724 /* find a register of class 'rc2' with at most one reference on stack.
1725 * If none, call get_reg(rc) */
1726 ST_FUNC int get_reg_ex(int rc, int rc2)
1728 int r;
1729 SValue *p;
1731 for(r=0;r<NB_REGS;r++) {
1732 if (reg_classes[r] & rc2) {
1733 int n;
1734 n=0;
1735 for(p = vstack; p <= vtop; p++) {
1736 if ((p->r & VT_VALMASK) == r ||
1737 p->r2 == r)
1738 n++;
1740 if (n <= 1)
1741 return r;
1744 return get_reg(rc);
1746 #endif
1748 /* find a free register of class 'rc'. If none, save one register */
1749 ST_FUNC int get_reg(int rc)
1751 int r;
1752 SValue *p;
1754 /* find a free register */
1755 for(r=0;r<NB_REGS;r++) {
1756 if (reg_classes[r] & rc) {
1757 if (nocode_wanted)
1758 return r;
1759 for(p=vstack;p<=vtop;p++) {
1760 if ((p->r & VT_VALMASK) == r ||
1761 p->r2 == r)
1762 goto notfound;
1764 return r;
1766 notfound: ;
1769 /* no register left : free the first one on the stack (VERY
1770 IMPORTANT to start from the bottom to ensure that we don't
1771 spill registers used in gen_opi()) */
1772 for(p=vstack;p<=vtop;p++) {
1773 /* look at second register (if long long) */
1774 r = p->r2;
1775 if (r < VT_CONST && (reg_classes[r] & rc))
1776 goto save_found;
1777 r = p->r & VT_VALMASK;
1778 if (r < VT_CONST && (reg_classes[r] & rc)) {
1779 save_found:
1780 save_reg(r);
1781 return r;
1784 /* Should never comes here */
1785 return -1;
1788 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1789 static int get_temp_local_var(int size,int align){
1790 int i;
1791 struct temp_local_variable *temp_var;
1792 int found_var;
1793 SValue *p;
1794 int r;
1795 char free;
1796 char found;
1797 found=0;
1798 for(i=0;i<nb_temp_local_vars;i++){
1799 temp_var=&arr_temp_local_vars[i];
1800 if(temp_var->size<size||align!=temp_var->align){
1801 continue;
1803 /*check if temp_var is free*/
1804 free=1;
1805 for(p=vstack;p<=vtop;p++) {
1806 r=p->r&VT_VALMASK;
1807 if(r==VT_LOCAL||r==VT_LLOCAL){
1808 if(p->c.i==temp_var->location){
1809 free=0;
1810 break;
1814 if(free){
1815 found_var=temp_var->location;
1816 found=1;
1817 break;
1820 if(!found){
1821 loc = (loc - size) & -align;
1822 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1823 temp_var=&arr_temp_local_vars[i];
1824 temp_var->location=loc;
1825 temp_var->size=size;
1826 temp_var->align=align;
1827 nb_temp_local_vars++;
1829 found_var=loc;
1831 return found_var;
1834 static void clear_temp_local_var_list(){
1835 nb_temp_local_vars=0;
1838 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1839 if needed */
1840 static void move_reg(int r, int s, int t)
1842 SValue sv;
1844 if (r != s) {
1845 save_reg(r);
1846 sv.type.t = t;
1847 sv.type.ref = NULL;
1848 sv.r = s;
1849 sv.c.i = 0;
1850 load(r, &sv);
1854 /* get address of vtop (vtop MUST BE an lvalue) */
1855 ST_FUNC void gaddrof(void)
1857 vtop->r &= ~VT_LVAL;
1858 /* tricky: if saved lvalue, then we can go back to lvalue */
1859 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1860 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1863 #ifdef CONFIG_TCC_BCHECK
1864 /* generate lvalue bound code */
1865 static void gbound(void)
1867 CType type1;
1869 vtop->r &= ~VT_MUSTBOUND;
1870 /* if lvalue, then use checking code before dereferencing */
1871 if (vtop->r & VT_LVAL) {
1872 /* if not VT_BOUNDED value, then make one */
1873 if (!(vtop->r & VT_BOUNDED)) {
1874 /* must save type because we must set it to int to get pointer */
1875 type1 = vtop->type;
1876 vtop->type.t = VT_PTR;
1877 gaddrof();
1878 vpushi(0);
1879 gen_bounded_ptr_add();
1880 vtop->r |= VT_LVAL;
1881 vtop->type = type1;
1883 /* then check for dereferencing */
1884 gen_bounded_ptr_deref();
1888 /* we need to call __bound_ptr_add before we start to load function
1889 args into registers */
1890 ST_FUNC void gbound_args(int nb_args)
1892 int i;
1893 for (i = 1; i <= nb_args; ++i)
1894 if (vtop[1 - i].r & VT_MUSTBOUND) {
1895 vrotb(i);
1896 gbound();
1897 vrott(i);
1901 /* Add bounds for local symbols from S to E (via ->prev) */
1902 static void add_local_bounds(Sym *s, Sym *e)
1904 for (; s != e; s = s->prev) {
1905 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1906 continue;
1907 /* Add arrays/structs/unions because we always take address */
1908 if ((s->type.t & VT_ARRAY)
1909 || (s->type.t & VT_BTYPE) == VT_STRUCT
1910 || s->a.addrtaken) {
1911 /* add local bound info */
1912 int align, size = type_size(&s->type, &align);
1913 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1914 2 * sizeof(addr_t));
1915 bounds_ptr[0] = s->c;
1916 bounds_ptr[1] = size;
1920 #endif
1922 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1923 static void pop_local_syms(int param, Sym **ptop, Sym *b, int keep, int ellipsis)
1925 #ifdef CONFIG_TCC_BCHECK
1926 if (!ellipsis && !keep && tcc_state->do_bounds_check)
1927 add_local_bounds(*ptop, b);
1928 #endif
1929 tcc_add_debug_info (param, *ptop, b);
1930 sym_pop(ptop, b, keep);
1933 static void incr_bf_adr(int o)
1935 vtop->type = char_pointer_type;
1936 gaddrof();
1937 vpushs(o);
1938 gen_op('+');
1939 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1940 vtop->r |= VT_LVAL;
1943 /* single-byte load mode for packed or otherwise unaligned bitfields */
1944 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1946 int n, o, bits;
1947 save_reg_upstack(vtop->r, 1);
1948 vpush64(type->t & VT_BTYPE, 0); // B X
1949 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1950 do {
1951 vswap(); // X B
1952 incr_bf_adr(o);
1953 vdup(); // X B B
1954 n = 8 - bit_pos;
1955 if (n > bit_size)
1956 n = bit_size;
1957 if (bit_pos)
1958 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
1959 if (n < 8)
1960 vpushi((1 << n) - 1), gen_op('&');
1961 gen_cast(type);
1962 if (bits)
1963 vpushi(bits), gen_op(TOK_SHL);
1964 vrotb(3); // B Y X
1965 gen_op('|'); // B X
1966 bits += n, bit_size -= n, o = 1;
1967 } while (bit_size);
1968 vswap(), vpop();
1969 if (!(type->t & VT_UNSIGNED)) {
1970 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
1971 vpushi(n), gen_op(TOK_SHL);
1972 vpushi(n), gen_op(TOK_SAR);
1976 /* single-byte store mode for packed or otherwise unaligned bitfields */
1977 static void store_packed_bf(int bit_pos, int bit_size)
1979 int bits, n, o, m, c;
1981 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
1982 vswap(); // X B
1983 save_reg_upstack(vtop->r, 1);
1984 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
1985 do {
1986 incr_bf_adr(o); // X B
1987 vswap(); //B X
1988 c ? vdup() : gv_dup(); // B V X
1989 vrott(3); // X B V
1990 if (bits)
1991 vpushi(bits), gen_op(TOK_SHR);
1992 if (bit_pos)
1993 vpushi(bit_pos), gen_op(TOK_SHL);
1994 n = 8 - bit_pos;
1995 if (n > bit_size)
1996 n = bit_size;
1997 if (n < 8) {
1998 m = ((1 << n) - 1) << bit_pos;
1999 vpushi(m), gen_op('&'); // X B V1
2000 vpushv(vtop-1); // X B V1 B
2001 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
2002 gen_op('&'); // X B V1 B1
2003 gen_op('|'); // X B V2
2005 vdup(), vtop[-1] = vtop[-2]; // X B B V2
2006 vstore(), vpop(); // X B
2007 bits += n, bit_size -= n, bit_pos = 0, o = 1;
2008 } while (bit_size);
2009 vpop(), vpop();
2012 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
2014 int t;
2015 if (0 == sv->type.ref)
2016 return 0;
2017 t = sv->type.ref->auxtype;
2018 if (t != -1 && t != VT_STRUCT) {
2019 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
2020 sv->r |= VT_LVAL;
2022 return t;
2025 /* store vtop a register belonging to class 'rc'. lvalues are
2026 converted to values. Cannot be used if cannot be converted to
2027 register value (such as structures). */
2028 ST_FUNC int gv(int rc)
2030 int r, r2, r_ok, r2_ok, rc2, bt;
2031 int bit_pos, bit_size, size, align;
2033 /* NOTE: get_reg can modify vstack[] */
2034 if (vtop->type.t & VT_BITFIELD) {
2035 CType type;
2037 bit_pos = BIT_POS(vtop->type.t);
2038 bit_size = BIT_SIZE(vtop->type.t);
2039 /* remove bit field info to avoid loops */
2040 vtop->type.t &= ~VT_STRUCT_MASK;
2042 type.ref = NULL;
2043 type.t = vtop->type.t & VT_UNSIGNED;
2044 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
2045 type.t |= VT_UNSIGNED;
2047 r = adjust_bf(vtop, bit_pos, bit_size);
2049 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2050 type.t |= VT_LLONG;
2051 else
2052 type.t |= VT_INT;
2054 if (r == VT_STRUCT) {
2055 load_packed_bf(&type, bit_pos, bit_size);
2056 } else {
2057 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
2058 /* cast to int to propagate signedness in following ops */
2059 gen_cast(&type);
2060 /* generate shifts */
2061 vpushi(bits - (bit_pos + bit_size));
2062 gen_op(TOK_SHL);
2063 vpushi(bits - bit_size);
2064 /* NOTE: transformed to SHR if unsigned */
2065 gen_op(TOK_SAR);
2067 r = gv(rc);
2068 } else {
2069 if (is_float(vtop->type.t) &&
2070 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2071 unsigned long offset;
2072 /* CPUs usually cannot use float constants, so we store them
2073 generically in data segment */
2074 size = type_size(&vtop->type, &align);
2075 if (NODATA_WANTED)
2076 size = 0, align = 1;
2077 offset = section_add(data_section, size, align);
2078 vpush_ref(&vtop->type, data_section, offset, size);
2079 vswap();
2080 init_putv(&vtop->type, data_section, offset);
2081 vtop->r |= VT_LVAL;
2083 #ifdef CONFIG_TCC_BCHECK
2084 if (vtop->r & VT_MUSTBOUND)
2085 gbound();
2086 #endif
2088 bt = vtop->type.t & VT_BTYPE;
2090 #ifdef TCC_TARGET_RISCV64
2091 /* XXX mega hack */
2092 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
2093 rc = RC_INT;
2094 #endif
2095 rc2 = RC2_TYPE(bt, rc);
2097 /* need to reload if:
2098 - constant
2099 - lvalue (need to dereference pointer)
2100 - already a register, but not in the right class */
2101 r = vtop->r & VT_VALMASK;
2102 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
2103 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
2105 if (!r_ok || !r2_ok) {
2106 if (!r_ok)
2107 r = get_reg(rc);
2108 if (rc2) {
2109 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
2110 int original_type = vtop->type.t;
2112 /* two register type load :
2113 expand to two words temporarily */
2114 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2115 /* load constant */
2116 unsigned long long ll = vtop->c.i;
2117 vtop->c.i = ll; /* first word */
2118 load(r, vtop);
2119 vtop->r = r; /* save register value */
2120 vpushi(ll >> 32); /* second word */
2121 } else if (vtop->r & VT_LVAL) {
2122 /* We do not want to modifier the long long pointer here.
2123 So we save any other instances down the stack */
2124 save_reg_upstack(vtop->r, 1);
2125 /* load from memory */
2126 vtop->type.t = load_type;
2127 load(r, vtop);
2128 vdup();
2129 vtop[-1].r = r; /* save register value */
2130 /* increment pointer to get second word */
2131 vtop->type.t = VT_PTRDIFF_T;
2132 gaddrof();
2133 vpushs(PTR_SIZE);
2134 gen_op('+');
2135 vtop->r |= VT_LVAL;
2136 vtop->type.t = load_type;
2137 } else {
2138 /* move registers */
2139 if (!r_ok)
2140 load(r, vtop);
2141 if (r2_ok && vtop->r2 < VT_CONST)
2142 goto done;
2143 vdup();
2144 vtop[-1].r = r; /* save register value */
2145 vtop->r = vtop[-1].r2;
2147 /* Allocate second register. Here we rely on the fact that
2148 get_reg() tries first to free r2 of an SValue. */
2149 r2 = get_reg(rc2);
2150 load(r2, vtop);
2151 vpop();
2152 /* write second register */
2153 vtop->r2 = r2;
2154 done:
2155 vtop->type.t = original_type;
2156 } else {
2157 if (vtop->r == VT_CMP)
2158 vset_VT_JMP();
2159 /* one register type load */
2160 load(r, vtop);
2163 vtop->r = r;
2164 #ifdef TCC_TARGET_C67
2165 /* uses register pairs for doubles */
2166 if (bt == VT_DOUBLE)
2167 vtop->r2 = r+1;
2168 #endif
2170 return r;
2173 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2174 ST_FUNC void gv2(int rc1, int rc2)
2176 /* generate more generic register first. But VT_JMP or VT_CMP
2177 values must be generated first in all cases to avoid possible
2178 reload errors */
2179 if (vtop->r != VT_CMP && rc1 <= rc2) {
2180 vswap();
2181 gv(rc1);
2182 vswap();
2183 gv(rc2);
2184 /* test if reload is needed for first register */
2185 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
2186 vswap();
2187 gv(rc1);
2188 vswap();
2190 } else {
2191 gv(rc2);
2192 vswap();
2193 gv(rc1);
2194 vswap();
2195 /* test if reload is needed for first register */
2196 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
2197 gv(rc2);
2202 #if PTR_SIZE == 4
2203 /* expand 64bit on stack in two ints */
2204 ST_FUNC void lexpand(void)
2206 int u, v;
2207 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
2208 v = vtop->r & (VT_VALMASK | VT_LVAL);
2209 if (v == VT_CONST) {
2210 vdup();
2211 vtop[0].c.i >>= 32;
2212 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
2213 vdup();
2214 vtop[0].c.i += 4;
2215 } else {
2216 gv(RC_INT);
2217 vdup();
2218 vtop[0].r = vtop[-1].r2;
2219 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
2221 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
2223 #endif
2225 #if PTR_SIZE == 4
2226 /* build a long long from two ints */
2227 static void lbuild(int t)
2229 gv2(RC_INT, RC_INT);
2230 vtop[-1].r2 = vtop[0].r;
2231 vtop[-1].type.t = t;
2232 vpop();
2234 #endif
2236 /* convert stack entry to register and duplicate its value in another
2237 register */
2238 static void gv_dup(void)
2240 int t, rc, r;
2242 t = vtop->type.t;
2243 #if PTR_SIZE == 4
2244 if ((t & VT_BTYPE) == VT_LLONG) {
2245 if (t & VT_BITFIELD) {
2246 gv(RC_INT);
2247 t = vtop->type.t;
2249 lexpand();
2250 gv_dup();
2251 vswap();
2252 vrotb(3);
2253 gv_dup();
2254 vrotb(4);
2255 /* stack: H L L1 H1 */
2256 lbuild(t);
2257 vrotb(3);
2258 vrotb(3);
2259 vswap();
2260 lbuild(t);
2261 vswap();
2262 return;
2264 #endif
2265 /* duplicate value */
2266 rc = RC_TYPE(t);
2267 gv(rc);
2268 r = get_reg(rc);
2269 vdup();
2270 load(r, vtop);
2271 vtop->r = r;
2274 #if PTR_SIZE == 4
2275 /* generate CPU independent (unsigned) long long operations */
2276 static void gen_opl(int op)
2278 int t, a, b, op1, c, i;
2279 int func;
2280 unsigned short reg_iret = REG_IRET;
2281 unsigned short reg_lret = REG_IRE2;
2282 SValue tmp;
2284 switch(op) {
2285 case '/':
2286 case TOK_PDIV:
2287 func = TOK___divdi3;
2288 goto gen_func;
2289 case TOK_UDIV:
2290 func = TOK___udivdi3;
2291 goto gen_func;
2292 case '%':
2293 func = TOK___moddi3;
2294 goto gen_mod_func;
2295 case TOK_UMOD:
2296 func = TOK___umoddi3;
2297 gen_mod_func:
2298 #ifdef TCC_ARM_EABI
2299 reg_iret = TREG_R2;
2300 reg_lret = TREG_R3;
2301 #endif
2302 gen_func:
2303 /* call generic long long function */
2304 vpush_global_sym(&func_old_type, func);
2305 vrott(3);
2306 gfunc_call(2);
2307 vpushi(0);
2308 vtop->r = reg_iret;
2309 vtop->r2 = reg_lret;
2310 break;
2311 case '^':
2312 case '&':
2313 case '|':
2314 case '*':
2315 case '+':
2316 case '-':
2317 //pv("gen_opl A",0,2);
2318 t = vtop->type.t;
2319 vswap();
2320 lexpand();
2321 vrotb(3);
2322 lexpand();
2323 /* stack: L1 H1 L2 H2 */
2324 tmp = vtop[0];
2325 vtop[0] = vtop[-3];
2326 vtop[-3] = tmp;
2327 tmp = vtop[-2];
2328 vtop[-2] = vtop[-3];
2329 vtop[-3] = tmp;
2330 vswap();
2331 /* stack: H1 H2 L1 L2 */
2332 //pv("gen_opl B",0,4);
2333 if (op == '*') {
2334 vpushv(vtop - 1);
2335 vpushv(vtop - 1);
2336 gen_op(TOK_UMULL);
2337 lexpand();
2338 /* stack: H1 H2 L1 L2 ML MH */
2339 for(i=0;i<4;i++)
2340 vrotb(6);
2341 /* stack: ML MH H1 H2 L1 L2 */
2342 tmp = vtop[0];
2343 vtop[0] = vtop[-2];
2344 vtop[-2] = tmp;
2345 /* stack: ML MH H1 L2 H2 L1 */
2346 gen_op('*');
2347 vrotb(3);
2348 vrotb(3);
2349 gen_op('*');
2350 /* stack: ML MH M1 M2 */
2351 gen_op('+');
2352 gen_op('+');
2353 } else if (op == '+' || op == '-') {
2354 /* XXX: add non carry method too (for MIPS or alpha) */
2355 if (op == '+')
2356 op1 = TOK_ADDC1;
2357 else
2358 op1 = TOK_SUBC1;
2359 gen_op(op1);
2360 /* stack: H1 H2 (L1 op L2) */
2361 vrotb(3);
2362 vrotb(3);
2363 gen_op(op1 + 1); /* TOK_xxxC2 */
2364 } else {
2365 gen_op(op);
2366 /* stack: H1 H2 (L1 op L2) */
2367 vrotb(3);
2368 vrotb(3);
2369 /* stack: (L1 op L2) H1 H2 */
2370 gen_op(op);
2371 /* stack: (L1 op L2) (H1 op H2) */
2373 /* stack: L H */
2374 lbuild(t);
2375 break;
2376 case TOK_SAR:
2377 case TOK_SHR:
2378 case TOK_SHL:
2379 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2380 t = vtop[-1].type.t;
2381 vswap();
2382 lexpand();
2383 vrotb(3);
2384 /* stack: L H shift */
2385 c = (int)vtop->c.i;
2386 /* constant: simpler */
2387 /* NOTE: all comments are for SHL. the other cases are
2388 done by swapping words */
2389 vpop();
2390 if (op != TOK_SHL)
2391 vswap();
2392 if (c >= 32) {
2393 /* stack: L H */
2394 vpop();
2395 if (c > 32) {
2396 vpushi(c - 32);
2397 gen_op(op);
2399 if (op != TOK_SAR) {
2400 vpushi(0);
2401 } else {
2402 gv_dup();
2403 vpushi(31);
2404 gen_op(TOK_SAR);
2406 vswap();
2407 } else {
2408 vswap();
2409 gv_dup();
2410 /* stack: H L L */
2411 vpushi(c);
2412 gen_op(op);
2413 vswap();
2414 vpushi(32 - c);
2415 if (op == TOK_SHL)
2416 gen_op(TOK_SHR);
2417 else
2418 gen_op(TOK_SHL);
2419 vrotb(3);
2420 /* stack: L L H */
2421 vpushi(c);
2422 if (op == TOK_SHL)
2423 gen_op(TOK_SHL);
2424 else
2425 gen_op(TOK_SHR);
2426 gen_op('|');
2428 if (op != TOK_SHL)
2429 vswap();
2430 lbuild(t);
2431 } else {
2432 /* XXX: should provide a faster fallback on x86 ? */
2433 switch(op) {
2434 case TOK_SAR:
2435 func = TOK___ashrdi3;
2436 goto gen_func;
2437 case TOK_SHR:
2438 func = TOK___lshrdi3;
2439 goto gen_func;
2440 case TOK_SHL:
2441 func = TOK___ashldi3;
2442 goto gen_func;
2445 break;
2446 default:
2447 /* compare operations */
2448 t = vtop->type.t;
2449 vswap();
2450 lexpand();
2451 vrotb(3);
2452 lexpand();
2453 /* stack: L1 H1 L2 H2 */
2454 tmp = vtop[-1];
2455 vtop[-1] = vtop[-2];
2456 vtop[-2] = tmp;
2457 /* stack: L1 L2 H1 H2 */
2458 save_regs(4);
2459 /* compare high */
2460 op1 = op;
2461 /* when values are equal, we need to compare low words. since
2462 the jump is inverted, we invert the test too. */
2463 if (op1 == TOK_LT)
2464 op1 = TOK_LE;
2465 else if (op1 == TOK_GT)
2466 op1 = TOK_GE;
2467 else if (op1 == TOK_ULT)
2468 op1 = TOK_ULE;
2469 else if (op1 == TOK_UGT)
2470 op1 = TOK_UGE;
2471 a = 0;
2472 b = 0;
2473 gen_op(op1);
2474 if (op == TOK_NE) {
2475 b = gvtst(0, 0);
2476 } else {
2477 a = gvtst(1, 0);
2478 if (op != TOK_EQ) {
2479 /* generate non equal test */
2480 vpushi(0);
2481 vset_VT_CMP(TOK_NE);
2482 b = gvtst(0, 0);
2485 /* compare low. Always unsigned */
2486 op1 = op;
2487 if (op1 == TOK_LT)
2488 op1 = TOK_ULT;
2489 else if (op1 == TOK_LE)
2490 op1 = TOK_ULE;
2491 else if (op1 == TOK_GT)
2492 op1 = TOK_UGT;
2493 else if (op1 == TOK_GE)
2494 op1 = TOK_UGE;
2495 gen_op(op1);
2496 #if 0//def TCC_TARGET_I386
2497 if (op == TOK_NE) { gsym(b); break; }
2498 if (op == TOK_EQ) { gsym(a); break; }
2499 #endif
2500 gvtst_set(1, a);
2501 gvtst_set(0, b);
2502 break;
2505 #endif
2507 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2509 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2510 return (a ^ b) >> 63 ? -x : x;
2513 static int gen_opic_lt(uint64_t a, uint64_t b)
2515 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2518 /* handle integer constant optimizations and various machine
2519 independent opt */
2520 static void gen_opic(int op)
2522 SValue *v1 = vtop - 1;
2523 SValue *v2 = vtop;
2524 int t1 = v1->type.t & VT_BTYPE;
2525 int t2 = v2->type.t & VT_BTYPE;
2526 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2527 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2528 uint64_t l1 = c1 ? v1->c.i : 0;
2529 uint64_t l2 = c2 ? v2->c.i : 0;
2530 int shm = (t1 == VT_LLONG) ? 63 : 31;
2532 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2533 l1 = ((uint32_t)l1 |
2534 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2535 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2536 l2 = ((uint32_t)l2 |
2537 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2539 if (c1 && c2) {
2540 switch(op) {
2541 case '+': l1 += l2; break;
2542 case '-': l1 -= l2; break;
2543 case '&': l1 &= l2; break;
2544 case '^': l1 ^= l2; break;
2545 case '|': l1 |= l2; break;
2546 case '*': l1 *= l2; break;
2548 case TOK_PDIV:
2549 case '/':
2550 case '%':
2551 case TOK_UDIV:
2552 case TOK_UMOD:
2553 /* if division by zero, generate explicit division */
2554 if (l2 == 0) {
2555 if (const_wanted && !(nocode_wanted & unevalmask))
2556 tcc_error("division by zero in constant");
2557 goto general_case;
2559 switch(op) {
2560 default: l1 = gen_opic_sdiv(l1, l2); break;
2561 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2562 case TOK_UDIV: l1 = l1 / l2; break;
2563 case TOK_UMOD: l1 = l1 % l2; break;
2565 break;
2566 case TOK_SHL: l1 <<= (l2 & shm); break;
2567 case TOK_SHR: l1 >>= (l2 & shm); break;
2568 case TOK_SAR:
2569 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2570 break;
2571 /* tests */
2572 case TOK_ULT: l1 = l1 < l2; break;
2573 case TOK_UGE: l1 = l1 >= l2; break;
2574 case TOK_EQ: l1 = l1 == l2; break;
2575 case TOK_NE: l1 = l1 != l2; break;
2576 case TOK_ULE: l1 = l1 <= l2; break;
2577 case TOK_UGT: l1 = l1 > l2; break;
2578 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2579 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2580 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2581 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2582 /* logical */
2583 case TOK_LAND: l1 = l1 && l2; break;
2584 case TOK_LOR: l1 = l1 || l2; break;
2585 default:
2586 goto general_case;
2588 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2589 l1 = ((uint32_t)l1 |
2590 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2591 v1->c.i = l1;
2592 vtop--;
2593 } else {
2594 /* if commutative ops, put c2 as constant */
2595 if (c1 && (op == '+' || op == '&' || op == '^' ||
2596 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2597 vswap();
2598 c2 = c1; //c = c1, c1 = c2, c2 = c;
2599 l2 = l1; //l = l1, l1 = l2, l2 = l;
2601 if (!const_wanted &&
2602 c1 && ((l1 == 0 &&
2603 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2604 (l1 == -1 && op == TOK_SAR))) {
2605 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2606 vtop--;
2607 } else if (!const_wanted &&
2608 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2609 (op == '|' &&
2610 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2611 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2612 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2613 if (l2 == 1)
2614 vtop->c.i = 0;
2615 vswap();
2616 vtop--;
2617 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2618 op == TOK_PDIV) &&
2619 l2 == 1) ||
2620 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2621 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2622 l2 == 0) ||
2623 (op == '&' &&
2624 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2625 /* filter out NOP operations like x*1, x-0, x&-1... */
2626 vtop--;
2627 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2628 /* try to use shifts instead of muls or divs */
2629 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2630 int n = -1;
2631 while (l2) {
2632 l2 >>= 1;
2633 n++;
2635 vtop->c.i = n;
2636 if (op == '*')
2637 op = TOK_SHL;
2638 else if (op == TOK_PDIV)
2639 op = TOK_SAR;
2640 else
2641 op = TOK_SHR;
2643 goto general_case;
2644 } else if (c2 && (op == '+' || op == '-') &&
2645 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2646 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2647 /* symbol + constant case */
2648 if (op == '-')
2649 l2 = -l2;
2650 l2 += vtop[-1].c.i;
2651 /* The backends can't always deal with addends to symbols
2652 larger than +-1<<31. Don't construct such. */
2653 if ((int)l2 != l2)
2654 goto general_case;
2655 vtop--;
2656 vtop->c.i = l2;
2657 } else {
2658 general_case:
2659 /* call low level op generator */
2660 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2661 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2662 gen_opl(op);
2663 else
2664 gen_opi(op);
2669 /* generate a floating point operation with constant propagation */
2670 static void gen_opif(int op)
2672 int c1, c2;
2673 SValue *v1, *v2;
2674 #if defined _MSC_VER && defined __x86_64__
2675 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2676 volatile
2677 #endif
2678 long double f1, f2;
2680 v1 = vtop - 1;
2681 v2 = vtop;
2682 /* currently, we cannot do computations with forward symbols */
2683 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2684 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2685 if (c1 && c2) {
2686 if (v1->type.t == VT_FLOAT) {
2687 f1 = v1->c.f;
2688 f2 = v2->c.f;
2689 } else if (v1->type.t == VT_DOUBLE) {
2690 f1 = v1->c.d;
2691 f2 = v2->c.d;
2692 } else {
2693 f1 = v1->c.ld;
2694 f2 = v2->c.ld;
2697 /* NOTE: we only do constant propagation if finite number (not
2698 NaN or infinity) (ANSI spec) */
2699 if (!ieee_finite(f1) || !ieee_finite(f2))
2700 goto general_case;
2702 switch(op) {
2703 case '+': f1 += f2; break;
2704 case '-': f1 -= f2; break;
2705 case '*': f1 *= f2; break;
2706 case '/':
2707 if (f2 == 0.0) {
2708 /* If not in initializer we need to potentially generate
2709 FP exceptions at runtime, otherwise we want to fold. */
2710 if (!const_wanted)
2711 goto general_case;
2713 f1 /= f2;
2714 break;
2715 /* XXX: also handles tests ? */
2716 default:
2717 goto general_case;
2719 /* XXX: overflow test ? */
2720 if (v1->type.t == VT_FLOAT) {
2721 v1->c.f = f1;
2722 } else if (v1->type.t == VT_DOUBLE) {
2723 v1->c.d = f1;
2724 } else {
2725 v1->c.ld = f1;
2727 vtop--;
2728 } else {
2729 general_case:
2730 gen_opf(op);
2734 /* print a type. If 'varstr' is not NULL, then the variable is also
2735 printed in the type */
2736 /* XXX: union */
2737 /* XXX: add array and function pointers */
2738 static void type_to_str(char *buf, int buf_size,
2739 CType *type, const char *varstr)
2741 int bt, v, t;
2742 Sym *s, *sa;
2743 char buf1[256];
2744 const char *tstr;
2746 t = type->t;
2747 bt = t & VT_BTYPE;
2748 buf[0] = '\0';
2750 if (t & VT_EXTERN)
2751 pstrcat(buf, buf_size, "extern ");
2752 if (t & VT_STATIC)
2753 pstrcat(buf, buf_size, "static ");
2754 if (t & VT_TYPEDEF)
2755 pstrcat(buf, buf_size, "typedef ");
2756 if (t & VT_INLINE)
2757 pstrcat(buf, buf_size, "inline ");
2758 if (t & VT_VOLATILE)
2759 pstrcat(buf, buf_size, "volatile ");
2760 if (t & VT_CONSTANT)
2761 pstrcat(buf, buf_size, "const ");
2763 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2764 || ((t & VT_UNSIGNED)
2765 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2766 && !IS_ENUM(t)
2768 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2770 buf_size -= strlen(buf);
2771 buf += strlen(buf);
2773 switch(bt) {
2774 case VT_VOID:
2775 tstr = "void";
2776 goto add_tstr;
2777 case VT_BOOL:
2778 tstr = "_Bool";
2779 goto add_tstr;
2780 case VT_BYTE:
2781 tstr = "char";
2782 goto add_tstr;
2783 case VT_SHORT:
2784 tstr = "short";
2785 goto add_tstr;
2786 case VT_INT:
2787 tstr = "int";
2788 goto maybe_long;
2789 case VT_LLONG:
2790 tstr = "long long";
2791 maybe_long:
2792 if (t & VT_LONG)
2793 tstr = "long";
2794 if (!IS_ENUM(t))
2795 goto add_tstr;
2796 tstr = "enum ";
2797 goto tstruct;
2798 case VT_FLOAT:
2799 tstr = "float";
2800 goto add_tstr;
2801 case VT_DOUBLE:
2802 tstr = "double";
2803 if (!(t & VT_LONG))
2804 goto add_tstr;
2805 case VT_LDOUBLE:
2806 tstr = "long double";
2807 add_tstr:
2808 pstrcat(buf, buf_size, tstr);
2809 break;
2810 case VT_STRUCT:
2811 tstr = "struct ";
2812 if (IS_UNION(t))
2813 tstr = "union ";
2814 tstruct:
2815 pstrcat(buf, buf_size, tstr);
2816 v = type->ref->v & ~SYM_STRUCT;
2817 if (v >= SYM_FIRST_ANOM)
2818 pstrcat(buf, buf_size, "<anonymous>");
2819 else
2820 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2821 break;
2822 case VT_FUNC:
2823 s = type->ref;
2824 buf1[0]=0;
2825 if (varstr && '*' == *varstr) {
2826 pstrcat(buf1, sizeof(buf1), "(");
2827 pstrcat(buf1, sizeof(buf1), varstr);
2828 pstrcat(buf1, sizeof(buf1), ")");
2830 pstrcat(buf1, buf_size, "(");
2831 sa = s->next;
2832 while (sa != NULL) {
2833 char buf2[256];
2834 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2835 pstrcat(buf1, sizeof(buf1), buf2);
2836 sa = sa->next;
2837 if (sa)
2838 pstrcat(buf1, sizeof(buf1), ", ");
2840 if (s->f.func_type == FUNC_ELLIPSIS)
2841 pstrcat(buf1, sizeof(buf1), ", ...");
2842 pstrcat(buf1, sizeof(buf1), ")");
2843 type_to_str(buf, buf_size, &s->type, buf1);
2844 goto no_var;
2845 case VT_PTR:
2846 s = type->ref;
2847 if (t & VT_ARRAY) {
2848 if (varstr && '*' == *varstr)
2849 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2850 else
2851 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2852 type_to_str(buf, buf_size, &s->type, buf1);
2853 goto no_var;
2855 pstrcpy(buf1, sizeof(buf1), "*");
2856 if (t & VT_CONSTANT)
2857 pstrcat(buf1, buf_size, "const ");
2858 if (t & VT_VOLATILE)
2859 pstrcat(buf1, buf_size, "volatile ");
2860 if (varstr)
2861 pstrcat(buf1, sizeof(buf1), varstr);
2862 type_to_str(buf, buf_size, &s->type, buf1);
2863 goto no_var;
2865 if (varstr) {
2866 pstrcat(buf, buf_size, " ");
2867 pstrcat(buf, buf_size, varstr);
2869 no_var: ;
2872 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2874 char buf1[256], buf2[256];
2875 type_to_str(buf1, sizeof(buf1), st, NULL);
2876 type_to_str(buf2, sizeof(buf2), dt, NULL);
2877 tcc_error(fmt, buf1, buf2);
2880 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2882 char buf1[256], buf2[256];
2883 type_to_str(buf1, sizeof(buf1), st, NULL);
2884 type_to_str(buf2, sizeof(buf2), dt, NULL);
2885 tcc_warning(fmt, buf1, buf2);
2888 static int pointed_size(CType *type)
2890 int align;
2891 return type_size(pointed_type(type), &align);
2894 static void vla_runtime_pointed_size(CType *type)
2896 int align;
2897 vla_runtime_type_size(pointed_type(type), &align);
2900 static inline int is_null_pointer(SValue *p)
2902 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2903 return 0;
2904 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2905 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2906 ((p->type.t & VT_BTYPE) == VT_PTR &&
2907 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2908 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2909 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2913 /* compare function types. OLD functions match any new functions */
2914 static int is_compatible_func(CType *type1, CType *type2)
2916 Sym *s1, *s2;
2918 s1 = type1->ref;
2919 s2 = type2->ref;
2920 if (s1->f.func_call != s2->f.func_call)
2921 return 0;
2922 if (s1->f.func_type != s2->f.func_type
2923 && s1->f.func_type != FUNC_OLD
2924 && s2->f.func_type != FUNC_OLD)
2925 return 0;
2926 /* we should check the function return type for FUNC_OLD too
2927 but that causes problems with the internally used support
2928 functions such as TOK_memmove */
2929 if (s1->f.func_type == FUNC_OLD && !s1->next)
2930 return 1;
2931 if (s2->f.func_type == FUNC_OLD && !s2->next)
2932 return 1;
2933 for (;;) {
2934 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2935 return 0;
2936 s1 = s1->next;
2937 s2 = s2->next;
2938 if (!s1)
2939 return !s2;
2940 if (!s2)
2941 return 0;
2945 /* return true if type1 and type2 are the same. If unqualified is
2946 true, qualifiers on the types are ignored.
2948 static int compare_types(CType *type1, CType *type2, int unqualified)
2950 int bt1, t1, t2;
2952 t1 = type1->t & VT_TYPE;
2953 t2 = type2->t & VT_TYPE;
2954 if (unqualified) {
2955 /* strip qualifiers before comparing */
2956 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
2957 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
2960 /* Default Vs explicit signedness only matters for char */
2961 if ((t1 & VT_BTYPE) != VT_BYTE) {
2962 t1 &= ~VT_DEFSIGN;
2963 t2 &= ~VT_DEFSIGN;
2965 /* XXX: bitfields ? */
2966 if (t1 != t2)
2967 return 0;
2969 if ((t1 & VT_ARRAY)
2970 && !(type1->ref->c < 0
2971 || type2->ref->c < 0
2972 || type1->ref->c == type2->ref->c))
2973 return 0;
2975 /* test more complicated cases */
2976 bt1 = t1 & VT_BTYPE;
2977 if (bt1 == VT_PTR) {
2978 type1 = pointed_type(type1);
2979 type2 = pointed_type(type2);
2980 return is_compatible_types(type1, type2);
2981 } else if (bt1 == VT_STRUCT) {
2982 return (type1->ref == type2->ref);
2983 } else if (bt1 == VT_FUNC) {
2984 return is_compatible_func(type1, type2);
2985 } else if (IS_ENUM(type1->t) || IS_ENUM(type2->t)) {
2986 return type1->ref == type2->ref;
2987 } else {
2988 return 1;
2992 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
2993 type is stored in DEST if non-null (except for pointer plus/minus) . */
2994 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
2996 CType *type1 = &op1->type, *type2 = &op2->type, type;
2997 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
2998 int ret = 1;
3000 type.t = VT_VOID;
3001 type.ref = NULL;
3003 if (bt1 == VT_VOID || bt2 == VT_VOID) {
3004 ret = op == '?' ? 1 : 0;
3005 /* NOTE: as an extension, we accept void on only one side */
3006 type.t = VT_VOID;
3007 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3008 if (op == '+') ; /* Handled in caller */
3009 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3010 /* If one is a null ptr constant the result type is the other. */
3011 else if (is_null_pointer (op2)) type = *type1;
3012 else if (is_null_pointer (op1)) type = *type2;
3013 else if (bt1 != bt2) {
3014 /* accept comparison or cond-expr between pointer and integer
3015 with a warning */
3016 if ((op == '?' || (op >= TOK_ULT && op <= TOK_LOR))
3017 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
3018 tcc_warning("pointer/integer mismatch in %s",
3019 op == '?' ? "conditional expression" : "comparison");
3020 else if (op != '-' || !is_integer_btype(bt2))
3021 ret = 0;
3022 type = *(bt1 == VT_PTR ? type1 : type2);
3023 } else {
3024 CType *pt1 = pointed_type(type1);
3025 CType *pt2 = pointed_type(type2);
3026 int pbt1 = pt1->t & VT_BTYPE;
3027 int pbt2 = pt2->t & VT_BTYPE;
3028 int newquals, copied = 0;
3029 if (pbt1 != VT_VOID && pbt2 != VT_VOID
3030 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
3031 if (op != '?' && (op < TOK_ULT || op > TOK_LOR))
3032 ret = 0;
3033 else
3034 type_incompatibility_warning(type1, type2,
3035 op == '?'
3036 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3037 : "pointer type mismatch in comparison('%s' and '%s')");
3039 if (op == '?') {
3040 /* pointers to void get preferred, otherwise the
3041 pointed to types minus qualifs should be compatible */
3042 type = *((pbt1 == VT_VOID) ? type1 : type2);
3043 /* combine qualifs */
3044 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
3045 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
3046 & newquals)
3048 /* copy the pointer target symbol */
3049 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3050 0, type.ref->c);
3051 copied = 1;
3052 pointed_type(&type)->t |= newquals;
3054 /* pointers to incomplete arrays get converted to
3055 pointers to completed ones if possible */
3056 if (pt1->t & VT_ARRAY
3057 && pt2->t & VT_ARRAY
3058 && pointed_type(&type)->ref->c < 0
3059 && (pt1->ref->c > 0 || pt2->ref->c > 0))
3061 if (!copied)
3062 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3063 0, type.ref->c);
3064 pointed_type(&type)->ref =
3065 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
3066 0, pointed_type(&type)->ref->c);
3067 pointed_type(&type)->ref->c =
3068 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
3072 if (op >= TOK_ULT && op <= TOK_LOR)
3073 type.t = VT_SIZE_T;
3074 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
3075 if (op != '?' || !compare_types(type1, type2, 1))
3076 ret = 0;
3077 type = *type1;
3078 } else if (is_float(bt1) || is_float(bt2)) {
3079 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
3080 type.t = VT_LDOUBLE;
3081 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
3082 type.t = VT_DOUBLE;
3083 } else {
3084 type.t = VT_FLOAT;
3086 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
3087 /* cast to biggest op */
3088 type.t = VT_LLONG | VT_LONG;
3089 if (bt1 == VT_LLONG)
3090 type.t &= t1;
3091 if (bt2 == VT_LLONG)
3092 type.t &= t2;
3093 /* convert to unsigned if it does not fit in a long long */
3094 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
3095 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
3096 type.t |= VT_UNSIGNED;
3097 } else {
3098 /* integer operations */
3099 type.t = VT_INT | (VT_LONG & (t1 | t2));
3100 /* convert to unsigned if it does not fit in an integer */
3101 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
3102 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
3103 type.t |= VT_UNSIGNED;
3105 if (dest)
3106 *dest = type;
3107 return ret;
3110 /* generic gen_op: handles types problems */
3111 ST_FUNC void gen_op(int op)
3113 int u, t1, t2, bt1, bt2, t;
3114 CType type1, combtype;
3116 redo:
3117 t1 = vtop[-1].type.t;
3118 t2 = vtop[0].type.t;
3119 bt1 = t1 & VT_BTYPE;
3120 bt2 = t2 & VT_BTYPE;
3122 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
3123 if (bt2 == VT_FUNC) {
3124 mk_pointer(&vtop->type);
3125 gaddrof();
3127 if (bt1 == VT_FUNC) {
3128 vswap();
3129 mk_pointer(&vtop->type);
3130 gaddrof();
3131 vswap();
3133 goto redo;
3134 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
3135 tcc_error_noabort("invalid operand types for binary operation");
3136 vpop();
3137 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3138 /* at least one operand is a pointer */
3139 /* relational op: must be both pointers */
3140 if (op >= TOK_ULT && op <= TOK_LOR)
3141 goto std_op;
3142 /* if both pointers, then it must be the '-' op */
3143 if (bt1 == VT_PTR && bt2 == VT_PTR) {
3144 if (op != '-')
3145 tcc_error("cannot use pointers here");
3146 if (vtop[-1].type.t & VT_VLA) {
3147 vla_runtime_pointed_size(&vtop[-1].type);
3148 } else {
3149 vpushi(pointed_size(&vtop[-1].type));
3151 vrott(3);
3152 gen_opic(op);
3153 vtop->type.t = VT_PTRDIFF_T;
3154 vswap();
3155 gen_op(TOK_PDIV);
3156 } else {
3157 /* exactly one pointer : must be '+' or '-'. */
3158 if (op != '-' && op != '+')
3159 tcc_error("cannot use pointers here");
3160 /* Put pointer as first operand */
3161 if (bt2 == VT_PTR) {
3162 vswap();
3163 t = t1, t1 = t2, t2 = t;
3165 #if PTR_SIZE == 4
3166 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
3167 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3168 gen_cast_s(VT_INT);
3169 #endif
3170 type1 = vtop[-1].type;
3171 type1.t &= ~VT_ARRAY;
3172 if (vtop[-1].type.t & VT_VLA)
3173 vla_runtime_pointed_size(&vtop[-1].type);
3174 else {
3175 u = pointed_size(&vtop[-1].type);
3176 if (u < 0)
3177 tcc_error("unknown array element size");
3178 #if PTR_SIZE == 8
3179 vpushll(u);
3180 #else
3181 /* XXX: cast to int ? (long long case) */
3182 vpushi(u);
3183 #endif
3185 gen_op('*');
3186 #ifdef CONFIG_TCC_BCHECK
3187 if (tcc_state->do_bounds_check && !const_wanted) {
3188 /* if bounded pointers, we generate a special code to
3189 test bounds */
3190 if (op == '-') {
3191 vpushi(0);
3192 vswap();
3193 gen_op('-');
3195 gen_bounded_ptr_add();
3196 } else
3197 #endif
3199 gen_opic(op);
3201 /* put again type if gen_opic() swaped operands */
3202 vtop->type = type1;
3204 } else {
3205 /* floats can only be used for a few operations */
3206 if (is_float(combtype.t)
3207 && op != '+' && op != '-' && op != '*' && op != '/'
3208 && (op < TOK_ULT || op > TOK_LOR))
3209 tcc_error("invalid operands for binary operation");
3210 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
3211 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
3212 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
3213 t |= VT_UNSIGNED;
3214 t |= (VT_LONG & t1);
3215 combtype.t = t;
3217 std_op:
3218 t = t2 = combtype.t;
3219 /* XXX: currently, some unsigned operations are explicit, so
3220 we modify them here */
3221 if (t & VT_UNSIGNED) {
3222 if (op == TOK_SAR)
3223 op = TOK_SHR;
3224 else if (op == '/')
3225 op = TOK_UDIV;
3226 else if (op == '%')
3227 op = TOK_UMOD;
3228 else if (op == TOK_LT)
3229 op = TOK_ULT;
3230 else if (op == TOK_GT)
3231 op = TOK_UGT;
3232 else if (op == TOK_LE)
3233 op = TOK_ULE;
3234 else if (op == TOK_GE)
3235 op = TOK_UGE;
3237 vswap();
3238 gen_cast_s(t);
3239 vswap();
3240 /* special case for shifts and long long: we keep the shift as
3241 an integer */
3242 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
3243 t2 = VT_INT;
3244 gen_cast_s(t2);
3245 if (is_float(t))
3246 gen_opif(op);
3247 else
3248 gen_opic(op);
3249 if (op >= TOK_ULT && op <= TOK_LOR) {
3250 /* relational op: the result is an int */
3251 vtop->type.t = VT_INT;
3252 } else {
3253 vtop->type.t = t;
3256 // Make sure that we have converted to an rvalue:
3257 if (vtop->r & VT_LVAL)
3258 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3261 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3262 #define gen_cvt_itof1 gen_cvt_itof
3263 #else
3264 /* generic itof for unsigned long long case */
3265 static void gen_cvt_itof1(int t)
3267 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3268 (VT_LLONG | VT_UNSIGNED)) {
3270 if (t == VT_FLOAT)
3271 vpush_global_sym(&func_old_type, TOK___floatundisf);
3272 #if LDOUBLE_SIZE != 8
3273 else if (t == VT_LDOUBLE)
3274 vpush_global_sym(&func_old_type, TOK___floatundixf);
3275 #endif
3276 else
3277 vpush_global_sym(&func_old_type, TOK___floatundidf);
3278 vrott(2);
3279 gfunc_call(1);
3280 vpushi(0);
3281 PUT_R_RET(vtop, t);
3282 } else {
3283 gen_cvt_itof(t);
3286 #endif
3288 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3289 #define gen_cvt_ftoi1 gen_cvt_ftoi
3290 #else
3291 /* generic ftoi for unsigned long long case */
3292 static void gen_cvt_ftoi1(int t)
3294 int st;
3295 if (t == (VT_LLONG | VT_UNSIGNED)) {
3296 /* not handled natively */
3297 st = vtop->type.t & VT_BTYPE;
3298 if (st == VT_FLOAT)
3299 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
3300 #if LDOUBLE_SIZE != 8
3301 else if (st == VT_LDOUBLE)
3302 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
3303 #endif
3304 else
3305 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
3306 vrott(2);
3307 gfunc_call(1);
3308 vpushi(0);
3309 PUT_R_RET(vtop, t);
3310 } else {
3311 gen_cvt_ftoi(t);
3314 #endif
3316 /* special delayed cast for char/short */
3317 static void force_charshort_cast(void)
3319 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3320 int dbt = vtop->type.t;
3321 vtop->r &= ~VT_MUSTCAST;
3322 vtop->type.t = sbt;
3323 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3324 vtop->type.t = dbt;
3327 static void gen_cast_s(int t)
3329 CType type;
3330 type.t = t;
3331 type.ref = NULL;
3332 gen_cast(&type);
3335 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3336 static void gen_cast(CType *type)
3338 int sbt, dbt, sf, df, c;
3339 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3341 /* special delayed cast for char/short */
3342 if (vtop->r & VT_MUSTCAST)
3343 force_charshort_cast();
3345 /* bitfields first get cast to ints */
3346 if (vtop->type.t & VT_BITFIELD)
3347 gv(RC_INT);
3349 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3350 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3351 if (sbt == VT_FUNC)
3352 sbt = VT_PTR;
3354 again:
3355 if (sbt != dbt) {
3356 sf = is_float(sbt);
3357 df = is_float(dbt);
3358 dbt_bt = dbt & VT_BTYPE;
3359 sbt_bt = sbt & VT_BTYPE;
3361 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3362 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3363 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3364 #endif
3365 if (c) {
3366 /* constant case: we can do it now */
3367 /* XXX: in ISOC, cannot do it if error in convert */
3368 if (sbt == VT_FLOAT)
3369 vtop->c.ld = vtop->c.f;
3370 else if (sbt == VT_DOUBLE)
3371 vtop->c.ld = vtop->c.d;
3373 if (df) {
3374 if (sbt_bt == VT_LLONG) {
3375 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3376 vtop->c.ld = vtop->c.i;
3377 else
3378 vtop->c.ld = -(long double)-vtop->c.i;
3379 } else if(!sf) {
3380 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3381 vtop->c.ld = (uint32_t)vtop->c.i;
3382 else
3383 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3386 if (dbt == VT_FLOAT)
3387 vtop->c.f = (float)vtop->c.ld;
3388 else if (dbt == VT_DOUBLE)
3389 vtop->c.d = (double)vtop->c.ld;
3390 } else if (sf && dbt == VT_BOOL) {
3391 vtop->c.i = (vtop->c.ld != 0);
3392 } else {
3393 if(sf)
3394 vtop->c.i = vtop->c.ld;
3395 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3397 else if (sbt & VT_UNSIGNED)
3398 vtop->c.i = (uint32_t)vtop->c.i;
3399 else
3400 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3402 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3404 else if (dbt == VT_BOOL)
3405 vtop->c.i = (vtop->c.i != 0);
3406 else {
3407 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3408 dbt_bt == VT_SHORT ? 0xffff :
3409 0xffffffff;
3410 vtop->c.i &= m;
3411 if (!(dbt & VT_UNSIGNED))
3412 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3415 goto done;
3417 } else if (dbt == VT_BOOL
3418 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3419 == (VT_CONST | VT_SYM)) {
3420 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3421 vtop->r = VT_CONST;
3422 vtop->c.i = 1;
3423 goto done;
3426 /* cannot generate code for global or static initializers */
3427 if (STATIC_DATA_WANTED)
3428 goto done;
3430 /* non constant case: generate code */
3431 if (dbt == VT_BOOL) {
3432 gen_test_zero(TOK_NE);
3433 goto done;
3436 if (sf || df) {
3437 if (sf && df) {
3438 /* convert from fp to fp */
3439 gen_cvt_ftof(dbt);
3440 } else if (df) {
3441 /* convert int to fp */
3442 gen_cvt_itof1(dbt);
3443 } else {
3444 /* convert fp to int */
3445 sbt = dbt;
3446 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3447 sbt = VT_INT;
3448 gen_cvt_ftoi1(sbt);
3449 goto again; /* may need char/short cast */
3451 goto done;
3454 ds = btype_size(dbt_bt);
3455 ss = btype_size(sbt_bt);
3456 if (ds == 0 || ss == 0) {
3457 if (dbt_bt == VT_VOID)
3458 goto done;
3459 cast_error(&vtop->type, type);
3461 if (IS_ENUM(type->t) && type->ref->c < 0)
3462 tcc_error("cast to incomplete type");
3464 /* same size and no sign conversion needed */
3465 if (ds == ss && ds >= 4)
3466 goto done;
3467 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3468 tcc_warning("cast between pointer and integer of different size");
3469 if (sbt_bt == VT_PTR) {
3470 /* put integer type to allow logical operations below */
3471 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3475 /* processor allows { int a = 0, b = *(char*)&a; }
3476 That means that if we cast to less width, we can just
3477 change the type and read it still later. */
3478 #define ALLOW_SUBTYPE_ACCESS 1
3480 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3481 /* value still in memory */
3482 if (ds <= ss)
3483 goto done;
3484 /* ss <= 4 here */
3485 if (ds <= 4) {
3486 gv(RC_INT);
3487 goto done; /* no 64bit envolved */
3490 gv(RC_INT);
3492 trunc = 0;
3493 #if PTR_SIZE == 4
3494 if (ds == 8) {
3495 /* generate high word */
3496 if (sbt & VT_UNSIGNED) {
3497 vpushi(0);
3498 gv(RC_INT);
3499 } else {
3500 gv_dup();
3501 vpushi(31);
3502 gen_op(TOK_SAR);
3504 lbuild(dbt);
3505 } else if (ss == 8) {
3506 /* from long long: just take low order word */
3507 lexpand();
3508 vpop();
3510 ss = 4;
3512 #elif PTR_SIZE == 8
3513 if (ds == 8) {
3514 /* need to convert from 32bit to 64bit */
3515 if (sbt & VT_UNSIGNED) {
3516 #if defined(TCC_TARGET_RISCV64)
3517 /* RISC-V keeps 32bit vals in registers sign-extended.
3518 So here we need a zero-extension. */
3519 trunc = 32;
3520 #else
3521 goto done;
3522 #endif
3523 } else {
3524 gen_cvt_sxtw();
3525 goto done;
3527 ss = ds, ds = 4, dbt = sbt;
3528 } else if (ss == 8) {
3529 /* XXX some architectures (e.g. risc-v) would like it
3530 better for this merely being a 32-to-64 sign or zero-
3531 extension. */
3532 trunc = 32; /* zero upper 32 bits */
3533 } else {
3534 ss = 4;
3536 #endif
3538 if (ds >= ss)
3539 goto done;
3540 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3541 if (ss == 4) {
3542 gen_cvt_csti(dbt);
3543 goto done;
3545 #endif
3546 bits = (ss - ds) * 8;
3547 /* for unsigned, gen_op will convert SAR to SHR */
3548 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3549 vpushi(bits);
3550 gen_op(TOK_SHL);
3551 vpushi(bits - trunc);
3552 gen_op(TOK_SAR);
3553 vpushi(trunc);
3554 gen_op(TOK_SHR);
3556 done:
3557 vtop->type = *type;
3558 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3561 /* return type size as known at compile time. Put alignment at 'a' */
3562 ST_FUNC int type_size(CType *type, int *a)
3564 Sym *s;
3565 int bt;
3567 bt = type->t & VT_BTYPE;
3568 if (bt == VT_STRUCT) {
3569 /* struct/union */
3570 s = type->ref;
3571 *a = s->r;
3572 return s->c;
3573 } else if (bt == VT_PTR) {
3574 if (type->t & VT_ARRAY) {
3575 int ts;
3577 s = type->ref;
3578 ts = type_size(&s->type, a);
3580 if (ts < 0 && s->c < 0)
3581 ts = -ts;
3583 return ts * s->c;
3584 } else {
3585 *a = PTR_SIZE;
3586 return PTR_SIZE;
3588 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3589 return -1; /* incomplete enum */
3590 } else if (bt == VT_LDOUBLE) {
3591 *a = LDOUBLE_ALIGN;
3592 return LDOUBLE_SIZE;
3593 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3594 #ifdef TCC_TARGET_I386
3595 #ifdef TCC_TARGET_PE
3596 *a = 8;
3597 #else
3598 *a = 4;
3599 #endif
3600 #elif defined(TCC_TARGET_ARM)
3601 #ifdef TCC_ARM_EABI
3602 *a = 8;
3603 #else
3604 *a = 4;
3605 #endif
3606 #else
3607 *a = 8;
3608 #endif
3609 return 8;
3610 } else if (bt == VT_INT || bt == VT_FLOAT) {
3611 *a = 4;
3612 return 4;
3613 } else if (bt == VT_SHORT) {
3614 *a = 2;
3615 return 2;
3616 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3617 *a = 8;
3618 return 16;
3619 } else {
3620 /* char, void, function, _Bool */
3621 *a = 1;
3622 return 1;
3626 /* push type size as known at runtime time on top of value stack. Put
3627 alignment at 'a' */
3628 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
3630 if (type->t & VT_VLA) {
3631 type_size(&type->ref->type, a);
3632 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3633 } else {
3634 vpushi(type_size(type, a));
3638 /* return the pointed type of t */
3639 static inline CType *pointed_type(CType *type)
3641 return &type->ref->type;
3644 /* modify type so that its it is a pointer to type. */
3645 ST_FUNC void mk_pointer(CType *type)
3647 Sym *s;
3648 s = sym_push(SYM_FIELD, type, 0, -1);
3649 type->t = VT_PTR | (type->t & VT_STORAGE);
3650 type->ref = s;
3653 /* return true if type1 and type2 are exactly the same (including
3654 qualifiers).
3656 static int is_compatible_types(CType *type1, CType *type2)
3658 return compare_types(type1,type2,0);
3661 /* return true if type1 and type2 are the same (ignoring qualifiers).
3663 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3665 return compare_types(type1,type2,1);
3668 static void cast_error(CType *st, CType *dt)
3670 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3673 /* verify type compatibility to store vtop in 'dt' type */
3674 static void verify_assign_cast(CType *dt)
3676 CType *st, *type1, *type2;
3677 int dbt, sbt, qualwarn, lvl;
3679 st = &vtop->type; /* source type */
3680 dbt = dt->t & VT_BTYPE;
3681 sbt = st->t & VT_BTYPE;
3682 if (dt->t & VT_CONSTANT)
3683 tcc_warning("assignment of read-only location");
3684 switch(dbt) {
3685 case VT_VOID:
3686 if (sbt != dbt)
3687 tcc_error("assignment to void expression");
3688 break;
3689 case VT_PTR:
3690 /* special cases for pointers */
3691 /* '0' can also be a pointer */
3692 if (is_null_pointer(vtop))
3693 break;
3694 /* accept implicit pointer to integer cast with warning */
3695 if (is_integer_btype(sbt)) {
3696 tcc_warning("assignment makes pointer from integer without a cast");
3697 break;
3699 type1 = pointed_type(dt);
3700 if (sbt == VT_PTR)
3701 type2 = pointed_type(st);
3702 else if (sbt == VT_FUNC)
3703 type2 = st; /* a function is implicitly a function pointer */
3704 else
3705 goto error;
3706 if (is_compatible_types(type1, type2))
3707 break;
3708 for (qualwarn = lvl = 0;; ++lvl) {
3709 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3710 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3711 qualwarn = 1;
3712 dbt = type1->t & (VT_BTYPE|VT_LONG);
3713 sbt = type2->t & (VT_BTYPE|VT_LONG);
3714 if (dbt != VT_PTR || sbt != VT_PTR)
3715 break;
3716 type1 = pointed_type(type1);
3717 type2 = pointed_type(type2);
3719 if (!is_compatible_unqualified_types(type1, type2)) {
3720 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3721 /* void * can match anything */
3722 } else if (dbt == sbt
3723 && is_integer_btype(sbt & VT_BTYPE)
3724 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3725 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3726 /* Like GCC don't warn by default for merely changes
3727 in pointer target signedness. Do warn for different
3728 base types, though, in particular for unsigned enums
3729 and signed int targets. */
3730 } else {
3731 tcc_warning("assignment from incompatible pointer type");
3732 break;
3735 if (qualwarn)
3736 tcc_warning("assignment discards qualifiers from pointer target type");
3737 break;
3738 case VT_BYTE:
3739 case VT_SHORT:
3740 case VT_INT:
3741 case VT_LLONG:
3742 if (sbt == VT_PTR || sbt == VT_FUNC) {
3743 tcc_warning("assignment makes integer from pointer without a cast");
3744 } else if (sbt == VT_STRUCT) {
3745 goto case_VT_STRUCT;
3747 /* XXX: more tests */
3748 break;
3749 case VT_STRUCT:
3750 case_VT_STRUCT:
3751 if (!is_compatible_unqualified_types(dt, st)) {
3752 error:
3753 cast_error(st, dt);
3755 break;
3759 static void gen_assign_cast(CType *dt)
3761 verify_assign_cast(dt);
3762 gen_cast(dt);
3765 /* store vtop in lvalue pushed on stack */
3766 ST_FUNC void vstore(void)
3768 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3770 ft = vtop[-1].type.t;
3771 sbt = vtop->type.t & VT_BTYPE;
3772 dbt = ft & VT_BTYPE;
3774 verify_assign_cast(&vtop[-1].type);
3776 if (sbt == VT_STRUCT) {
3777 /* if structure, only generate pointer */
3778 /* structure assignment : generate memcpy */
3779 /* XXX: optimize if small size */
3780 size = type_size(&vtop->type, &align);
3782 /* destination */
3783 vswap();
3784 #ifdef CONFIG_TCC_BCHECK
3785 if (vtop->r & VT_MUSTBOUND)
3786 gbound(); /* check would be wrong after gaddrof() */
3787 #endif
3788 vtop->type.t = VT_PTR;
3789 gaddrof();
3791 /* address of memcpy() */
3792 #ifdef TCC_ARM_EABI
3793 if(!(align & 7))
3794 vpush_global_sym(&func_old_type, TOK_memcpy8);
3795 else if(!(align & 3))
3796 vpush_global_sym(&func_old_type, TOK_memcpy4);
3797 else
3798 #endif
3799 /* Use memmove, rather than memcpy, as dest and src may be same: */
3800 vpush_global_sym(&func_old_type, TOK_memmove);
3802 vswap();
3803 /* source */
3804 vpushv(vtop - 2);
3805 #ifdef CONFIG_TCC_BCHECK
3806 if (vtop->r & VT_MUSTBOUND)
3807 gbound();
3808 #endif
3809 vtop->type.t = VT_PTR;
3810 gaddrof();
3811 /* type size */
3812 vpushi(size);
3813 gfunc_call(3);
3814 /* leave source on stack */
3816 } else if (ft & VT_BITFIELD) {
3817 /* bitfield store handling */
3819 /* save lvalue as expression result (example: s.b = s.a = n;) */
3820 vdup(), vtop[-1] = vtop[-2];
3822 bit_pos = BIT_POS(ft);
3823 bit_size = BIT_SIZE(ft);
3824 /* remove bit field info to avoid loops */
3825 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3827 if (dbt == VT_BOOL) {
3828 gen_cast(&vtop[-1].type);
3829 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3831 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3832 if (dbt != VT_BOOL) {
3833 gen_cast(&vtop[-1].type);
3834 dbt = vtop[-1].type.t & VT_BTYPE;
3836 if (r == VT_STRUCT) {
3837 store_packed_bf(bit_pos, bit_size);
3838 } else {
3839 unsigned long long mask = (1ULL << bit_size) - 1;
3840 if (dbt != VT_BOOL) {
3841 /* mask source */
3842 if (dbt == VT_LLONG)
3843 vpushll(mask);
3844 else
3845 vpushi((unsigned)mask);
3846 gen_op('&');
3848 /* shift source */
3849 vpushi(bit_pos);
3850 gen_op(TOK_SHL);
3851 vswap();
3852 /* duplicate destination */
3853 vdup();
3854 vrott(3);
3855 /* load destination, mask and or with source */
3856 if (dbt == VT_LLONG)
3857 vpushll(~(mask << bit_pos));
3858 else
3859 vpushi(~((unsigned)mask << bit_pos));
3860 gen_op('&');
3861 gen_op('|');
3862 /* store result */
3863 vstore();
3864 /* ... and discard */
3865 vpop();
3867 } else if (dbt == VT_VOID) {
3868 --vtop;
3869 } else {
3870 /* optimize char/short casts */
3871 delayed_cast = 0;
3872 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3873 && is_integer_btype(sbt)
3875 if ((vtop->r & VT_MUSTCAST)
3876 && btype_size(dbt) > btype_size(sbt)
3878 force_charshort_cast();
3879 delayed_cast = 1;
3880 } else {
3881 gen_cast(&vtop[-1].type);
3884 #ifdef CONFIG_TCC_BCHECK
3885 /* bound check case */
3886 if (vtop[-1].r & VT_MUSTBOUND) {
3887 vswap();
3888 gbound();
3889 vswap();
3891 #endif
3892 gv(RC_TYPE(dbt)); /* generate value */
3894 if (delayed_cast) {
3895 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3896 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3897 vtop->type.t = ft & VT_TYPE;
3900 /* if lvalue was saved on stack, must read it */
3901 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3902 SValue sv;
3903 r = get_reg(RC_INT);
3904 sv.type.t = VT_PTRDIFF_T;
3905 sv.r = VT_LOCAL | VT_LVAL;
3906 sv.c.i = vtop[-1].c.i;
3907 load(r, &sv);
3908 vtop[-1].r = r | VT_LVAL;
3911 r = vtop->r & VT_VALMASK;
3912 /* two word case handling :
3913 store second register at word + 4 (or +8 for x86-64) */
3914 if (USING_TWO_WORDS(dbt)) {
3915 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3916 vtop[-1].type.t = load_type;
3917 store(r, vtop - 1);
3918 vswap();
3919 /* convert to int to increment easily */
3920 vtop->type.t = VT_PTRDIFF_T;
3921 gaddrof();
3922 vpushs(PTR_SIZE);
3923 gen_op('+');
3924 vtop->r |= VT_LVAL;
3925 vswap();
3926 vtop[-1].type.t = load_type;
3927 /* XXX: it works because r2 is spilled last ! */
3928 store(vtop->r2, vtop - 1);
3929 } else {
3930 /* single word */
3931 store(r, vtop - 1);
3933 vswap();
3934 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3938 /* post defines POST/PRE add. c is the token ++ or -- */
3939 ST_FUNC void inc(int post, int c)
3941 test_lvalue();
3942 vdup(); /* save lvalue */
3943 if (post) {
3944 gv_dup(); /* duplicate value */
3945 vrotb(3);
3946 vrotb(3);
3948 /* add constant */
3949 vpushi(c - TOK_MID);
3950 gen_op('+');
3951 vstore(); /* store value */
3952 if (post)
3953 vpop(); /* if post op, return saved value */
3956 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
3958 /* read the string */
3959 if (tok != TOK_STR)
3960 expect(msg);
3961 cstr_new(astr);
3962 while (tok == TOK_STR) {
3963 /* XXX: add \0 handling too ? */
3964 cstr_cat(astr, tokc.str.data, -1);
3965 next();
3967 cstr_ccat(astr, '\0');
3970 /* If I is >= 1 and a power of two, returns log2(i)+1.
3971 If I is 0 returns 0. */
3972 static int exact_log2p1(int i)
3974 int ret;
3975 if (!i)
3976 return 0;
3977 for (ret = 1; i >= 1 << 8; ret += 8)
3978 i >>= 8;
3979 if (i >= 1 << 4)
3980 ret += 4, i >>= 4;
3981 if (i >= 1 << 2)
3982 ret += 2, i >>= 2;
3983 if (i >= 1 << 1)
3984 ret++;
3985 return ret;
3988 /* Parse __attribute__((...)) GNUC extension. */
3989 static void parse_attribute(AttributeDef *ad)
3991 int t, n;
3992 CString astr;
3994 redo:
3995 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
3996 return;
3997 next();
3998 skip('(');
3999 skip('(');
4000 while (tok != ')') {
4001 if (tok < TOK_IDENT)
4002 expect("attribute name");
4003 t = tok;
4004 next();
4005 switch(t) {
4006 case TOK_CLEANUP1:
4007 case TOK_CLEANUP2:
4009 Sym *s;
4011 skip('(');
4012 s = sym_find(tok);
4013 if (!s) {
4014 tcc_warning("implicit declaration of function '%s'",
4015 get_tok_str(tok, &tokc));
4016 s = external_global_sym(tok, &func_old_type);
4018 else if ((s->type.t & VT_BTYPE) == VT_FUNC) {
4019 ad->cleanup_func = s;
4021 else {
4022 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
4024 ad->cleanup_func = s;
4025 next();
4026 skip(')');
4027 break;
4029 case TOK_CONSTRUCTOR1:
4030 case TOK_CONSTRUCTOR2:
4031 ad->f.func_ctor = 1;
4032 break;
4033 case TOK_DESTRUCTOR1:
4034 case TOK_DESTRUCTOR2:
4035 ad->f.func_dtor = 1;
4036 break;
4037 case TOK_SECTION1:
4038 case TOK_SECTION2:
4039 skip('(');
4040 parse_mult_str(&astr, "section name");
4041 ad->section = find_section(tcc_state, (char *)astr.data);
4042 skip(')');
4043 cstr_free(&astr);
4044 break;
4045 case TOK_ALIAS1:
4046 case TOK_ALIAS2:
4047 skip('(');
4048 parse_mult_str(&astr, "alias(\"target\")");
4049 ad->alias_target = /* save string as token, for later */
4050 tok_alloc((char*)astr.data, astr.size-1)->tok;
4051 skip(')');
4052 cstr_free(&astr);
4053 break;
4054 case TOK_VISIBILITY1:
4055 case TOK_VISIBILITY2:
4056 skip('(');
4057 parse_mult_str(&astr,
4058 "visibility(\"default|hidden|internal|protected\")");
4059 if (!strcmp (astr.data, "default"))
4060 ad->a.visibility = STV_DEFAULT;
4061 else if (!strcmp (astr.data, "hidden"))
4062 ad->a.visibility = STV_HIDDEN;
4063 else if (!strcmp (astr.data, "internal"))
4064 ad->a.visibility = STV_INTERNAL;
4065 else if (!strcmp (astr.data, "protected"))
4066 ad->a.visibility = STV_PROTECTED;
4067 else
4068 expect("visibility(\"default|hidden|internal|protected\")");
4069 skip(')');
4070 cstr_free(&astr);
4071 break;
4072 case TOK_ALIGNED1:
4073 case TOK_ALIGNED2:
4074 if (tok == '(') {
4075 next();
4076 n = expr_const();
4077 if (n <= 0 || (n & (n - 1)) != 0)
4078 tcc_error("alignment must be a positive power of two");
4079 skip(')');
4080 } else {
4081 n = MAX_ALIGN;
4083 ad->a.aligned = exact_log2p1(n);
4084 if (n != 1 << (ad->a.aligned - 1))
4085 tcc_error("alignment of %d is larger than implemented", n);
4086 break;
4087 case TOK_PACKED1:
4088 case TOK_PACKED2:
4089 ad->a.packed = 1;
4090 break;
4091 case TOK_WEAK1:
4092 case TOK_WEAK2:
4093 ad->a.weak = 1;
4094 break;
4095 case TOK_UNUSED1:
4096 case TOK_UNUSED2:
4097 /* currently, no need to handle it because tcc does not
4098 track unused objects */
4099 break;
4100 case TOK_NORETURN1:
4101 case TOK_NORETURN2:
4102 ad->f.func_noreturn = 1;
4103 break;
4104 case TOK_CDECL1:
4105 case TOK_CDECL2:
4106 case TOK_CDECL3:
4107 ad->f.func_call = FUNC_CDECL;
4108 break;
4109 case TOK_STDCALL1:
4110 case TOK_STDCALL2:
4111 case TOK_STDCALL3:
4112 ad->f.func_call = FUNC_STDCALL;
4113 break;
4114 #ifdef TCC_TARGET_I386
4115 case TOK_REGPARM1:
4116 case TOK_REGPARM2:
4117 skip('(');
4118 n = expr_const();
4119 if (n > 3)
4120 n = 3;
4121 else if (n < 0)
4122 n = 0;
4123 if (n > 0)
4124 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
4125 skip(')');
4126 break;
4127 case TOK_FASTCALL1:
4128 case TOK_FASTCALL2:
4129 case TOK_FASTCALL3:
4130 ad->f.func_call = FUNC_FASTCALLW;
4131 break;
4132 #endif
4133 case TOK_MODE:
4134 skip('(');
4135 switch(tok) {
4136 case TOK_MODE_DI:
4137 ad->attr_mode = VT_LLONG + 1;
4138 break;
4139 case TOK_MODE_QI:
4140 ad->attr_mode = VT_BYTE + 1;
4141 break;
4142 case TOK_MODE_HI:
4143 ad->attr_mode = VT_SHORT + 1;
4144 break;
4145 case TOK_MODE_SI:
4146 case TOK_MODE_word:
4147 ad->attr_mode = VT_INT + 1;
4148 break;
4149 default:
4150 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
4151 break;
4153 next();
4154 skip(')');
4155 break;
4156 case TOK_DLLEXPORT:
4157 ad->a.dllexport = 1;
4158 break;
4159 case TOK_NODECORATE:
4160 ad->a.nodecorate = 1;
4161 break;
4162 case TOK_DLLIMPORT:
4163 ad->a.dllimport = 1;
4164 break;
4165 default:
4166 if (tcc_state->warn_unsupported)
4167 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
4168 /* skip parameters */
4169 if (tok == '(') {
4170 int parenthesis = 0;
4171 do {
4172 if (tok == '(')
4173 parenthesis++;
4174 else if (tok == ')')
4175 parenthesis--;
4176 next();
4177 } while (parenthesis && tok != -1);
4179 break;
4181 if (tok != ',')
4182 break;
4183 next();
4185 skip(')');
4186 skip(')');
4187 goto redo;
4190 static Sym * find_field (CType *type, int v, int *cumofs)
4192 Sym *s = type->ref;
4193 v |= SYM_FIELD;
4194 while ((s = s->next) != NULL) {
4195 if ((s->v & SYM_FIELD) &&
4196 (s->type.t & VT_BTYPE) == VT_STRUCT &&
4197 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
4198 Sym *ret = find_field (&s->type, v, cumofs);
4199 if (ret) {
4200 *cumofs += s->c;
4201 return ret;
4204 if (s->v == v)
4205 break;
4207 return s;
4210 static void struct_layout(CType *type, AttributeDef *ad)
4212 int size, align, maxalign, offset, c, bit_pos, bit_size;
4213 int packed, a, bt, prevbt, prev_bit_size;
4214 int pcc = !tcc_state->ms_bitfields;
4215 int pragma_pack = *tcc_state->pack_stack_ptr;
4216 Sym *f;
4218 maxalign = 1;
4219 offset = 0;
4220 c = 0;
4221 bit_pos = 0;
4222 prevbt = VT_STRUCT; /* make it never match */
4223 prev_bit_size = 0;
4225 //#define BF_DEBUG
4227 for (f = type->ref->next; f; f = f->next) {
4228 if (f->type.t & VT_BITFIELD)
4229 bit_size = BIT_SIZE(f->type.t);
4230 else
4231 bit_size = -1;
4232 size = type_size(&f->type, &align);
4233 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4234 packed = 0;
4236 if (pcc && bit_size == 0) {
4237 /* in pcc mode, packing does not affect zero-width bitfields */
4239 } else {
4240 /* in pcc mode, attribute packed overrides if set. */
4241 if (pcc && (f->a.packed || ad->a.packed))
4242 align = packed = 1;
4244 /* pragma pack overrides align if lesser and packs bitfields always */
4245 if (pragma_pack) {
4246 packed = 1;
4247 if (pragma_pack < align)
4248 align = pragma_pack;
4249 /* in pcc mode pragma pack also overrides individual align */
4250 if (pcc && pragma_pack < a)
4251 a = 0;
4254 /* some individual align was specified */
4255 if (a)
4256 align = a;
4258 if (type->ref->type.t == VT_UNION) {
4259 if (pcc && bit_size >= 0)
4260 size = (bit_size + 7) >> 3;
4261 offset = 0;
4262 if (size > c)
4263 c = size;
4265 } else if (bit_size < 0) {
4266 if (pcc)
4267 c += (bit_pos + 7) >> 3;
4268 c = (c + align - 1) & -align;
4269 offset = c;
4270 if (size > 0)
4271 c += size;
4272 bit_pos = 0;
4273 prevbt = VT_STRUCT;
4274 prev_bit_size = 0;
4276 } else {
4277 /* A bit-field. Layout is more complicated. There are two
4278 options: PCC (GCC) compatible and MS compatible */
4279 if (pcc) {
4280 /* In PCC layout a bit-field is placed adjacent to the
4281 preceding bit-fields, except if:
4282 - it has zero-width
4283 - an individual alignment was given
4284 - it would overflow its base type container and
4285 there is no packing */
4286 if (bit_size == 0) {
4287 new_field:
4288 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4289 bit_pos = 0;
4290 } else if (f->a.aligned) {
4291 goto new_field;
4292 } else if (!packed) {
4293 int a8 = align * 8;
4294 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4295 if (ofs > size / align)
4296 goto new_field;
4299 /* in pcc mode, long long bitfields have type int if they fit */
4300 if (size == 8 && bit_size <= 32)
4301 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4303 while (bit_pos >= align * 8)
4304 c += align, bit_pos -= align * 8;
4305 offset = c;
4307 /* In PCC layout named bit-fields influence the alignment
4308 of the containing struct using the base types alignment,
4309 except for packed fields (which here have correct align). */
4310 if (f->v & SYM_FIRST_ANOM
4311 // && bit_size // ??? gcc on ARM/rpi does that
4313 align = 1;
4315 } else {
4316 bt = f->type.t & VT_BTYPE;
4317 if ((bit_pos + bit_size > size * 8)
4318 || (bit_size > 0) == (bt != prevbt)
4320 c = (c + align - 1) & -align;
4321 offset = c;
4322 bit_pos = 0;
4323 /* In MS bitfield mode a bit-field run always uses
4324 at least as many bits as the underlying type.
4325 To start a new run it's also required that this
4326 or the last bit-field had non-zero width. */
4327 if (bit_size || prev_bit_size)
4328 c += size;
4330 /* In MS layout the records alignment is normally
4331 influenced by the field, except for a zero-width
4332 field at the start of a run (but by further zero-width
4333 fields it is again). */
4334 if (bit_size == 0 && prevbt != bt)
4335 align = 1;
4336 prevbt = bt;
4337 prev_bit_size = bit_size;
4340 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4341 | (bit_pos << VT_STRUCT_SHIFT);
4342 bit_pos += bit_size;
4344 if (align > maxalign)
4345 maxalign = align;
4347 #ifdef BF_DEBUG
4348 printf("set field %s offset %-2d size %-2d align %-2d",
4349 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4350 if (f->type.t & VT_BITFIELD) {
4351 printf(" pos %-2d bits %-2d",
4352 BIT_POS(f->type.t),
4353 BIT_SIZE(f->type.t)
4356 printf("\n");
4357 #endif
4359 f->c = offset;
4360 f->r = 0;
4363 if (pcc)
4364 c += (bit_pos + 7) >> 3;
4366 /* store size and alignment */
4367 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4368 if (a < maxalign)
4369 a = maxalign;
4370 type->ref->r = a;
4371 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4372 /* can happen if individual align for some member was given. In
4373 this case MSVC ignores maxalign when aligning the size */
4374 a = pragma_pack;
4375 if (a < bt)
4376 a = bt;
4378 c = (c + a - 1) & -a;
4379 type->ref->c = c;
4381 #ifdef BF_DEBUG
4382 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4383 #endif
4385 /* check whether we can access bitfields by their type */
4386 for (f = type->ref->next; f; f = f->next) {
4387 int s, px, cx, c0;
4388 CType t;
4390 if (0 == (f->type.t & VT_BITFIELD))
4391 continue;
4392 f->type.ref = f;
4393 f->auxtype = -1;
4394 bit_size = BIT_SIZE(f->type.t);
4395 if (bit_size == 0)
4396 continue;
4397 bit_pos = BIT_POS(f->type.t);
4398 size = type_size(&f->type, &align);
4399 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
4400 continue;
4402 /* try to access the field using a different type */
4403 c0 = -1, s = align = 1;
4404 for (;;) {
4405 px = f->c * 8 + bit_pos;
4406 cx = (px >> 3) & -align;
4407 px = px - (cx << 3);
4408 if (c0 == cx)
4409 break;
4410 s = (px + bit_size + 7) >> 3;
4411 if (s > 4) {
4412 t.t = VT_LLONG;
4413 } else if (s > 2) {
4414 t.t = VT_INT;
4415 } else if (s > 1) {
4416 t.t = VT_SHORT;
4417 } else {
4418 t.t = VT_BYTE;
4420 s = type_size(&t, &align);
4421 c0 = cx;
4424 if (px + bit_size <= s * 8 && cx + s <= c) {
4425 /* update offset and bit position */
4426 f->c = cx;
4427 bit_pos = px;
4428 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4429 | (bit_pos << VT_STRUCT_SHIFT);
4430 if (s != size)
4431 f->auxtype = t.t;
4432 #ifdef BF_DEBUG
4433 printf("FIX field %s offset %-2d size %-2d align %-2d "
4434 "pos %-2d bits %-2d\n",
4435 get_tok_str(f->v & ~SYM_FIELD, NULL),
4436 cx, s, align, px, bit_size);
4437 #endif
4438 } else {
4439 /* fall back to load/store single-byte wise */
4440 f->auxtype = VT_STRUCT;
4441 #ifdef BF_DEBUG
4442 printf("FIX field %s : load byte-wise\n",
4443 get_tok_str(f->v & ~SYM_FIELD, NULL));
4444 #endif
4449 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4450 static void struct_decl(CType *type, int u)
4452 int v, c, size, align, flexible;
4453 int bit_size, bsize, bt;
4454 Sym *s, *ss, **ps;
4455 AttributeDef ad, ad1;
4456 CType type1, btype;
4458 memset(&ad, 0, sizeof ad);
4459 next();
4460 parse_attribute(&ad);
4461 if (tok != '{') {
4462 v = tok;
4463 next();
4464 /* struct already defined ? return it */
4465 if (v < TOK_IDENT)
4466 expect("struct/union/enum name");
4467 s = struct_find(v);
4468 if (s && (s->sym_scope == local_scope || tok != '{')) {
4469 if (u == s->type.t)
4470 goto do_decl;
4471 if (u == VT_ENUM && IS_ENUM(s->type.t))
4472 goto do_decl;
4473 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4475 } else {
4476 v = anon_sym++;
4478 /* Record the original enum/struct/union token. */
4479 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4480 type1.ref = NULL;
4481 /* we put an undefined size for struct/union */
4482 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4483 s->r = 0; /* default alignment is zero as gcc */
4484 do_decl:
4485 type->t = s->type.t;
4486 type->ref = s;
4488 if (tok == '{') {
4489 next();
4490 if (s->c != -1)
4491 tcc_error("struct/union/enum already defined");
4492 s->c = -2;
4493 /* cannot be empty */
4494 /* non empty enums are not allowed */
4495 ps = &s->next;
4496 if (u == VT_ENUM) {
4497 long long ll = 0, pl = 0, nl = 0;
4498 CType t;
4499 t.ref = s;
4500 /* enum symbols have static storage */
4501 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4502 for(;;) {
4503 v = tok;
4504 if (v < TOK_UIDENT)
4505 expect("identifier");
4506 ss = sym_find(v);
4507 if (ss && !local_stack)
4508 tcc_error("redefinition of enumerator '%s'",
4509 get_tok_str(v, NULL));
4510 next();
4511 if (tok == '=') {
4512 next();
4513 ll = expr_const64();
4515 ss = sym_push(v, &t, VT_CONST, 0);
4516 ss->enum_val = ll;
4517 *ps = ss, ps = &ss->next;
4518 if (ll < nl)
4519 nl = ll;
4520 if (ll > pl)
4521 pl = ll;
4522 if (tok != ',')
4523 break;
4524 next();
4525 ll++;
4526 /* NOTE: we accept a trailing comma */
4527 if (tok == '}')
4528 break;
4530 skip('}');
4531 /* set integral type of the enum */
4532 t.t = VT_INT;
4533 if (nl >= 0) {
4534 if (pl != (unsigned)pl)
4535 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4536 t.t |= VT_UNSIGNED;
4537 } else if (pl != (int)pl || nl != (int)nl)
4538 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4539 s->type.t = type->t = t.t | VT_ENUM;
4540 s->c = 0;
4541 /* set type for enum members */
4542 for (ss = s->next; ss; ss = ss->next) {
4543 ll = ss->enum_val;
4544 if (ll == (int)ll) /* default is int if it fits */
4545 continue;
4546 if (t.t & VT_UNSIGNED) {
4547 ss->type.t |= VT_UNSIGNED;
4548 if (ll == (unsigned)ll)
4549 continue;
4551 ss->type.t = (ss->type.t & ~VT_BTYPE)
4552 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4554 } else {
4555 c = 0;
4556 flexible = 0;
4557 while (tok != '}') {
4558 if (!parse_btype(&btype, &ad1)) {
4559 skip(';');
4560 continue;
4562 while (1) {
4563 if (flexible)
4564 tcc_error("flexible array member '%s' not at the end of struct",
4565 get_tok_str(v, NULL));
4566 bit_size = -1;
4567 v = 0;
4568 type1 = btype;
4569 if (tok != ':') {
4570 if (tok != ';')
4571 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4572 if (v == 0) {
4573 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4574 expect("identifier");
4575 else {
4576 int v = btype.ref->v;
4577 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4578 if (tcc_state->ms_extensions == 0)
4579 expect("identifier");
4583 if (type_size(&type1, &align) < 0) {
4584 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4585 flexible = 1;
4586 else
4587 tcc_error("field '%s' has incomplete type",
4588 get_tok_str(v, NULL));
4590 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4591 (type1.t & VT_BTYPE) == VT_VOID ||
4592 (type1.t & VT_STORAGE))
4593 tcc_error("invalid type for '%s'",
4594 get_tok_str(v, NULL));
4596 if (tok == ':') {
4597 next();
4598 bit_size = expr_const();
4599 /* XXX: handle v = 0 case for messages */
4600 if (bit_size < 0)
4601 tcc_error("negative width in bit-field '%s'",
4602 get_tok_str(v, NULL));
4603 if (v && bit_size == 0)
4604 tcc_error("zero width for bit-field '%s'",
4605 get_tok_str(v, NULL));
4606 parse_attribute(&ad1);
4608 size = type_size(&type1, &align);
4609 if (bit_size >= 0) {
4610 bt = type1.t & VT_BTYPE;
4611 if (bt != VT_INT &&
4612 bt != VT_BYTE &&
4613 bt != VT_SHORT &&
4614 bt != VT_BOOL &&
4615 bt != VT_LLONG)
4616 tcc_error("bitfields must have scalar type");
4617 bsize = size * 8;
4618 if (bit_size > bsize) {
4619 tcc_error("width of '%s' exceeds its type",
4620 get_tok_str(v, NULL));
4621 } else if (bit_size == bsize
4622 && !ad.a.packed && !ad1.a.packed) {
4623 /* no need for bit fields */
4625 } else if (bit_size == 64) {
4626 tcc_error("field width 64 not implemented");
4627 } else {
4628 type1.t = (type1.t & ~VT_STRUCT_MASK)
4629 | VT_BITFIELD
4630 | (bit_size << (VT_STRUCT_SHIFT + 6));
4633 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4634 /* Remember we've seen a real field to check
4635 for placement of flexible array member. */
4636 c = 1;
4638 /* If member is a struct or bit-field, enforce
4639 placing into the struct (as anonymous). */
4640 if (v == 0 &&
4641 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4642 bit_size >= 0)) {
4643 v = anon_sym++;
4645 if (v) {
4646 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4647 ss->a = ad1.a;
4648 *ps = ss;
4649 ps = &ss->next;
4651 if (tok == ';' || tok == TOK_EOF)
4652 break;
4653 skip(',');
4655 skip(';');
4657 skip('}');
4658 parse_attribute(&ad);
4659 if (ad.cleanup_func) {
4660 tcc_warning("attribute '__cleanup__' ignored on type");
4662 struct_layout(type, &ad);
4667 static void sym_to_attr(AttributeDef *ad, Sym *s)
4669 merge_symattr(&ad->a, &s->a);
4670 merge_funcattr(&ad->f, &s->f);
4673 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4674 are added to the element type, copied because it could be a typedef. */
4675 static void parse_btype_qualify(CType *type, int qualifiers)
4677 while (type->t & VT_ARRAY) {
4678 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4679 type = &type->ref->type;
4681 type->t |= qualifiers;
4684 /* return 0 if no type declaration. otherwise, return the basic type
4685 and skip it.
4687 static int parse_btype(CType *type, AttributeDef *ad)
4689 int t, u, bt, st, type_found, typespec_found, g, n;
4690 Sym *s;
4691 CType type1;
4693 memset(ad, 0, sizeof(AttributeDef));
4694 type_found = 0;
4695 typespec_found = 0;
4696 t = VT_INT;
4697 bt = st = -1;
4698 type->ref = NULL;
4700 while(1) {
4701 switch(tok) {
4702 case TOK_EXTENSION:
4703 /* currently, we really ignore extension */
4704 next();
4705 continue;
4707 /* basic types */
4708 case TOK_CHAR:
4709 u = VT_BYTE;
4710 basic_type:
4711 next();
4712 basic_type1:
4713 if (u == VT_SHORT || u == VT_LONG) {
4714 if (st != -1 || (bt != -1 && bt != VT_INT))
4715 tmbt: tcc_error("too many basic types");
4716 st = u;
4717 } else {
4718 if (bt != -1 || (st != -1 && u != VT_INT))
4719 goto tmbt;
4720 bt = u;
4722 if (u != VT_INT)
4723 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4724 typespec_found = 1;
4725 break;
4726 case TOK_VOID:
4727 u = VT_VOID;
4728 goto basic_type;
4729 case TOK_SHORT:
4730 u = VT_SHORT;
4731 goto basic_type;
4732 case TOK_INT:
4733 u = VT_INT;
4734 goto basic_type;
4735 case TOK_ALIGNAS:
4736 { int n;
4737 AttributeDef ad1;
4738 next();
4739 skip('(');
4740 memset(&ad1, 0, sizeof(AttributeDef));
4741 if (parse_btype(&type1, &ad1)) {
4742 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4743 if (ad1.a.aligned)
4744 n = 1 << (ad1.a.aligned - 1);
4745 else
4746 type_size(&type1, &n);
4747 } else {
4748 n = expr_const();
4749 if (n <= 0 || (n & (n - 1)) != 0)
4750 tcc_error("alignment must be a positive power of two");
4752 skip(')');
4753 ad->a.aligned = exact_log2p1(n);
4755 continue;
4756 case TOK_LONG:
4757 if ((t & VT_BTYPE) == VT_DOUBLE) {
4758 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4759 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4760 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4761 } else {
4762 u = VT_LONG;
4763 goto basic_type;
4765 next();
4766 break;
4767 #ifdef TCC_TARGET_ARM64
4768 case TOK_UINT128:
4769 /* GCC's __uint128_t appears in some Linux header files. Make it a
4770 synonym for long double to get the size and alignment right. */
4771 u = VT_LDOUBLE;
4772 goto basic_type;
4773 #endif
4774 case TOK_BOOL:
4775 u = VT_BOOL;
4776 goto basic_type;
4777 case TOK_FLOAT:
4778 u = VT_FLOAT;
4779 goto basic_type;
4780 case TOK_DOUBLE:
4781 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4782 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4783 } else {
4784 u = VT_DOUBLE;
4785 goto basic_type;
4787 next();
4788 break;
4789 case TOK_ENUM:
4790 struct_decl(&type1, VT_ENUM);
4791 basic_type2:
4792 u = type1.t;
4793 type->ref = type1.ref;
4794 goto basic_type1;
4795 case TOK_STRUCT:
4796 struct_decl(&type1, VT_STRUCT);
4797 goto basic_type2;
4798 case TOK_UNION:
4799 struct_decl(&type1, VT_UNION);
4800 goto basic_type2;
4802 /* type modifiers */
4803 case TOK_CONST1:
4804 case TOK_CONST2:
4805 case TOK_CONST3:
4806 type->t = t;
4807 parse_btype_qualify(type, VT_CONSTANT);
4808 t = type->t;
4809 next();
4810 break;
4811 case TOK_VOLATILE1:
4812 case TOK_VOLATILE2:
4813 case TOK_VOLATILE3:
4814 type->t = t;
4815 parse_btype_qualify(type, VT_VOLATILE);
4816 t = type->t;
4817 next();
4818 break;
4819 case TOK_SIGNED1:
4820 case TOK_SIGNED2:
4821 case TOK_SIGNED3:
4822 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4823 tcc_error("signed and unsigned modifier");
4824 t |= VT_DEFSIGN;
4825 next();
4826 typespec_found = 1;
4827 break;
4828 case TOK_REGISTER:
4829 case TOK_AUTO:
4830 case TOK_RESTRICT1:
4831 case TOK_RESTRICT2:
4832 case TOK_RESTRICT3:
4833 next();
4834 break;
4835 case TOK_UNSIGNED:
4836 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4837 tcc_error("signed and unsigned modifier");
4838 t |= VT_DEFSIGN | VT_UNSIGNED;
4839 next();
4840 typespec_found = 1;
4841 break;
4843 /* storage */
4844 case TOK_EXTERN:
4845 g = VT_EXTERN;
4846 goto storage;
4847 case TOK_STATIC:
4848 g = VT_STATIC;
4849 goto storage;
4850 case TOK_TYPEDEF:
4851 g = VT_TYPEDEF;
4852 goto storage;
4853 storage:
4854 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4855 tcc_error("multiple storage classes");
4856 t |= g;
4857 next();
4858 break;
4859 case TOK_INLINE1:
4860 case TOK_INLINE2:
4861 case TOK_INLINE3:
4862 t |= VT_INLINE;
4863 next();
4864 break;
4865 case TOK_NORETURN3:
4866 next();
4867 ad->f.func_noreturn = 1;
4868 break;
4869 /* GNUC attribute */
4870 case TOK_ATTRIBUTE1:
4871 case TOK_ATTRIBUTE2:
4872 parse_attribute(ad);
4873 if (ad->attr_mode) {
4874 u = ad->attr_mode -1;
4875 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4877 continue;
4878 /* GNUC typeof */
4879 case TOK_TYPEOF1:
4880 case TOK_TYPEOF2:
4881 case TOK_TYPEOF3:
4882 next();
4883 parse_expr_type(&type1);
4884 /* remove all storage modifiers except typedef */
4885 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4886 if (type1.ref)
4887 sym_to_attr(ad, type1.ref);
4888 goto basic_type2;
4889 default:
4890 if (typespec_found)
4891 goto the_end;
4892 s = sym_find(tok);
4893 if (!s || !(s->type.t & VT_TYPEDEF))
4894 goto the_end;
4896 n = tok, next();
4897 if (tok == ':' && !in_generic) {
4898 /* ignore if it's a label */
4899 unget_tok(n);
4900 goto the_end;
4903 t &= ~(VT_BTYPE|VT_LONG);
4904 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4905 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4906 type->ref = s->type.ref;
4907 if (t)
4908 parse_btype_qualify(type, t);
4909 t = type->t;
4910 /* get attributes from typedef */
4911 sym_to_attr(ad, s);
4912 typespec_found = 1;
4913 st = bt = -2;
4914 break;
4916 type_found = 1;
4918 the_end:
4919 if (tcc_state->char_is_unsigned) {
4920 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4921 t |= VT_UNSIGNED;
4923 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4924 bt = t & (VT_BTYPE|VT_LONG);
4925 if (bt == VT_LONG)
4926 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4927 #ifdef TCC_TARGET_PE
4928 if (bt == VT_LDOUBLE)
4929 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4930 #endif
4931 type->t = t;
4932 return type_found;
4935 /* convert a function parameter type (array to pointer and function to
4936 function pointer) */
4937 static inline void convert_parameter_type(CType *pt)
4939 /* remove const and volatile qualifiers (XXX: const could be used
4940 to indicate a const function parameter */
4941 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4942 /* array must be transformed to pointer according to ANSI C */
4943 pt->t &= ~VT_ARRAY;
4944 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4945 mk_pointer(pt);
4949 ST_FUNC void parse_asm_str(CString *astr)
4951 skip('(');
4952 parse_mult_str(astr, "string constant");
4955 /* Parse an asm label and return the token */
4956 static int asm_label_instr(void)
4958 int v;
4959 CString astr;
4961 next();
4962 parse_asm_str(&astr);
4963 skip(')');
4964 #ifdef ASM_DEBUG
4965 printf("asm_alias: \"%s\"\n", (char *)astr.data);
4966 #endif
4967 v = tok_alloc(astr.data, astr.size - 1)->tok;
4968 cstr_free(&astr);
4969 return v;
4972 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
4974 int n, l, t1, arg_size, align, unused_align;
4975 Sym **plast, *s, *first;
4976 AttributeDef ad1;
4977 CType pt;
4979 if (tok == '(') {
4980 /* function type, or recursive declarator (return if so) */
4981 next();
4982 if (td && !(td & TYPE_ABSTRACT))
4983 return 0;
4984 if (tok == ')')
4985 l = 0;
4986 else if (parse_btype(&pt, &ad1))
4987 l = FUNC_NEW;
4988 else if (td) {
4989 merge_attr (ad, &ad1);
4990 return 0;
4991 } else
4992 l = FUNC_OLD;
4993 first = NULL;
4994 plast = &first;
4995 arg_size = 0;
4996 if (l) {
4997 for(;;) {
4998 /* read param name and compute offset */
4999 if (l != FUNC_OLD) {
5000 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
5001 break;
5002 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
5003 if ((pt.t & VT_BTYPE) == VT_VOID)
5004 tcc_error("parameter declared as void");
5005 } else {
5006 n = tok;
5007 if (n < TOK_UIDENT)
5008 expect("identifier");
5009 pt.t = VT_VOID; /* invalid type */
5010 pt.ref = NULL;
5011 next();
5013 convert_parameter_type(&pt);
5014 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
5015 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
5016 *plast = s;
5017 plast = &s->next;
5018 if (tok == ')')
5019 break;
5020 skip(',');
5021 if (l == FUNC_NEW && tok == TOK_DOTS) {
5022 l = FUNC_ELLIPSIS;
5023 next();
5024 break;
5026 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
5027 tcc_error("invalid type");
5029 } else
5030 /* if no parameters, then old type prototype */
5031 l = FUNC_OLD;
5032 skip(')');
5033 /* NOTE: const is ignored in returned type as it has a special
5034 meaning in gcc / C++ */
5035 type->t &= ~VT_CONSTANT;
5036 /* some ancient pre-K&R C allows a function to return an array
5037 and the array brackets to be put after the arguments, such
5038 that "int c()[]" means something like "int[] c()" */
5039 if (tok == '[') {
5040 next();
5041 skip(']'); /* only handle simple "[]" */
5042 mk_pointer(type);
5044 /* we push a anonymous symbol which will contain the function prototype */
5045 ad->f.func_args = arg_size;
5046 ad->f.func_type = l;
5047 s = sym_push(SYM_FIELD, type, 0, 0);
5048 s->a = ad->a;
5049 s->f = ad->f;
5050 s->next = first;
5051 type->t = VT_FUNC;
5052 type->ref = s;
5053 } else if (tok == '[') {
5054 int saved_nocode_wanted = nocode_wanted;
5055 /* array definition */
5056 next();
5057 while (1) {
5058 /* XXX The optional type-quals and static should only be accepted
5059 in parameter decls. The '*' as well, and then even only
5060 in prototypes (not function defs). */
5061 switch (tok) {
5062 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
5063 case TOK_CONST1:
5064 case TOK_VOLATILE1:
5065 case TOK_STATIC:
5066 case '*':
5067 next();
5068 continue;
5069 default:
5070 break;
5072 break;
5074 n = -1;
5075 t1 = 0;
5076 if (tok != ']') {
5077 if (!local_stack || (storage & VT_STATIC))
5078 vpushi(expr_const());
5079 else {
5080 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5081 length must always be evaluated, even under nocode_wanted,
5082 so that its size slot is initialized (e.g. under sizeof
5083 or typeof). */
5084 nocode_wanted = 0;
5085 gexpr();
5087 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5088 n = vtop->c.i;
5089 if (n < 0)
5090 tcc_error("invalid array size");
5091 } else {
5092 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5093 tcc_error("size of variable length array should be an integer");
5094 n = 0;
5095 t1 = VT_VLA;
5098 skip(']');
5099 /* parse next post type */
5100 post_type(type, ad, storage, 0);
5102 if ((type->t & VT_BTYPE) == VT_FUNC)
5103 tcc_error("declaration of an array of functions");
5104 if ((type->t & VT_BTYPE) == VT_VOID
5105 || type_size(type, &unused_align) < 0)
5106 tcc_error("declaration of an array of incomplete type elements");
5108 t1 |= type->t & VT_VLA;
5110 if (t1 & VT_VLA) {
5111 if (n < 0)
5112 tcc_error("need explicit inner array size in VLAs");
5113 loc -= type_size(&int_type, &align);
5114 loc &= -align;
5115 n = loc;
5117 vla_runtime_type_size(type, &align);
5118 gen_op('*');
5119 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5120 vswap();
5121 vstore();
5123 if (n != -1)
5124 vpop();
5125 nocode_wanted = saved_nocode_wanted;
5127 /* we push an anonymous symbol which will contain the array
5128 element type */
5129 s = sym_push(SYM_FIELD, type, 0, n);
5130 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5131 type->ref = s;
5133 return 1;
5136 /* Parse a type declarator (except basic type), and return the type
5137 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5138 expected. 'type' should contain the basic type. 'ad' is the
5139 attribute definition of the basic type. It can be modified by
5140 type_decl(). If this (possibly abstract) declarator is a pointer chain
5141 it returns the innermost pointed to type (equals *type, but is a different
5142 pointer), otherwise returns type itself, that's used for recursive calls. */
5143 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5145 CType *post, *ret;
5146 int qualifiers, storage;
5148 /* recursive type, remove storage bits first, apply them later again */
5149 storage = type->t & VT_STORAGE;
5150 type->t &= ~VT_STORAGE;
5151 post = ret = type;
5153 while (tok == '*') {
5154 qualifiers = 0;
5155 redo:
5156 next();
5157 switch(tok) {
5158 case TOK_CONST1:
5159 case TOK_CONST2:
5160 case TOK_CONST3:
5161 qualifiers |= VT_CONSTANT;
5162 goto redo;
5163 case TOK_VOLATILE1:
5164 case TOK_VOLATILE2:
5165 case TOK_VOLATILE3:
5166 qualifiers |= VT_VOLATILE;
5167 goto redo;
5168 case TOK_RESTRICT1:
5169 case TOK_RESTRICT2:
5170 case TOK_RESTRICT3:
5171 goto redo;
5172 /* XXX: clarify attribute handling */
5173 case TOK_ATTRIBUTE1:
5174 case TOK_ATTRIBUTE2:
5175 parse_attribute(ad);
5176 break;
5178 mk_pointer(type);
5179 type->t |= qualifiers;
5180 if (ret == type)
5181 /* innermost pointed to type is the one for the first derivation */
5182 ret = pointed_type(type);
5185 if (tok == '(') {
5186 /* This is possibly a parameter type list for abstract declarators
5187 ('int ()'), use post_type for testing this. */
5188 if (!post_type(type, ad, 0, td)) {
5189 /* It's not, so it's a nested declarator, and the post operations
5190 apply to the innermost pointed to type (if any). */
5191 /* XXX: this is not correct to modify 'ad' at this point, but
5192 the syntax is not clear */
5193 parse_attribute(ad);
5194 post = type_decl(type, ad, v, td);
5195 skip(')');
5196 } else
5197 goto abstract;
5198 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5199 /* type identifier */
5200 *v = tok;
5201 next();
5202 } else {
5203 abstract:
5204 if (!(td & TYPE_ABSTRACT))
5205 expect("identifier");
5206 *v = 0;
5208 post_type(post, ad, storage, 0);
5209 parse_attribute(ad);
5210 type->t |= storage;
5211 return ret;
5214 /* indirection with full error checking and bound check */
5215 ST_FUNC void indir(void)
5217 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5218 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5219 return;
5220 expect("pointer");
5222 if (vtop->r & VT_LVAL)
5223 gv(RC_INT);
5224 vtop->type = *pointed_type(&vtop->type);
5225 /* Arrays and functions are never lvalues */
5226 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5227 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5228 vtop->r |= VT_LVAL;
5229 /* if bound checking, the referenced pointer must be checked */
5230 #ifdef CONFIG_TCC_BCHECK
5231 if (tcc_state->do_bounds_check)
5232 vtop->r |= VT_MUSTBOUND;
5233 #endif
5237 /* pass a parameter to a function and do type checking and casting */
5238 static void gfunc_param_typed(Sym *func, Sym *arg)
5240 int func_type;
5241 CType type;
5243 func_type = func->f.func_type;
5244 if (func_type == FUNC_OLD ||
5245 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5246 /* default casting : only need to convert float to double */
5247 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5248 gen_cast_s(VT_DOUBLE);
5249 } else if (vtop->type.t & VT_BITFIELD) {
5250 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5251 type.ref = vtop->type.ref;
5252 gen_cast(&type);
5253 } else if (vtop->r & VT_MUSTCAST) {
5254 force_charshort_cast();
5256 } else if (arg == NULL) {
5257 tcc_error("too many arguments to function");
5258 } else {
5259 type = arg->type;
5260 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5261 gen_assign_cast(&type);
5265 /* parse an expression and return its type without any side effect. */
5266 static void expr_type(CType *type, void (*expr_fn)(void))
5268 nocode_wanted++;
5269 expr_fn();
5270 *type = vtop->type;
5271 vpop();
5272 nocode_wanted--;
5275 /* parse an expression of the form '(type)' or '(expr)' and return its
5276 type */
5277 static void parse_expr_type(CType *type)
5279 int n;
5280 AttributeDef ad;
5282 skip('(');
5283 if (parse_btype(type, &ad)) {
5284 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5285 } else {
5286 expr_type(type, gexpr);
5288 skip(')');
5291 static void parse_type(CType *type)
5293 AttributeDef ad;
5294 int n;
5296 if (!parse_btype(type, &ad)) {
5297 expect("type");
5299 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5302 static void parse_builtin_params(int nc, const char *args)
5304 char c, sep = '(';
5305 CType t;
5306 if (nc)
5307 nocode_wanted++;
5308 next();
5309 while ((c = *args++)) {
5310 skip(sep);
5311 sep = ',';
5312 switch (c) {
5313 case 'e': expr_eq(); continue;
5314 case 't': parse_type(&t); vpush(&t); continue;
5315 default: tcc_error("internal error"); break;
5318 skip(')');
5319 if (nc)
5320 nocode_wanted--;
5323 ST_FUNC void unary(void)
5325 int n, t, align, size, r, sizeof_caller;
5326 CType type;
5327 Sym *s;
5328 AttributeDef ad;
5330 /* generate line number info */
5331 if (tcc_state->do_debug)
5332 tcc_debug_line(tcc_state);
5334 sizeof_caller = in_sizeof;
5335 in_sizeof = 0;
5336 type.ref = NULL;
5337 /* XXX: GCC 2.95.3 does not generate a table although it should be
5338 better here */
5339 tok_next:
5340 switch(tok) {
5341 case TOK_EXTENSION:
5342 next();
5343 goto tok_next;
5344 case TOK_LCHAR:
5345 #ifdef TCC_TARGET_PE
5346 t = VT_SHORT|VT_UNSIGNED;
5347 goto push_tokc;
5348 #endif
5349 case TOK_CINT:
5350 case TOK_CCHAR:
5351 t = VT_INT;
5352 push_tokc:
5353 type.t = t;
5354 vsetc(&type, VT_CONST, &tokc);
5355 next();
5356 break;
5357 case TOK_CUINT:
5358 t = VT_INT | VT_UNSIGNED;
5359 goto push_tokc;
5360 case TOK_CLLONG:
5361 t = VT_LLONG;
5362 goto push_tokc;
5363 case TOK_CULLONG:
5364 t = VT_LLONG | VT_UNSIGNED;
5365 goto push_tokc;
5366 case TOK_CFLOAT:
5367 t = VT_FLOAT;
5368 goto push_tokc;
5369 case TOK_CDOUBLE:
5370 t = VT_DOUBLE;
5371 goto push_tokc;
5372 case TOK_CLDOUBLE:
5373 t = VT_LDOUBLE;
5374 goto push_tokc;
5375 case TOK_CLONG:
5376 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5377 goto push_tokc;
5378 case TOK_CULONG:
5379 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5380 goto push_tokc;
5381 case TOK___FUNCTION__:
5382 if (!gnu_ext)
5383 goto tok_identifier;
5384 /* fall thru */
5385 case TOK___FUNC__:
5387 void *ptr;
5388 int len;
5389 /* special function name identifier */
5390 len = strlen(funcname) + 1;
5391 /* generate char[len] type */
5392 type.t = VT_BYTE;
5393 mk_pointer(&type);
5394 type.t |= VT_ARRAY;
5395 type.ref->c = len;
5396 vpush_ref(&type, data_section, data_section->data_offset, len);
5397 if (!NODATA_WANTED) {
5398 ptr = section_ptr_add(data_section, len);
5399 memcpy(ptr, funcname, len);
5401 next();
5403 break;
5404 case TOK_LSTR:
5405 #ifdef TCC_TARGET_PE
5406 t = VT_SHORT | VT_UNSIGNED;
5407 #else
5408 t = VT_INT;
5409 #endif
5410 goto str_init;
5411 case TOK_STR:
5412 /* string parsing */
5413 t = VT_BYTE;
5414 if (tcc_state->char_is_unsigned)
5415 t = VT_BYTE | VT_UNSIGNED;
5416 str_init:
5417 if (tcc_state->warn_write_strings)
5418 t |= VT_CONSTANT;
5419 type.t = t;
5420 mk_pointer(&type);
5421 type.t |= VT_ARRAY;
5422 memset(&ad, 0, sizeof(AttributeDef));
5423 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5424 break;
5425 case '(':
5426 next();
5427 /* cast ? */
5428 if (parse_btype(&type, &ad)) {
5429 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5430 skip(')');
5431 /* check ISOC99 compound literal */
5432 if (tok == '{') {
5433 /* data is allocated locally by default */
5434 if (global_expr)
5435 r = VT_CONST;
5436 else
5437 r = VT_LOCAL;
5438 /* all except arrays are lvalues */
5439 if (!(type.t & VT_ARRAY))
5440 r |= VT_LVAL;
5441 memset(&ad, 0, sizeof(AttributeDef));
5442 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5443 } else {
5444 if (sizeof_caller) {
5445 vpush(&type);
5446 return;
5448 unary();
5449 gen_cast(&type);
5451 } else if (tok == '{') {
5452 int saved_nocode_wanted = nocode_wanted;
5453 if (const_wanted && !(nocode_wanted & unevalmask))
5454 tcc_error("expected constant");
5455 /* save all registers */
5456 save_regs(0);
5457 /* statement expression : we do not accept break/continue
5458 inside as GCC does. We do retain the nocode_wanted state,
5459 as statement expressions can't ever be entered from the
5460 outside, so any reactivation of code emission (from labels
5461 or loop heads) can be disabled again after the end of it. */
5462 block(1);
5463 nocode_wanted = saved_nocode_wanted;
5464 skip(')');
5465 } else {
5466 gexpr();
5467 skip(')');
5469 break;
5470 case '*':
5471 next();
5472 unary();
5473 indir();
5474 break;
5475 case '&':
5476 next();
5477 unary();
5478 /* functions names must be treated as function pointers,
5479 except for unary '&' and sizeof. Since we consider that
5480 functions are not lvalues, we only have to handle it
5481 there and in function calls. */
5482 /* arrays can also be used although they are not lvalues */
5483 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5484 !(vtop->type.t & VT_ARRAY))
5485 test_lvalue();
5486 if (vtop->sym)
5487 vtop->sym->a.addrtaken = 1;
5488 mk_pointer(&vtop->type);
5489 gaddrof();
5490 break;
5491 case '!':
5492 next();
5493 unary();
5494 gen_test_zero(TOK_EQ);
5495 break;
5496 case '~':
5497 next();
5498 unary();
5499 vpushi(-1);
5500 gen_op('^');
5501 break;
5502 case '+':
5503 next();
5504 unary();
5505 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5506 tcc_error("pointer not accepted for unary plus");
5507 /* In order to force cast, we add zero, except for floating point
5508 where we really need an noop (otherwise -0.0 will be transformed
5509 into +0.0). */
5510 if (!is_float(vtop->type.t)) {
5511 vpushi(0);
5512 gen_op('+');
5514 break;
5515 case TOK_SIZEOF:
5516 case TOK_ALIGNOF1:
5517 case TOK_ALIGNOF2:
5518 case TOK_ALIGNOF3:
5519 t = tok;
5520 next();
5521 in_sizeof++;
5522 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5523 s = NULL;
5524 if (vtop[1].r & VT_SYM)
5525 s = vtop[1].sym; /* hack: accessing previous vtop */
5526 size = type_size(&type, &align);
5527 if (s && s->a.aligned)
5528 align = 1 << (s->a.aligned - 1);
5529 if (t == TOK_SIZEOF) {
5530 if (!(type.t & VT_VLA)) {
5531 if (size < 0)
5532 tcc_error("sizeof applied to an incomplete type");
5533 vpushs(size);
5534 } else {
5535 vla_runtime_type_size(&type, &align);
5537 } else {
5538 vpushs(align);
5540 vtop->type.t |= VT_UNSIGNED;
5541 break;
5543 case TOK_builtin_expect:
5544 /* __builtin_expect is a no-op for now */
5545 parse_builtin_params(0, "ee");
5546 vpop();
5547 break;
5548 case TOK_builtin_types_compatible_p:
5549 parse_builtin_params(0, "tt");
5550 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5551 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5552 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5553 vtop -= 2;
5554 vpushi(n);
5555 break;
5556 case TOK_builtin_choose_expr:
5558 int64_t c;
5559 next();
5560 skip('(');
5561 c = expr_const64();
5562 skip(',');
5563 if (!c) {
5564 nocode_wanted++;
5566 expr_eq();
5567 if (!c) {
5568 vpop();
5569 nocode_wanted--;
5571 skip(',');
5572 if (c) {
5573 nocode_wanted++;
5575 expr_eq();
5576 if (c) {
5577 vpop();
5578 nocode_wanted--;
5580 skip(')');
5582 break;
5583 case TOK_builtin_constant_p:
5584 parse_builtin_params(1, "e");
5585 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5586 vtop--;
5587 vpushi(n);
5588 break;
5589 case TOK_builtin_frame_address:
5590 case TOK_builtin_return_address:
5592 int tok1 = tok;
5593 int level;
5594 next();
5595 skip('(');
5596 if (tok != TOK_CINT) {
5597 tcc_error("%s only takes positive integers",
5598 tok1 == TOK_builtin_return_address ?
5599 "__builtin_return_address" :
5600 "__builtin_frame_address");
5602 level = (uint32_t)tokc.i;
5603 next();
5604 skip(')');
5605 type.t = VT_VOID;
5606 mk_pointer(&type);
5607 vset(&type, VT_LOCAL, 0); /* local frame */
5608 while (level--) {
5609 mk_pointer(&vtop->type);
5610 indir(); /* -> parent frame */
5612 if (tok1 == TOK_builtin_return_address) {
5613 // assume return address is just above frame pointer on stack
5614 vpushi(PTR_SIZE);
5615 gen_op('+');
5616 mk_pointer(&vtop->type);
5617 indir();
5620 break;
5621 #ifdef TCC_TARGET_RISCV64
5622 case TOK_builtin_va_start:
5623 parse_builtin_params(0, "ee");
5624 r = vtop->r & VT_VALMASK;
5625 if (r == VT_LLOCAL)
5626 r = VT_LOCAL;
5627 if (r != VT_LOCAL)
5628 tcc_error("__builtin_va_start expects a local variable");
5629 gen_va_start();
5630 vstore();
5631 break;
5632 #endif
5633 #ifdef TCC_TARGET_X86_64
5634 #ifdef TCC_TARGET_PE
5635 case TOK_builtin_va_start:
5636 parse_builtin_params(0, "ee");
5637 r = vtop->r & VT_VALMASK;
5638 if (r == VT_LLOCAL)
5639 r = VT_LOCAL;
5640 if (r != VT_LOCAL)
5641 tcc_error("__builtin_va_start expects a local variable");
5642 vtop->r = r;
5643 vtop->type = char_pointer_type;
5644 vtop->c.i += 8;
5645 vstore();
5646 break;
5647 #else
5648 case TOK_builtin_va_arg_types:
5649 parse_builtin_params(0, "t");
5650 vpushi(classify_x86_64_va_arg(&vtop->type));
5651 vswap();
5652 vpop();
5653 break;
5654 #endif
5655 #endif
5657 #ifdef TCC_TARGET_ARM64
5658 case TOK_builtin_va_start: {
5659 parse_builtin_params(0, "ee");
5660 //xx check types
5661 gen_va_start();
5662 vpushi(0);
5663 vtop->type.t = VT_VOID;
5664 break;
5666 case TOK_builtin_va_arg: {
5667 parse_builtin_params(0, "et");
5668 type = vtop->type;
5669 vpop();
5670 //xx check types
5671 gen_va_arg(&type);
5672 vtop->type = type;
5673 break;
5675 case TOK___arm64_clear_cache: {
5676 parse_builtin_params(0, "ee");
5677 gen_clear_cache();
5678 vpushi(0);
5679 vtop->type.t = VT_VOID;
5680 break;
5682 #endif
5683 /* pre operations */
5684 case TOK_INC:
5685 case TOK_DEC:
5686 t = tok;
5687 next();
5688 unary();
5689 inc(0, t);
5690 break;
5691 case '-':
5692 next();
5693 unary();
5694 t = vtop->type.t & VT_BTYPE;
5695 if (is_float(t)) {
5696 /* In IEEE negate(x) isn't subtract(0,x), but rather
5697 subtract(-0, x). */
5698 vpush(&vtop->type);
5699 if (t == VT_FLOAT)
5700 vtop->c.f = -1.0 * 0.0;
5701 else if (t == VT_DOUBLE)
5702 vtop->c.d = -1.0 * 0.0;
5703 else
5704 vtop->c.ld = -1.0 * 0.0;
5705 } else
5706 vpushi(0);
5707 vswap();
5708 gen_op('-');
5709 break;
5710 case TOK_LAND:
5711 if (!gnu_ext)
5712 goto tok_identifier;
5713 next();
5714 /* allow to take the address of a label */
5715 if (tok < TOK_UIDENT)
5716 expect("label identifier");
5717 s = label_find(tok);
5718 if (!s) {
5719 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5720 } else {
5721 if (s->r == LABEL_DECLARED)
5722 s->r = LABEL_FORWARD;
5724 if (!s->type.t) {
5725 s->type.t = VT_VOID;
5726 mk_pointer(&s->type);
5727 s->type.t |= VT_STATIC;
5729 vpushsym(&s->type, s);
5730 next();
5731 break;
5733 case TOK_GENERIC:
5735 CType controlling_type;
5736 int has_default = 0;
5737 int has_match = 0;
5738 int learn = 0;
5739 TokenString *str = NULL;
5740 int saved_const_wanted = const_wanted;
5742 next();
5743 skip('(');
5744 const_wanted = 0;
5745 expr_type(&controlling_type, expr_eq);
5746 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5747 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5748 mk_pointer(&controlling_type);
5749 const_wanted = saved_const_wanted;
5750 for (;;) {
5751 learn = 0;
5752 skip(',');
5753 if (tok == TOK_DEFAULT) {
5754 if (has_default)
5755 tcc_error("too many 'default'");
5756 has_default = 1;
5757 if (!has_match)
5758 learn = 1;
5759 next();
5760 } else {
5761 AttributeDef ad_tmp;
5762 int itmp;
5763 CType cur_type;
5765 in_generic++;
5766 parse_btype(&cur_type, &ad_tmp);
5767 in_generic--;
5769 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5770 if (compare_types(&controlling_type, &cur_type, 0)) {
5771 if (has_match) {
5772 tcc_error("type match twice");
5774 has_match = 1;
5775 learn = 1;
5778 skip(':');
5779 if (learn) {
5780 if (str)
5781 tok_str_free(str);
5782 skip_or_save_block(&str);
5783 } else {
5784 skip_or_save_block(NULL);
5786 if (tok == ')')
5787 break;
5789 if (!str) {
5790 char buf[60];
5791 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5792 tcc_error("type '%s' does not match any association", buf);
5794 begin_macro(str, 1);
5795 next();
5796 expr_eq();
5797 if (tok != TOK_EOF)
5798 expect(",");
5799 end_macro();
5800 next();
5801 break;
5803 // special qnan , snan and infinity values
5804 case TOK___NAN__:
5805 n = 0x7fc00000;
5806 special_math_val:
5807 vpushi(n);
5808 vtop->type.t = VT_FLOAT;
5809 next();
5810 break;
5811 case TOK___SNAN__:
5812 n = 0x7f800001;
5813 goto special_math_val;
5814 case TOK___INF__:
5815 n = 0x7f800000;
5816 goto special_math_val;
5818 default:
5819 tok_identifier:
5820 t = tok;
5821 next();
5822 if (t < TOK_UIDENT)
5823 expect("identifier");
5824 s = sym_find(t);
5825 if (!s || IS_ASM_SYM(s)) {
5826 const char *name = get_tok_str(t, NULL);
5827 if (tok != '(')
5828 tcc_error("'%s' undeclared", name);
5829 /* for simple function calls, we tolerate undeclared
5830 external reference to int() function */
5831 if (tcc_state->warn_implicit_function_declaration
5832 #ifdef TCC_TARGET_PE
5833 /* people must be warned about using undeclared WINAPI functions
5834 (which usually start with uppercase letter) */
5835 || (name[0] >= 'A' && name[0] <= 'Z')
5836 #endif
5838 tcc_warning("implicit declaration of function '%s'", name);
5839 s = external_global_sym(t, &func_old_type);
5842 r = s->r;
5843 /* A symbol that has a register is a local register variable,
5844 which starts out as VT_LOCAL value. */
5845 if ((r & VT_VALMASK) < VT_CONST)
5846 r = (r & ~VT_VALMASK) | VT_LOCAL;
5848 vset(&s->type, r, s->c);
5849 /* Point to s as backpointer (even without r&VT_SYM).
5850 Will be used by at least the x86 inline asm parser for
5851 regvars. */
5852 vtop->sym = s;
5854 if (r & VT_SYM) {
5855 vtop->c.i = 0;
5856 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5857 vtop->c.i = s->enum_val;
5859 break;
5862 /* post operations */
5863 while (1) {
5864 if (tok == TOK_INC || tok == TOK_DEC) {
5865 inc(1, tok);
5866 next();
5867 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5868 int qualifiers, cumofs = 0;
5869 /* field */
5870 if (tok == TOK_ARROW)
5871 indir();
5872 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5873 test_lvalue();
5874 gaddrof();
5875 /* expect pointer on structure */
5876 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT) {
5877 char got[256];
5878 type_to_str(got, sizeof got, &vtop->type, NULL);
5879 tcc_error("expected struct or union but not '%s'", got);
5881 if (tok == TOK_CDOUBLE)
5882 expect("field name");
5883 next();
5884 if (tok == TOK_CINT || tok == TOK_CUINT)
5885 expect("field name");
5886 s = find_field(&vtop->type, tok, &cumofs);
5887 if (!s)
5888 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5889 /* add field offset to pointer */
5890 vtop->type = char_pointer_type; /* change type to 'char *' */
5891 vpushi(cumofs + s->c);
5892 gen_op('+');
5893 /* change type to field type, and set to lvalue */
5894 vtop->type = s->type;
5895 vtop->type.t |= qualifiers;
5896 /* an array is never an lvalue */
5897 if (!(vtop->type.t & VT_ARRAY)) {
5898 vtop->r |= VT_LVAL;
5899 #ifdef CONFIG_TCC_BCHECK
5900 /* if bound checking, the referenced pointer must be checked */
5901 if (tcc_state->do_bounds_check)
5902 vtop->r |= VT_MUSTBOUND;
5903 #endif
5905 next();
5906 } else if (tok == '[') {
5907 next();
5908 gexpr();
5909 gen_op('+');
5910 indir();
5911 skip(']');
5912 } else if (tok == '(') {
5913 SValue ret;
5914 Sym *sa;
5915 int nb_args, ret_nregs, ret_align, regsize, variadic;
5917 #ifdef CONFIG_TCC_BCHECK
5918 tcc_state->alloca_vla_used |= tcc_state->do_bounds_check &&
5919 (vtop->r & VT_SYM) &&
5920 vtop->sym->v == TOK_alloca;
5921 #endif
5922 /* function call */
5923 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5924 /* pointer test (no array accepted) */
5925 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5926 vtop->type = *pointed_type(&vtop->type);
5927 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5928 goto error_func;
5929 } else {
5930 error_func:
5931 expect("function pointer");
5933 } else {
5934 vtop->r &= ~VT_LVAL; /* no lvalue */
5936 /* get return type */
5937 s = vtop->type.ref;
5938 next();
5939 sa = s->next; /* first parameter */
5940 nb_args = regsize = 0;
5941 ret.r2 = VT_CONST;
5942 /* compute first implicit argument if a structure is returned */
5943 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5944 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5945 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
5946 &ret_align, &regsize);
5947 if (ret_nregs <= 0) {
5948 /* get some space for the returned structure */
5949 size = type_size(&s->type, &align);
5950 #ifdef TCC_TARGET_ARM64
5951 /* On arm64, a small struct is return in registers.
5952 It is much easier to write it to memory if we know
5953 that we are allowed to write some extra bytes, so
5954 round the allocated space up to a power of 2: */
5955 if (size < 16)
5956 while (size & (size - 1))
5957 size = (size | (size - 1)) + 1;
5958 #endif
5959 loc = (loc - size) & -align;
5960 ret.type = s->type;
5961 ret.r = VT_LOCAL | VT_LVAL;
5962 /* pass it as 'int' to avoid structure arg passing
5963 problems */
5964 vseti(VT_LOCAL, loc);
5965 ret.c = vtop->c;
5966 if (ret_nregs < 0)
5967 vtop--;
5968 else
5969 nb_args++;
5971 } else {
5972 ret_nregs = 1;
5973 ret.type = s->type;
5976 if (ret_nregs > 0) {
5977 /* return in register */
5978 ret.c.i = 0;
5979 PUT_R_RET(&ret, ret.type.t);
5981 if (tok != ')') {
5982 for(;;) {
5983 expr_eq();
5984 gfunc_param_typed(s, sa);
5985 nb_args++;
5986 if (sa)
5987 sa = sa->next;
5988 if (tok == ')')
5989 break;
5990 skip(',');
5993 if (sa)
5994 tcc_error("too few arguments to function");
5995 skip(')');
5996 gfunc_call(nb_args);
5998 if (ret_nregs < 0) {
5999 vsetc(&ret.type, ret.r, &ret.c);
6000 #ifdef TCC_TARGET_RISCV64
6001 arch_transfer_ret_regs(1);
6002 #endif
6003 } else {
6004 /* return value */
6005 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6006 vsetc(&ret.type, r, &ret.c);
6007 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6010 /* handle packed struct return */
6011 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6012 int addr, offset;
6014 size = type_size(&s->type, &align);
6015 /* We're writing whole regs often, make sure there's enough
6016 space. Assume register size is power of 2. */
6017 if (regsize > align)
6018 align = regsize;
6019 loc = (loc - size) & -align;
6020 addr = loc;
6021 offset = 0;
6022 for (;;) {
6023 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6024 vswap();
6025 vstore();
6026 vtop--;
6027 if (--ret_nregs == 0)
6028 break;
6029 offset += regsize;
6031 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6034 /* Promote char/short return values. This is matters only
6035 for calling function that were not compiled by TCC and
6036 only on some architectures. For those where it doesn't
6037 matter we expect things to be already promoted to int,
6038 but not larger. */
6039 t = s->type.t & VT_BTYPE;
6040 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6041 #ifdef PROMOTE_RET
6042 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6043 #else
6044 vtop->type.t = VT_INT;
6045 #endif
6048 if (s->f.func_noreturn)
6049 CODE_OFF();
6050 } else {
6051 break;
6056 #ifndef precedence_parser /* original top-down parser */
6058 static void expr_prod(void)
6060 int t;
6062 unary();
6063 while ((t = tok) == '*' || t == '/' || t == '%') {
6064 next();
6065 unary();
6066 gen_op(t);
6070 static void expr_sum(void)
6072 int t;
6074 expr_prod();
6075 while ((t = tok) == '+' || t == '-') {
6076 next();
6077 expr_prod();
6078 gen_op(t);
6082 static void expr_shift(void)
6084 int t;
6086 expr_sum();
6087 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6088 next();
6089 expr_sum();
6090 gen_op(t);
6094 static void expr_cmp(void)
6096 int t;
6098 expr_shift();
6099 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6100 t == TOK_ULT || t == TOK_UGE) {
6101 next();
6102 expr_shift();
6103 gen_op(t);
6107 static void expr_cmpeq(void)
6109 int t;
6111 expr_cmp();
6112 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6113 next();
6114 expr_cmp();
6115 gen_op(t);
6119 static void expr_and(void)
6121 expr_cmpeq();
6122 while (tok == '&') {
6123 next();
6124 expr_cmpeq();
6125 gen_op('&');
6129 static void expr_xor(void)
6131 expr_and();
6132 while (tok == '^') {
6133 next();
6134 expr_and();
6135 gen_op('^');
6139 static void expr_or(void)
6141 expr_xor();
6142 while (tok == '|') {
6143 next();
6144 expr_xor();
6145 gen_op('|');
6149 static void expr_landor(int op);
6151 static void expr_land(void)
6153 expr_or();
6154 if (tok == TOK_LAND)
6155 expr_landor(tok);
6158 static void expr_lor(void)
6160 expr_land();
6161 if (tok == TOK_LOR)
6162 expr_landor(tok);
6165 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6166 #else /* defined precedence_parser */
6167 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6168 # define expr_lor() unary(), expr_infix(1)
6170 static int precedence(int tok)
6172 switch (tok) {
6173 case TOK_LOR: return 1;
6174 case TOK_LAND: return 2;
6175 case '|': return 3;
6176 case '^': return 4;
6177 case '&': return 5;
6178 case TOK_EQ: case TOK_NE: return 6;
6179 relat: case TOK_ULT: case TOK_UGE: return 7;
6180 case TOK_SHL: case TOK_SAR: return 8;
6181 case '+': case '-': return 9;
6182 case '*': case '/': case '%': return 10;
6183 default:
6184 if (tok >= TOK_ULE && tok <= TOK_GT)
6185 goto relat;
6186 return 0;
6189 static unsigned char prec[256];
6190 static void init_prec(void)
6192 int i;
6193 for (i = 0; i < 256; i++)
6194 prec[i] = precedence(i);
6196 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6198 static void expr_landor(int op);
6200 static void expr_infix(int p)
6202 int t = tok, p2;
6203 while ((p2 = precedence(t)) >= p) {
6204 if (t == TOK_LOR || t == TOK_LAND) {
6205 expr_landor(t);
6206 } else {
6207 next();
6208 unary();
6209 if (precedence(tok) > p2)
6210 expr_infix(p2 + 1);
6211 gen_op(t);
6213 t = tok;
6216 #endif
6218 /* Assuming vtop is a value used in a conditional context
6219 (i.e. compared with zero) return 0 if it's false, 1 if
6220 true and -1 if it can't be statically determined. */
6221 static int condition_3way(void)
6223 int c = -1;
6224 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6225 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6226 vdup();
6227 gen_cast_s(VT_BOOL);
6228 c = vtop->c.i;
6229 vpop();
6231 return c;
6234 static void expr_landor(int op)
6236 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6237 for(;;) {
6238 c = f ? i : condition_3way();
6239 if (c < 0)
6240 save_regs(1), cc = 0;
6241 else if (c != i)
6242 nocode_wanted++, f = 1;
6243 if (tok != op)
6244 break;
6245 if (c < 0)
6246 t = gvtst(i, t);
6247 else
6248 vpop();
6249 next();
6250 expr_landor_next(op);
6252 if (cc || f) {
6253 vpop();
6254 vpushi(i ^ f);
6255 gsym(t);
6256 nocode_wanted -= f;
6257 } else {
6258 gvtst_set(i, t);
6262 static int is_cond_bool(SValue *sv)
6264 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6265 && (sv->type.t & VT_BTYPE) == VT_INT)
6266 return (unsigned)sv->c.i < 2;
6267 if (sv->r == VT_CMP)
6268 return 1;
6269 return 0;
6272 static void expr_cond(void)
6274 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6275 SValue sv;
6276 CType type;
6277 int ncw_prev;
6279 expr_lor();
6280 if (tok == '?') {
6281 next();
6282 c = condition_3way();
6283 g = (tok == ':' && gnu_ext);
6284 tt = 0;
6285 if (!g) {
6286 if (c < 0) {
6287 save_regs(1);
6288 tt = gvtst(1, 0);
6289 } else {
6290 vpop();
6292 } else if (c < 0) {
6293 /* needed to avoid having different registers saved in
6294 each branch */
6295 save_regs(1);
6296 gv_dup();
6297 tt = gvtst(0, 0);
6300 ncw_prev = nocode_wanted;
6301 if (c == 0)
6302 nocode_wanted++;
6303 if (!g)
6304 gexpr();
6306 if (c < 0 && vtop->r == VT_CMP) {
6307 t1 = gvtst(0, 0);
6308 vpushi(0);
6309 gvtst_set(0, t1);
6310 gv(RC_INT);
6313 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6314 mk_pointer(&vtop->type);
6315 sv = *vtop; /* save value to handle it later */
6316 vtop--; /* no vpop so that FP stack is not flushed */
6318 if (g) {
6319 u = tt;
6320 } else if (c < 0) {
6321 u = gjmp(0);
6322 gsym(tt);
6323 } else
6324 u = 0;
6326 nocode_wanted = ncw_prev;
6327 if (c == 1)
6328 nocode_wanted++;
6329 skip(':');
6330 expr_cond();
6332 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6333 if (sv.r == VT_CMP) {
6334 t1 = sv.jtrue;
6335 t2 = u;
6336 } else {
6337 t1 = gvtst(0, 0);
6338 t2 = gjmp(0);
6339 gsym(u);
6340 vpushv(&sv);
6342 gvtst_set(0, t1);
6343 gvtst_set(1, t2);
6344 nocode_wanted = ncw_prev;
6345 // tcc_warning("two conditions expr_cond");
6346 return;
6349 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6350 mk_pointer(&vtop->type);
6352 /* cast operands to correct type according to ISOC rules */
6353 if (!combine_types(&type, &sv, vtop, '?'))
6354 type_incompatibility_error(&sv.type, &vtop->type,
6355 "type mismatch in conditional expression (have '%s' and '%s')");
6356 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6357 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6358 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6360 /* now we convert second operand */
6361 if (c != 1) {
6362 gen_cast(&type);
6363 if (islv) {
6364 mk_pointer(&vtop->type);
6365 gaddrof();
6366 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6367 gaddrof();
6370 rc = RC_TYPE(type.t);
6371 /* for long longs, we use fixed registers to avoid having
6372 to handle a complicated move */
6373 if (USING_TWO_WORDS(type.t))
6374 rc = RC_RET(type.t);
6376 tt = r2 = 0;
6377 if (c < 0) {
6378 r2 = gv(rc);
6379 tt = gjmp(0);
6381 gsym(u);
6382 nocode_wanted = ncw_prev;
6384 /* this is horrible, but we must also convert first
6385 operand */
6386 if (c != 0) {
6387 *vtop = sv;
6388 gen_cast(&type);
6389 if (islv) {
6390 mk_pointer(&vtop->type);
6391 gaddrof();
6392 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6393 gaddrof();
6396 if (c < 0) {
6397 r1 = gv(rc);
6398 move_reg(r2, r1, islv ? VT_PTR : type.t);
6399 vtop->r = r2;
6400 gsym(tt);
6403 if (islv)
6404 indir();
6408 static void expr_eq(void)
6410 int t;
6412 expr_cond();
6413 if ((t = tok) == '='
6414 || (t >= TOK_A_MOD && t <= TOK_A_DIV)
6415 || t == TOK_A_XOR || t == TOK_A_OR
6416 || t == TOK_A_SHL || t == TOK_A_SAR) {
6417 test_lvalue();
6418 next();
6419 if (t == '=') {
6420 expr_eq();
6421 } else {
6422 vdup();
6423 expr_eq();
6424 gen_op(t & 0x7f);
6426 vstore();
6430 ST_FUNC void gexpr(void)
6432 while (1) {
6433 expr_eq();
6434 if (tok != ',')
6435 break;
6436 vpop();
6437 next();
6441 /* parse a constant expression and return value in vtop. */
6442 static void expr_const1(void)
6444 const_wanted++;
6445 nocode_wanted += unevalmask + 1;
6446 expr_cond();
6447 nocode_wanted -= unevalmask + 1;
6448 const_wanted--;
6451 /* parse an integer constant and return its value. */
6452 static inline int64_t expr_const64(void)
6454 int64_t c;
6455 expr_const1();
6456 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6457 expect("constant expression");
6458 c = vtop->c.i;
6459 vpop();
6460 return c;
6463 /* parse an integer constant and return its value.
6464 Complain if it doesn't fit 32bit (signed or unsigned). */
6465 ST_FUNC int expr_const(void)
6467 int c;
6468 int64_t wc = expr_const64();
6469 c = wc;
6470 if (c != wc && (unsigned)c != wc)
6471 tcc_error("constant exceeds 32 bit");
6472 return c;
6475 /* ------------------------------------------------------------------------- */
6476 /* return from function */
6478 #ifndef TCC_TARGET_ARM64
6479 static void gfunc_return(CType *func_type)
6481 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6482 CType type, ret_type;
6483 int ret_align, ret_nregs, regsize;
6484 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6485 &ret_align, &regsize);
6486 if (ret_nregs < 0) {
6487 #ifdef TCC_TARGET_RISCV64
6488 arch_transfer_ret_regs(0);
6489 #endif
6490 } else if (0 == ret_nregs) {
6491 /* if returning structure, must copy it to implicit
6492 first pointer arg location */
6493 type = *func_type;
6494 mk_pointer(&type);
6495 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6496 indir();
6497 vswap();
6498 /* copy structure value to pointer */
6499 vstore();
6500 } else {
6501 /* returning structure packed into registers */
6502 int size, addr, align, rc;
6503 size = type_size(func_type,&align);
6504 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6505 (vtop->c.i & (ret_align-1)))
6506 && (align & (ret_align-1))) {
6507 loc = (loc - size) & -ret_align;
6508 addr = loc;
6509 type = *func_type;
6510 vset(&type, VT_LOCAL | VT_LVAL, addr);
6511 vswap();
6512 vstore();
6513 vpop();
6514 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6516 vtop->type = ret_type;
6517 rc = RC_RET(ret_type.t);
6518 if (ret_nregs == 1)
6519 gv(rc);
6520 else {
6521 for (;;) {
6522 vdup();
6523 gv(rc);
6524 vpop();
6525 if (--ret_nregs == 0)
6526 break;
6527 /* We assume that when a structure is returned in multiple
6528 registers, their classes are consecutive values of the
6529 suite s(n) = 2^n */
6530 rc <<= 1;
6531 vtop->c.i += regsize;
6535 } else {
6536 gv(RC_RET(func_type->t));
6538 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6540 #endif
6542 static void check_func_return(void)
6544 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6545 return;
6546 if (!strcmp (funcname, "main")
6547 && (func_vt.t & VT_BTYPE) == VT_INT) {
6548 /* main returns 0 by default */
6549 vpushi(0);
6550 gen_assign_cast(&func_vt);
6551 gfunc_return(&func_vt);
6552 } else {
6553 tcc_warning("function might return no value: '%s'", funcname);
6557 /* ------------------------------------------------------------------------- */
6558 /* switch/case */
6560 static int case_cmp(const void *pa, const void *pb)
6562 int64_t a = (*(struct case_t**) pa)->v1;
6563 int64_t b = (*(struct case_t**) pb)->v1;
6564 return a < b ? -1 : a > b;
6567 static void gtst_addr(int t, int a)
6569 gsym_addr(gvtst(0, t), a);
6572 static void gcase(struct case_t **base, int len, int *bsym)
6574 struct case_t *p;
6575 int e;
6576 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6577 while (len > 8) {
6578 /* binary search */
6579 p = base[len/2];
6580 vdup();
6581 if (ll)
6582 vpushll(p->v2);
6583 else
6584 vpushi(p->v2);
6585 gen_op(TOK_LE);
6586 e = gvtst(1, 0);
6587 vdup();
6588 if (ll)
6589 vpushll(p->v1);
6590 else
6591 vpushi(p->v1);
6592 gen_op(TOK_GE);
6593 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6594 /* x < v1 */
6595 gcase(base, len/2, bsym);
6596 /* x > v2 */
6597 gsym(e);
6598 e = len/2 + 1;
6599 base += e; len -= e;
6601 /* linear scan */
6602 while (len--) {
6603 p = *base++;
6604 vdup();
6605 if (ll)
6606 vpushll(p->v2);
6607 else
6608 vpushi(p->v2);
6609 if (p->v1 == p->v2) {
6610 gen_op(TOK_EQ);
6611 gtst_addr(0, p->sym);
6612 } else {
6613 gen_op(TOK_LE);
6614 e = gvtst(1, 0);
6615 vdup();
6616 if (ll)
6617 vpushll(p->v1);
6618 else
6619 vpushi(p->v1);
6620 gen_op(TOK_GE);
6621 gtst_addr(0, p->sym);
6622 gsym(e);
6625 *bsym = gjmp(*bsym);
6628 /* ------------------------------------------------------------------------- */
6629 /* __attribute__((cleanup(fn))) */
6631 static void try_call_scope_cleanup(Sym *stop)
6633 Sym *cls = cur_scope->cl.s;
6635 for (; cls != stop; cls = cls->ncl) {
6636 Sym *fs = cls->next;
6637 Sym *vs = cls->prev_tok;
6639 vpushsym(&fs->type, fs);
6640 vset(&vs->type, vs->r, vs->c);
6641 vtop->sym = vs;
6642 mk_pointer(&vtop->type);
6643 gaddrof();
6644 gfunc_call(1);
6648 static void try_call_cleanup_goto(Sym *cleanupstate)
6650 Sym *oc, *cc;
6651 int ocd, ccd;
6653 if (!cur_scope->cl.s)
6654 return;
6656 /* search NCA of both cleanup chains given parents and initial depth */
6657 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6658 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6660 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6662 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6665 try_call_scope_cleanup(cc);
6668 /* call 'func' for each __attribute__((cleanup(func))) */
6669 static void block_cleanup(struct scope *o)
6671 int jmp = 0;
6672 Sym *g, **pg;
6673 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6674 if (g->prev_tok->r & LABEL_FORWARD) {
6675 Sym *pcl = g->next;
6676 if (!jmp)
6677 jmp = gjmp(0);
6678 gsym(pcl->jnext);
6679 try_call_scope_cleanup(o->cl.s);
6680 pcl->jnext = gjmp(0);
6681 if (!o->cl.n)
6682 goto remove_pending;
6683 g->c = o->cl.n;
6684 pg = &g->prev;
6685 } else {
6686 remove_pending:
6687 *pg = g->prev;
6688 sym_free(g);
6691 gsym(jmp);
6692 try_call_scope_cleanup(o->cl.s);
6695 /* ------------------------------------------------------------------------- */
6696 /* VLA */
6698 static void vla_restore(int loc)
6700 if (loc)
6701 gen_vla_sp_restore(loc);
6704 static void vla_leave(struct scope *o)
6706 if (o->vla.num < cur_scope->vla.num)
6707 vla_restore(o->vla.loc);
6710 /* ------------------------------------------------------------------------- */
6711 /* local scopes */
6713 void new_scope(struct scope *o)
6715 /* copy and link previous scope */
6716 *o = *cur_scope;
6717 o->prev = cur_scope;
6718 cur_scope = o;
6720 /* record local declaration stack position */
6721 o->lstk = local_stack;
6722 o->llstk = local_label_stack;
6724 ++local_scope;
6725 tcc_debug_stabn(N_LBRAC, ind - func_ind);
6728 void prev_scope(struct scope *o, int is_expr)
6730 vla_leave(o->prev);
6732 if (o->cl.s != o->prev->cl.s)
6733 block_cleanup(o->prev);
6735 /* pop locally defined labels */
6736 label_pop(&local_label_stack, o->llstk, is_expr);
6738 /* In the is_expr case (a statement expression is finished here),
6739 vtop might refer to symbols on the local_stack. Either via the
6740 type or via vtop->sym. We can't pop those nor any that in turn
6741 might be referred to. To make it easier we don't roll back
6742 any symbols in that case; some upper level call to block() will
6743 do that. We do have to remove such symbols from the lookup
6744 tables, though. sym_pop will do that. */
6746 /* pop locally defined symbols */
6747 pop_local_syms(0, &local_stack, o->lstk, is_expr, 0);
6749 tcc_debug_stabn(N_RBRAC, ind - func_ind);
6751 cur_scope = o->prev;
6752 --local_scope;
6755 /* leave a scope via break/continue(/goto) */
6756 void leave_scope(struct scope *o)
6758 if (!o)
6759 return;
6760 try_call_scope_cleanup(o->cl.s);
6761 vla_leave(o);
6764 /* ------------------------------------------------------------------------- */
6765 /* call block from 'for do while' loops */
6767 static void lblock(int *bsym, int *csym)
6769 struct scope *lo = loop_scope, *co = cur_scope;
6770 int *b = co->bsym, *c = co->csym;
6771 if (csym) {
6772 co->csym = csym;
6773 loop_scope = co;
6775 co->bsym = bsym;
6776 block(0);
6777 co->bsym = b;
6778 if (csym) {
6779 co->csym = c;
6780 loop_scope = lo;
6784 static void block(int is_expr)
6786 int a, b, c, d, e, t;
6787 struct scope o;
6788 Sym *s;
6790 if (is_expr) {
6791 /* default return value is (void) */
6792 vpushi(0);
6793 vtop->type.t = VT_VOID;
6796 again:
6797 t = tok, next();
6799 if (t == TOK_IF) {
6800 skip('(');
6801 gexpr();
6802 skip(')');
6803 a = gvtst(1, 0);
6804 block(0);
6805 if (tok == TOK_ELSE) {
6806 d = gjmp(0);
6807 gsym(a);
6808 next();
6809 block(0);
6810 gsym(d); /* patch else jmp */
6811 } else {
6812 gsym(a);
6815 } else if (t == TOK_WHILE) {
6816 d = gind();
6817 skip('(');
6818 gexpr();
6819 skip(')');
6820 a = gvtst(1, 0);
6821 b = 0;
6822 lblock(&a, &b);
6823 gjmp_addr(d);
6824 gsym_addr(b, d);
6825 gsym(a);
6827 } else if (t == '{') {
6828 new_scope(&o);
6830 /* handle local labels declarations */
6831 while (tok == TOK_LABEL) {
6832 do {
6833 next();
6834 if (tok < TOK_UIDENT)
6835 expect("label identifier");
6836 label_push(&local_label_stack, tok, LABEL_DECLARED);
6837 next();
6838 } while (tok == ',');
6839 skip(';');
6842 while (tok != '}') {
6843 decl(VT_LOCAL);
6844 if (tok != '}') {
6845 if (is_expr)
6846 vpop();
6847 block(is_expr);
6851 prev_scope(&o, is_expr);
6852 if (local_scope)
6853 next();
6854 else if (!nocode_wanted)
6855 check_func_return();
6857 } else if (t == TOK_RETURN) {
6858 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6859 if (tok != ';') {
6860 gexpr();
6861 if (b) {
6862 gen_assign_cast(&func_vt);
6863 } else {
6864 if (vtop->type.t != VT_VOID)
6865 tcc_warning("void function returns a value");
6866 vtop--;
6868 } else if (b) {
6869 tcc_warning("'return' with no value");
6870 b = 0;
6872 leave_scope(root_scope);
6873 if (b)
6874 gfunc_return(&func_vt);
6875 skip(';');
6876 /* jump unless last stmt in top-level block */
6877 if (tok != '}' || local_scope != 1)
6878 rsym = gjmp(rsym);
6879 CODE_OFF();
6881 } else if (t == TOK_BREAK) {
6882 /* compute jump */
6883 if (!cur_scope->bsym)
6884 tcc_error("cannot break");
6885 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
6886 leave_scope(cur_switch->scope);
6887 else
6888 leave_scope(loop_scope);
6889 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6890 skip(';');
6892 } else if (t == TOK_CONTINUE) {
6893 /* compute jump */
6894 if (!cur_scope->csym)
6895 tcc_error("cannot continue");
6896 leave_scope(loop_scope);
6897 *cur_scope->csym = gjmp(*cur_scope->csym);
6898 skip(';');
6900 } else if (t == TOK_FOR) {
6901 new_scope(&o);
6903 skip('(');
6904 if (tok != ';') {
6905 /* c99 for-loop init decl? */
6906 if (!decl0(VT_LOCAL, 1, NULL)) {
6907 /* no, regular for-loop init expr */
6908 gexpr();
6909 vpop();
6912 skip(';');
6913 a = b = 0;
6914 c = d = gind();
6915 if (tok != ';') {
6916 gexpr();
6917 a = gvtst(1, 0);
6919 skip(';');
6920 if (tok != ')') {
6921 e = gjmp(0);
6922 d = gind();
6923 gexpr();
6924 vpop();
6925 gjmp_addr(c);
6926 gsym(e);
6928 skip(')');
6929 lblock(&a, &b);
6930 gjmp_addr(d);
6931 gsym_addr(b, d);
6932 gsym(a);
6933 prev_scope(&o, 0);
6935 } else if (t == TOK_DO) {
6936 a = b = 0;
6937 d = gind();
6938 lblock(&a, &b);
6939 gsym(b);
6940 skip(TOK_WHILE);
6941 skip('(');
6942 gexpr();
6943 skip(')');
6944 skip(';');
6945 c = gvtst(0, 0);
6946 gsym_addr(c, d);
6947 gsym(a);
6949 } else if (t == TOK_SWITCH) {
6950 struct switch_t *sw;
6952 sw = tcc_mallocz(sizeof *sw);
6953 sw->bsym = &a;
6954 sw->scope = cur_scope;
6955 sw->prev = cur_switch;
6956 cur_switch = sw;
6958 skip('(');
6959 gexpr();
6960 skip(')');
6961 sw->sv = *vtop--; /* save switch value */
6963 a = 0;
6964 b = gjmp(0); /* jump to first case */
6965 lblock(&a, NULL);
6966 a = gjmp(a); /* add implicit break */
6967 /* case lookup */
6968 gsym(b);
6970 qsort(sw->p, sw->n, sizeof(void*), case_cmp);
6971 for (b = 1; b < sw->n; b++)
6972 if (sw->p[b - 1]->v2 >= sw->p[b]->v1)
6973 tcc_error("duplicate case value");
6975 /* Our switch table sorting is signed, so the compared
6976 value needs to be as well when it's 64bit. */
6977 vpushv(&sw->sv);
6978 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
6979 vtop->type.t &= ~VT_UNSIGNED;
6980 gv(RC_INT);
6981 d = 0, gcase(sw->p, sw->n, &d);
6982 vpop();
6983 if (sw->def_sym)
6984 gsym_addr(d, sw->def_sym);
6985 else
6986 gsym(d);
6987 /* break label */
6988 gsym(a);
6990 dynarray_reset(&sw->p, &sw->n);
6991 cur_switch = sw->prev;
6992 tcc_free(sw);
6994 } else if (t == TOK_CASE) {
6995 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
6996 if (!cur_switch)
6997 expect("switch");
6998 cr->v1 = cr->v2 = expr_const64();
6999 if (gnu_ext && tok == TOK_DOTS) {
7000 next();
7001 cr->v2 = expr_const64();
7002 if (cr->v2 < cr->v1)
7003 tcc_warning("empty case range");
7005 cr->sym = gind();
7006 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7007 skip(':');
7008 is_expr = 0;
7009 goto block_after_label;
7011 } else if (t == TOK_DEFAULT) {
7012 if (!cur_switch)
7013 expect("switch");
7014 if (cur_switch->def_sym)
7015 tcc_error("too many 'default'");
7016 cur_switch->def_sym = gind();
7017 skip(':');
7018 is_expr = 0;
7019 goto block_after_label;
7021 } else if (t == TOK_GOTO) {
7022 vla_restore(root_scope->vla.loc);
7023 if (tok == '*' && gnu_ext) {
7024 /* computed goto */
7025 next();
7026 gexpr();
7027 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7028 expect("pointer");
7029 ggoto();
7031 } else if (tok >= TOK_UIDENT) {
7032 s = label_find(tok);
7033 /* put forward definition if needed */
7034 if (!s)
7035 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7036 else if (s->r == LABEL_DECLARED)
7037 s->r = LABEL_FORWARD;
7039 if (s->r & LABEL_FORWARD) {
7040 /* start new goto chain for cleanups, linked via label->next */
7041 if (cur_scope->cl.s && !nocode_wanted) {
7042 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7043 pending_gotos->prev_tok = s;
7044 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7045 pending_gotos->next = s;
7047 s->jnext = gjmp(s->jnext);
7048 } else {
7049 try_call_cleanup_goto(s->cleanupstate);
7050 gjmp_addr(s->jnext);
7052 next();
7054 } else {
7055 expect("label identifier");
7057 skip(';');
7059 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7060 asm_instr();
7062 } else {
7063 if (tok == ':' && t >= TOK_UIDENT) {
7064 /* label case */
7065 next();
7066 s = label_find(t);
7067 if (s) {
7068 if (s->r == LABEL_DEFINED)
7069 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7070 s->r = LABEL_DEFINED;
7071 if (s->next) {
7072 Sym *pcl; /* pending cleanup goto */
7073 for (pcl = s->next; pcl; pcl = pcl->prev)
7074 gsym(pcl->jnext);
7075 sym_pop(&s->next, NULL, 0);
7076 } else
7077 gsym(s->jnext);
7078 } else {
7079 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7081 s->jnext = gind();
7082 s->cleanupstate = cur_scope->cl.s;
7084 block_after_label:
7085 vla_restore(cur_scope->vla.loc);
7086 /* we accept this, but it is a mistake */
7087 if (tok == '}') {
7088 tcc_warning("deprecated use of label at end of compound statement");
7089 } else {
7090 goto again;
7093 } else {
7094 /* expression case */
7095 if (t != ';') {
7096 unget_tok(t);
7097 if (is_expr) {
7098 vpop();
7099 gexpr();
7100 } else {
7101 gexpr();
7102 vpop();
7104 skip(';');
7110 /* This skips over a stream of tokens containing balanced {} and ()
7111 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7112 with a '{'). If STR then allocates and stores the skipped tokens
7113 in *STR. This doesn't check if () and {} are nested correctly,
7114 i.e. "({)}" is accepted. */
7115 static void skip_or_save_block(TokenString **str)
7117 int braces = tok == '{';
7118 int level = 0;
7119 if (str)
7120 *str = tok_str_alloc();
7122 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
7123 int t;
7124 if (tok == TOK_EOF) {
7125 if (str || level > 0)
7126 tcc_error("unexpected end of file");
7127 else
7128 break;
7130 if (str)
7131 tok_str_add_tok(*str);
7132 t = tok;
7133 next();
7134 if (t == '{' || t == '(') {
7135 level++;
7136 } else if (t == '}' || t == ')') {
7137 level--;
7138 if (level == 0 && braces && t == '}')
7139 break;
7142 if (str) {
7143 tok_str_add(*str, -1);
7144 tok_str_add(*str, 0);
7148 #define EXPR_CONST 1
7149 #define EXPR_ANY 2
7151 static void parse_init_elem(int expr_type)
7153 int saved_global_expr;
7154 switch(expr_type) {
7155 case EXPR_CONST:
7156 /* compound literals must be allocated globally in this case */
7157 saved_global_expr = global_expr;
7158 global_expr = 1;
7159 expr_const1();
7160 global_expr = saved_global_expr;
7161 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7162 (compound literals). */
7163 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7164 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7165 || vtop->sym->v < SYM_FIRST_ANOM))
7166 #ifdef TCC_TARGET_PE
7167 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7168 #endif
7170 tcc_error("initializer element is not constant");
7171 break;
7172 case EXPR_ANY:
7173 expr_eq();
7174 break;
7178 /* put zeros for variable based init */
7179 static void init_putz(Section *sec, unsigned long c, int size)
7181 if (sec) {
7182 /* nothing to do because globals are already set to zero */
7183 } else {
7184 vpush_global_sym(&func_old_type, TOK_memset);
7185 vseti(VT_LOCAL, c);
7186 #ifdef TCC_TARGET_ARM
7187 vpushs(size);
7188 vpushi(0);
7189 #else
7190 vpushi(0);
7191 vpushs(size);
7192 #endif
7193 gfunc_call(3);
7197 #define DIF_FIRST 1
7198 #define DIF_SIZE_ONLY 2
7199 #define DIF_HAVE_ELEM 4
7201 /* t is the array or struct type. c is the array or struct
7202 address. cur_field is the pointer to the current
7203 field, for arrays the 'c' member contains the current start
7204 index. 'flags' is as in decl_initializer.
7205 'al' contains the already initialized length of the
7206 current container (starting at c). This returns the new length of that. */
7207 static int decl_designator(CType *type, Section *sec, unsigned long c,
7208 Sym **cur_field, int flags, int al)
7210 Sym *s, *f;
7211 int index, index_last, align, l, nb_elems, elem_size;
7212 unsigned long corig = c;
7214 elem_size = 0;
7215 nb_elems = 1;
7217 if (flags & DIF_HAVE_ELEM)
7218 goto no_designator;
7220 if (gnu_ext && tok >= TOK_UIDENT) {
7221 l = tok, next();
7222 if (tok == ':')
7223 goto struct_field;
7224 unget_tok(l);
7227 /* NOTE: we only support ranges for last designator */
7228 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7229 if (tok == '[') {
7230 if (!(type->t & VT_ARRAY))
7231 expect("array type");
7232 next();
7233 index = index_last = expr_const();
7234 if (tok == TOK_DOTS && gnu_ext) {
7235 next();
7236 index_last = expr_const();
7238 skip(']');
7239 s = type->ref;
7240 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
7241 index_last < index)
7242 tcc_error("invalid index");
7243 if (cur_field)
7244 (*cur_field)->c = index_last;
7245 type = pointed_type(type);
7246 elem_size = type_size(type, &align);
7247 c += index * elem_size;
7248 nb_elems = index_last - index + 1;
7249 } else {
7250 int cumofs;
7251 next();
7252 l = tok;
7253 struct_field:
7254 next();
7255 if ((type->t & VT_BTYPE) != VT_STRUCT)
7256 expect("struct/union type");
7257 cumofs = 0;
7258 f = find_field(type, l, &cumofs);
7259 if (!f)
7260 expect("field");
7261 if (cur_field)
7262 *cur_field = f;
7263 type = &f->type;
7264 c += cumofs + f->c;
7266 cur_field = NULL;
7268 if (!cur_field) {
7269 if (tok == '=') {
7270 next();
7271 } else if (!gnu_ext) {
7272 expect("=");
7274 } else {
7275 no_designator:
7276 if (type->t & VT_ARRAY) {
7277 index = (*cur_field)->c;
7278 if (type->ref->c >= 0 && index >= type->ref->c)
7279 tcc_error("index too large");
7280 type = pointed_type(type);
7281 c += index * type_size(type, &align);
7282 } else {
7283 f = *cur_field;
7284 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
7285 *cur_field = f = f->next;
7286 if (!f)
7287 tcc_error("too many field init");
7288 type = &f->type;
7289 c += f->c;
7292 /* must put zero in holes (note that doing it that way
7293 ensures that it even works with designators) */
7294 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
7295 init_putz(sec, corig + al, c - corig - al);
7296 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
7298 /* XXX: make it more general */
7299 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7300 unsigned long c_end;
7301 uint8_t *src, *dst;
7302 int i;
7304 if (!sec) {
7305 vset(type, VT_LOCAL|VT_LVAL, c);
7306 for (i = 1; i < nb_elems; i++) {
7307 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
7308 vswap();
7309 vstore();
7311 vpop();
7312 } else if (!NODATA_WANTED) {
7313 c_end = c + nb_elems * elem_size;
7314 if (c_end > sec->data_allocated)
7315 section_realloc(sec, c_end);
7316 src = sec->data + c;
7317 dst = src;
7318 for(i = 1; i < nb_elems; i++) {
7319 dst += elem_size;
7320 memcpy(dst, src, elem_size);
7324 c += nb_elems * type_size(type, &align);
7325 if (c - corig > al)
7326 al = c - corig;
7327 return al;
7330 /* store a value or an expression directly in global data or in local array */
7331 static void init_putv(CType *type, Section *sec, unsigned long c)
7333 int bt;
7334 void *ptr;
7335 CType dtype;
7337 dtype = *type;
7338 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7340 if (sec) {
7341 int size, align;
7342 /* XXX: not portable */
7343 /* XXX: generate error if incorrect relocation */
7344 gen_assign_cast(&dtype);
7345 bt = type->t & VT_BTYPE;
7347 if ((vtop->r & VT_SYM)
7348 && bt != VT_PTR
7349 && bt != VT_FUNC
7350 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7351 || (type->t & VT_BITFIELD))
7352 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7354 tcc_error("initializer element is not computable at load time");
7356 if (NODATA_WANTED) {
7357 vtop--;
7358 return;
7361 size = type_size(type, &align);
7362 section_reserve(sec, c + size);
7363 ptr = sec->data + c;
7365 /* XXX: make code faster ? */
7366 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7367 vtop->sym->v >= SYM_FIRST_ANOM &&
7368 /* XXX This rejects compound literals like
7369 '(void *){ptr}'. The problem is that '&sym' is
7370 represented the same way, which would be ruled out
7371 by the SYM_FIRST_ANOM check above, but also '"string"'
7372 in 'char *p = "string"' is represented the same
7373 with the type being VT_PTR and the symbol being an
7374 anonymous one. That is, there's no difference in vtop
7375 between '(void *){x}' and '&(void *){x}'. Ignore
7376 pointer typed entities here. Hopefully no real code
7377 will every use compound literals with scalar type. */
7378 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7379 /* These come from compound literals, memcpy stuff over. */
7380 Section *ssec;
7381 ElfSym *esym;
7382 ElfW_Rel *rel;
7383 esym = elfsym(vtop->sym);
7384 ssec = tcc_state->sections[esym->st_shndx];
7385 memmove (ptr, ssec->data + esym->st_value, size);
7386 if (ssec->reloc) {
7387 /* We need to copy over all memory contents, and that
7388 includes relocations. Use the fact that relocs are
7389 created it order, so look from the end of relocs
7390 until we hit one before the copied region. */
7391 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
7392 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
7393 while (num_relocs--) {
7394 rel--;
7395 if (rel->r_offset >= esym->st_value + size)
7396 continue;
7397 if (rel->r_offset < esym->st_value)
7398 break;
7399 /* Note: if the same fields are initialized multiple
7400 times (possible with designators) then we possibly
7401 add multiple relocations for the same offset here.
7402 That would lead to wrong code, the last reloc needs
7403 to win. We clean this up later after the whole
7404 initializer is parsed. */
7405 put_elf_reloca(symtab_section, sec,
7406 c + rel->r_offset - esym->st_value,
7407 ELFW(R_TYPE)(rel->r_info),
7408 ELFW(R_SYM)(rel->r_info),
7409 #if PTR_SIZE == 8
7410 rel->r_addend
7411 #else
7413 #endif
7417 } else {
7418 if (type->t & VT_BITFIELD) {
7419 int bit_pos, bit_size, bits, n;
7420 unsigned char *p, v, m;
7421 bit_pos = BIT_POS(vtop->type.t);
7422 bit_size = BIT_SIZE(vtop->type.t);
7423 p = (unsigned char*)ptr + (bit_pos >> 3);
7424 bit_pos &= 7, bits = 0;
7425 while (bit_size) {
7426 n = 8 - bit_pos;
7427 if (n > bit_size)
7428 n = bit_size;
7429 v = vtop->c.i >> bits << bit_pos;
7430 m = ((1 << n) - 1) << bit_pos;
7431 *p = (*p & ~m) | (v & m);
7432 bits += n, bit_size -= n, bit_pos = 0, ++p;
7434 } else
7435 switch(bt) {
7436 /* XXX: when cross-compiling we assume that each type has the
7437 same representation on host and target, which is likely to
7438 be wrong in the case of long double */
7439 case VT_BOOL:
7440 vtop->c.i = vtop->c.i != 0;
7441 case VT_BYTE:
7442 *(char *)ptr |= vtop->c.i;
7443 break;
7444 case VT_SHORT:
7445 *(short *)ptr |= vtop->c.i;
7446 break;
7447 case VT_FLOAT:
7448 *(float*)ptr = vtop->c.f;
7449 break;
7450 case VT_DOUBLE:
7451 *(double *)ptr = vtop->c.d;
7452 break;
7453 case VT_LDOUBLE:
7454 #if defined TCC_IS_NATIVE_387
7455 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7456 memcpy(ptr, &vtop->c.ld, 10);
7457 #ifdef __TINYC__
7458 else if (sizeof (long double) == sizeof (double))
7459 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7460 #endif
7461 else if (vtop->c.ld == 0.0)
7463 else
7464 #endif
7465 if (sizeof(long double) == LDOUBLE_SIZE)
7466 *(long double*)ptr = vtop->c.ld;
7467 else if (sizeof(double) == LDOUBLE_SIZE)
7468 *(double *)ptr = (double)vtop->c.ld;
7469 else
7470 tcc_error("can't cross compile long double constants");
7471 break;
7472 #if PTR_SIZE != 8
7473 case VT_LLONG:
7474 *(long long *)ptr |= vtop->c.i;
7475 break;
7476 #else
7477 case VT_LLONG:
7478 #endif
7479 case VT_PTR:
7481 addr_t val = vtop->c.i;
7482 #if PTR_SIZE == 8
7483 if (vtop->r & VT_SYM)
7484 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7485 else
7486 *(addr_t *)ptr |= val;
7487 #else
7488 if (vtop->r & VT_SYM)
7489 greloc(sec, vtop->sym, c, R_DATA_PTR);
7490 *(addr_t *)ptr |= val;
7491 #endif
7492 break;
7494 default:
7496 int val = vtop->c.i;
7497 #if PTR_SIZE == 8
7498 if (vtop->r & VT_SYM)
7499 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7500 else
7501 *(int *)ptr |= val;
7502 #else
7503 if (vtop->r & VT_SYM)
7504 greloc(sec, vtop->sym, c, R_DATA_PTR);
7505 *(int *)ptr |= val;
7506 #endif
7507 break;
7511 vtop--;
7512 } else {
7513 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7514 vswap();
7515 vstore();
7516 vpop();
7520 /* 't' contains the type and storage info. 'c' is the offset of the
7521 object in section 'sec'. If 'sec' is NULL, it means stack based
7522 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7523 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7524 size only evaluation is wanted (only for arrays). */
7525 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7526 int flags)
7528 int len, n, no_oblock, nb, i;
7529 int size1, align1;
7530 Sym *s, *f;
7531 Sym indexsym;
7532 CType *t1;
7534 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7535 /* In case of strings we have special handling for arrays, so
7536 don't consume them as initializer value (which would commit them
7537 to some anonymous symbol). */
7538 tok != TOK_LSTR && tok != TOK_STR &&
7539 !(flags & DIF_SIZE_ONLY)) {
7540 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7541 flags |= DIF_HAVE_ELEM;
7544 if ((flags & DIF_HAVE_ELEM) &&
7545 !(type->t & VT_ARRAY) &&
7546 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7547 The source type might have VT_CONSTANT set, which is
7548 of course assignable to non-const elements. */
7549 is_compatible_unqualified_types(type, &vtop->type)) {
7550 init_putv(type, sec, c);
7551 } else if (type->t & VT_ARRAY) {
7552 s = type->ref;
7553 n = s->c;
7554 t1 = pointed_type(type);
7555 size1 = type_size(t1, &align1);
7557 no_oblock = 1;
7558 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7559 tok == '{') {
7560 if (tok != '{')
7561 tcc_error("character array initializer must be a literal,"
7562 " optionally enclosed in braces");
7563 skip('{');
7564 no_oblock = 0;
7567 /* only parse strings here if correct type (otherwise: handle
7568 them as ((w)char *) expressions */
7569 if ((tok == TOK_LSTR &&
7570 #ifdef TCC_TARGET_PE
7571 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7572 #else
7573 (t1->t & VT_BTYPE) == VT_INT
7574 #endif
7575 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7576 len = 0;
7577 while (tok == TOK_STR || tok == TOK_LSTR) {
7578 int cstr_len, ch;
7580 /* compute maximum number of chars wanted */
7581 if (tok == TOK_STR)
7582 cstr_len = tokc.str.size;
7583 else
7584 cstr_len = tokc.str.size / sizeof(nwchar_t);
7585 cstr_len--;
7586 nb = cstr_len;
7587 if (n >= 0 && nb > (n - len))
7588 nb = n - len;
7589 if (!(flags & DIF_SIZE_ONLY)) {
7590 if (cstr_len > nb)
7591 tcc_warning("initializer-string for array is too long");
7592 /* in order to go faster for common case (char
7593 string in global variable, we handle it
7594 specifically */
7595 if (sec && tok == TOK_STR && size1 == 1) {
7596 if (!NODATA_WANTED)
7597 memcpy(sec->data + c + len, tokc.str.data, nb);
7598 } else {
7599 for(i=0;i<nb;i++) {
7600 if (tok == TOK_STR)
7601 ch = ((unsigned char *)tokc.str.data)[i];
7602 else
7603 ch = ((nwchar_t *)tokc.str.data)[i];
7604 vpushi(ch);
7605 init_putv(t1, sec, c + (len + i) * size1);
7609 len += nb;
7610 next();
7612 /* only add trailing zero if enough storage (no
7613 warning in this case since it is standard) */
7614 if (n < 0 || len < n) {
7615 if (!(flags & DIF_SIZE_ONLY)) {
7616 vpushi(0);
7617 init_putv(t1, sec, c + (len * size1));
7619 len++;
7621 len *= size1;
7622 } else {
7623 indexsym.c = 0;
7624 f = &indexsym;
7626 do_init_list:
7627 len = 0;
7628 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7629 len = decl_designator(type, sec, c, &f, flags, len);
7630 flags &= ~DIF_HAVE_ELEM;
7631 if (type->t & VT_ARRAY) {
7632 ++indexsym.c;
7633 /* special test for multi dimensional arrays (may not
7634 be strictly correct if designators are used at the
7635 same time) */
7636 if (no_oblock && len >= n*size1)
7637 break;
7638 } else {
7639 if (s->type.t == VT_UNION)
7640 f = NULL;
7641 else
7642 f = f->next;
7643 if (no_oblock && f == NULL)
7644 break;
7647 if (tok == '}')
7648 break;
7649 skip(',');
7652 /* put zeros at the end */
7653 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7654 init_putz(sec, c + len, n*size1 - len);
7655 if (!no_oblock)
7656 skip('}');
7657 /* patch type size if needed, which happens only for array types */
7658 if (n < 0)
7659 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7660 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7661 size1 = 1;
7662 no_oblock = 1;
7663 if ((flags & DIF_FIRST) || tok == '{') {
7664 skip('{');
7665 no_oblock = 0;
7667 s = type->ref;
7668 f = s->next;
7669 n = s->c;
7670 goto do_init_list;
7671 } else if (tok == '{') {
7672 if (flags & DIF_HAVE_ELEM)
7673 skip(';');
7674 next();
7675 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7676 skip('}');
7677 } else if ((flags & DIF_SIZE_ONLY)) {
7678 /* If we supported only ISO C we wouldn't have to accept calling
7679 this on anything than an array if DIF_SIZE_ONLY (and even then
7680 only on the outermost level, so no recursion would be needed),
7681 because initializing a flex array member isn't supported.
7682 But GNU C supports it, so we need to recurse even into
7683 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7684 /* just skip expression */
7685 skip_or_save_block(NULL);
7686 } else {
7687 if (!(flags & DIF_HAVE_ELEM)) {
7688 /* This should happen only when we haven't parsed
7689 the init element above for fear of committing a
7690 string constant to memory too early. */
7691 if (tok != TOK_STR && tok != TOK_LSTR)
7692 expect("string constant");
7693 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7695 init_putv(type, sec, c);
7699 /* parse an initializer for type 't' if 'has_init' is non zero, and
7700 allocate space in local or global data space ('r' is either
7701 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7702 variable 'v' of scope 'scope' is declared before initializers
7703 are parsed. If 'v' is zero, then a reference to the new object
7704 is put in the value stack. If 'has_init' is 2, a special parsing
7705 is done to handle string constants. */
7706 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7707 int has_init, int v, int scope)
7709 int size, align, addr;
7710 TokenString *init_str = NULL;
7712 Section *sec;
7713 Sym *flexible_array;
7714 Sym *sym = NULL;
7715 int saved_nocode_wanted = nocode_wanted;
7716 #ifdef CONFIG_TCC_BCHECK
7717 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7718 #endif
7720 /* Always allocate static or global variables */
7721 if (v && (r & VT_VALMASK) == VT_CONST)
7722 nocode_wanted |= 0x80000000;
7724 flexible_array = NULL;
7725 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7726 Sym *field = type->ref->next;
7727 if (field) {
7728 while (field->next)
7729 field = field->next;
7730 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7731 flexible_array = field;
7735 size = type_size(type, &align);
7736 /* If unknown size, we must evaluate it before
7737 evaluating initializers because
7738 initializers can generate global data too
7739 (e.g. string pointers or ISOC99 compound
7740 literals). It also simplifies local
7741 initializers handling */
7742 if (size < 0 || (flexible_array && has_init)) {
7743 if (!has_init)
7744 tcc_error("unknown type size");
7745 /* get all init string */
7746 if (has_init == 2) {
7747 init_str = tok_str_alloc();
7748 /* only get strings */
7749 while (tok == TOK_STR || tok == TOK_LSTR) {
7750 tok_str_add_tok(init_str);
7751 next();
7753 tok_str_add(init_str, -1);
7754 tok_str_add(init_str, 0);
7755 } else {
7756 skip_or_save_block(&init_str);
7758 unget_tok(0);
7760 /* compute size */
7761 begin_macro(init_str, 1);
7762 next();
7763 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7764 /* prepare second initializer parsing */
7765 macro_ptr = init_str->str;
7766 next();
7768 /* if still unknown size, error */
7769 size = type_size(type, &align);
7770 if (size < 0)
7771 tcc_error("unknown type size");
7773 /* If there's a flex member and it was used in the initializer
7774 adjust size. */
7775 if (flexible_array &&
7776 flexible_array->type.ref->c > 0)
7777 size += flexible_array->type.ref->c
7778 * pointed_size(&flexible_array->type);
7779 /* take into account specified alignment if bigger */
7780 if (ad->a.aligned) {
7781 int speca = 1 << (ad->a.aligned - 1);
7782 if (speca > align)
7783 align = speca;
7784 } else if (ad->a.packed) {
7785 align = 1;
7788 if (!v && NODATA_WANTED)
7789 size = 0, align = 1;
7791 if ((r & VT_VALMASK) == VT_LOCAL) {
7792 sec = NULL;
7793 #ifdef CONFIG_TCC_BCHECK
7794 if (bcheck && v) {
7795 /* add padding between stack variables for bound checking */
7796 loc--;
7798 #endif
7799 loc = (loc - size) & -align;
7800 addr = loc;
7801 #ifdef CONFIG_TCC_BCHECK
7802 if (bcheck && v) {
7803 /* add padding between stack variables for bound checking */
7804 loc--;
7806 #endif
7807 if (v) {
7808 /* local variable */
7809 #ifdef CONFIG_TCC_ASM
7810 if (ad->asm_label) {
7811 int reg = asm_parse_regvar(ad->asm_label);
7812 if (reg >= 0)
7813 r = (r & ~VT_VALMASK) | reg;
7815 #endif
7816 sym = sym_push(v, type, r, addr);
7817 if (ad->cleanup_func) {
7818 Sym *cls = sym_push2(&all_cleanups,
7819 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7820 cls->prev_tok = sym;
7821 cls->next = ad->cleanup_func;
7822 cls->ncl = cur_scope->cl.s;
7823 cur_scope->cl.s = cls;
7826 sym->a = ad->a;
7827 } else {
7828 /* push local reference */
7829 vset(type, r, addr);
7831 } else {
7832 if (v && scope == VT_CONST) {
7833 /* see if the symbol was already defined */
7834 sym = sym_find(v);
7835 if (sym) {
7836 patch_storage(sym, ad, type);
7837 /* we accept several definitions of the same global variable. */
7838 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7839 goto no_alloc;
7843 /* allocate symbol in corresponding section */
7844 sec = ad->section;
7845 if (!sec) {
7846 if (has_init)
7847 sec = data_section;
7848 else if (tcc_state->nocommon)
7849 sec = bss_section;
7852 if (sec) {
7853 addr = section_add(sec, size, align);
7854 #ifdef CONFIG_TCC_BCHECK
7855 /* add padding if bound check */
7856 if (bcheck)
7857 section_add(sec, 1, 1);
7858 #endif
7859 } else {
7860 addr = align; /* SHN_COMMON is special, symbol value is align */
7861 sec = common_section;
7864 if (v) {
7865 if (!sym) {
7866 sym = sym_push(v, type, r | VT_SYM, 0);
7867 patch_storage(sym, ad, NULL);
7869 /* update symbol definition */
7870 put_extern_sym(sym, sec, addr, size);
7871 } else {
7872 /* push global reference */
7873 vpush_ref(type, sec, addr, size);
7874 sym = vtop->sym;
7875 vtop->r |= r;
7878 #ifdef CONFIG_TCC_BCHECK
7879 /* handles bounds now because the symbol must be defined
7880 before for the relocation */
7881 if (bcheck) {
7882 addr_t *bounds_ptr;
7884 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7885 /* then add global bound info */
7886 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7887 bounds_ptr[0] = 0; /* relocated */
7888 bounds_ptr[1] = size;
7890 #endif
7893 if (type->t & VT_VLA) {
7894 int a;
7896 if (NODATA_WANTED)
7897 goto no_alloc;
7899 /* save current stack pointer */
7900 if (root_scope->vla.loc == 0) {
7901 struct scope *v = cur_scope;
7902 gen_vla_sp_save(loc -= PTR_SIZE);
7903 do v->vla.loc = loc; while ((v = v->prev));
7906 vla_runtime_type_size(type, &a);
7907 gen_vla_alloc(type, a);
7908 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7909 /* on _WIN64, because of the function args scratch area, the
7910 result of alloca differs from RSP and is returned in RAX. */
7911 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7912 #endif
7913 gen_vla_sp_save(addr);
7914 cur_scope->vla.loc = addr;
7915 cur_scope->vla.num++;
7916 #ifdef CONFIG_TCC_BCHECK
7917 tcc_state->alloca_vla_used |= bcheck;
7918 #endif
7919 } else if (has_init) {
7920 size_t oldreloc_offset = 0;
7921 if (sec && sec->reloc)
7922 oldreloc_offset = sec->reloc->data_offset;
7923 decl_initializer(type, sec, addr, DIF_FIRST);
7924 if (sec && sec->reloc)
7925 squeeze_multi_relocs(sec, oldreloc_offset);
7926 /* patch flexible array member size back to -1, */
7927 /* for possible subsequent similar declarations */
7928 if (flexible_array)
7929 flexible_array->type.ref->c = -1;
7932 no_alloc:
7933 /* restore parse state if needed */
7934 if (init_str) {
7935 end_macro();
7936 next();
7939 nocode_wanted = saved_nocode_wanted;
7942 /* parse a function defined by symbol 'sym' and generate its code in
7943 'cur_text_section' */
7944 static void gen_function(Sym *sym)
7946 /* Initialize VLA state */
7947 struct scope f = { 0 };
7948 cur_scope = root_scope = &f;
7950 nocode_wanted = 0;
7951 #ifdef CONFIG_TCC_BCHECK
7952 tcc_state->alloca_vla_used = 0;
7953 #endif
7954 ind = cur_text_section->data_offset;
7955 if (sym->a.aligned) {
7956 size_t newoff = section_add(cur_text_section, 0,
7957 1 << (sym->a.aligned - 1));
7958 gen_fill_nops(newoff - ind);
7960 /* NOTE: we patch the symbol size later */
7961 put_extern_sym(sym, cur_text_section, ind, 0);
7962 if (sym->type.ref->f.func_ctor)
7963 add_array (tcc_state, ".init_array", sym->c);
7964 if (sym->type.ref->f.func_dtor)
7965 add_array (tcc_state, ".fini_array", sym->c);
7966 funcname = get_tok_str(sym->v, NULL);
7967 func_ind = ind;
7968 /* put debug symbol */
7969 tcc_debug_funcstart(tcc_state, sym);
7970 /* push a dummy symbol to enable local sym storage */
7971 sym_push2(&local_stack, SYM_FIELD, 0, 0);
7972 local_scope = 1; /* for function parameters */
7973 gfunc_prolog(sym);
7974 local_scope = 0;
7975 rsym = 0;
7976 clear_temp_local_var_list();
7977 block(0);
7978 gsym(rsym);
7979 nocode_wanted = 0;
7980 /* reset local stack */
7981 pop_local_syms(1, &local_stack, NULL, 0,
7982 sym->type.ref->f.func_type == FUNC_ELLIPSIS);
7983 gfunc_epilog();
7984 cur_text_section->data_offset = ind;
7985 local_scope = 0;
7986 label_pop(&global_label_stack, NULL, 0);
7987 sym_pop(&all_cleanups, NULL, 0);
7988 /* patch symbol size */
7989 elfsym(sym)->st_size = ind - func_ind;
7990 /* end of function */
7991 tcc_debug_funcend(tcc_state, ind - func_ind);
7992 /* It's better to crash than to generate wrong code */
7993 cur_text_section = NULL;
7994 funcname = ""; /* for safety */
7995 func_vt.t = VT_VOID; /* for safety */
7996 func_var = 0; /* for safety */
7997 ind = 0; /* for safety */
7998 nocode_wanted = 0x80000000;
7999 check_vstack();
8000 /* do this after funcend debug info */
8001 next();
8004 static void gen_inline_functions(TCCState *s)
8006 Sym *sym;
8007 int inline_generated, i;
8008 struct InlineFunc *fn;
8010 tcc_open_bf(s, ":inline:", 0);
8011 /* iterate while inline function are referenced */
8012 do {
8013 inline_generated = 0;
8014 for (i = 0; i < s->nb_inline_fns; ++i) {
8015 fn = s->inline_fns[i];
8016 sym = fn->sym;
8017 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8018 /* the function was used or forced (and then not internal):
8019 generate its code and convert it to a normal function */
8020 fn->sym = NULL;
8021 tcc_debug_putfile(s, fn->filename);
8022 begin_macro(fn->func_str, 1);
8023 next();
8024 cur_text_section = text_section;
8025 gen_function(sym);
8026 end_macro();
8028 inline_generated = 1;
8031 } while (inline_generated);
8032 tcc_close();
8035 static void free_inline_functions(TCCState *s)
8037 int i;
8038 /* free tokens of unused inline functions */
8039 for (i = 0; i < s->nb_inline_fns; ++i) {
8040 struct InlineFunc *fn = s->inline_fns[i];
8041 if (fn->sym)
8042 tok_str_free(fn->func_str);
8044 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8047 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8048 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8049 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
8051 int v, has_init, r;
8052 CType type, btype;
8053 Sym *sym;
8054 AttributeDef ad, adbase;
8056 while (1) {
8057 if (tok == TOK_STATIC_ASSERT) {
8058 CString error_str;
8059 int c;
8061 next();
8062 skip('(');
8063 c = expr_const();
8065 if (tok == ')') {
8066 if (!c)
8067 tcc_error("_Static_assert fail");
8068 next();
8069 goto static_assert_out;
8072 skip(',');
8073 parse_mult_str(&error_str, "string constant");
8074 if (c == 0)
8075 tcc_error("%s", (char *)error_str.data);
8076 cstr_free(&error_str);
8077 skip(')');
8078 static_assert_out:
8079 skip(';');
8080 continue;
8082 if (!parse_btype(&btype, &adbase)) {
8083 if (is_for_loop_init)
8084 return 0;
8085 /* skip redundant ';' if not in old parameter decl scope */
8086 if (tok == ';' && l != VT_CMP) {
8087 next();
8088 continue;
8090 if (l != VT_CONST)
8091 break;
8092 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8093 /* global asm block */
8094 asm_global_instr();
8095 continue;
8097 if (tok >= TOK_UIDENT) {
8098 /* special test for old K&R protos without explicit int
8099 type. Only accepted when defining global data */
8100 btype.t = VT_INT;
8101 } else {
8102 if (tok != TOK_EOF)
8103 expect("declaration");
8104 break;
8107 if (tok == ';') {
8108 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8109 int v = btype.ref->v;
8110 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8111 tcc_warning("unnamed struct/union that defines no instances");
8112 next();
8113 continue;
8115 if (IS_ENUM(btype.t)) {
8116 next();
8117 continue;
8120 while (1) { /* iterate thru each declaration */
8121 type = btype;
8122 /* If the base type itself was an array type of unspecified
8123 size (like in 'typedef int arr[]; arr x = {1};') then
8124 we will overwrite the unknown size by the real one for
8125 this decl. We need to unshare the ref symbol holding
8126 that size. */
8127 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
8128 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
8130 ad = adbase;
8131 type_decl(&type, &ad, &v, TYPE_DIRECT);
8132 #if 0
8134 char buf[500];
8135 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8136 printf("type = '%s'\n", buf);
8138 #endif
8139 if ((type.t & VT_BTYPE) == VT_FUNC) {
8140 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
8141 tcc_error("function without file scope cannot be static");
8142 /* if old style function prototype, we accept a
8143 declaration list */
8144 sym = type.ref;
8145 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
8146 decl0(VT_CMP, 0, sym);
8147 /* always compile 'extern inline' */
8148 if (type.t & VT_EXTERN)
8149 type.t &= ~VT_INLINE;
8152 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8153 ad.asm_label = asm_label_instr();
8154 /* parse one last attribute list, after asm label */
8155 parse_attribute(&ad);
8156 #if 0
8157 /* gcc does not allow __asm__("label") with function definition,
8158 but why not ... */
8159 if (tok == '{')
8160 expect(";");
8161 #endif
8164 #ifdef TCC_TARGET_PE
8165 if (ad.a.dllimport || ad.a.dllexport) {
8166 if (type.t & VT_STATIC)
8167 tcc_error("cannot have dll linkage with static");
8168 if (type.t & VT_TYPEDEF) {
8169 tcc_warning("'%s' attribute ignored for typedef",
8170 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8171 (ad.a.dllexport = 0, "dllexport"));
8172 } else if (ad.a.dllimport) {
8173 if ((type.t & VT_BTYPE) == VT_FUNC)
8174 ad.a.dllimport = 0;
8175 else
8176 type.t |= VT_EXTERN;
8179 #endif
8180 if (tok == '{') {
8181 if (l != VT_CONST)
8182 tcc_error("cannot use local functions");
8183 if ((type.t & VT_BTYPE) != VT_FUNC)
8184 expect("function definition");
8186 /* reject abstract declarators in function definition
8187 make old style params without decl have int type */
8188 sym = type.ref;
8189 while ((sym = sym->next) != NULL) {
8190 if (!(sym->v & ~SYM_FIELD))
8191 expect("identifier");
8192 if (sym->type.t == VT_VOID)
8193 sym->type = int_type;
8196 /* put function symbol */
8197 type.t &= ~VT_EXTERN;
8198 sym = external_sym(v, &type, 0, &ad);
8199 /* static inline functions are just recorded as a kind
8200 of macro. Their code will be emitted at the end of
8201 the compilation unit only if they are used */
8202 if (sym->type.t & VT_INLINE) {
8203 struct InlineFunc *fn;
8204 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8205 strcpy(fn->filename, file->filename);
8206 fn->sym = sym;
8207 skip_or_save_block(&fn->func_str);
8208 dynarray_add(&tcc_state->inline_fns,
8209 &tcc_state->nb_inline_fns, fn);
8210 } else {
8211 /* compute text section */
8212 cur_text_section = ad.section;
8213 if (!cur_text_section)
8214 cur_text_section = text_section;
8215 gen_function(sym);
8217 break;
8218 } else {
8219 if (l == VT_CMP) {
8220 /* find parameter in function parameter list */
8221 for (sym = func_sym->next; sym; sym = sym->next)
8222 if ((sym->v & ~SYM_FIELD) == v)
8223 goto found;
8224 tcc_error("declaration for parameter '%s' but no such parameter",
8225 get_tok_str(v, NULL));
8226 found:
8227 if (type.t & VT_STORAGE) /* 'register' is okay */
8228 tcc_error("storage class specified for '%s'",
8229 get_tok_str(v, NULL));
8230 if (sym->type.t != VT_VOID)
8231 tcc_error("redefinition of parameter '%s'",
8232 get_tok_str(v, NULL));
8233 convert_parameter_type(&type);
8234 sym->type = type;
8235 } else if (type.t & VT_TYPEDEF) {
8236 /* save typedefed type */
8237 /* XXX: test storage specifiers ? */
8238 sym = sym_find(v);
8239 if (sym && sym->sym_scope == local_scope) {
8240 if (!is_compatible_types(&sym->type, &type)
8241 || !(sym->type.t & VT_TYPEDEF))
8242 tcc_error("incompatible redefinition of '%s'",
8243 get_tok_str(v, NULL));
8244 sym->type = type;
8245 } else {
8246 sym = sym_push(v, &type, 0, 0);
8248 sym->a = ad.a;
8249 sym->f = ad.f;
8250 } else if ((type.t & VT_BTYPE) == VT_VOID
8251 && !(type.t & VT_EXTERN)) {
8252 tcc_error("declaration of void object");
8253 } else {
8254 r = 0;
8255 if ((type.t & VT_BTYPE) == VT_FUNC) {
8256 /* external function definition */
8257 /* specific case for func_call attribute */
8258 type.ref->f = ad.f;
8259 } else if (!(type.t & VT_ARRAY)) {
8260 /* not lvalue if array */
8261 r |= VT_LVAL;
8263 has_init = (tok == '=');
8264 if (has_init && (type.t & VT_VLA))
8265 tcc_error("variable length array cannot be initialized");
8266 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8267 || (type.t & VT_BTYPE) == VT_FUNC
8268 /* as with GCC, uninitialized global arrays with no size
8269 are considered extern: */
8270 || ((type.t & VT_ARRAY) && !has_init
8271 && l == VT_CONST && type.ref->c < 0)
8273 /* external variable or function */
8274 type.t |= VT_EXTERN;
8275 sym = external_sym(v, &type, r, &ad);
8276 if (ad.alias_target) {
8277 ElfSym *esym;
8278 Sym *alias_target;
8279 alias_target = sym_find(ad.alias_target);
8280 esym = elfsym(alias_target);
8281 if (!esym)
8282 tcc_error("unsupported forward __alias__ attribute");
8283 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
8285 } else {
8286 if (type.t & VT_STATIC)
8287 r |= VT_CONST;
8288 else
8289 r |= l;
8290 if (has_init)
8291 next();
8292 else if (l == VT_CONST)
8293 /* uninitialized global variables may be overridden */
8294 type.t |= VT_EXTERN;
8295 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
8298 if (tok != ',') {
8299 if (is_for_loop_init)
8300 return 1;
8301 skip(';');
8302 break;
8304 next();
8308 return 0;
8311 static void decl(int l)
8313 decl0(l, 0, NULL);
8316 /* ------------------------------------------------------------------------- */
8317 #undef gjmp_addr
8318 #undef gjmp
8319 /* ------------------------------------------------------------------------- */