OpenBSD: use portable strtoll instead of strtonum to allow cross-compilation test
[tinycc.git] / tccgen.c
blob5f83bdc31594978ecde63a82f78d92ed3e316efd
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 static int section_sym;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int const_wanted; /* true if constant wanted */
55 ST_DATA int nocode_wanted; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
66 static int gind(void) { CODE_ON(); return ind; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
70 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
74 #define gjmp gjmp_acs
75 /* <---- */
77 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
79 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
80 ST_DATA int func_vc;
81 static int last_line_num, new_file, func_ind; /* debug info control */
82 ST_DATA const char *funcname;
83 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
84 static CString initstr;
86 #if PTR_SIZE == 4
87 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
88 #define VT_PTRDIFF_T VT_INT
89 #elif LONG_SIZE == 4
90 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_LLONG
92 #else
93 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
95 #endif
97 ST_DATA struct switch_t {
98 struct case_t {
99 int64_t v1, v2;
100 int sym;
101 } **p; int n; /* list of case ranges */
102 int def_sym; /* default symbol */
103 int *bsym;
104 struct scope *scope;
105 struct switch_t *prev;
106 SValue sv;
107 } *cur_switch; /* current switch */
109 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
110 /*list of temporary local variables on the stack in current function. */
111 ST_DATA struct temp_local_variable {
112 int location; //offset on stack. Svalue.c.i
113 short size;
114 short align;
115 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
116 short nb_temp_local_vars;
118 static struct scope {
119 struct scope *prev;
120 struct { int loc, num; } vla;
121 struct { Sym *s; int n; } cl;
122 int *bsym, *csym;
123 Sym *lstk, *llstk;
124 } *cur_scope, *loop_scope, *root_scope;
126 typedef struct {
127 Section *sec;
128 int local_offset;
129 Sym *flex_array_ref;
130 } init_params;
132 /********************************************************/
133 /* stab debug support */
135 static const struct {
136 int type;
137 const char *name;
138 } default_debug[] = {
139 { VT_INT, "int:t1=r1;-2147483648;2147483647;" },
140 { VT_BYTE, "char:t2=r2;0;127;" },
141 #if LONG_SIZE == 4
142 { VT_LONG | VT_INT, "long int:t3=r3;-2147483648;2147483647;" },
143 #else
144 { VT_LLONG | VT_LONG, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
145 #endif
146 { VT_INT | VT_UNSIGNED, "unsigned int:t4=r4;0;037777777777;" },
147 #if LONG_SIZE == 4
148 { VT_LONG | VT_INT | VT_UNSIGNED, "long unsigned int:t5=r5;0;037777777777;" },
149 #else
150 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
151 { VT_LLONG | VT_LONG | VT_UNSIGNED, "long unsigned int:t5=r5;0;01777777777777777777777;" },
152 #endif
153 { VT_QLONG, "__int128:t6=r6;0;-1;" },
154 { VT_QLONG | VT_UNSIGNED, "__int128 unsigned:t7=r7;0;-1;" },
155 { VT_LLONG, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
156 { VT_LLONG | VT_UNSIGNED, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
157 { VT_SHORT, "short int:t10=r10;-32768;32767;" },
158 { VT_SHORT | VT_UNSIGNED, "short unsigned int:t11=r11;0;65535;" },
159 { VT_BYTE | VT_DEFSIGN, "signed char:t12=r12;-128;127;" },
160 { VT_BYTE | VT_DEFSIGN | VT_UNSIGNED, "unsigned char:t13=r13;0;255;" },
161 { VT_FLOAT, "float:t14=r1;4;0;" },
162 { VT_DOUBLE, "double:t15=r1;8;0;" },
163 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
164 { VT_DOUBLE | VT_LONG, "long double:t16=r1;8;0;" },
165 #else
166 { VT_LDOUBLE, "long double:t16=r1;16;0;" },
167 #endif
168 { -1, "_Float32:t17=r1;4;0;" },
169 { -1, "_Float64:t18=r1;8;0;" },
170 { -1, "_Float128:t19=r1;16;0;" },
171 { -1, "_Float32x:t20=r1;8;0;" },
172 { -1, "_Float64x:t21=r1;16;0;" },
173 { -1, "_Decimal32:t22=r1;4;0;" },
174 { -1, "_Decimal64:t23=r1;8;0;" },
175 { -1, "_Decimal128:t24=r1;16;0;" },
176 /* if default char is unsigned */
177 { VT_BYTE | VT_UNSIGNED, "unsigned char:t25=r25;0;255;" },
178 /* boolean type */
179 { VT_BOOL, "bool:t26=r26;0;255;" },
180 { VT_VOID, "void:t27=27" },
183 static int debug_next_type;
185 static struct debug_hash {
186 int debug_type;
187 Sym *type;
188 } *debug_hash;
190 static int n_debug_hash;
192 static struct debug_info {
193 int start;
194 int end;
195 int n_sym;
196 struct debug_sym {
197 int type;
198 unsigned long value;
199 char *str;
200 Section *sec;
201 int sym_index;
202 } *sym;
203 struct debug_info *child, *next, *last, *parent;
204 } *debug_info, *debug_info_root;
206 /********************************************************/
207 #if 1
208 #define precedence_parser
209 static void init_prec(void);
210 #endif
211 /********************************************************/
212 #ifndef CONFIG_TCC_ASM
213 ST_FUNC void asm_instr(void)
215 tcc_error("inline asm() not supported");
217 ST_FUNC void asm_global_instr(void)
219 tcc_error("inline asm() not supported");
221 #endif
223 /* ------------------------------------------------------------------------- */
224 static void gen_cast(CType *type);
225 static void gen_cast_s(int t);
226 static inline CType *pointed_type(CType *type);
227 static int is_compatible_types(CType *type1, CType *type2);
228 static int parse_btype(CType *type, AttributeDef *ad);
229 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
230 static void parse_expr_type(CType *type);
231 static void init_putv(init_params *p, CType *type, unsigned long c);
232 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
233 static void block(int is_expr);
234 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
235 static void decl(int l);
236 static int decl0(int l, int is_for_loop_init, Sym *);
237 static void expr_eq(void);
238 static void vla_runtime_type_size(CType *type, int *a);
239 static int is_compatible_unqualified_types(CType *type1, CType *type2);
240 static inline int64_t expr_const64(void);
241 static void vpush64(int ty, unsigned long long v);
242 static void vpush(CType *type);
243 static int gvtst(int inv, int t);
244 static void gen_inline_functions(TCCState *s);
245 static void free_inline_functions(TCCState *s);
246 static void skip_or_save_block(TokenString **str);
247 static void gv_dup(void);
248 static int get_temp_local_var(int size,int align);
249 static void clear_temp_local_var_list();
250 static void cast_error(CType *st, CType *dt);
252 ST_INLN int is_float(int t)
254 int bt = t & VT_BTYPE;
255 return bt == VT_LDOUBLE
256 || bt == VT_DOUBLE
257 || bt == VT_FLOAT
258 || bt == VT_QFLOAT;
261 static inline int is_integer_btype(int bt)
263 return bt == VT_BYTE
264 || bt == VT_BOOL
265 || bt == VT_SHORT
266 || bt == VT_INT
267 || bt == VT_LLONG;
270 static int btype_size(int bt)
272 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
273 bt == VT_SHORT ? 2 :
274 bt == VT_INT ? 4 :
275 bt == VT_LLONG ? 8 :
276 bt == VT_PTR ? PTR_SIZE : 0;
279 /* returns function return register from type */
280 static int R_RET(int t)
282 if (!is_float(t))
283 return REG_IRET;
284 #ifdef TCC_TARGET_X86_64
285 if ((t & VT_BTYPE) == VT_LDOUBLE)
286 return TREG_ST0;
287 #elif defined TCC_TARGET_RISCV64
288 if ((t & VT_BTYPE) == VT_LDOUBLE)
289 return REG_IRET;
290 #endif
291 return REG_FRET;
294 /* returns 2nd function return register, if any */
295 static int R2_RET(int t)
297 t &= VT_BTYPE;
298 #if PTR_SIZE == 4
299 if (t == VT_LLONG)
300 return REG_IRE2;
301 #elif defined TCC_TARGET_X86_64
302 if (t == VT_QLONG)
303 return REG_IRE2;
304 if (t == VT_QFLOAT)
305 return REG_FRE2;
306 #elif defined TCC_TARGET_RISCV64
307 if (t == VT_LDOUBLE)
308 return REG_IRE2;
309 #endif
310 return VT_CONST;
313 /* returns true for two-word types */
314 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
316 /* put function return registers to stack value */
317 static void PUT_R_RET(SValue *sv, int t)
319 sv->r = R_RET(t), sv->r2 = R2_RET(t);
322 /* returns function return register class for type t */
323 static int RC_RET(int t)
325 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
328 /* returns generic register class for type t */
329 static int RC_TYPE(int t)
331 if (!is_float(t))
332 return RC_INT;
333 #ifdef TCC_TARGET_X86_64
334 if ((t & VT_BTYPE) == VT_LDOUBLE)
335 return RC_ST0;
336 if ((t & VT_BTYPE) == VT_QFLOAT)
337 return RC_FRET;
338 #elif defined TCC_TARGET_RISCV64
339 if ((t & VT_BTYPE) == VT_LDOUBLE)
340 return RC_INT;
341 #endif
342 return RC_FLOAT;
345 /* returns 2nd register class corresponding to t and rc */
346 static int RC2_TYPE(int t, int rc)
348 if (!USING_TWO_WORDS(t))
349 return 0;
350 #ifdef RC_IRE2
351 if (rc == RC_IRET)
352 return RC_IRE2;
353 #endif
354 #ifdef RC_FRE2
355 if (rc == RC_FRET)
356 return RC_FRE2;
357 #endif
358 if (rc & RC_FLOAT)
359 return RC_FLOAT;
360 return RC_INT;
363 /* we use our own 'finite' function to avoid potential problems with
364 non standard math libs */
365 /* XXX: endianness dependent */
366 ST_FUNC int ieee_finite(double d)
368 int p[4];
369 memcpy(p, &d, sizeof(double));
370 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
373 /* compiling intel long double natively */
374 #if (defined __i386__ || defined __x86_64__) \
375 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
376 # define TCC_IS_NATIVE_387
377 #endif
379 ST_FUNC void test_lvalue(void)
381 if (!(vtop->r & VT_LVAL))
382 expect("lvalue");
385 ST_FUNC void check_vstack(void)
387 if (vtop != vstack - 1)
388 tcc_error("internal compiler error: vstack leak (%d)",
389 (int)(vtop - vstack + 1));
392 /* ------------------------------------------------------------------------- */
393 /* vstack debugging aid */
395 #if 0
396 void pv (const char *lbl, int a, int b)
398 int i;
399 for (i = a; i < a + b; ++i) {
400 SValue *p = &vtop[-i];
401 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
402 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
405 #endif
407 /* ------------------------------------------------------------------------- */
408 /* start of translation unit info */
409 ST_FUNC void tcc_debug_start(TCCState *s1)
411 if (s1->do_debug) {
412 int i;
413 char buf[512];
415 /* file info: full path + filename */
416 section_sym = put_elf_sym(symtab_section, 0, 0,
417 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
418 text_section->sh_num, NULL);
419 getcwd(buf, sizeof(buf));
420 #ifdef _WIN32
421 normalize_slashes(buf);
422 #endif
423 pstrcat(buf, sizeof(buf), "/");
424 put_stabs_r(s1, buf, N_SO, 0, 0,
425 text_section->data_offset, text_section, section_sym);
426 put_stabs_r(s1, file->prev ? file->prev->filename : file->filename,
427 N_SO, 0, 0,
428 text_section->data_offset, text_section, section_sym);
429 for (i = 0; i < sizeof (default_debug) / sizeof (default_debug[0]); i++)
430 put_stabs(s1, default_debug[i].name, N_LSYM, 0, 0, 0);
432 new_file = last_line_num = 0;
433 func_ind = -1;
434 debug_next_type = sizeof(default_debug) / sizeof(default_debug[0]);
435 debug_hash = NULL;
436 n_debug_hash = 0;
438 /* we're currently 'including' the <command line> */
439 tcc_debug_bincl(s1);
442 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
443 symbols can be safely used */
444 put_elf_sym(symtab_section, 0, 0,
445 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
446 SHN_ABS, file->filename);
449 static void tcc_debug_stabs (TCCState *s1, const char *str, int type, unsigned long value,
450 Section *sec, int sym_index)
452 struct debug_sym *s;
454 if (debug_info) {
455 debug_info->sym =
456 (struct debug_sym *)tcc_realloc (debug_info->sym,
457 sizeof(struct debug_sym) *
458 (debug_info->n_sym + 1));
459 s = debug_info->sym + debug_info->n_sym++;
460 s->type = type;
461 s->value = value;
462 s->str = tcc_strdup(str);
463 s->sec = sec;
464 s->sym_index = sym_index;
466 else if (sec)
467 put_stabs_r (s1, str, type, 0, 0, value, sec, sym_index);
468 else
469 put_stabs (s1, str, type, 0, 0, value);
472 static void tcc_debug_stabn(int type, int value)
474 if (type == N_LBRAC) {
475 struct debug_info *info =
476 (struct debug_info *) tcc_mallocz(sizeof (*info));
478 info->start = value;
479 info->parent = debug_info;
480 if (debug_info) {
481 if (debug_info->child) {
482 if (debug_info->child->last)
483 debug_info->child->last->next = info;
484 else
485 debug_info->child->next = info;
486 debug_info->child->last = info;
488 else
489 debug_info->child = info;
491 else
492 debug_info_root = info;
493 debug_info = info;
495 else {
496 debug_info->end = value;
497 debug_info = debug_info->parent;
501 static void tcc_get_debug_info(TCCState *s1, Sym *s, CString *result)
503 int type;
504 int n = 0;
505 int debug_type = -1;
506 Sym *t = s;
507 CString str;
509 for (;;) {
510 type = t->type.t & ~(VT_STORAGE | VT_CONSTANT | VT_VOLATILE);
511 if ((type & VT_BTYPE) != VT_BYTE)
512 type &= ~VT_DEFSIGN;
513 if (type == VT_PTR || type == (VT_PTR | VT_ARRAY))
514 n++, t = t->type.ref;
515 else
516 break;
518 if ((type & VT_BTYPE) == VT_STRUCT) {
519 int i;
521 t = t->type.ref;
522 for (i = 0; i < n_debug_hash; i++) {
523 if (t == debug_hash[i].type) {
524 debug_type = debug_hash[i].debug_type;
525 break;
528 if (debug_type == -1) {
529 debug_type = ++debug_next_type;
530 debug_hash = (struct debug_hash *)
531 tcc_realloc (debug_hash,
532 (n_debug_hash + 1) * sizeof(*debug_hash));
533 debug_hash[n_debug_hash].debug_type = debug_type;
534 debug_hash[n_debug_hash++].type = t;
535 cstr_new (&str);
536 cstr_printf (&str, "%s:T%d=%c%d",
537 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
538 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
539 debug_type,
540 IS_UNION (t->type.t) ? 'u' : 's',
541 t->c);
542 while (t->next) {
543 int pos, size, align;
545 t = t->next;
546 cstr_printf (&str, "%s:",
547 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
548 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
549 tcc_get_debug_info (s1, t, &str);
550 if (t->type.t & VT_BITFIELD) {
551 pos = t->c * 8 + BIT_POS(t->type.t);
552 size = BIT_SIZE(t->type.t);
554 else {
555 pos = t->c * 8;
556 size = type_size(&t->type, &align) * 8;
558 cstr_printf (&str, ",%d,%d;", pos, size);
560 cstr_printf (&str, ";");
561 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
562 cstr_free (&str);
565 else if (IS_ENUM(type)) {
566 Sym *e = t = t->type.ref;
568 debug_type = ++debug_next_type;
569 cstr_new (&str);
570 cstr_printf (&str, "%s:T%d=e",
571 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
572 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
573 debug_type);
574 while (t->next) {
575 t = t->next;
576 cstr_printf (&str, "%s:",
577 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
578 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
579 cstr_printf (&str, e->type.t & VT_UNSIGNED ? "%u," : "%d,",
580 (int)t->enum_val);
582 cstr_printf (&str, ";");
583 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
584 cstr_free (&str);
586 else if ((type & VT_BTYPE) != VT_FUNC) {
587 type &= ~VT_STRUCT_MASK;
588 for (debug_type = 1;
589 debug_type <= sizeof(default_debug) / sizeof(default_debug[0]);
590 debug_type++)
591 if (default_debug[debug_type - 1].type == type)
592 break;
593 if (debug_type > sizeof(default_debug) / sizeof(default_debug[0]))
594 return;
596 if (n > 0)
597 cstr_printf (result, "%d=", ++debug_next_type);
598 t = s;
599 for (;;) {
600 type = t->type.t & ~(VT_STORAGE | VT_CONSTANT | VT_VOLATILE);
601 if ((type & VT_BTYPE) != VT_BYTE)
602 type &= ~VT_DEFSIGN;
603 if (type == VT_PTR)
604 cstr_printf (result, "%d=*", ++debug_next_type);
605 else if (type == (VT_PTR | VT_ARRAY))
606 cstr_printf (result, "%d=ar1;0;%d;",
607 ++debug_next_type, t->type.ref->c - 1);
608 else if (type == VT_FUNC) {
609 cstr_printf (result, "%d=f", ++debug_next_type);
610 tcc_get_debug_info (s1, t->type.ref, result);
611 return;
613 else
614 break;
615 t = t->type.ref;
617 cstr_printf (result, "%d", debug_type);
620 static void tcc_debug_finish (TCCState *s1, struct debug_info *cur)
622 while (cur) {
623 int i;
624 struct debug_info *next = cur->next;
626 for (i = 0; i < cur->n_sym; i++) {
627 struct debug_sym *s = &cur->sym[i];
629 if (s->sec)
630 put_stabs_r(s1, s->str, s->type, 0, 0, s->value,
631 s->sec, s->sym_index);
632 else
633 put_stabs(s1, s->str, s->type, 0, 0, s->value);
634 tcc_free (s->str);
636 tcc_free (cur->sym);
637 put_stabn(s1, N_LBRAC, 0, 0, cur->start);
638 tcc_debug_finish (s1, cur->child);
639 put_stabn(s1, N_RBRAC, 0, 0, cur->end);
640 tcc_free (cur);
641 cur = next;
645 static void tcc_add_debug_info(TCCState *s1, int param, Sym *s, Sym *e)
647 CString debug_str;
648 cstr_new (&debug_str);
649 for (; s != e; s = s->prev) {
650 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
651 continue;
652 cstr_reset (&debug_str);
653 cstr_printf (&debug_str, "%s:%s", get_tok_str(s->v, NULL), param ? "p" : "");
654 tcc_get_debug_info(s1, s, &debug_str);
655 tcc_debug_stabs(s1, debug_str.data, param ? N_PSYM : N_LSYM, s->c, NULL, 0);
657 cstr_free (&debug_str);
660 static void tcc_debug_extern_sym(TCCState *s1, Sym *sym, int sh_num, int sym_bind)
662 Section *s = s1->sections[sh_num];
663 CString str;
665 cstr_new (&str);
666 cstr_printf (&str, "%s:%c",
667 get_tok_str(sym->v, NULL),
668 sym_bind == STB_GLOBAL ? 'G' : local_scope ? 'V' : 'S'
670 tcc_get_debug_info(s1, sym, &str);
671 if (sym_bind == STB_GLOBAL)
672 tcc_debug_stabs(s1, str.data, N_GSYM, 0, NULL, 0);
673 else
674 tcc_debug_stabs(s1, str.data,
675 (sym->type.t & VT_STATIC) && data_section == s
676 ? N_STSYM : N_LCSYM, 0, s, sym->c);
677 cstr_free (&str);
680 static void tcc_debug_typedef(TCCState *s1, Sym *sym)
682 CString str;
684 cstr_new (&str);
685 cstr_printf (&str, "%s:t",
686 (sym->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
687 ? "" : get_tok_str(sym->v & ~SYM_FIELD, NULL));
688 tcc_get_debug_info(s1, sym, &str);
689 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
690 cstr_free (&str);
693 /* put end of translation unit info */
694 ST_FUNC void tcc_debug_end(TCCState *s1)
696 if (!s1->do_debug)
697 return;
698 put_stabs_r(s1, NULL, N_SO, 0, 0,
699 text_section->data_offset, text_section, section_sym);
700 tcc_free(debug_hash);
703 static BufferedFile* put_new_file(TCCState *s1)
705 BufferedFile *f = file;
706 /* use upper file if from inline ":asm:" */
707 if (f->filename[0] == ':')
708 f = f->prev;
709 if (f && new_file) {
710 put_stabs_r(s1, f->filename, N_SOL, 0, 0, ind, text_section, section_sym);
711 new_file = last_line_num = 0;
713 return f;
716 /* generate line number info */
717 ST_FUNC void tcc_debug_line(TCCState *s1)
719 BufferedFile *f;
720 if (!s1->do_debug
721 || cur_text_section != text_section
722 || !(f = put_new_file(s1))
723 || last_line_num == f->line_num)
724 return;
725 if (func_ind != -1) {
726 put_stabn(s1, N_SLINE, 0, f->line_num, ind - func_ind);
727 } else {
728 /* from tcc_assemble */
729 put_stabs_r(s1, NULL, N_SLINE, 0, f->line_num, ind, text_section, section_sym);
731 last_line_num = f->line_num;
734 /* put function symbol */
735 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
737 CString debug_str;
738 BufferedFile *f;
739 if (!s1->do_debug)
740 return;
741 debug_info_root = NULL;
742 debug_info = NULL;
743 tcc_debug_stabn(N_LBRAC, ind - func_ind);
744 if (!(f = put_new_file(s1)))
745 return;
746 cstr_new (&debug_str);
747 cstr_printf(&debug_str, "%s:%c", funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
748 tcc_get_debug_info(s1, sym->type.ref, &debug_str);
749 put_stabs_r(s1, debug_str.data, N_FUN, 0, f->line_num, 0, cur_text_section, sym->c);
750 cstr_free (&debug_str);
752 tcc_debug_line(s1);
755 /* put function size */
756 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
758 if (!s1->do_debug)
759 return;
760 tcc_debug_stabn(N_RBRAC, size);
761 tcc_debug_finish (s1, debug_info_root);
764 /* put alternative filename */
765 ST_FUNC void tcc_debug_putfile(TCCState *s1, const char *filename)
767 if (0 == strcmp(file->filename, filename))
768 return;
769 pstrcpy(file->filename, sizeof(file->filename), filename);
770 new_file = 1;
773 /* begin of #include */
774 ST_FUNC void tcc_debug_bincl(TCCState *s1)
776 if (!s1->do_debug)
777 return;
778 put_stabs(s1, file->filename, N_BINCL, 0, 0, 0);
779 new_file = 1;
782 /* end of #include */
783 ST_FUNC void tcc_debug_eincl(TCCState *s1)
785 if (!s1->do_debug)
786 return;
787 put_stabn(s1, N_EINCL, 0, 0, 0);
788 new_file = 1;
791 /* ------------------------------------------------------------------------- */
792 /* initialize vstack and types. This must be done also for tcc -E */
793 ST_FUNC void tccgen_init(TCCState *s1)
795 vtop = vstack - 1;
796 memset(vtop, 0, sizeof *vtop);
798 /* define some often used types */
799 int_type.t = VT_INT;
801 char_type.t = VT_BYTE;
802 if (s1->char_is_unsigned)
803 char_type.t |= VT_UNSIGNED;
804 char_pointer_type = char_type;
805 mk_pointer(&char_pointer_type);
807 func_old_type.t = VT_FUNC;
808 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
809 func_old_type.ref->f.func_call = FUNC_CDECL;
810 func_old_type.ref->f.func_type = FUNC_OLD;
811 #ifdef precedence_parser
812 init_prec();
813 #endif
814 cstr_new(&initstr);
817 ST_FUNC int tccgen_compile(TCCState *s1)
819 cur_text_section = NULL;
820 funcname = "";
821 anon_sym = SYM_FIRST_ANOM;
822 section_sym = 0;
823 const_wanted = 0;
824 nocode_wanted = 0x80000000;
825 local_scope = 0;
827 tcc_debug_start(s1);
828 #ifdef TCC_TARGET_ARM
829 arm_init(s1);
830 #endif
831 #ifdef INC_DEBUG
832 printf("%s: **** new file\n", file->filename);
833 #endif
834 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
835 next();
836 decl(VT_CONST);
837 gen_inline_functions(s1);
838 check_vstack();
839 /* end of translation unit info */
840 tcc_debug_end(s1);
841 return 0;
844 ST_FUNC void tccgen_finish(TCCState *s1)
846 cstr_free(&initstr);
847 free_inline_functions(s1);
848 sym_pop(&global_stack, NULL, 0);
849 sym_pop(&local_stack, NULL, 0);
850 /* free preprocessor macros */
851 free_defines(NULL);
852 /* free sym_pools */
853 dynarray_reset(&sym_pools, &nb_sym_pools);
854 sym_free_first = NULL;
857 /* ------------------------------------------------------------------------- */
858 ST_FUNC ElfSym *elfsym(Sym *s)
860 if (!s || !s->c)
861 return NULL;
862 return &((ElfSym *)symtab_section->data)[s->c];
865 /* apply storage attributes to Elf symbol */
866 ST_FUNC void update_storage(Sym *sym)
868 ElfSym *esym;
869 int sym_bind, old_sym_bind;
871 esym = elfsym(sym);
872 if (!esym)
873 return;
875 if (sym->a.visibility)
876 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
877 | sym->a.visibility;
879 if (sym->type.t & (VT_STATIC | VT_INLINE))
880 sym_bind = STB_LOCAL;
881 else if (sym->a.weak)
882 sym_bind = STB_WEAK;
883 else
884 sym_bind = STB_GLOBAL;
885 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
886 if (sym_bind != old_sym_bind) {
887 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
890 #ifdef TCC_TARGET_PE
891 if (sym->a.dllimport)
892 esym->st_other |= ST_PE_IMPORT;
893 if (sym->a.dllexport)
894 esym->st_other |= ST_PE_EXPORT;
895 #endif
897 #if 0
898 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
899 get_tok_str(sym->v, NULL),
900 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
901 sym->a.visibility,
902 sym->a.dllexport,
903 sym->a.dllimport
905 #endif
908 /* ------------------------------------------------------------------------- */
909 /* update sym->c so that it points to an external symbol in section
910 'section' with value 'value' */
912 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
913 addr_t value, unsigned long size,
914 int can_add_underscore)
916 int sym_type, sym_bind, info, other, t;
917 ElfSym *esym;
918 const char *name;
919 char buf1[256];
921 if (!sym->c) {
922 name = get_tok_str(sym->v, NULL);
923 t = sym->type.t;
924 if ((t & VT_BTYPE) == VT_FUNC) {
925 sym_type = STT_FUNC;
926 } else if ((t & VT_BTYPE) == VT_VOID) {
927 sym_type = STT_NOTYPE;
928 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
929 sym_type = STT_FUNC;
930 } else {
931 sym_type = STT_OBJECT;
933 if (t & (VT_STATIC | VT_INLINE))
934 sym_bind = STB_LOCAL;
935 else
936 sym_bind = STB_GLOBAL;
937 other = 0;
939 #ifdef TCC_TARGET_PE
940 if (sym_type == STT_FUNC && sym->type.ref) {
941 Sym *ref = sym->type.ref;
942 if (ref->a.nodecorate) {
943 can_add_underscore = 0;
945 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
946 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
947 name = buf1;
948 other |= ST_PE_STDCALL;
949 can_add_underscore = 0;
952 #endif
954 if (sym->asm_label) {
955 name = get_tok_str(sym->asm_label, NULL);
956 can_add_underscore = 0;
959 if (tcc_state->leading_underscore && can_add_underscore) {
960 buf1[0] = '_';
961 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
962 name = buf1;
965 info = ELFW(ST_INFO)(sym_bind, sym_type);
966 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
968 if (tcc_state->do_debug
969 && sym_type != STT_FUNC
970 && sym->v < SYM_FIRST_ANOM)
971 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind);
973 } else {
974 esym = elfsym(sym);
975 esym->st_value = value;
976 esym->st_size = size;
977 esym->st_shndx = sh_num;
979 update_storage(sym);
982 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
983 addr_t value, unsigned long size)
985 int sh_num = section ? section->sh_num : SHN_UNDEF;
986 put_extern_sym2(sym, sh_num, value, size, 1);
989 /* add a new relocation entry to symbol 'sym' in section 's' */
990 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
991 addr_t addend)
993 int c = 0;
995 if (nocode_wanted && s == cur_text_section)
996 return;
998 if (sym) {
999 if (0 == sym->c)
1000 put_extern_sym(sym, NULL, 0, 0);
1001 c = sym->c;
1004 /* now we can add ELF relocation info */
1005 put_elf_reloca(symtab_section, s, offset, type, c, addend);
1008 #if PTR_SIZE == 4
1009 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
1011 greloca(s, sym, offset, type, 0);
1013 #endif
1015 /* ------------------------------------------------------------------------- */
1016 /* symbol allocator */
1017 static Sym *__sym_malloc(void)
1019 Sym *sym_pool, *sym, *last_sym;
1020 int i;
1022 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
1023 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
1025 last_sym = sym_free_first;
1026 sym = sym_pool;
1027 for(i = 0; i < SYM_POOL_NB; i++) {
1028 sym->next = last_sym;
1029 last_sym = sym;
1030 sym++;
1032 sym_free_first = last_sym;
1033 return last_sym;
1036 static inline Sym *sym_malloc(void)
1038 Sym *sym;
1039 #ifndef SYM_DEBUG
1040 sym = sym_free_first;
1041 if (!sym)
1042 sym = __sym_malloc();
1043 sym_free_first = sym->next;
1044 return sym;
1045 #else
1046 sym = tcc_malloc(sizeof(Sym));
1047 return sym;
1048 #endif
1051 ST_INLN void sym_free(Sym *sym)
1053 #ifndef SYM_DEBUG
1054 sym->next = sym_free_first;
1055 sym_free_first = sym;
1056 #else
1057 tcc_free(sym);
1058 #endif
1061 /* push, without hashing */
1062 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
1064 Sym *s;
1066 s = sym_malloc();
1067 memset(s, 0, sizeof *s);
1068 s->v = v;
1069 s->type.t = t;
1070 s->c = c;
1071 /* add in stack */
1072 s->prev = *ps;
1073 *ps = s;
1074 return s;
1077 /* find a symbol and return its associated structure. 's' is the top
1078 of the symbol stack */
1079 ST_FUNC Sym *sym_find2(Sym *s, int v)
1081 while (s) {
1082 if (s->v == v)
1083 return s;
1084 else if (s->v == -1)
1085 return NULL;
1086 s = s->prev;
1088 return NULL;
1091 /* structure lookup */
1092 ST_INLN Sym *struct_find(int v)
1094 v -= TOK_IDENT;
1095 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1096 return NULL;
1097 return table_ident[v]->sym_struct;
1100 /* find an identifier */
1101 ST_INLN Sym *sym_find(int v)
1103 v -= TOK_IDENT;
1104 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1105 return NULL;
1106 return table_ident[v]->sym_identifier;
1109 static int sym_scope(Sym *s)
1111 if (IS_ENUM_VAL (s->type.t))
1112 return s->type.ref->sym_scope;
1113 else
1114 return s->sym_scope;
1117 /* push a given symbol on the symbol stack */
1118 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
1120 Sym *s, **ps;
1121 TokenSym *ts;
1123 if (local_stack)
1124 ps = &local_stack;
1125 else
1126 ps = &global_stack;
1127 s = sym_push2(ps, v, type->t, c);
1128 s->type.ref = type->ref;
1129 s->r = r;
1130 /* don't record fields or anonymous symbols */
1131 /* XXX: simplify */
1132 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1133 /* record symbol in token array */
1134 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1135 if (v & SYM_STRUCT)
1136 ps = &ts->sym_struct;
1137 else
1138 ps = &ts->sym_identifier;
1139 s->prev_tok = *ps;
1140 *ps = s;
1141 s->sym_scope = local_scope;
1142 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
1143 tcc_error("redeclaration of '%s'",
1144 get_tok_str(v & ~SYM_STRUCT, NULL));
1146 return s;
1149 /* push a global identifier */
1150 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
1152 Sym *s, **ps;
1153 s = sym_push2(&global_stack, v, t, c);
1154 s->r = VT_CONST | VT_SYM;
1155 /* don't record anonymous symbol */
1156 if (v < SYM_FIRST_ANOM) {
1157 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
1158 /* modify the top most local identifier, so that sym_identifier will
1159 point to 's' when popped; happens when called from inline asm */
1160 while (*ps != NULL && (*ps)->sym_scope)
1161 ps = &(*ps)->prev_tok;
1162 s->prev_tok = *ps;
1163 *ps = s;
1165 return s;
1168 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1169 pop them yet from the list, but do remove them from the token array. */
1170 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
1172 Sym *s, *ss, **ps;
1173 TokenSym *ts;
1174 int v;
1176 s = *ptop;
1177 while(s != b) {
1178 ss = s->prev;
1179 v = s->v;
1180 /* remove symbol in token array */
1181 /* XXX: simplify */
1182 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1183 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1184 if (v & SYM_STRUCT)
1185 ps = &ts->sym_struct;
1186 else
1187 ps = &ts->sym_identifier;
1188 *ps = s->prev_tok;
1190 if (!keep)
1191 sym_free(s);
1192 s = ss;
1194 if (!keep)
1195 *ptop = b;
1198 /* ------------------------------------------------------------------------- */
1199 static void vcheck_cmp(void)
1201 /* cannot let cpu flags if other instruction are generated. Also
1202 avoid leaving VT_JMP anywhere except on the top of the stack
1203 because it would complicate the code generator.
1205 Don't do this when nocode_wanted. vtop might come from
1206 !nocode_wanted regions (see 88_codeopt.c) and transforming
1207 it to a register without actually generating code is wrong
1208 as their value might still be used for real. All values
1209 we push under nocode_wanted will eventually be popped
1210 again, so that the VT_CMP/VT_JMP value will be in vtop
1211 when code is unsuppressed again. */
1213 if (vtop->r == VT_CMP && !nocode_wanted)
1214 gv(RC_INT);
1217 static void vsetc(CType *type, int r, CValue *vc)
1219 if (vtop >= vstack + (VSTACK_SIZE - 1))
1220 tcc_error("memory full (vstack)");
1221 vcheck_cmp();
1222 vtop++;
1223 vtop->type = *type;
1224 vtop->r = r;
1225 vtop->r2 = VT_CONST;
1226 vtop->c = *vc;
1227 vtop->sym = NULL;
1230 ST_FUNC void vswap(void)
1232 SValue tmp;
1234 vcheck_cmp();
1235 tmp = vtop[0];
1236 vtop[0] = vtop[-1];
1237 vtop[-1] = tmp;
1240 /* pop stack value */
1241 ST_FUNC void vpop(void)
1243 int v;
1244 v = vtop->r & VT_VALMASK;
1245 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1246 /* for x86, we need to pop the FP stack */
1247 if (v == TREG_ST0) {
1248 o(0xd8dd); /* fstp %st(0) */
1249 } else
1250 #endif
1251 if (v == VT_CMP) {
1252 /* need to put correct jump if && or || without test */
1253 gsym(vtop->jtrue);
1254 gsym(vtop->jfalse);
1256 vtop--;
1259 /* push constant of type "type" with useless value */
1260 static void vpush(CType *type)
1262 vset(type, VT_CONST, 0);
1265 /* push arbitrary 64bit constant */
1266 static void vpush64(int ty, unsigned long long v)
1268 CValue cval;
1269 CType ctype;
1270 ctype.t = ty;
1271 ctype.ref = NULL;
1272 cval.i = v;
1273 vsetc(&ctype, VT_CONST, &cval);
1276 /* push integer constant */
1277 ST_FUNC void vpushi(int v)
1279 vpush64(VT_INT, v);
1282 /* push a pointer sized constant */
1283 static void vpushs(addr_t v)
1285 vpush64(VT_SIZE_T, v);
1288 /* push long long constant */
1289 static inline void vpushll(long long v)
1291 vpush64(VT_LLONG, v);
1294 ST_FUNC void vset(CType *type, int r, int v)
1296 CValue cval;
1297 cval.i = v;
1298 vsetc(type, r, &cval);
1301 static void vseti(int r, int v)
1303 CType type;
1304 type.t = VT_INT;
1305 type.ref = NULL;
1306 vset(&type, r, v);
1309 ST_FUNC void vpushv(SValue *v)
1311 if (vtop >= vstack + (VSTACK_SIZE - 1))
1312 tcc_error("memory full (vstack)");
1313 vtop++;
1314 *vtop = *v;
1317 static void vdup(void)
1319 vpushv(vtop);
1322 /* rotate n first stack elements to the bottom
1323 I1 ... In -> I2 ... In I1 [top is right]
1325 ST_FUNC void vrotb(int n)
1327 int i;
1328 SValue tmp;
1330 vcheck_cmp();
1331 tmp = vtop[-n + 1];
1332 for(i=-n+1;i!=0;i++)
1333 vtop[i] = vtop[i+1];
1334 vtop[0] = tmp;
1337 /* rotate the n elements before entry e towards the top
1338 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1340 ST_FUNC void vrote(SValue *e, int n)
1342 int i;
1343 SValue tmp;
1345 vcheck_cmp();
1346 tmp = *e;
1347 for(i = 0;i < n - 1; i++)
1348 e[-i] = e[-i - 1];
1349 e[-n + 1] = tmp;
1352 /* rotate n first stack elements to the top
1353 I1 ... In -> In I1 ... I(n-1) [top is right]
1355 ST_FUNC void vrott(int n)
1357 vrote(vtop, n);
1360 /* ------------------------------------------------------------------------- */
1361 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1363 /* called from generators to set the result from relational ops */
1364 ST_FUNC void vset_VT_CMP(int op)
1366 vtop->r = VT_CMP;
1367 vtop->cmp_op = op;
1368 vtop->jfalse = 0;
1369 vtop->jtrue = 0;
1372 /* called once before asking generators to load VT_CMP to a register */
1373 static void vset_VT_JMP(void)
1375 int op = vtop->cmp_op;
1377 if (vtop->jtrue || vtop->jfalse) {
1378 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1379 int inv = op & (op < 2); /* small optimization */
1380 vseti(VT_JMP+inv, gvtst(inv, 0));
1381 } else {
1382 /* otherwise convert flags (rsp. 0/1) to register */
1383 vtop->c.i = op;
1384 if (op < 2) /* doesn't seem to happen */
1385 vtop->r = VT_CONST;
1389 /* Set CPU Flags, doesn't yet jump */
1390 static void gvtst_set(int inv, int t)
1392 int *p;
1394 if (vtop->r != VT_CMP) {
1395 vpushi(0);
1396 gen_op(TOK_NE);
1397 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1398 vset_VT_CMP(vtop->c.i != 0);
1401 p = inv ? &vtop->jfalse : &vtop->jtrue;
1402 *p = gjmp_append(*p, t);
1405 /* Generate value test
1407 * Generate a test for any value (jump, comparison and integers) */
1408 static int gvtst(int inv, int t)
1410 int op, x, u;
1412 gvtst_set(inv, t);
1413 t = vtop->jtrue, u = vtop->jfalse;
1414 if (inv)
1415 x = u, u = t, t = x;
1416 op = vtop->cmp_op;
1418 /* jump to the wanted target */
1419 if (op > 1)
1420 t = gjmp_cond(op ^ inv, t);
1421 else if (op != inv)
1422 t = gjmp(t);
1423 /* resolve complementary jumps to here */
1424 gsym(u);
1426 vtop--;
1427 return t;
1430 /* generate a zero or nozero test */
1431 static void gen_test_zero(int op)
1433 if (vtop->r == VT_CMP) {
1434 int j;
1435 if (op == TOK_EQ) {
1436 j = vtop->jfalse;
1437 vtop->jfalse = vtop->jtrue;
1438 vtop->jtrue = j;
1439 vtop->cmp_op ^= 1;
1441 } else {
1442 vpushi(0);
1443 gen_op(op);
1447 /* ------------------------------------------------------------------------- */
1448 /* push a symbol value of TYPE */
1449 ST_FUNC void vpushsym(CType *type, Sym *sym)
1451 CValue cval;
1452 cval.i = 0;
1453 vsetc(type, VT_CONST | VT_SYM, &cval);
1454 vtop->sym = sym;
1457 /* Return a static symbol pointing to a section */
1458 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1460 int v;
1461 Sym *sym;
1463 v = anon_sym++;
1464 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1465 sym->type.t |= VT_STATIC;
1466 put_extern_sym(sym, sec, offset, size);
1467 return sym;
1470 /* push a reference to a section offset by adding a dummy symbol */
1471 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1473 vpushsym(type, get_sym_ref(type, sec, offset, size));
1476 /* define a new external reference to a symbol 'v' of type 'u' */
1477 ST_FUNC Sym *external_global_sym(int v, CType *type)
1479 Sym *s;
1481 s = sym_find(v);
1482 if (!s) {
1483 /* push forward reference */
1484 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1485 s->type.ref = type->ref;
1486 } else if (IS_ASM_SYM(s)) {
1487 s->type.t = type->t | (s->type.t & VT_EXTERN);
1488 s->type.ref = type->ref;
1489 update_storage(s);
1491 return s;
1494 /* create an external reference with no specific type similar to asm labels.
1495 This avoids type conflicts if the symbol is used from C too */
1496 ST_FUNC Sym *external_helper_sym(int v)
1498 CType ct = { VT_ASM_FUNC, NULL };
1499 return external_global_sym(v, &ct);
1502 /* push a reference to an helper function (such as memmove) */
1503 ST_FUNC void vpush_helper_func(int v)
1505 vpushsym(&func_old_type, external_helper_sym(v));
1508 /* Merge symbol attributes. */
1509 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1511 if (sa1->aligned && !sa->aligned)
1512 sa->aligned = sa1->aligned;
1513 sa->packed |= sa1->packed;
1514 sa->weak |= sa1->weak;
1515 if (sa1->visibility != STV_DEFAULT) {
1516 int vis = sa->visibility;
1517 if (vis == STV_DEFAULT
1518 || vis > sa1->visibility)
1519 vis = sa1->visibility;
1520 sa->visibility = vis;
1522 sa->dllexport |= sa1->dllexport;
1523 sa->nodecorate |= sa1->nodecorate;
1524 sa->dllimport |= sa1->dllimport;
1527 /* Merge function attributes. */
1528 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1530 if (fa1->func_call && !fa->func_call)
1531 fa->func_call = fa1->func_call;
1532 if (fa1->func_type && !fa->func_type)
1533 fa->func_type = fa1->func_type;
1534 if (fa1->func_args && !fa->func_args)
1535 fa->func_args = fa1->func_args;
1536 if (fa1->func_noreturn)
1537 fa->func_noreturn = 1;
1538 if (fa1->func_ctor)
1539 fa->func_ctor = 1;
1540 if (fa1->func_dtor)
1541 fa->func_dtor = 1;
1544 /* Merge attributes. */
1545 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1547 merge_symattr(&ad->a, &ad1->a);
1548 merge_funcattr(&ad->f, &ad1->f);
1550 if (ad1->section)
1551 ad->section = ad1->section;
1552 if (ad1->alias_target)
1553 ad->alias_target = ad1->alias_target;
1554 if (ad1->asm_label)
1555 ad->asm_label = ad1->asm_label;
1556 if (ad1->attr_mode)
1557 ad->attr_mode = ad1->attr_mode;
1560 /* Merge some type attributes. */
1561 static void patch_type(Sym *sym, CType *type)
1563 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1564 if (!(sym->type.t & VT_EXTERN))
1565 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1566 sym->type.t &= ~VT_EXTERN;
1569 if (IS_ASM_SYM(sym)) {
1570 /* stay static if both are static */
1571 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1572 sym->type.ref = type->ref;
1575 if (!is_compatible_types(&sym->type, type)) {
1576 tcc_error("incompatible types for redefinition of '%s'",
1577 get_tok_str(sym->v, NULL));
1579 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1580 int static_proto = sym->type.t & VT_STATIC;
1581 /* warn if static follows non-static function declaration */
1582 if ((type->t & VT_STATIC) && !static_proto
1583 /* XXX this test for inline shouldn't be here. Until we
1584 implement gnu-inline mode again it silences a warning for
1585 mingw caused by our workarounds. */
1586 && !((type->t | sym->type.t) & VT_INLINE))
1587 tcc_warning("static storage ignored for redefinition of '%s'",
1588 get_tok_str(sym->v, NULL));
1590 /* set 'inline' if both agree or if one has static */
1591 if ((type->t | sym->type.t) & VT_INLINE) {
1592 if (!((type->t ^ sym->type.t) & VT_INLINE)
1593 || ((type->t | sym->type.t) & VT_STATIC))
1594 static_proto |= VT_INLINE;
1597 if (0 == (type->t & VT_EXTERN)) {
1598 struct FuncAttr f = sym->type.ref->f;
1599 /* put complete type, use static from prototype */
1600 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1601 sym->type.ref = type->ref;
1602 merge_funcattr(&sym->type.ref->f, &f);
1603 } else {
1604 sym->type.t &= ~VT_INLINE | static_proto;
1607 if (sym->type.ref->f.func_type == FUNC_OLD
1608 && type->ref->f.func_type != FUNC_OLD) {
1609 sym->type.ref = type->ref;
1612 } else {
1613 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1614 /* set array size if it was omitted in extern declaration */
1615 sym->type.ref->c = type->ref->c;
1617 if ((type->t ^ sym->type.t) & VT_STATIC)
1618 tcc_warning("storage mismatch for redefinition of '%s'",
1619 get_tok_str(sym->v, NULL));
1623 /* Merge some storage attributes. */
1624 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1626 if (type)
1627 patch_type(sym, type);
1629 #ifdef TCC_TARGET_PE
1630 if (sym->a.dllimport != ad->a.dllimport)
1631 tcc_error("incompatible dll linkage for redefinition of '%s'",
1632 get_tok_str(sym->v, NULL));
1633 #endif
1634 merge_symattr(&sym->a, &ad->a);
1635 if (ad->asm_label)
1636 sym->asm_label = ad->asm_label;
1637 update_storage(sym);
1640 /* copy sym to other stack */
1641 static Sym *sym_copy(Sym *s0, Sym **ps)
1643 Sym *s;
1644 s = sym_malloc(), *s = *s0;
1645 s->prev = *ps, *ps = s;
1646 if (s->v < SYM_FIRST_ANOM) {
1647 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1648 s->prev_tok = *ps, *ps = s;
1650 return s;
1653 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1654 static void sym_copy_ref(Sym *s, Sym **ps)
1656 int bt = s->type.t & VT_BTYPE;
1657 if (bt == VT_FUNC || bt == VT_PTR) {
1658 Sym **sp = &s->type.ref;
1659 for (s = *sp, *sp = NULL; s; s = s->next) {
1660 Sym *s2 = sym_copy(s, ps);
1661 sp = &(*sp = s2)->next;
1662 sym_copy_ref(s2, ps);
1667 /* define a new external reference to a symbol 'v' */
1668 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1670 Sym *s;
1672 /* look for global symbol */
1673 s = sym_find(v);
1674 while (s && s->sym_scope)
1675 s = s->prev_tok;
1677 if (!s) {
1678 /* push forward reference */
1679 s = global_identifier_push(v, type->t, 0);
1680 s->r |= r;
1681 s->a = ad->a;
1682 s->asm_label = ad->asm_label;
1683 s->type.ref = type->ref;
1684 /* copy type to the global stack */
1685 if (local_stack)
1686 sym_copy_ref(s, &global_stack);
1687 } else {
1688 patch_storage(s, ad, type);
1690 /* push variables on local_stack if any */
1691 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1692 s = sym_copy(s, &local_stack);
1693 return s;
1696 /* save registers up to (vtop - n) stack entry */
1697 ST_FUNC void save_regs(int n)
1699 SValue *p, *p1;
1700 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1701 save_reg(p->r);
1704 /* save r to the memory stack, and mark it as being free */
1705 ST_FUNC void save_reg(int r)
1707 save_reg_upstack(r, 0);
1710 /* save r to the memory stack, and mark it as being free,
1711 if seen up to (vtop - n) stack entry */
1712 ST_FUNC void save_reg_upstack(int r, int n)
1714 int l, size, align, bt;
1715 SValue *p, *p1, sv;
1717 if ((r &= VT_VALMASK) >= VT_CONST)
1718 return;
1719 if (nocode_wanted)
1720 return;
1721 l = 0;
1722 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1723 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1724 /* must save value on stack if not already done */
1725 if (!l) {
1726 bt = p->type.t & VT_BTYPE;
1727 if (bt == VT_VOID)
1728 continue;
1729 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1730 bt = VT_PTR;
1731 sv.type.t = bt;
1732 size = type_size(&sv.type, &align);
1733 l = get_temp_local_var(size,align);
1734 sv.r = VT_LOCAL | VT_LVAL;
1735 sv.c.i = l;
1736 store(p->r & VT_VALMASK, &sv);
1737 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1738 /* x86 specific: need to pop fp register ST0 if saved */
1739 if (r == TREG_ST0) {
1740 o(0xd8dd); /* fstp %st(0) */
1742 #endif
1743 /* special long long case */
1744 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1745 sv.c.i += PTR_SIZE;
1746 store(p->r2, &sv);
1749 /* mark that stack entry as being saved on the stack */
1750 if (p->r & VT_LVAL) {
1751 /* also clear the bounded flag because the
1752 relocation address of the function was stored in
1753 p->c.i */
1754 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1755 } else {
1756 p->r = VT_LVAL | VT_LOCAL;
1758 p->r2 = VT_CONST;
1759 p->c.i = l;
1764 #ifdef TCC_TARGET_ARM
1765 /* find a register of class 'rc2' with at most one reference on stack.
1766 * If none, call get_reg(rc) */
1767 ST_FUNC int get_reg_ex(int rc, int rc2)
1769 int r;
1770 SValue *p;
1772 for(r=0;r<NB_REGS;r++) {
1773 if (reg_classes[r] & rc2) {
1774 int n;
1775 n=0;
1776 for(p = vstack; p <= vtop; p++) {
1777 if ((p->r & VT_VALMASK) == r ||
1778 p->r2 == r)
1779 n++;
1781 if (n <= 1)
1782 return r;
1785 return get_reg(rc);
1787 #endif
1789 /* find a free register of class 'rc'. If none, save one register */
1790 ST_FUNC int get_reg(int rc)
1792 int r;
1793 SValue *p;
1795 /* find a free register */
1796 for(r=0;r<NB_REGS;r++) {
1797 if (reg_classes[r] & rc) {
1798 if (nocode_wanted)
1799 return r;
1800 for(p=vstack;p<=vtop;p++) {
1801 if ((p->r & VT_VALMASK) == r ||
1802 p->r2 == r)
1803 goto notfound;
1805 return r;
1807 notfound: ;
1810 /* no register left : free the first one on the stack (VERY
1811 IMPORTANT to start from the bottom to ensure that we don't
1812 spill registers used in gen_opi()) */
1813 for(p=vstack;p<=vtop;p++) {
1814 /* look at second register (if long long) */
1815 r = p->r2;
1816 if (r < VT_CONST && (reg_classes[r] & rc))
1817 goto save_found;
1818 r = p->r & VT_VALMASK;
1819 if (r < VT_CONST && (reg_classes[r] & rc)) {
1820 save_found:
1821 save_reg(r);
1822 return r;
1825 /* Should never comes here */
1826 return -1;
1829 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1830 static int get_temp_local_var(int size,int align){
1831 int i;
1832 struct temp_local_variable *temp_var;
1833 int found_var;
1834 SValue *p;
1835 int r;
1836 char free;
1837 char found;
1838 found=0;
1839 for(i=0;i<nb_temp_local_vars;i++){
1840 temp_var=&arr_temp_local_vars[i];
1841 if(temp_var->size<size||align!=temp_var->align){
1842 continue;
1844 /*check if temp_var is free*/
1845 free=1;
1846 for(p=vstack;p<=vtop;p++) {
1847 r=p->r&VT_VALMASK;
1848 if(r==VT_LOCAL||r==VT_LLOCAL){
1849 if(p->c.i==temp_var->location){
1850 free=0;
1851 break;
1855 if(free){
1856 found_var=temp_var->location;
1857 found=1;
1858 break;
1861 if(!found){
1862 loc = (loc - size) & -align;
1863 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1864 temp_var=&arr_temp_local_vars[i];
1865 temp_var->location=loc;
1866 temp_var->size=size;
1867 temp_var->align=align;
1868 nb_temp_local_vars++;
1870 found_var=loc;
1872 return found_var;
1875 static void clear_temp_local_var_list(){
1876 nb_temp_local_vars=0;
1879 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1880 if needed */
1881 static void move_reg(int r, int s, int t)
1883 SValue sv;
1885 if (r != s) {
1886 save_reg(r);
1887 sv.type.t = t;
1888 sv.type.ref = NULL;
1889 sv.r = s;
1890 sv.c.i = 0;
1891 load(r, &sv);
1895 /* get address of vtop (vtop MUST BE an lvalue) */
1896 ST_FUNC void gaddrof(void)
1898 vtop->r &= ~VT_LVAL;
1899 /* tricky: if saved lvalue, then we can go back to lvalue */
1900 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1901 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1904 #ifdef CONFIG_TCC_BCHECK
1905 /* generate a bounded pointer addition */
1906 static void gen_bounded_ptr_add(void)
1908 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
1909 if (save) {
1910 vpushv(&vtop[-1]);
1911 vrott(3);
1913 vpush_helper_func(TOK___bound_ptr_add);
1914 vrott(3);
1915 gfunc_call(2);
1916 vtop -= save;
1917 vpushi(0);
1918 /* returned pointer is in REG_IRET */
1919 vtop->r = REG_IRET | VT_BOUNDED;
1920 if (nocode_wanted)
1921 return;
1922 /* relocation offset of the bounding function call point */
1923 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
1926 /* patch pointer addition in vtop so that pointer dereferencing is
1927 also tested */
1928 static void gen_bounded_ptr_deref(void)
1930 addr_t func;
1931 int size, align;
1932 ElfW_Rel *rel;
1933 Sym *sym;
1935 if (nocode_wanted)
1936 return;
1938 size = type_size(&vtop->type, &align);
1939 switch(size) {
1940 case 1: func = TOK___bound_ptr_indir1; break;
1941 case 2: func = TOK___bound_ptr_indir2; break;
1942 case 4: func = TOK___bound_ptr_indir4; break;
1943 case 8: func = TOK___bound_ptr_indir8; break;
1944 case 12: func = TOK___bound_ptr_indir12; break;
1945 case 16: func = TOK___bound_ptr_indir16; break;
1946 default:
1947 /* may happen with struct member access */
1948 return;
1950 sym = external_helper_sym(func);
1951 if (!sym->c)
1952 put_extern_sym(sym, NULL, 0, 0);
1953 /* patch relocation */
1954 /* XXX: find a better solution ? */
1955 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
1956 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
1959 /* generate lvalue bound code */
1960 static void gbound(void)
1962 CType type1;
1964 vtop->r &= ~VT_MUSTBOUND;
1965 /* if lvalue, then use checking code before dereferencing */
1966 if (vtop->r & VT_LVAL) {
1967 /* if not VT_BOUNDED value, then make one */
1968 if (!(vtop->r & VT_BOUNDED)) {
1969 /* must save type because we must set it to int to get pointer */
1970 type1 = vtop->type;
1971 vtop->type.t = VT_PTR;
1972 gaddrof();
1973 vpushi(0);
1974 gen_bounded_ptr_add();
1975 vtop->r |= VT_LVAL;
1976 vtop->type = type1;
1978 /* then check for dereferencing */
1979 gen_bounded_ptr_deref();
1983 /* we need to call __bound_ptr_add before we start to load function
1984 args into registers */
1985 ST_FUNC void gbound_args(int nb_args)
1987 int i, v;
1988 SValue *sv;
1990 for (i = 1; i <= nb_args; ++i)
1991 if (vtop[1 - i].r & VT_MUSTBOUND) {
1992 vrotb(i);
1993 gbound();
1994 vrott(i);
1997 sv = vtop - nb_args;
1998 if (sv->r & VT_SYM) {
1999 v = sv->sym->v;
2000 if (v == TOK_setjmp
2001 || v == TOK__setjmp
2002 #ifndef TCC_TARGET_PE
2003 || v == TOK_sigsetjmp
2004 || v == TOK___sigsetjmp
2005 #endif
2007 vpush_helper_func(TOK___bound_setjmp);
2008 vpushv(sv + 1);
2009 gfunc_call(1);
2010 func_bound_add_epilog = 1;
2012 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
2013 if (v == TOK_alloca)
2014 func_bound_add_epilog = 1;
2015 #endif
2016 #if TARGETOS_NetBSD
2017 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
2018 sv->sym->asm_label = TOK___bound_longjmp;
2019 #endif
2023 /* Add bounds for local symbols from S to E (via ->prev) */
2024 static void add_local_bounds(Sym *s, Sym *e)
2026 for (; s != e; s = s->prev) {
2027 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
2028 continue;
2029 /* Add arrays/structs/unions because we always take address */
2030 if ((s->type.t & VT_ARRAY)
2031 || (s->type.t & VT_BTYPE) == VT_STRUCT
2032 || s->a.addrtaken) {
2033 /* add local bound info */
2034 int align, size = type_size(&s->type, &align);
2035 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
2036 2 * sizeof(addr_t));
2037 bounds_ptr[0] = s->c;
2038 bounds_ptr[1] = size;
2042 #endif
2044 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2045 static void pop_local_syms(Sym **ptop, Sym *b, int keep, int ellipsis)
2047 #ifdef CONFIG_TCC_BCHECK
2048 if (tcc_state->do_bounds_check && !ellipsis && !keep)
2049 add_local_bounds(*ptop, b);
2050 #endif
2051 if (tcc_state->do_debug)
2052 tcc_add_debug_info (tcc_state, !local_scope, *ptop, b);
2053 sym_pop(ptop, b, keep);
2056 static void incr_bf_adr(int o)
2058 vtop->type = char_pointer_type;
2059 gaddrof();
2060 vpushs(o);
2061 gen_op('+');
2062 vtop->type.t = VT_BYTE | VT_UNSIGNED;
2063 vtop->r |= VT_LVAL;
2066 /* single-byte load mode for packed or otherwise unaligned bitfields */
2067 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
2069 int n, o, bits;
2070 save_reg_upstack(vtop->r, 1);
2071 vpush64(type->t & VT_BTYPE, 0); // B X
2072 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2073 do {
2074 vswap(); // X B
2075 incr_bf_adr(o);
2076 vdup(); // X B B
2077 n = 8 - bit_pos;
2078 if (n > bit_size)
2079 n = bit_size;
2080 if (bit_pos)
2081 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
2082 if (n < 8)
2083 vpushi((1 << n) - 1), gen_op('&');
2084 gen_cast(type);
2085 if (bits)
2086 vpushi(bits), gen_op(TOK_SHL);
2087 vrotb(3); // B Y X
2088 gen_op('|'); // B X
2089 bits += n, bit_size -= n, o = 1;
2090 } while (bit_size);
2091 vswap(), vpop();
2092 if (!(type->t & VT_UNSIGNED)) {
2093 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
2094 vpushi(n), gen_op(TOK_SHL);
2095 vpushi(n), gen_op(TOK_SAR);
2099 /* single-byte store mode for packed or otherwise unaligned bitfields */
2100 static void store_packed_bf(int bit_pos, int bit_size)
2102 int bits, n, o, m, c;
2104 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2105 vswap(); // X B
2106 save_reg_upstack(vtop->r, 1);
2107 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2108 do {
2109 incr_bf_adr(o); // X B
2110 vswap(); //B X
2111 c ? vdup() : gv_dup(); // B V X
2112 vrott(3); // X B V
2113 if (bits)
2114 vpushi(bits), gen_op(TOK_SHR);
2115 if (bit_pos)
2116 vpushi(bit_pos), gen_op(TOK_SHL);
2117 n = 8 - bit_pos;
2118 if (n > bit_size)
2119 n = bit_size;
2120 if (n < 8) {
2121 m = ((1 << n) - 1) << bit_pos;
2122 vpushi(m), gen_op('&'); // X B V1
2123 vpushv(vtop-1); // X B V1 B
2124 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
2125 gen_op('&'); // X B V1 B1
2126 gen_op('|'); // X B V2
2128 vdup(), vtop[-1] = vtop[-2]; // X B B V2
2129 vstore(), vpop(); // X B
2130 bits += n, bit_size -= n, bit_pos = 0, o = 1;
2131 } while (bit_size);
2132 vpop(), vpop();
2135 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
2137 int t;
2138 if (0 == sv->type.ref)
2139 return 0;
2140 t = sv->type.ref->auxtype;
2141 if (t != -1 && t != VT_STRUCT) {
2142 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
2143 sv->r |= VT_LVAL;
2145 return t;
2148 /* store vtop a register belonging to class 'rc'. lvalues are
2149 converted to values. Cannot be used if cannot be converted to
2150 register value (such as structures). */
2151 ST_FUNC int gv(int rc)
2153 int r, r2, r_ok, r2_ok, rc2, bt;
2154 int bit_pos, bit_size, size, align;
2156 /* NOTE: get_reg can modify vstack[] */
2157 if (vtop->type.t & VT_BITFIELD) {
2158 CType type;
2160 bit_pos = BIT_POS(vtop->type.t);
2161 bit_size = BIT_SIZE(vtop->type.t);
2162 /* remove bit field info to avoid loops */
2163 vtop->type.t &= ~VT_STRUCT_MASK;
2165 type.ref = NULL;
2166 type.t = vtop->type.t & VT_UNSIGNED;
2167 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
2168 type.t |= VT_UNSIGNED;
2170 r = adjust_bf(vtop, bit_pos, bit_size);
2172 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2173 type.t |= VT_LLONG;
2174 else
2175 type.t |= VT_INT;
2177 if (r == VT_STRUCT) {
2178 load_packed_bf(&type, bit_pos, bit_size);
2179 } else {
2180 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
2181 /* cast to int to propagate signedness in following ops */
2182 gen_cast(&type);
2183 /* generate shifts */
2184 vpushi(bits - (bit_pos + bit_size));
2185 gen_op(TOK_SHL);
2186 vpushi(bits - bit_size);
2187 /* NOTE: transformed to SHR if unsigned */
2188 gen_op(TOK_SAR);
2190 r = gv(rc);
2191 } else {
2192 if (is_float(vtop->type.t) &&
2193 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2194 /* CPUs usually cannot use float constants, so we store them
2195 generically in data segment */
2196 init_params p = { data_section };
2197 unsigned long offset;
2198 size = type_size(&vtop->type, &align);
2199 if (NODATA_WANTED)
2200 size = 0, align = 1;
2201 offset = section_add(p.sec, size, align);
2202 vpush_ref(&vtop->type, p.sec, offset, size);
2203 vswap();
2204 init_putv(&p, &vtop->type, offset);
2205 vtop->r |= VT_LVAL;
2207 #ifdef CONFIG_TCC_BCHECK
2208 if (vtop->r & VT_MUSTBOUND)
2209 gbound();
2210 #endif
2212 bt = vtop->type.t & VT_BTYPE;
2214 #ifdef TCC_TARGET_RISCV64
2215 /* XXX mega hack */
2216 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
2217 rc = RC_INT;
2218 #endif
2219 rc2 = RC2_TYPE(bt, rc);
2221 /* need to reload if:
2222 - constant
2223 - lvalue (need to dereference pointer)
2224 - already a register, but not in the right class */
2225 r = vtop->r & VT_VALMASK;
2226 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
2227 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
2229 if (!r_ok || !r2_ok) {
2230 if (!r_ok)
2231 r = get_reg(rc);
2232 if (rc2) {
2233 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
2234 int original_type = vtop->type.t;
2236 /* two register type load :
2237 expand to two words temporarily */
2238 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2239 /* load constant */
2240 unsigned long long ll = vtop->c.i;
2241 vtop->c.i = ll; /* first word */
2242 load(r, vtop);
2243 vtop->r = r; /* save register value */
2244 vpushi(ll >> 32); /* second word */
2245 } else if (vtop->r & VT_LVAL) {
2246 /* We do not want to modifier the long long pointer here.
2247 So we save any other instances down the stack */
2248 save_reg_upstack(vtop->r, 1);
2249 /* load from memory */
2250 vtop->type.t = load_type;
2251 load(r, vtop);
2252 vdup();
2253 vtop[-1].r = r; /* save register value */
2254 /* increment pointer to get second word */
2255 vtop->type.t = VT_PTRDIFF_T;
2256 gaddrof();
2257 vpushs(PTR_SIZE);
2258 gen_op('+');
2259 vtop->r |= VT_LVAL;
2260 vtop->type.t = load_type;
2261 } else {
2262 /* move registers */
2263 if (!r_ok)
2264 load(r, vtop);
2265 if (r2_ok && vtop->r2 < VT_CONST)
2266 goto done;
2267 vdup();
2268 vtop[-1].r = r; /* save register value */
2269 vtop->r = vtop[-1].r2;
2271 /* Allocate second register. Here we rely on the fact that
2272 get_reg() tries first to free r2 of an SValue. */
2273 r2 = get_reg(rc2);
2274 load(r2, vtop);
2275 vpop();
2276 /* write second register */
2277 vtop->r2 = r2;
2278 done:
2279 vtop->type.t = original_type;
2280 } else {
2281 if (vtop->r == VT_CMP)
2282 vset_VT_JMP();
2283 /* one register type load */
2284 load(r, vtop);
2287 vtop->r = r;
2288 #ifdef TCC_TARGET_C67
2289 /* uses register pairs for doubles */
2290 if (bt == VT_DOUBLE)
2291 vtop->r2 = r+1;
2292 #endif
2294 return r;
2297 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2298 ST_FUNC void gv2(int rc1, int rc2)
2300 /* generate more generic register first. But VT_JMP or VT_CMP
2301 values must be generated first in all cases to avoid possible
2302 reload errors */
2303 if (vtop->r != VT_CMP && rc1 <= rc2) {
2304 vswap();
2305 gv(rc1);
2306 vswap();
2307 gv(rc2);
2308 /* test if reload is needed for first register */
2309 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
2310 vswap();
2311 gv(rc1);
2312 vswap();
2314 } else {
2315 gv(rc2);
2316 vswap();
2317 gv(rc1);
2318 vswap();
2319 /* test if reload is needed for first register */
2320 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
2321 gv(rc2);
2326 #if PTR_SIZE == 4
2327 /* expand 64bit on stack in two ints */
2328 ST_FUNC void lexpand(void)
2330 int u, v;
2331 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
2332 v = vtop->r & (VT_VALMASK | VT_LVAL);
2333 if (v == VT_CONST) {
2334 vdup();
2335 vtop[0].c.i >>= 32;
2336 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
2337 vdup();
2338 vtop[0].c.i += 4;
2339 } else {
2340 gv(RC_INT);
2341 vdup();
2342 vtop[0].r = vtop[-1].r2;
2343 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
2345 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
2347 #endif
2349 #if PTR_SIZE == 4
2350 /* build a long long from two ints */
2351 static void lbuild(int t)
2353 gv2(RC_INT, RC_INT);
2354 vtop[-1].r2 = vtop[0].r;
2355 vtop[-1].type.t = t;
2356 vpop();
2358 #endif
2360 /* convert stack entry to register and duplicate its value in another
2361 register */
2362 static void gv_dup(void)
2364 int t, rc, r;
2366 t = vtop->type.t;
2367 #if PTR_SIZE == 4
2368 if ((t & VT_BTYPE) == VT_LLONG) {
2369 if (t & VT_BITFIELD) {
2370 gv(RC_INT);
2371 t = vtop->type.t;
2373 lexpand();
2374 gv_dup();
2375 vswap();
2376 vrotb(3);
2377 gv_dup();
2378 vrotb(4);
2379 /* stack: H L L1 H1 */
2380 lbuild(t);
2381 vrotb(3);
2382 vrotb(3);
2383 vswap();
2384 lbuild(t);
2385 vswap();
2386 return;
2388 #endif
2389 /* duplicate value */
2390 rc = RC_TYPE(t);
2391 gv(rc);
2392 r = get_reg(rc);
2393 vdup();
2394 load(r, vtop);
2395 vtop->r = r;
2398 #if PTR_SIZE == 4
2399 /* generate CPU independent (unsigned) long long operations */
2400 static void gen_opl(int op)
2402 int t, a, b, op1, c, i;
2403 int func;
2404 unsigned short reg_iret = REG_IRET;
2405 unsigned short reg_lret = REG_IRE2;
2406 SValue tmp;
2408 switch(op) {
2409 case '/':
2410 case TOK_PDIV:
2411 func = TOK___divdi3;
2412 goto gen_func;
2413 case TOK_UDIV:
2414 func = TOK___udivdi3;
2415 goto gen_func;
2416 case '%':
2417 func = TOK___moddi3;
2418 goto gen_mod_func;
2419 case TOK_UMOD:
2420 func = TOK___umoddi3;
2421 gen_mod_func:
2422 #ifdef TCC_ARM_EABI
2423 reg_iret = TREG_R2;
2424 reg_lret = TREG_R3;
2425 #endif
2426 gen_func:
2427 /* call generic long long function */
2428 vpush_helper_func(func);
2429 vrott(3);
2430 gfunc_call(2);
2431 vpushi(0);
2432 vtop->r = reg_iret;
2433 vtop->r2 = reg_lret;
2434 break;
2435 case '^':
2436 case '&':
2437 case '|':
2438 case '*':
2439 case '+':
2440 case '-':
2441 //pv("gen_opl A",0,2);
2442 t = vtop->type.t;
2443 vswap();
2444 lexpand();
2445 vrotb(3);
2446 lexpand();
2447 /* stack: L1 H1 L2 H2 */
2448 tmp = vtop[0];
2449 vtop[0] = vtop[-3];
2450 vtop[-3] = tmp;
2451 tmp = vtop[-2];
2452 vtop[-2] = vtop[-3];
2453 vtop[-3] = tmp;
2454 vswap();
2455 /* stack: H1 H2 L1 L2 */
2456 //pv("gen_opl B",0,4);
2457 if (op == '*') {
2458 vpushv(vtop - 1);
2459 vpushv(vtop - 1);
2460 gen_op(TOK_UMULL);
2461 lexpand();
2462 /* stack: H1 H2 L1 L2 ML MH */
2463 for(i=0;i<4;i++)
2464 vrotb(6);
2465 /* stack: ML MH H1 H2 L1 L2 */
2466 tmp = vtop[0];
2467 vtop[0] = vtop[-2];
2468 vtop[-2] = tmp;
2469 /* stack: ML MH H1 L2 H2 L1 */
2470 gen_op('*');
2471 vrotb(3);
2472 vrotb(3);
2473 gen_op('*');
2474 /* stack: ML MH M1 M2 */
2475 gen_op('+');
2476 gen_op('+');
2477 } else if (op == '+' || op == '-') {
2478 /* XXX: add non carry method too (for MIPS or alpha) */
2479 if (op == '+')
2480 op1 = TOK_ADDC1;
2481 else
2482 op1 = TOK_SUBC1;
2483 gen_op(op1);
2484 /* stack: H1 H2 (L1 op L2) */
2485 vrotb(3);
2486 vrotb(3);
2487 gen_op(op1 + 1); /* TOK_xxxC2 */
2488 } else {
2489 gen_op(op);
2490 /* stack: H1 H2 (L1 op L2) */
2491 vrotb(3);
2492 vrotb(3);
2493 /* stack: (L1 op L2) H1 H2 */
2494 gen_op(op);
2495 /* stack: (L1 op L2) (H1 op H2) */
2497 /* stack: L H */
2498 lbuild(t);
2499 break;
2500 case TOK_SAR:
2501 case TOK_SHR:
2502 case TOK_SHL:
2503 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2504 t = vtop[-1].type.t;
2505 vswap();
2506 lexpand();
2507 vrotb(3);
2508 /* stack: L H shift */
2509 c = (int)vtop->c.i;
2510 /* constant: simpler */
2511 /* NOTE: all comments are for SHL. the other cases are
2512 done by swapping words */
2513 vpop();
2514 if (op != TOK_SHL)
2515 vswap();
2516 if (c >= 32) {
2517 /* stack: L H */
2518 vpop();
2519 if (c > 32) {
2520 vpushi(c - 32);
2521 gen_op(op);
2523 if (op != TOK_SAR) {
2524 vpushi(0);
2525 } else {
2526 gv_dup();
2527 vpushi(31);
2528 gen_op(TOK_SAR);
2530 vswap();
2531 } else {
2532 vswap();
2533 gv_dup();
2534 /* stack: H L L */
2535 vpushi(c);
2536 gen_op(op);
2537 vswap();
2538 vpushi(32 - c);
2539 if (op == TOK_SHL)
2540 gen_op(TOK_SHR);
2541 else
2542 gen_op(TOK_SHL);
2543 vrotb(3);
2544 /* stack: L L H */
2545 vpushi(c);
2546 if (op == TOK_SHL)
2547 gen_op(TOK_SHL);
2548 else
2549 gen_op(TOK_SHR);
2550 gen_op('|');
2552 if (op != TOK_SHL)
2553 vswap();
2554 lbuild(t);
2555 } else {
2556 /* XXX: should provide a faster fallback on x86 ? */
2557 switch(op) {
2558 case TOK_SAR:
2559 func = TOK___ashrdi3;
2560 goto gen_func;
2561 case TOK_SHR:
2562 func = TOK___lshrdi3;
2563 goto gen_func;
2564 case TOK_SHL:
2565 func = TOK___ashldi3;
2566 goto gen_func;
2569 break;
2570 default:
2571 /* compare operations */
2572 t = vtop->type.t;
2573 vswap();
2574 lexpand();
2575 vrotb(3);
2576 lexpand();
2577 /* stack: L1 H1 L2 H2 */
2578 tmp = vtop[-1];
2579 vtop[-1] = vtop[-2];
2580 vtop[-2] = tmp;
2581 /* stack: L1 L2 H1 H2 */
2582 save_regs(4);
2583 /* compare high */
2584 op1 = op;
2585 /* when values are equal, we need to compare low words. since
2586 the jump is inverted, we invert the test too. */
2587 if (op1 == TOK_LT)
2588 op1 = TOK_LE;
2589 else if (op1 == TOK_GT)
2590 op1 = TOK_GE;
2591 else if (op1 == TOK_ULT)
2592 op1 = TOK_ULE;
2593 else if (op1 == TOK_UGT)
2594 op1 = TOK_UGE;
2595 a = 0;
2596 b = 0;
2597 gen_op(op1);
2598 if (op == TOK_NE) {
2599 b = gvtst(0, 0);
2600 } else {
2601 a = gvtst(1, 0);
2602 if (op != TOK_EQ) {
2603 /* generate non equal test */
2604 vpushi(0);
2605 vset_VT_CMP(TOK_NE);
2606 b = gvtst(0, 0);
2609 /* compare low. Always unsigned */
2610 op1 = op;
2611 if (op1 == TOK_LT)
2612 op1 = TOK_ULT;
2613 else if (op1 == TOK_LE)
2614 op1 = TOK_ULE;
2615 else if (op1 == TOK_GT)
2616 op1 = TOK_UGT;
2617 else if (op1 == TOK_GE)
2618 op1 = TOK_UGE;
2619 gen_op(op1);
2620 #if 0//def TCC_TARGET_I386
2621 if (op == TOK_NE) { gsym(b); break; }
2622 if (op == TOK_EQ) { gsym(a); break; }
2623 #endif
2624 gvtst_set(1, a);
2625 gvtst_set(0, b);
2626 break;
2629 #endif
2631 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2633 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2634 return (a ^ b) >> 63 ? -x : x;
2637 static int gen_opic_lt(uint64_t a, uint64_t b)
2639 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2642 /* handle integer constant optimizations and various machine
2643 independent opt */
2644 static void gen_opic(int op)
2646 SValue *v1 = vtop - 1;
2647 SValue *v2 = vtop;
2648 int t1 = v1->type.t & VT_BTYPE;
2649 int t2 = v2->type.t & VT_BTYPE;
2650 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2651 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2652 uint64_t l1 = c1 ? v1->c.i : 0;
2653 uint64_t l2 = c2 ? v2->c.i : 0;
2654 int shm = (t1 == VT_LLONG) ? 63 : 31;
2656 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2657 l1 = ((uint32_t)l1 |
2658 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2659 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2660 l2 = ((uint32_t)l2 |
2661 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2663 if (c1 && c2) {
2664 switch(op) {
2665 case '+': l1 += l2; break;
2666 case '-': l1 -= l2; break;
2667 case '&': l1 &= l2; break;
2668 case '^': l1 ^= l2; break;
2669 case '|': l1 |= l2; break;
2670 case '*': l1 *= l2; break;
2672 case TOK_PDIV:
2673 case '/':
2674 case '%':
2675 case TOK_UDIV:
2676 case TOK_UMOD:
2677 /* if division by zero, generate explicit division */
2678 if (l2 == 0) {
2679 if (const_wanted && !(nocode_wanted & unevalmask))
2680 tcc_error("division by zero in constant");
2681 goto general_case;
2683 switch(op) {
2684 default: l1 = gen_opic_sdiv(l1, l2); break;
2685 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2686 case TOK_UDIV: l1 = l1 / l2; break;
2687 case TOK_UMOD: l1 = l1 % l2; break;
2689 break;
2690 case TOK_SHL: l1 <<= (l2 & shm); break;
2691 case TOK_SHR: l1 >>= (l2 & shm); break;
2692 case TOK_SAR:
2693 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2694 break;
2695 /* tests */
2696 case TOK_ULT: l1 = l1 < l2; break;
2697 case TOK_UGE: l1 = l1 >= l2; break;
2698 case TOK_EQ: l1 = l1 == l2; break;
2699 case TOK_NE: l1 = l1 != l2; break;
2700 case TOK_ULE: l1 = l1 <= l2; break;
2701 case TOK_UGT: l1 = l1 > l2; break;
2702 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2703 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2704 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2705 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2706 /* logical */
2707 case TOK_LAND: l1 = l1 && l2; break;
2708 case TOK_LOR: l1 = l1 || l2; break;
2709 default:
2710 goto general_case;
2712 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2713 l1 = ((uint32_t)l1 |
2714 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2715 v1->c.i = l1;
2716 vtop--;
2717 } else {
2718 /* if commutative ops, put c2 as constant */
2719 if (c1 && (op == '+' || op == '&' || op == '^' ||
2720 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2721 vswap();
2722 c2 = c1; //c = c1, c1 = c2, c2 = c;
2723 l2 = l1; //l = l1, l1 = l2, l2 = l;
2725 if (!const_wanted &&
2726 c1 && ((l1 == 0 &&
2727 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2728 (l1 == -1 && op == TOK_SAR))) {
2729 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2730 vtop--;
2731 } else if (!const_wanted &&
2732 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2733 (op == '|' &&
2734 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2735 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2736 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2737 if (l2 == 1)
2738 vtop->c.i = 0;
2739 vswap();
2740 vtop--;
2741 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2742 op == TOK_PDIV) &&
2743 l2 == 1) ||
2744 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2745 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2746 l2 == 0) ||
2747 (op == '&' &&
2748 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2749 /* filter out NOP operations like x*1, x-0, x&-1... */
2750 vtop--;
2751 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2752 /* try to use shifts instead of muls or divs */
2753 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2754 int n = -1;
2755 while (l2) {
2756 l2 >>= 1;
2757 n++;
2759 vtop->c.i = n;
2760 if (op == '*')
2761 op = TOK_SHL;
2762 else if (op == TOK_PDIV)
2763 op = TOK_SAR;
2764 else
2765 op = TOK_SHR;
2767 goto general_case;
2768 } else if (c2 && (op == '+' || op == '-') &&
2769 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2770 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2771 /* symbol + constant case */
2772 if (op == '-')
2773 l2 = -l2;
2774 l2 += vtop[-1].c.i;
2775 /* The backends can't always deal with addends to symbols
2776 larger than +-1<<31. Don't construct such. */
2777 if ((int)l2 != l2)
2778 goto general_case;
2779 vtop--;
2780 vtop->c.i = l2;
2781 } else {
2782 general_case:
2783 /* call low level op generator */
2784 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2785 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2786 gen_opl(op);
2787 else
2788 gen_opi(op);
2793 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2794 # define gen_negf gen_opf
2795 #else
2796 /* XXX: implement in gen_opf() for other backends too */
2797 void gen_negf(int op)
2799 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2800 subtract(-0, x), but with them it's really a sign flip
2801 operation. We implement this with bit manipulation and have
2802 to do some type reinterpretation for this, which TCC can do
2803 only via memory. */
2805 int align, size, bt;
2807 size = type_size(&vtop->type, &align);
2808 bt = vtop->type.t & VT_BTYPE;
2809 save_reg(gv(RC_TYPE(bt)));
2810 vdup();
2811 incr_bf_adr(size - 1);
2812 vdup();
2813 vpushi(0x80); /* flip sign */
2814 gen_op('^');
2815 vstore();
2816 vpop();
2818 #endif
2820 /* generate a floating point operation with constant propagation */
2821 static void gen_opif(int op)
2823 int c1, c2;
2824 SValue *v1, *v2;
2825 #if defined _MSC_VER && defined __x86_64__
2826 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2827 volatile
2828 #endif
2829 long double f1, f2;
2831 v1 = vtop - 1;
2832 v2 = vtop;
2833 if (op == TOK_NEG)
2834 v1 = v2;
2836 /* currently, we cannot do computations with forward symbols */
2837 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2838 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2839 if (c1 && c2) {
2840 if (v1->type.t == VT_FLOAT) {
2841 f1 = v1->c.f;
2842 f2 = v2->c.f;
2843 } else if (v1->type.t == VT_DOUBLE) {
2844 f1 = v1->c.d;
2845 f2 = v2->c.d;
2846 } else {
2847 f1 = v1->c.ld;
2848 f2 = v2->c.ld;
2850 /* NOTE: we only do constant propagation if finite number (not
2851 NaN or infinity) (ANSI spec) */
2852 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
2853 goto general_case;
2854 switch(op) {
2855 case '+': f1 += f2; break;
2856 case '-': f1 -= f2; break;
2857 case '*': f1 *= f2; break;
2858 case '/':
2859 if (f2 == 0.0) {
2860 union { float f; unsigned u; } x1, x2, y;
2861 /* If not in initializer we need to potentially generate
2862 FP exceptions at runtime, otherwise we want to fold. */
2863 if (!const_wanted)
2864 goto general_case;
2865 /* the run-time result of 0.0/0.0 on x87, also of other compilers
2866 when used to compile the f1 /= f2 below, would be -nan */
2867 x1.f = f1, x2.f = f2;
2868 if (f1 == 0.0)
2869 y.u = 0x7fc00000; /* nan */
2870 else
2871 y.u = 0x7f800000; /* infinity */
2872 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
2873 f1 = y.f;
2874 break;
2876 f1 /= f2;
2877 break;
2878 case TOK_NEG:
2879 f1 = -f1;
2880 goto unary_result;
2881 /* XXX: also handles tests ? */
2882 default:
2883 goto general_case;
2885 vtop--;
2886 unary_result:
2887 /* XXX: overflow test ? */
2888 if (v1->type.t == VT_FLOAT) {
2889 v1->c.f = f1;
2890 } else if (v1->type.t == VT_DOUBLE) {
2891 v1->c.d = f1;
2892 } else {
2893 v1->c.ld = f1;
2895 } else {
2896 general_case:
2897 if (op == TOK_NEG) {
2898 gen_negf(op);
2899 } else {
2900 gen_opf(op);
2905 /* print a type. If 'varstr' is not NULL, then the variable is also
2906 printed in the type */
2907 /* XXX: union */
2908 /* XXX: add array and function pointers */
2909 static void type_to_str(char *buf, int buf_size,
2910 CType *type, const char *varstr)
2912 int bt, v, t;
2913 Sym *s, *sa;
2914 char buf1[256];
2915 const char *tstr;
2917 t = type->t;
2918 bt = t & VT_BTYPE;
2919 buf[0] = '\0';
2921 if (t & VT_EXTERN)
2922 pstrcat(buf, buf_size, "extern ");
2923 if (t & VT_STATIC)
2924 pstrcat(buf, buf_size, "static ");
2925 if (t & VT_TYPEDEF)
2926 pstrcat(buf, buf_size, "typedef ");
2927 if (t & VT_INLINE)
2928 pstrcat(buf, buf_size, "inline ");
2929 if (t & VT_VOLATILE)
2930 pstrcat(buf, buf_size, "volatile ");
2931 if (t & VT_CONSTANT)
2932 pstrcat(buf, buf_size, "const ");
2934 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2935 || ((t & VT_UNSIGNED)
2936 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2937 && !IS_ENUM(t)
2939 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2941 buf_size -= strlen(buf);
2942 buf += strlen(buf);
2944 switch(bt) {
2945 case VT_VOID:
2946 tstr = "void";
2947 goto add_tstr;
2948 case VT_BOOL:
2949 tstr = "_Bool";
2950 goto add_tstr;
2951 case VT_BYTE:
2952 tstr = "char";
2953 goto add_tstr;
2954 case VT_SHORT:
2955 tstr = "short";
2956 goto add_tstr;
2957 case VT_INT:
2958 tstr = "int";
2959 goto maybe_long;
2960 case VT_LLONG:
2961 tstr = "long long";
2962 maybe_long:
2963 if (t & VT_LONG)
2964 tstr = "long";
2965 if (!IS_ENUM(t))
2966 goto add_tstr;
2967 tstr = "enum ";
2968 goto tstruct;
2969 case VT_FLOAT:
2970 tstr = "float";
2971 goto add_tstr;
2972 case VT_DOUBLE:
2973 tstr = "double";
2974 if (!(t & VT_LONG))
2975 goto add_tstr;
2976 case VT_LDOUBLE:
2977 tstr = "long double";
2978 add_tstr:
2979 pstrcat(buf, buf_size, tstr);
2980 break;
2981 case VT_STRUCT:
2982 tstr = "struct ";
2983 if (IS_UNION(t))
2984 tstr = "union ";
2985 tstruct:
2986 pstrcat(buf, buf_size, tstr);
2987 v = type->ref->v & ~SYM_STRUCT;
2988 if (v >= SYM_FIRST_ANOM)
2989 pstrcat(buf, buf_size, "<anonymous>");
2990 else
2991 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2992 break;
2993 case VT_FUNC:
2994 s = type->ref;
2995 buf1[0]=0;
2996 if (varstr && '*' == *varstr) {
2997 pstrcat(buf1, sizeof(buf1), "(");
2998 pstrcat(buf1, sizeof(buf1), varstr);
2999 pstrcat(buf1, sizeof(buf1), ")");
3001 pstrcat(buf1, buf_size, "(");
3002 sa = s->next;
3003 while (sa != NULL) {
3004 char buf2[256];
3005 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3006 pstrcat(buf1, sizeof(buf1), buf2);
3007 sa = sa->next;
3008 if (sa)
3009 pstrcat(buf1, sizeof(buf1), ", ");
3011 if (s->f.func_type == FUNC_ELLIPSIS)
3012 pstrcat(buf1, sizeof(buf1), ", ...");
3013 pstrcat(buf1, sizeof(buf1), ")");
3014 type_to_str(buf, buf_size, &s->type, buf1);
3015 goto no_var;
3016 case VT_PTR:
3017 s = type->ref;
3018 if (t & VT_ARRAY) {
3019 if (varstr && '*' == *varstr)
3020 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3021 else
3022 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3023 type_to_str(buf, buf_size, &s->type, buf1);
3024 goto no_var;
3026 pstrcpy(buf1, sizeof(buf1), "*");
3027 if (t & VT_CONSTANT)
3028 pstrcat(buf1, buf_size, "const ");
3029 if (t & VT_VOLATILE)
3030 pstrcat(buf1, buf_size, "volatile ");
3031 if (varstr)
3032 pstrcat(buf1, sizeof(buf1), varstr);
3033 type_to_str(buf, buf_size, &s->type, buf1);
3034 goto no_var;
3036 if (varstr) {
3037 pstrcat(buf, buf_size, " ");
3038 pstrcat(buf, buf_size, varstr);
3040 no_var: ;
3043 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
3045 char buf1[256], buf2[256];
3046 type_to_str(buf1, sizeof(buf1), st, NULL);
3047 type_to_str(buf2, sizeof(buf2), dt, NULL);
3048 tcc_error(fmt, buf1, buf2);
3051 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
3053 char buf1[256], buf2[256];
3054 type_to_str(buf1, sizeof(buf1), st, NULL);
3055 type_to_str(buf2, sizeof(buf2), dt, NULL);
3056 tcc_warning(fmt, buf1, buf2);
3059 static int pointed_size(CType *type)
3061 int align;
3062 return type_size(pointed_type(type), &align);
3065 static void vla_runtime_pointed_size(CType *type)
3067 int align;
3068 vla_runtime_type_size(pointed_type(type), &align);
3071 static inline int is_null_pointer(SValue *p)
3073 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
3074 return 0;
3075 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
3076 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
3077 ((p->type.t & VT_BTYPE) == VT_PTR &&
3078 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
3079 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
3080 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
3084 /* compare function types. OLD functions match any new functions */
3085 static int is_compatible_func(CType *type1, CType *type2)
3087 Sym *s1, *s2;
3089 s1 = type1->ref;
3090 s2 = type2->ref;
3091 if (s1->f.func_call != s2->f.func_call)
3092 return 0;
3093 if (s1->f.func_type != s2->f.func_type
3094 && s1->f.func_type != FUNC_OLD
3095 && s2->f.func_type != FUNC_OLD)
3096 return 0;
3097 for (;;) {
3098 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
3099 return 0;
3100 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
3101 return 1;
3102 s1 = s1->next;
3103 s2 = s2->next;
3104 if (!s1)
3105 return !s2;
3106 if (!s2)
3107 return 0;
3111 /* return true if type1 and type2 are the same. If unqualified is
3112 true, qualifiers on the types are ignored.
3114 static int compare_types(CType *type1, CType *type2, int unqualified)
3116 int bt1, t1, t2;
3118 t1 = type1->t & VT_TYPE;
3119 t2 = type2->t & VT_TYPE;
3120 if (unqualified) {
3121 /* strip qualifiers before comparing */
3122 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3123 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3126 /* Default Vs explicit signedness only matters for char */
3127 if ((t1 & VT_BTYPE) != VT_BYTE) {
3128 t1 &= ~VT_DEFSIGN;
3129 t2 &= ~VT_DEFSIGN;
3131 /* XXX: bitfields ? */
3132 if (t1 != t2)
3133 return 0;
3135 if ((t1 & VT_ARRAY)
3136 && !(type1->ref->c < 0
3137 || type2->ref->c < 0
3138 || type1->ref->c == type2->ref->c))
3139 return 0;
3141 /* test more complicated cases */
3142 bt1 = t1 & VT_BTYPE;
3143 if (bt1 == VT_PTR) {
3144 type1 = pointed_type(type1);
3145 type2 = pointed_type(type2);
3146 return is_compatible_types(type1, type2);
3147 } else if (bt1 == VT_STRUCT) {
3148 return (type1->ref == type2->ref);
3149 } else if (bt1 == VT_FUNC) {
3150 return is_compatible_func(type1, type2);
3151 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
3152 /* If both are enums then they must be the same, if only one is then
3153 t1 and t2 must be equal, which was checked above already. */
3154 return type1->ref == type2->ref;
3155 } else {
3156 return 1;
3160 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3161 type is stored in DEST if non-null (except for pointer plus/minus) . */
3162 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
3164 CType *type1 = &op1->type, *type2 = &op2->type, type;
3165 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
3166 int ret = 1;
3168 type.t = VT_VOID;
3169 type.ref = NULL;
3171 if (bt1 == VT_VOID || bt2 == VT_VOID) {
3172 ret = op == '?' ? 1 : 0;
3173 /* NOTE: as an extension, we accept void on only one side */
3174 type.t = VT_VOID;
3175 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3176 if (op == '+') ; /* Handled in caller */
3177 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3178 /* If one is a null ptr constant the result type is the other. */
3179 else if (is_null_pointer (op2)) type = *type1;
3180 else if (is_null_pointer (op1)) type = *type2;
3181 else if (bt1 != bt2) {
3182 /* accept comparison or cond-expr between pointer and integer
3183 with a warning */
3184 if ((op == '?' || TOK_ISCOND(op))
3185 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
3186 tcc_warning("pointer/integer mismatch in %s",
3187 op == '?' ? "conditional expression" : "comparison");
3188 else if (op != '-' || !is_integer_btype(bt2))
3189 ret = 0;
3190 type = *(bt1 == VT_PTR ? type1 : type2);
3191 } else {
3192 CType *pt1 = pointed_type(type1);
3193 CType *pt2 = pointed_type(type2);
3194 int pbt1 = pt1->t & VT_BTYPE;
3195 int pbt2 = pt2->t & VT_BTYPE;
3196 int newquals, copied = 0;
3197 if (pbt1 != VT_VOID && pbt2 != VT_VOID
3198 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
3199 if (op != '?' && !TOK_ISCOND(op))
3200 ret = 0;
3201 else
3202 type_incompatibility_warning(type1, type2,
3203 op == '?'
3204 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3205 : "pointer type mismatch in comparison('%s' and '%s')");
3207 if (op == '?') {
3208 /* pointers to void get preferred, otherwise the
3209 pointed to types minus qualifs should be compatible */
3210 type = *((pbt1 == VT_VOID) ? type1 : type2);
3211 /* combine qualifs */
3212 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
3213 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
3214 & newquals)
3216 /* copy the pointer target symbol */
3217 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3218 0, type.ref->c);
3219 copied = 1;
3220 pointed_type(&type)->t |= newquals;
3222 /* pointers to incomplete arrays get converted to
3223 pointers to completed ones if possible */
3224 if (pt1->t & VT_ARRAY
3225 && pt2->t & VT_ARRAY
3226 && pointed_type(&type)->ref->c < 0
3227 && (pt1->ref->c > 0 || pt2->ref->c > 0))
3229 if (!copied)
3230 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3231 0, type.ref->c);
3232 pointed_type(&type)->ref =
3233 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
3234 0, pointed_type(&type)->ref->c);
3235 pointed_type(&type)->ref->c =
3236 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
3240 if (TOK_ISCOND(op))
3241 type.t = VT_SIZE_T;
3242 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
3243 if (op != '?' || !compare_types(type1, type2, 1))
3244 ret = 0;
3245 type = *type1;
3246 } else if (is_float(bt1) || is_float(bt2)) {
3247 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
3248 type.t = VT_LDOUBLE;
3249 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
3250 type.t = VT_DOUBLE;
3251 } else {
3252 type.t = VT_FLOAT;
3254 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
3255 /* cast to biggest op */
3256 type.t = VT_LLONG | VT_LONG;
3257 if (bt1 == VT_LLONG)
3258 type.t &= t1;
3259 if (bt2 == VT_LLONG)
3260 type.t &= t2;
3261 /* convert to unsigned if it does not fit in a long long */
3262 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
3263 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
3264 type.t |= VT_UNSIGNED;
3265 } else {
3266 /* integer operations */
3267 type.t = VT_INT | (VT_LONG & (t1 | t2));
3268 /* convert to unsigned if it does not fit in an integer */
3269 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
3270 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
3271 type.t |= VT_UNSIGNED;
3273 if (dest)
3274 *dest = type;
3275 return ret;
3278 /* generic gen_op: handles types problems */
3279 ST_FUNC void gen_op(int op)
3281 int u, t1, t2, bt1, bt2, t;
3282 CType type1, combtype;
3284 redo:
3285 t1 = vtop[-1].type.t;
3286 t2 = vtop[0].type.t;
3287 bt1 = t1 & VT_BTYPE;
3288 bt2 = t2 & VT_BTYPE;
3290 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
3291 if (bt2 == VT_FUNC) {
3292 mk_pointer(&vtop->type);
3293 gaddrof();
3295 if (bt1 == VT_FUNC) {
3296 vswap();
3297 mk_pointer(&vtop->type);
3298 gaddrof();
3299 vswap();
3301 goto redo;
3302 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
3303 tcc_error_noabort("invalid operand types for binary operation");
3304 vpop();
3305 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3306 /* at least one operand is a pointer */
3307 /* relational op: must be both pointers */
3308 if (TOK_ISCOND(op))
3309 goto std_op;
3310 /* if both pointers, then it must be the '-' op */
3311 if (bt1 == VT_PTR && bt2 == VT_PTR) {
3312 if (op != '-')
3313 tcc_error("cannot use pointers here");
3314 if (vtop[-1].type.t & VT_VLA) {
3315 vla_runtime_pointed_size(&vtop[-1].type);
3316 } else {
3317 vpushi(pointed_size(&vtop[-1].type));
3319 vrott(3);
3320 gen_opic(op);
3321 vtop->type.t = VT_PTRDIFF_T;
3322 vswap();
3323 gen_op(TOK_PDIV);
3324 } else {
3325 /* exactly one pointer : must be '+' or '-'. */
3326 if (op != '-' && op != '+')
3327 tcc_error("cannot use pointers here");
3328 /* Put pointer as first operand */
3329 if (bt2 == VT_PTR) {
3330 vswap();
3331 t = t1, t1 = t2, t2 = t;
3333 #if PTR_SIZE == 4
3334 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
3335 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3336 gen_cast_s(VT_INT);
3337 #endif
3338 type1 = vtop[-1].type;
3339 if (vtop[-1].type.t & VT_VLA)
3340 vla_runtime_pointed_size(&vtop[-1].type);
3341 else {
3342 u = pointed_size(&vtop[-1].type);
3343 if (u < 0)
3344 tcc_error("unknown array element size");
3345 #if PTR_SIZE == 8
3346 vpushll(u);
3347 #else
3348 /* XXX: cast to int ? (long long case) */
3349 vpushi(u);
3350 #endif
3352 gen_op('*');
3353 #ifdef CONFIG_TCC_BCHECK
3354 if (tcc_state->do_bounds_check && !const_wanted) {
3355 /* if bounded pointers, we generate a special code to
3356 test bounds */
3357 if (op == '-') {
3358 vpushi(0);
3359 vswap();
3360 gen_op('-');
3362 gen_bounded_ptr_add();
3363 } else
3364 #endif
3366 gen_opic(op);
3368 type1.t &= ~VT_ARRAY;
3369 /* put again type if gen_opic() swaped operands */
3370 vtop->type = type1;
3372 } else {
3373 /* floats can only be used for a few operations */
3374 if (is_float(combtype.t)
3375 && op != '+' && op != '-' && op != '*' && op != '/'
3376 && !TOK_ISCOND(op))
3377 tcc_error("invalid operands for binary operation");
3378 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
3379 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
3380 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
3381 t |= VT_UNSIGNED;
3382 t |= (VT_LONG & t1);
3383 combtype.t = t;
3385 std_op:
3386 t = t2 = combtype.t;
3387 /* XXX: currently, some unsigned operations are explicit, so
3388 we modify them here */
3389 if (t & VT_UNSIGNED) {
3390 if (op == TOK_SAR)
3391 op = TOK_SHR;
3392 else if (op == '/')
3393 op = TOK_UDIV;
3394 else if (op == '%')
3395 op = TOK_UMOD;
3396 else if (op == TOK_LT)
3397 op = TOK_ULT;
3398 else if (op == TOK_GT)
3399 op = TOK_UGT;
3400 else if (op == TOK_LE)
3401 op = TOK_ULE;
3402 else if (op == TOK_GE)
3403 op = TOK_UGE;
3405 vswap();
3406 gen_cast_s(t);
3407 vswap();
3408 /* special case for shifts and long long: we keep the shift as
3409 an integer */
3410 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
3411 t2 = VT_INT;
3412 gen_cast_s(t2);
3413 if (is_float(t))
3414 gen_opif(op);
3415 else
3416 gen_opic(op);
3417 if (TOK_ISCOND(op)) {
3418 /* relational op: the result is an int */
3419 vtop->type.t = VT_INT;
3420 } else {
3421 vtop->type.t = t;
3424 // Make sure that we have converted to an rvalue:
3425 if (vtop->r & VT_LVAL)
3426 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3429 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3430 #define gen_cvt_itof1 gen_cvt_itof
3431 #else
3432 /* generic itof for unsigned long long case */
3433 static void gen_cvt_itof1(int t)
3435 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3436 (VT_LLONG | VT_UNSIGNED)) {
3438 if (t == VT_FLOAT)
3439 vpush_helper_func(TOK___floatundisf);
3440 #if LDOUBLE_SIZE != 8
3441 else if (t == VT_LDOUBLE)
3442 vpush_helper_func(TOK___floatundixf);
3443 #endif
3444 else
3445 vpush_helper_func(TOK___floatundidf);
3446 vrott(2);
3447 gfunc_call(1);
3448 vpushi(0);
3449 PUT_R_RET(vtop, t);
3450 } else {
3451 gen_cvt_itof(t);
3454 #endif
3456 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3457 #define gen_cvt_ftoi1 gen_cvt_ftoi
3458 #else
3459 /* generic ftoi for unsigned long long case */
3460 static void gen_cvt_ftoi1(int t)
3462 int st;
3463 if (t == (VT_LLONG | VT_UNSIGNED)) {
3464 /* not handled natively */
3465 st = vtop->type.t & VT_BTYPE;
3466 if (st == VT_FLOAT)
3467 vpush_helper_func(TOK___fixunssfdi);
3468 #if LDOUBLE_SIZE != 8
3469 else if (st == VT_LDOUBLE)
3470 vpush_helper_func(TOK___fixunsxfdi);
3471 #endif
3472 else
3473 vpush_helper_func(TOK___fixunsdfdi);
3474 vrott(2);
3475 gfunc_call(1);
3476 vpushi(0);
3477 PUT_R_RET(vtop, t);
3478 } else {
3479 gen_cvt_ftoi(t);
3482 #endif
3484 /* special delayed cast for char/short */
3485 static void force_charshort_cast(void)
3487 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3488 int dbt = vtop->type.t;
3489 vtop->r &= ~VT_MUSTCAST;
3490 vtop->type.t = sbt;
3491 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3492 vtop->type.t = dbt;
3495 static void gen_cast_s(int t)
3497 CType type;
3498 type.t = t;
3499 type.ref = NULL;
3500 gen_cast(&type);
3503 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3504 static void gen_cast(CType *type)
3506 int sbt, dbt, sf, df, c;
3507 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3509 /* special delayed cast for char/short */
3510 if (vtop->r & VT_MUSTCAST)
3511 force_charshort_cast();
3513 /* bitfields first get cast to ints */
3514 if (vtop->type.t & VT_BITFIELD)
3515 gv(RC_INT);
3517 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3518 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3519 if (sbt == VT_FUNC)
3520 sbt = VT_PTR;
3522 again:
3523 if (sbt != dbt) {
3524 sf = is_float(sbt);
3525 df = is_float(dbt);
3526 dbt_bt = dbt & VT_BTYPE;
3527 sbt_bt = sbt & VT_BTYPE;
3529 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3530 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3531 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3532 #endif
3533 if (c) {
3534 /* constant case: we can do it now */
3535 /* XXX: in ISOC, cannot do it if error in convert */
3536 if (sbt == VT_FLOAT)
3537 vtop->c.ld = vtop->c.f;
3538 else if (sbt == VT_DOUBLE)
3539 vtop->c.ld = vtop->c.d;
3541 if (df) {
3542 if (sbt_bt == VT_LLONG) {
3543 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3544 vtop->c.ld = vtop->c.i;
3545 else
3546 vtop->c.ld = -(long double)-vtop->c.i;
3547 } else if(!sf) {
3548 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3549 vtop->c.ld = (uint32_t)vtop->c.i;
3550 else
3551 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3554 if (dbt == VT_FLOAT)
3555 vtop->c.f = (float)vtop->c.ld;
3556 else if (dbt == VT_DOUBLE)
3557 vtop->c.d = (double)vtop->c.ld;
3558 } else if (sf && dbt == VT_BOOL) {
3559 vtop->c.i = (vtop->c.ld != 0);
3560 } else {
3561 if(sf)
3562 vtop->c.i = vtop->c.ld;
3563 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3565 else if (sbt & VT_UNSIGNED)
3566 vtop->c.i = (uint32_t)vtop->c.i;
3567 else
3568 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3570 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3572 else if (dbt == VT_BOOL)
3573 vtop->c.i = (vtop->c.i != 0);
3574 else {
3575 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3576 dbt_bt == VT_SHORT ? 0xffff :
3577 0xffffffff;
3578 vtop->c.i &= m;
3579 if (!(dbt & VT_UNSIGNED))
3580 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3583 goto done;
3585 } else if (dbt == VT_BOOL
3586 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3587 == (VT_CONST | VT_SYM)) {
3588 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3589 vtop->r = VT_CONST;
3590 vtop->c.i = 1;
3591 goto done;
3594 /* cannot generate code for global or static initializers */
3595 if (STATIC_DATA_WANTED)
3596 goto done;
3598 /* non constant case: generate code */
3599 if (dbt == VT_BOOL) {
3600 gen_test_zero(TOK_NE);
3601 goto done;
3604 if (sf || df) {
3605 if (sf && df) {
3606 /* convert from fp to fp */
3607 gen_cvt_ftof(dbt);
3608 } else if (df) {
3609 /* convert int to fp */
3610 gen_cvt_itof1(dbt);
3611 } else {
3612 /* convert fp to int */
3613 sbt = dbt;
3614 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3615 sbt = VT_INT;
3616 gen_cvt_ftoi1(sbt);
3617 goto again; /* may need char/short cast */
3619 goto done;
3622 ds = btype_size(dbt_bt);
3623 ss = btype_size(sbt_bt);
3624 if (ds == 0 || ss == 0) {
3625 if (dbt_bt == VT_VOID)
3626 goto done;
3627 cast_error(&vtop->type, type);
3629 if (IS_ENUM(type->t) && type->ref->c < 0)
3630 tcc_error("cast to incomplete type");
3632 /* same size and no sign conversion needed */
3633 if (ds == ss && ds >= 4)
3634 goto done;
3635 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3636 tcc_warning("cast between pointer and integer of different size");
3637 if (sbt_bt == VT_PTR) {
3638 /* put integer type to allow logical operations below */
3639 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3643 /* processor allows { int a = 0, b = *(char*)&a; }
3644 That means that if we cast to less width, we can just
3645 change the type and read it still later. */
3646 #define ALLOW_SUBTYPE_ACCESS 1
3648 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3649 /* value still in memory */
3650 if (ds <= ss)
3651 goto done;
3652 /* ss <= 4 here */
3653 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3654 gv(RC_INT);
3655 goto done; /* no 64bit envolved */
3658 gv(RC_INT);
3660 trunc = 0;
3661 #if PTR_SIZE == 4
3662 if (ds == 8) {
3663 /* generate high word */
3664 if (sbt & VT_UNSIGNED) {
3665 vpushi(0);
3666 gv(RC_INT);
3667 } else {
3668 gv_dup();
3669 vpushi(31);
3670 gen_op(TOK_SAR);
3672 lbuild(dbt);
3673 } else if (ss == 8) {
3674 /* from long long: just take low order word */
3675 lexpand();
3676 vpop();
3678 ss = 4;
3680 #elif PTR_SIZE == 8
3681 if (ds == 8) {
3682 /* need to convert from 32bit to 64bit */
3683 if (sbt & VT_UNSIGNED) {
3684 #if defined(TCC_TARGET_RISCV64)
3685 /* RISC-V keeps 32bit vals in registers sign-extended.
3686 So here we need a zero-extension. */
3687 trunc = 32;
3688 #else
3689 goto done;
3690 #endif
3691 } else {
3692 gen_cvt_sxtw();
3693 goto done;
3695 ss = ds, ds = 4, dbt = sbt;
3696 } else if (ss == 8) {
3697 /* RISC-V keeps 32bit vals in registers sign-extended.
3698 So here we need a sign-extension for signed types and
3699 zero-extension. for unsigned types. */
3700 #if !defined(TCC_TARGET_RISCV64)
3701 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3702 #endif
3703 } else {
3704 ss = 4;
3706 #endif
3708 if (ds >= ss)
3709 goto done;
3710 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3711 if (ss == 4) {
3712 gen_cvt_csti(dbt);
3713 goto done;
3715 #endif
3716 bits = (ss - ds) * 8;
3717 /* for unsigned, gen_op will convert SAR to SHR */
3718 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3719 vpushi(bits);
3720 gen_op(TOK_SHL);
3721 vpushi(bits - trunc);
3722 gen_op(TOK_SAR);
3723 vpushi(trunc);
3724 gen_op(TOK_SHR);
3726 done:
3727 vtop->type = *type;
3728 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3731 /* return type size as known at compile time. Put alignment at 'a' */
3732 ST_FUNC int type_size(CType *type, int *a)
3734 Sym *s;
3735 int bt;
3737 bt = type->t & VT_BTYPE;
3738 if (bt == VT_STRUCT) {
3739 /* struct/union */
3740 s = type->ref;
3741 *a = s->r;
3742 return s->c;
3743 } else if (bt == VT_PTR) {
3744 if (type->t & VT_ARRAY) {
3745 int ts;
3747 s = type->ref;
3748 ts = type_size(&s->type, a);
3750 if (ts < 0 && s->c < 0)
3751 ts = -ts;
3753 return ts * s->c;
3754 } else {
3755 *a = PTR_SIZE;
3756 return PTR_SIZE;
3758 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3759 return -1; /* incomplete enum */
3760 } else if (bt == VT_LDOUBLE) {
3761 *a = LDOUBLE_ALIGN;
3762 return LDOUBLE_SIZE;
3763 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3764 #ifdef TCC_TARGET_I386
3765 #ifdef TCC_TARGET_PE
3766 *a = 8;
3767 #else
3768 *a = 4;
3769 #endif
3770 #elif defined(TCC_TARGET_ARM)
3771 #ifdef TCC_ARM_EABI
3772 *a = 8;
3773 #else
3774 *a = 4;
3775 #endif
3776 #else
3777 *a = 8;
3778 #endif
3779 return 8;
3780 } else if (bt == VT_INT || bt == VT_FLOAT) {
3781 *a = 4;
3782 return 4;
3783 } else if (bt == VT_SHORT) {
3784 *a = 2;
3785 return 2;
3786 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3787 *a = 8;
3788 return 16;
3789 } else {
3790 /* char, void, function, _Bool */
3791 *a = 1;
3792 return 1;
3796 /* push type size as known at runtime time on top of value stack. Put
3797 alignment at 'a' */
3798 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
3800 if (type->t & VT_VLA) {
3801 type_size(&type->ref->type, a);
3802 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3803 } else {
3804 vpushi(type_size(type, a));
3808 /* return the pointed type of t */
3809 static inline CType *pointed_type(CType *type)
3811 return &type->ref->type;
3814 /* modify type so that its it is a pointer to type. */
3815 ST_FUNC void mk_pointer(CType *type)
3817 Sym *s;
3818 s = sym_push(SYM_FIELD, type, 0, -1);
3819 type->t = VT_PTR | (type->t & VT_STORAGE);
3820 type->ref = s;
3823 /* return true if type1 and type2 are exactly the same (including
3824 qualifiers).
3826 static int is_compatible_types(CType *type1, CType *type2)
3828 return compare_types(type1,type2,0);
3831 /* return true if type1 and type2 are the same (ignoring qualifiers).
3833 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3835 return compare_types(type1,type2,1);
3838 static void cast_error(CType *st, CType *dt)
3840 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3843 /* verify type compatibility to store vtop in 'dt' type */
3844 static void verify_assign_cast(CType *dt)
3846 CType *st, *type1, *type2;
3847 int dbt, sbt, qualwarn, lvl;
3849 st = &vtop->type; /* source type */
3850 dbt = dt->t & VT_BTYPE;
3851 sbt = st->t & VT_BTYPE;
3852 if (dt->t & VT_CONSTANT)
3853 tcc_warning("assignment of read-only location");
3854 switch(dbt) {
3855 case VT_VOID:
3856 if (sbt != dbt)
3857 tcc_error("assignment to void expression");
3858 break;
3859 case VT_PTR:
3860 /* special cases for pointers */
3861 /* '0' can also be a pointer */
3862 if (is_null_pointer(vtop))
3863 break;
3864 /* accept implicit pointer to integer cast with warning */
3865 if (is_integer_btype(sbt)) {
3866 tcc_warning("assignment makes pointer from integer without a cast");
3867 break;
3869 type1 = pointed_type(dt);
3870 if (sbt == VT_PTR)
3871 type2 = pointed_type(st);
3872 else if (sbt == VT_FUNC)
3873 type2 = st; /* a function is implicitly a function pointer */
3874 else
3875 goto error;
3876 if (is_compatible_types(type1, type2))
3877 break;
3878 for (qualwarn = lvl = 0;; ++lvl) {
3879 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3880 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3881 qualwarn = 1;
3882 dbt = type1->t & (VT_BTYPE|VT_LONG);
3883 sbt = type2->t & (VT_BTYPE|VT_LONG);
3884 if (dbt != VT_PTR || sbt != VT_PTR)
3885 break;
3886 type1 = pointed_type(type1);
3887 type2 = pointed_type(type2);
3889 if (!is_compatible_unqualified_types(type1, type2)) {
3890 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3891 /* void * can match anything */
3892 } else if (dbt == sbt
3893 && is_integer_btype(sbt & VT_BTYPE)
3894 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3895 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3896 /* Like GCC don't warn by default for merely changes
3897 in pointer target signedness. Do warn for different
3898 base types, though, in particular for unsigned enums
3899 and signed int targets. */
3900 } else {
3901 tcc_warning("assignment from incompatible pointer type");
3902 break;
3905 if (qualwarn)
3906 tcc_warning("assignment discards qualifiers from pointer target type");
3907 break;
3908 case VT_BYTE:
3909 case VT_SHORT:
3910 case VT_INT:
3911 case VT_LLONG:
3912 if (sbt == VT_PTR || sbt == VT_FUNC) {
3913 tcc_warning("assignment makes integer from pointer without a cast");
3914 } else if (sbt == VT_STRUCT) {
3915 goto case_VT_STRUCT;
3917 /* XXX: more tests */
3918 break;
3919 case VT_STRUCT:
3920 case_VT_STRUCT:
3921 if (!is_compatible_unqualified_types(dt, st)) {
3922 error:
3923 cast_error(st, dt);
3925 break;
3929 static void gen_assign_cast(CType *dt)
3931 verify_assign_cast(dt);
3932 gen_cast(dt);
3935 /* store vtop in lvalue pushed on stack */
3936 ST_FUNC void vstore(void)
3938 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3940 ft = vtop[-1].type.t;
3941 sbt = vtop->type.t & VT_BTYPE;
3942 dbt = ft & VT_BTYPE;
3944 verify_assign_cast(&vtop[-1].type);
3946 if (sbt == VT_STRUCT) {
3947 /* if structure, only generate pointer */
3948 /* structure assignment : generate memcpy */
3949 /* XXX: optimize if small size */
3950 size = type_size(&vtop->type, &align);
3952 /* destination */
3953 vswap();
3954 #ifdef CONFIG_TCC_BCHECK
3955 if (vtop->r & VT_MUSTBOUND)
3956 gbound(); /* check would be wrong after gaddrof() */
3957 #endif
3958 vtop->type.t = VT_PTR;
3959 gaddrof();
3961 /* address of memcpy() */
3962 #ifdef TCC_ARM_EABI
3963 if(!(align & 7))
3964 vpush_helper_func(TOK_memmove8);
3965 else if(!(align & 3))
3966 vpush_helper_func(TOK_memmove4);
3967 else
3968 #endif
3969 /* Use memmove, rather than memcpy, as dest and src may be same: */
3970 vpush_helper_func(TOK_memmove);
3972 vswap();
3973 /* source */
3974 vpushv(vtop - 2);
3975 #ifdef CONFIG_TCC_BCHECK
3976 if (vtop->r & VT_MUSTBOUND)
3977 gbound();
3978 #endif
3979 vtop->type.t = VT_PTR;
3980 gaddrof();
3981 /* type size */
3982 vpushi(size);
3983 gfunc_call(3);
3984 /* leave source on stack */
3986 } else if (ft & VT_BITFIELD) {
3987 /* bitfield store handling */
3989 /* save lvalue as expression result (example: s.b = s.a = n;) */
3990 vdup(), vtop[-1] = vtop[-2];
3992 bit_pos = BIT_POS(ft);
3993 bit_size = BIT_SIZE(ft);
3994 /* remove bit field info to avoid loops */
3995 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3997 if (dbt == VT_BOOL) {
3998 gen_cast(&vtop[-1].type);
3999 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
4001 r = adjust_bf(vtop - 1, bit_pos, bit_size);
4002 if (dbt != VT_BOOL) {
4003 gen_cast(&vtop[-1].type);
4004 dbt = vtop[-1].type.t & VT_BTYPE;
4006 if (r == VT_STRUCT) {
4007 store_packed_bf(bit_pos, bit_size);
4008 } else {
4009 unsigned long long mask = (1ULL << bit_size) - 1;
4010 if (dbt != VT_BOOL) {
4011 /* mask source */
4012 if (dbt == VT_LLONG)
4013 vpushll(mask);
4014 else
4015 vpushi((unsigned)mask);
4016 gen_op('&');
4018 /* shift source */
4019 vpushi(bit_pos);
4020 gen_op(TOK_SHL);
4021 vswap();
4022 /* duplicate destination */
4023 vdup();
4024 vrott(3);
4025 /* load destination, mask and or with source */
4026 if (dbt == VT_LLONG)
4027 vpushll(~(mask << bit_pos));
4028 else
4029 vpushi(~((unsigned)mask << bit_pos));
4030 gen_op('&');
4031 gen_op('|');
4032 /* store result */
4033 vstore();
4034 /* ... and discard */
4035 vpop();
4037 } else if (dbt == VT_VOID) {
4038 --vtop;
4039 } else {
4040 /* optimize char/short casts */
4041 delayed_cast = 0;
4042 if ((dbt == VT_BYTE || dbt == VT_SHORT)
4043 && is_integer_btype(sbt)
4045 if ((vtop->r & VT_MUSTCAST)
4046 && btype_size(dbt) > btype_size(sbt)
4048 force_charshort_cast();
4049 delayed_cast = 1;
4050 } else {
4051 gen_cast(&vtop[-1].type);
4054 #ifdef CONFIG_TCC_BCHECK
4055 /* bound check case */
4056 if (vtop[-1].r & VT_MUSTBOUND) {
4057 vswap();
4058 gbound();
4059 vswap();
4061 #endif
4062 gv(RC_TYPE(dbt)); /* generate value */
4064 if (delayed_cast) {
4065 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
4066 //tcc_warning("deley cast %x -> %x", sbt, dbt);
4067 vtop->type.t = ft & VT_TYPE;
4070 /* if lvalue was saved on stack, must read it */
4071 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
4072 SValue sv;
4073 r = get_reg(RC_INT);
4074 sv.type.t = VT_PTRDIFF_T;
4075 sv.r = VT_LOCAL | VT_LVAL;
4076 sv.c.i = vtop[-1].c.i;
4077 load(r, &sv);
4078 vtop[-1].r = r | VT_LVAL;
4081 r = vtop->r & VT_VALMASK;
4082 /* two word case handling :
4083 store second register at word + 4 (or +8 for x86-64) */
4084 if (USING_TWO_WORDS(dbt)) {
4085 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
4086 vtop[-1].type.t = load_type;
4087 store(r, vtop - 1);
4088 vswap();
4089 /* convert to int to increment easily */
4090 vtop->type.t = VT_PTRDIFF_T;
4091 gaddrof();
4092 vpushs(PTR_SIZE);
4093 gen_op('+');
4094 vtop->r |= VT_LVAL;
4095 vswap();
4096 vtop[-1].type.t = load_type;
4097 /* XXX: it works because r2 is spilled last ! */
4098 store(vtop->r2, vtop - 1);
4099 } else {
4100 /* single word */
4101 store(r, vtop - 1);
4103 vswap();
4104 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
4108 /* post defines POST/PRE add. c is the token ++ or -- */
4109 ST_FUNC void inc(int post, int c)
4111 test_lvalue();
4112 vdup(); /* save lvalue */
4113 if (post) {
4114 gv_dup(); /* duplicate value */
4115 vrotb(3);
4116 vrotb(3);
4118 /* add constant */
4119 vpushi(c - TOK_MID);
4120 gen_op('+');
4121 vstore(); /* store value */
4122 if (post)
4123 vpop(); /* if post op, return saved value */
4126 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
4128 /* read the string */
4129 if (tok != TOK_STR)
4130 expect(msg);
4131 cstr_new(astr);
4132 while (tok == TOK_STR) {
4133 /* XXX: add \0 handling too ? */
4134 cstr_cat(astr, tokc.str.data, -1);
4135 next();
4137 cstr_ccat(astr, '\0');
4140 /* If I is >= 1 and a power of two, returns log2(i)+1.
4141 If I is 0 returns 0. */
4142 ST_FUNC int exact_log2p1(int i)
4144 int ret;
4145 if (!i)
4146 return 0;
4147 for (ret = 1; i >= 1 << 8; ret += 8)
4148 i >>= 8;
4149 if (i >= 1 << 4)
4150 ret += 4, i >>= 4;
4151 if (i >= 1 << 2)
4152 ret += 2, i >>= 2;
4153 if (i >= 1 << 1)
4154 ret++;
4155 return ret;
4158 /* Parse __attribute__((...)) GNUC extension. */
4159 static void parse_attribute(AttributeDef *ad)
4161 int t, n;
4162 CString astr;
4164 redo:
4165 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
4166 return;
4167 next();
4168 skip('(');
4169 skip('(');
4170 while (tok != ')') {
4171 if (tok < TOK_IDENT)
4172 expect("attribute name");
4173 t = tok;
4174 next();
4175 switch(t) {
4176 case TOK_CLEANUP1:
4177 case TOK_CLEANUP2:
4179 Sym *s;
4181 skip('(');
4182 s = sym_find(tok);
4183 if (!s) {
4184 tcc_warning("implicit declaration of function '%s'",
4185 get_tok_str(tok, &tokc));
4186 s = external_global_sym(tok, &func_old_type);
4187 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
4188 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
4189 ad->cleanup_func = s;
4190 next();
4191 skip(')');
4192 break;
4194 case TOK_CONSTRUCTOR1:
4195 case TOK_CONSTRUCTOR2:
4196 ad->f.func_ctor = 1;
4197 break;
4198 case TOK_DESTRUCTOR1:
4199 case TOK_DESTRUCTOR2:
4200 ad->f.func_dtor = 1;
4201 break;
4202 case TOK_ALWAYS_INLINE1:
4203 case TOK_ALWAYS_INLINE2:
4204 ad->f.func_alwinl = 1;
4205 break;
4206 case TOK_SECTION1:
4207 case TOK_SECTION2:
4208 skip('(');
4209 parse_mult_str(&astr, "section name");
4210 ad->section = find_section(tcc_state, (char *)astr.data);
4211 skip(')');
4212 cstr_free(&astr);
4213 break;
4214 case TOK_ALIAS1:
4215 case TOK_ALIAS2:
4216 skip('(');
4217 parse_mult_str(&astr, "alias(\"target\")");
4218 ad->alias_target = /* save string as token, for later */
4219 tok_alloc((char*)astr.data, astr.size-1)->tok;
4220 skip(')');
4221 cstr_free(&astr);
4222 break;
4223 case TOK_VISIBILITY1:
4224 case TOK_VISIBILITY2:
4225 skip('(');
4226 parse_mult_str(&astr,
4227 "visibility(\"default|hidden|internal|protected\")");
4228 if (!strcmp (astr.data, "default"))
4229 ad->a.visibility = STV_DEFAULT;
4230 else if (!strcmp (astr.data, "hidden"))
4231 ad->a.visibility = STV_HIDDEN;
4232 else if (!strcmp (astr.data, "internal"))
4233 ad->a.visibility = STV_INTERNAL;
4234 else if (!strcmp (astr.data, "protected"))
4235 ad->a.visibility = STV_PROTECTED;
4236 else
4237 expect("visibility(\"default|hidden|internal|protected\")");
4238 skip(')');
4239 cstr_free(&astr);
4240 break;
4241 case TOK_ALIGNED1:
4242 case TOK_ALIGNED2:
4243 if (tok == '(') {
4244 next();
4245 n = expr_const();
4246 if (n <= 0 || (n & (n - 1)) != 0)
4247 tcc_error("alignment must be a positive power of two");
4248 skip(')');
4249 } else {
4250 n = MAX_ALIGN;
4252 ad->a.aligned = exact_log2p1(n);
4253 if (n != 1 << (ad->a.aligned - 1))
4254 tcc_error("alignment of %d is larger than implemented", n);
4255 break;
4256 case TOK_PACKED1:
4257 case TOK_PACKED2:
4258 ad->a.packed = 1;
4259 break;
4260 case TOK_WEAK1:
4261 case TOK_WEAK2:
4262 ad->a.weak = 1;
4263 break;
4264 case TOK_UNUSED1:
4265 case TOK_UNUSED2:
4266 /* currently, no need to handle it because tcc does not
4267 track unused objects */
4268 break;
4269 case TOK_NORETURN1:
4270 case TOK_NORETURN2:
4271 ad->f.func_noreturn = 1;
4272 break;
4273 case TOK_CDECL1:
4274 case TOK_CDECL2:
4275 case TOK_CDECL3:
4276 ad->f.func_call = FUNC_CDECL;
4277 break;
4278 case TOK_STDCALL1:
4279 case TOK_STDCALL2:
4280 case TOK_STDCALL3:
4281 ad->f.func_call = FUNC_STDCALL;
4282 break;
4283 #ifdef TCC_TARGET_I386
4284 case TOK_REGPARM1:
4285 case TOK_REGPARM2:
4286 skip('(');
4287 n = expr_const();
4288 if (n > 3)
4289 n = 3;
4290 else if (n < 0)
4291 n = 0;
4292 if (n > 0)
4293 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
4294 skip(')');
4295 break;
4296 case TOK_FASTCALL1:
4297 case TOK_FASTCALL2:
4298 case TOK_FASTCALL3:
4299 ad->f.func_call = FUNC_FASTCALLW;
4300 break;
4301 #endif
4302 case TOK_MODE:
4303 skip('(');
4304 switch(tok) {
4305 case TOK_MODE_DI:
4306 ad->attr_mode = VT_LLONG + 1;
4307 break;
4308 case TOK_MODE_QI:
4309 ad->attr_mode = VT_BYTE + 1;
4310 break;
4311 case TOK_MODE_HI:
4312 ad->attr_mode = VT_SHORT + 1;
4313 break;
4314 case TOK_MODE_SI:
4315 case TOK_MODE_word:
4316 ad->attr_mode = VT_INT + 1;
4317 break;
4318 default:
4319 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
4320 break;
4322 next();
4323 skip(')');
4324 break;
4325 case TOK_DLLEXPORT:
4326 ad->a.dllexport = 1;
4327 break;
4328 case TOK_NODECORATE:
4329 ad->a.nodecorate = 1;
4330 break;
4331 case TOK_DLLIMPORT:
4332 ad->a.dllimport = 1;
4333 break;
4334 default:
4335 if (tcc_state->warn_unsupported)
4336 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
4337 /* skip parameters */
4338 if (tok == '(') {
4339 int parenthesis = 0;
4340 do {
4341 if (tok == '(')
4342 parenthesis++;
4343 else if (tok == ')')
4344 parenthesis--;
4345 next();
4346 } while (parenthesis && tok != -1);
4348 break;
4350 if (tok != ',')
4351 break;
4352 next();
4354 skip(')');
4355 skip(')');
4356 goto redo;
4359 static Sym * find_field (CType *type, int v, int *cumofs)
4361 Sym *s = type->ref;
4362 v |= SYM_FIELD;
4363 while ((s = s->next) != NULL) {
4364 if ((s->v & SYM_FIELD) &&
4365 (s->type.t & VT_BTYPE) == VT_STRUCT &&
4366 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
4367 Sym *ret = find_field (&s->type, v, cumofs);
4368 if (ret) {
4369 *cumofs += s->c;
4370 return ret;
4373 if (s->v == v)
4374 break;
4376 return s;
4379 static void check_fields (CType *type, int check)
4381 Sym *s = type->ref;
4383 while ((s = s->next) != NULL) {
4384 int v = s->v & ~SYM_FIELD;
4385 if (v < SYM_FIRST_ANOM) {
4386 TokenSym *ts = table_ident[v - TOK_IDENT];
4387 if (check && (ts->tok & SYM_FIELD))
4388 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4389 ts->tok ^= SYM_FIELD;
4390 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4391 check_fields (&s->type, check);
4395 static void struct_layout(CType *type, AttributeDef *ad)
4397 int size, align, maxalign, offset, c, bit_pos, bit_size;
4398 int packed, a, bt, prevbt, prev_bit_size;
4399 int pcc = !tcc_state->ms_bitfields;
4400 int pragma_pack = *tcc_state->pack_stack_ptr;
4401 Sym *f;
4403 maxalign = 1;
4404 offset = 0;
4405 c = 0;
4406 bit_pos = 0;
4407 prevbt = VT_STRUCT; /* make it never match */
4408 prev_bit_size = 0;
4410 //#define BF_DEBUG
4412 for (f = type->ref->next; f; f = f->next) {
4413 if (f->type.t & VT_BITFIELD)
4414 bit_size = BIT_SIZE(f->type.t);
4415 else
4416 bit_size = -1;
4417 size = type_size(&f->type, &align);
4418 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4419 packed = 0;
4421 if (pcc && bit_size == 0) {
4422 /* in pcc mode, packing does not affect zero-width bitfields */
4424 } else {
4425 /* in pcc mode, attribute packed overrides if set. */
4426 if (pcc && (f->a.packed || ad->a.packed))
4427 align = packed = 1;
4429 /* pragma pack overrides align if lesser and packs bitfields always */
4430 if (pragma_pack) {
4431 packed = 1;
4432 if (pragma_pack < align)
4433 align = pragma_pack;
4434 /* in pcc mode pragma pack also overrides individual align */
4435 if (pcc && pragma_pack < a)
4436 a = 0;
4439 /* some individual align was specified */
4440 if (a)
4441 align = a;
4443 if (type->ref->type.t == VT_UNION) {
4444 if (pcc && bit_size >= 0)
4445 size = (bit_size + 7) >> 3;
4446 offset = 0;
4447 if (size > c)
4448 c = size;
4450 } else if (bit_size < 0) {
4451 if (pcc)
4452 c += (bit_pos + 7) >> 3;
4453 c = (c + align - 1) & -align;
4454 offset = c;
4455 if (size > 0)
4456 c += size;
4457 bit_pos = 0;
4458 prevbt = VT_STRUCT;
4459 prev_bit_size = 0;
4461 } else {
4462 /* A bit-field. Layout is more complicated. There are two
4463 options: PCC (GCC) compatible and MS compatible */
4464 if (pcc) {
4465 /* In PCC layout a bit-field is placed adjacent to the
4466 preceding bit-fields, except if:
4467 - it has zero-width
4468 - an individual alignment was given
4469 - it would overflow its base type container and
4470 there is no packing */
4471 if (bit_size == 0) {
4472 new_field:
4473 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4474 bit_pos = 0;
4475 } else if (f->a.aligned) {
4476 goto new_field;
4477 } else if (!packed) {
4478 int a8 = align * 8;
4479 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4480 if (ofs > size / align)
4481 goto new_field;
4484 /* in pcc mode, long long bitfields have type int if they fit */
4485 if (size == 8 && bit_size <= 32)
4486 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4488 while (bit_pos >= align * 8)
4489 c += align, bit_pos -= align * 8;
4490 offset = c;
4492 /* In PCC layout named bit-fields influence the alignment
4493 of the containing struct using the base types alignment,
4494 except for packed fields (which here have correct align). */
4495 if (f->v & SYM_FIRST_ANOM
4496 // && bit_size // ??? gcc on ARM/rpi does that
4498 align = 1;
4500 } else {
4501 bt = f->type.t & VT_BTYPE;
4502 if ((bit_pos + bit_size > size * 8)
4503 || (bit_size > 0) == (bt != prevbt)
4505 c = (c + align - 1) & -align;
4506 offset = c;
4507 bit_pos = 0;
4508 /* In MS bitfield mode a bit-field run always uses
4509 at least as many bits as the underlying type.
4510 To start a new run it's also required that this
4511 or the last bit-field had non-zero width. */
4512 if (bit_size || prev_bit_size)
4513 c += size;
4515 /* In MS layout the records alignment is normally
4516 influenced by the field, except for a zero-width
4517 field at the start of a run (but by further zero-width
4518 fields it is again). */
4519 if (bit_size == 0 && prevbt != bt)
4520 align = 1;
4521 prevbt = bt;
4522 prev_bit_size = bit_size;
4525 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4526 | (bit_pos << VT_STRUCT_SHIFT);
4527 bit_pos += bit_size;
4529 if (align > maxalign)
4530 maxalign = align;
4532 #ifdef BF_DEBUG
4533 printf("set field %s offset %-2d size %-2d align %-2d",
4534 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4535 if (f->type.t & VT_BITFIELD) {
4536 printf(" pos %-2d bits %-2d",
4537 BIT_POS(f->type.t),
4538 BIT_SIZE(f->type.t)
4541 printf("\n");
4542 #endif
4544 f->c = offset;
4545 f->r = 0;
4548 if (pcc)
4549 c += (bit_pos + 7) >> 3;
4551 /* store size and alignment */
4552 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4553 if (a < maxalign)
4554 a = maxalign;
4555 type->ref->r = a;
4556 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4557 /* can happen if individual align for some member was given. In
4558 this case MSVC ignores maxalign when aligning the size */
4559 a = pragma_pack;
4560 if (a < bt)
4561 a = bt;
4563 c = (c + a - 1) & -a;
4564 type->ref->c = c;
4566 #ifdef BF_DEBUG
4567 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4568 #endif
4570 /* check whether we can access bitfields by their type */
4571 for (f = type->ref->next; f; f = f->next) {
4572 int s, px, cx, c0;
4573 CType t;
4575 if (0 == (f->type.t & VT_BITFIELD))
4576 continue;
4577 f->type.ref = f;
4578 f->auxtype = -1;
4579 bit_size = BIT_SIZE(f->type.t);
4580 if (bit_size == 0)
4581 continue;
4582 bit_pos = BIT_POS(f->type.t);
4583 size = type_size(&f->type, &align);
4584 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
4585 continue;
4587 /* try to access the field using a different type */
4588 c0 = -1, s = align = 1;
4589 t.t = VT_BYTE;
4590 for (;;) {
4591 px = f->c * 8 + bit_pos;
4592 cx = (px >> 3) & -align;
4593 px = px - (cx << 3);
4594 if (c0 == cx)
4595 break;
4596 s = (px + bit_size + 7) >> 3;
4597 if (s > 4) {
4598 t.t = VT_LLONG;
4599 } else if (s > 2) {
4600 t.t = VT_INT;
4601 } else if (s > 1) {
4602 t.t = VT_SHORT;
4603 } else {
4604 t.t = VT_BYTE;
4606 s = type_size(&t, &align);
4607 c0 = cx;
4610 if (px + bit_size <= s * 8 && cx + s <= c) {
4611 /* update offset and bit position */
4612 f->c = cx;
4613 bit_pos = px;
4614 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4615 | (bit_pos << VT_STRUCT_SHIFT);
4616 if (s != size)
4617 f->auxtype = t.t;
4618 #ifdef BF_DEBUG
4619 printf("FIX field %s offset %-2d size %-2d align %-2d "
4620 "pos %-2d bits %-2d\n",
4621 get_tok_str(f->v & ~SYM_FIELD, NULL),
4622 cx, s, align, px, bit_size);
4623 #endif
4624 } else {
4625 /* fall back to load/store single-byte wise */
4626 f->auxtype = VT_STRUCT;
4627 #ifdef BF_DEBUG
4628 printf("FIX field %s : load byte-wise\n",
4629 get_tok_str(f->v & ~SYM_FIELD, NULL));
4630 #endif
4635 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4636 static void struct_decl(CType *type, int u)
4638 int v, c, size, align, flexible;
4639 int bit_size, bsize, bt;
4640 Sym *s, *ss, **ps;
4641 AttributeDef ad, ad1;
4642 CType type1, btype;
4644 memset(&ad, 0, sizeof ad);
4645 next();
4646 parse_attribute(&ad);
4647 if (tok != '{') {
4648 v = tok;
4649 next();
4650 /* struct already defined ? return it */
4651 if (v < TOK_IDENT)
4652 expect("struct/union/enum name");
4653 s = struct_find(v);
4654 if (s && (s->sym_scope == local_scope || tok != '{')) {
4655 if (u == s->type.t)
4656 goto do_decl;
4657 if (u == VT_ENUM && IS_ENUM(s->type.t))
4658 goto do_decl;
4659 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4661 } else {
4662 v = anon_sym++;
4664 /* Record the original enum/struct/union token. */
4665 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4666 type1.ref = NULL;
4667 /* we put an undefined size for struct/union */
4668 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4669 s->r = 0; /* default alignment is zero as gcc */
4670 do_decl:
4671 type->t = s->type.t;
4672 type->ref = s;
4674 if (tok == '{') {
4675 next();
4676 if (s->c != -1)
4677 tcc_error("struct/union/enum already defined");
4678 s->c = -2;
4679 /* cannot be empty */
4680 /* non empty enums are not allowed */
4681 ps = &s->next;
4682 if (u == VT_ENUM) {
4683 long long ll = 0, pl = 0, nl = 0;
4684 CType t;
4685 t.ref = s;
4686 /* enum symbols have static storage */
4687 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4688 for(;;) {
4689 v = tok;
4690 if (v < TOK_UIDENT)
4691 expect("identifier");
4692 ss = sym_find(v);
4693 if (ss && !local_stack)
4694 tcc_error("redefinition of enumerator '%s'",
4695 get_tok_str(v, NULL));
4696 next();
4697 if (tok == '=') {
4698 next();
4699 ll = expr_const64();
4701 ss = sym_push(v, &t, VT_CONST, 0);
4702 ss->enum_val = ll;
4703 *ps = ss, ps = &ss->next;
4704 if (ll < nl)
4705 nl = ll;
4706 if (ll > pl)
4707 pl = ll;
4708 if (tok != ',')
4709 break;
4710 next();
4711 ll++;
4712 /* NOTE: we accept a trailing comma */
4713 if (tok == '}')
4714 break;
4716 skip('}');
4717 /* set integral type of the enum */
4718 t.t = VT_INT;
4719 if (nl >= 0) {
4720 if (pl != (unsigned)pl)
4721 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4722 t.t |= VT_UNSIGNED;
4723 } else if (pl != (int)pl || nl != (int)nl)
4724 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4725 s->type.t = type->t = t.t | VT_ENUM;
4726 s->c = 0;
4727 /* set type for enum members */
4728 for (ss = s->next; ss; ss = ss->next) {
4729 ll = ss->enum_val;
4730 if (ll == (int)ll) /* default is int if it fits */
4731 continue;
4732 if (t.t & VT_UNSIGNED) {
4733 ss->type.t |= VT_UNSIGNED;
4734 if (ll == (unsigned)ll)
4735 continue;
4737 ss->type.t = (ss->type.t & ~VT_BTYPE)
4738 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4740 } else {
4741 c = 0;
4742 flexible = 0;
4743 while (tok != '}') {
4744 if (!parse_btype(&btype, &ad1)) {
4745 skip(';');
4746 continue;
4748 while (1) {
4749 if (flexible)
4750 tcc_error("flexible array member '%s' not at the end of struct",
4751 get_tok_str(v, NULL));
4752 bit_size = -1;
4753 v = 0;
4754 type1 = btype;
4755 if (tok != ':') {
4756 if (tok != ';')
4757 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4758 if (v == 0) {
4759 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4760 expect("identifier");
4761 else {
4762 int v = btype.ref->v;
4763 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4764 if (tcc_state->ms_extensions == 0)
4765 expect("identifier");
4769 if (type_size(&type1, &align) < 0) {
4770 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4771 flexible = 1;
4772 else
4773 tcc_error("field '%s' has incomplete type",
4774 get_tok_str(v, NULL));
4776 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4777 (type1.t & VT_BTYPE) == VT_VOID ||
4778 (type1.t & VT_STORAGE))
4779 tcc_error("invalid type for '%s'",
4780 get_tok_str(v, NULL));
4782 if (tok == ':') {
4783 next();
4784 bit_size = expr_const();
4785 /* XXX: handle v = 0 case for messages */
4786 if (bit_size < 0)
4787 tcc_error("negative width in bit-field '%s'",
4788 get_tok_str(v, NULL));
4789 if (v && bit_size == 0)
4790 tcc_error("zero width for bit-field '%s'",
4791 get_tok_str(v, NULL));
4792 parse_attribute(&ad1);
4794 size = type_size(&type1, &align);
4795 if (bit_size >= 0) {
4796 bt = type1.t & VT_BTYPE;
4797 if (bt != VT_INT &&
4798 bt != VT_BYTE &&
4799 bt != VT_SHORT &&
4800 bt != VT_BOOL &&
4801 bt != VT_LLONG)
4802 tcc_error("bitfields must have scalar type");
4803 bsize = size * 8;
4804 if (bit_size > bsize) {
4805 tcc_error("width of '%s' exceeds its type",
4806 get_tok_str(v, NULL));
4807 } else if (bit_size == bsize
4808 && !ad.a.packed && !ad1.a.packed) {
4809 /* no need for bit fields */
4811 } else if (bit_size == 64) {
4812 tcc_error("field width 64 not implemented");
4813 } else {
4814 type1.t = (type1.t & ~VT_STRUCT_MASK)
4815 | VT_BITFIELD
4816 | (bit_size << (VT_STRUCT_SHIFT + 6));
4819 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4820 /* Remember we've seen a real field to check
4821 for placement of flexible array member. */
4822 c = 1;
4824 /* If member is a struct or bit-field, enforce
4825 placing into the struct (as anonymous). */
4826 if (v == 0 &&
4827 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4828 bit_size >= 0)) {
4829 v = anon_sym++;
4831 if (v) {
4832 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4833 ss->a = ad1.a;
4834 *ps = ss;
4835 ps = &ss->next;
4837 if (tok == ';' || tok == TOK_EOF)
4838 break;
4839 skip(',');
4841 skip(';');
4843 skip('}');
4844 parse_attribute(&ad);
4845 if (ad.cleanup_func) {
4846 tcc_warning("attribute '__cleanup__' ignored on type");
4848 check_fields(type, 1);
4849 check_fields(type, 0);
4850 struct_layout(type, &ad);
4855 static void sym_to_attr(AttributeDef *ad, Sym *s)
4857 merge_symattr(&ad->a, &s->a);
4858 merge_funcattr(&ad->f, &s->f);
4861 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4862 are added to the element type, copied because it could be a typedef. */
4863 static void parse_btype_qualify(CType *type, int qualifiers)
4865 while (type->t & VT_ARRAY) {
4866 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4867 type = &type->ref->type;
4869 type->t |= qualifiers;
4872 /* return 0 if no type declaration. otherwise, return the basic type
4873 and skip it.
4875 static int parse_btype(CType *type, AttributeDef *ad)
4877 int t, u, bt, st, type_found, typespec_found, g, n;
4878 Sym *s;
4879 CType type1;
4881 memset(ad, 0, sizeof(AttributeDef));
4882 type_found = 0;
4883 typespec_found = 0;
4884 t = VT_INT;
4885 bt = st = -1;
4886 type->ref = NULL;
4888 while(1) {
4889 switch(tok) {
4890 case TOK_EXTENSION:
4891 /* currently, we really ignore extension */
4892 next();
4893 continue;
4895 /* basic types */
4896 case TOK_CHAR:
4897 u = VT_BYTE;
4898 basic_type:
4899 next();
4900 basic_type1:
4901 if (u == VT_SHORT || u == VT_LONG) {
4902 if (st != -1 || (bt != -1 && bt != VT_INT))
4903 tmbt: tcc_error("too many basic types");
4904 st = u;
4905 } else {
4906 if (bt != -1 || (st != -1 && u != VT_INT))
4907 goto tmbt;
4908 bt = u;
4910 if (u != VT_INT)
4911 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4912 typespec_found = 1;
4913 break;
4914 case TOK_VOID:
4915 u = VT_VOID;
4916 goto basic_type;
4917 case TOK_SHORT:
4918 u = VT_SHORT;
4919 goto basic_type;
4920 case TOK_INT:
4921 u = VT_INT;
4922 goto basic_type;
4923 case TOK_ALIGNAS:
4924 { int n;
4925 AttributeDef ad1;
4926 next();
4927 skip('(');
4928 memset(&ad1, 0, sizeof(AttributeDef));
4929 if (parse_btype(&type1, &ad1)) {
4930 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4931 if (ad1.a.aligned)
4932 n = 1 << (ad1.a.aligned - 1);
4933 else
4934 type_size(&type1, &n);
4935 } else {
4936 n = expr_const();
4937 if (n <= 0 || (n & (n - 1)) != 0)
4938 tcc_error("alignment must be a positive power of two");
4940 skip(')');
4941 ad->a.aligned = exact_log2p1(n);
4943 continue;
4944 case TOK_LONG:
4945 if ((t & VT_BTYPE) == VT_DOUBLE) {
4946 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4947 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4948 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4949 } else {
4950 u = VT_LONG;
4951 goto basic_type;
4953 next();
4954 break;
4955 #ifdef TCC_TARGET_ARM64
4956 case TOK_UINT128:
4957 /* GCC's __uint128_t appears in some Linux header files. Make it a
4958 synonym for long double to get the size and alignment right. */
4959 u = VT_LDOUBLE;
4960 goto basic_type;
4961 #endif
4962 case TOK_BOOL:
4963 u = VT_BOOL;
4964 goto basic_type;
4965 case TOK_FLOAT:
4966 u = VT_FLOAT;
4967 goto basic_type;
4968 case TOK_DOUBLE:
4969 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4970 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4971 } else {
4972 u = VT_DOUBLE;
4973 goto basic_type;
4975 next();
4976 break;
4977 case TOK_ENUM:
4978 struct_decl(&type1, VT_ENUM);
4979 basic_type2:
4980 u = type1.t;
4981 type->ref = type1.ref;
4982 goto basic_type1;
4983 case TOK_STRUCT:
4984 struct_decl(&type1, VT_STRUCT);
4985 goto basic_type2;
4986 case TOK_UNION:
4987 struct_decl(&type1, VT_UNION);
4988 goto basic_type2;
4990 /* type modifiers */
4991 case TOK_CONST1:
4992 case TOK_CONST2:
4993 case TOK_CONST3:
4994 type->t = t;
4995 parse_btype_qualify(type, VT_CONSTANT);
4996 t = type->t;
4997 next();
4998 break;
4999 case TOK_VOLATILE1:
5000 case TOK_VOLATILE2:
5001 case TOK_VOLATILE3:
5002 type->t = t;
5003 parse_btype_qualify(type, VT_VOLATILE);
5004 t = type->t;
5005 next();
5006 break;
5007 case TOK_SIGNED1:
5008 case TOK_SIGNED2:
5009 case TOK_SIGNED3:
5010 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
5011 tcc_error("signed and unsigned modifier");
5012 t |= VT_DEFSIGN;
5013 next();
5014 typespec_found = 1;
5015 break;
5016 case TOK_REGISTER:
5017 case TOK_AUTO:
5018 case TOK_RESTRICT1:
5019 case TOK_RESTRICT2:
5020 case TOK_RESTRICT3:
5021 next();
5022 break;
5023 case TOK_UNSIGNED:
5024 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
5025 tcc_error("signed and unsigned modifier");
5026 t |= VT_DEFSIGN | VT_UNSIGNED;
5027 next();
5028 typespec_found = 1;
5029 break;
5031 /* storage */
5032 case TOK_EXTERN:
5033 g = VT_EXTERN;
5034 goto storage;
5035 case TOK_STATIC:
5036 g = VT_STATIC;
5037 goto storage;
5038 case TOK_TYPEDEF:
5039 g = VT_TYPEDEF;
5040 goto storage;
5041 storage:
5042 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
5043 tcc_error("multiple storage classes");
5044 t |= g;
5045 next();
5046 break;
5047 case TOK_INLINE1:
5048 case TOK_INLINE2:
5049 case TOK_INLINE3:
5050 t |= VT_INLINE;
5051 next();
5052 break;
5053 case TOK_NORETURN3:
5054 next();
5055 ad->f.func_noreturn = 1;
5056 break;
5057 /* GNUC attribute */
5058 case TOK_ATTRIBUTE1:
5059 case TOK_ATTRIBUTE2:
5060 parse_attribute(ad);
5061 if (ad->attr_mode) {
5062 u = ad->attr_mode -1;
5063 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
5065 continue;
5066 /* GNUC typeof */
5067 case TOK_TYPEOF1:
5068 case TOK_TYPEOF2:
5069 case TOK_TYPEOF3:
5070 next();
5071 parse_expr_type(&type1);
5072 /* remove all storage modifiers except typedef */
5073 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
5074 if (type1.ref)
5075 sym_to_attr(ad, type1.ref);
5076 goto basic_type2;
5077 default:
5078 if (typespec_found)
5079 goto the_end;
5080 s = sym_find(tok);
5081 if (!s || !(s->type.t & VT_TYPEDEF))
5082 goto the_end;
5084 n = tok, next();
5085 if (tok == ':' && !in_generic) {
5086 /* ignore if it's a label */
5087 unget_tok(n);
5088 goto the_end;
5091 t &= ~(VT_BTYPE|VT_LONG);
5092 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
5093 type->t = (s->type.t & ~VT_TYPEDEF) | u;
5094 type->ref = s->type.ref;
5095 if (t)
5096 parse_btype_qualify(type, t);
5097 t = type->t;
5098 /* get attributes from typedef */
5099 sym_to_attr(ad, s);
5100 typespec_found = 1;
5101 st = bt = -2;
5102 break;
5104 type_found = 1;
5106 the_end:
5107 if (tcc_state->char_is_unsigned) {
5108 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
5109 t |= VT_UNSIGNED;
5111 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5112 bt = t & (VT_BTYPE|VT_LONG);
5113 if (bt == VT_LONG)
5114 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
5115 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5116 if (bt == VT_LDOUBLE)
5117 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
5118 #endif
5119 type->t = t;
5120 return type_found;
5123 /* convert a function parameter type (array to pointer and function to
5124 function pointer) */
5125 static inline void convert_parameter_type(CType *pt)
5127 /* remove const and volatile qualifiers (XXX: const could be used
5128 to indicate a const function parameter */
5129 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
5130 /* array must be transformed to pointer according to ANSI C */
5131 pt->t &= ~VT_ARRAY;
5132 if ((pt->t & VT_BTYPE) == VT_FUNC) {
5133 mk_pointer(pt);
5137 ST_FUNC void parse_asm_str(CString *astr)
5139 skip('(');
5140 parse_mult_str(astr, "string constant");
5143 /* Parse an asm label and return the token */
5144 static int asm_label_instr(void)
5146 int v;
5147 CString astr;
5149 next();
5150 parse_asm_str(&astr);
5151 skip(')');
5152 #ifdef ASM_DEBUG
5153 printf("asm_alias: \"%s\"\n", (char *)astr.data);
5154 #endif
5155 v = tok_alloc(astr.data, astr.size - 1)->tok;
5156 cstr_free(&astr);
5157 return v;
5160 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
5162 int n, l, t1, arg_size, align, unused_align;
5163 Sym **plast, *s, *first;
5164 AttributeDef ad1;
5165 CType pt;
5167 if (tok == '(') {
5168 /* function type, or recursive declarator (return if so) */
5169 next();
5170 if (td && !(td & TYPE_ABSTRACT))
5171 return 0;
5172 if (tok == ')')
5173 l = 0;
5174 else if (parse_btype(&pt, &ad1))
5175 l = FUNC_NEW;
5176 else if (td) {
5177 merge_attr (ad, &ad1);
5178 return 0;
5179 } else
5180 l = FUNC_OLD;
5181 first = NULL;
5182 plast = &first;
5183 arg_size = 0;
5184 if (l) {
5185 for(;;) {
5186 /* read param name and compute offset */
5187 if (l != FUNC_OLD) {
5188 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
5189 break;
5190 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
5191 if ((pt.t & VT_BTYPE) == VT_VOID)
5192 tcc_error("parameter declared as void");
5193 } else {
5194 n = tok;
5195 if (n < TOK_UIDENT)
5196 expect("identifier");
5197 pt.t = VT_VOID; /* invalid type */
5198 pt.ref = NULL;
5199 next();
5201 convert_parameter_type(&pt);
5202 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
5203 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
5204 *plast = s;
5205 plast = &s->next;
5206 if (tok == ')')
5207 break;
5208 skip(',');
5209 if (l == FUNC_NEW && tok == TOK_DOTS) {
5210 l = FUNC_ELLIPSIS;
5211 next();
5212 break;
5214 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
5215 tcc_error("invalid type");
5217 } else
5218 /* if no parameters, then old type prototype */
5219 l = FUNC_OLD;
5220 skip(')');
5221 /* NOTE: const is ignored in returned type as it has a special
5222 meaning in gcc / C++ */
5223 type->t &= ~VT_CONSTANT;
5224 /* some ancient pre-K&R C allows a function to return an array
5225 and the array brackets to be put after the arguments, such
5226 that "int c()[]" means something like "int[] c()" */
5227 if (tok == '[') {
5228 next();
5229 skip(']'); /* only handle simple "[]" */
5230 mk_pointer(type);
5232 /* we push a anonymous symbol which will contain the function prototype */
5233 ad->f.func_args = arg_size;
5234 ad->f.func_type = l;
5235 s = sym_push(SYM_FIELD, type, 0, 0);
5236 s->a = ad->a;
5237 s->f = ad->f;
5238 s->next = first;
5239 type->t = VT_FUNC;
5240 type->ref = s;
5241 } else if (tok == '[') {
5242 int saved_nocode_wanted = nocode_wanted;
5243 /* array definition */
5244 next();
5245 while (1) {
5246 /* XXX The optional type-quals and static should only be accepted
5247 in parameter decls. The '*' as well, and then even only
5248 in prototypes (not function defs). */
5249 switch (tok) {
5250 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
5251 case TOK_CONST1:
5252 case TOK_VOLATILE1:
5253 case TOK_STATIC:
5254 case '*':
5255 next();
5256 continue;
5257 default:
5258 break;
5260 break;
5262 n = -1;
5263 t1 = 0;
5264 if (tok != ']') {
5265 if (!local_stack || (storage & VT_STATIC))
5266 vpushi(expr_const());
5267 else {
5268 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5269 length must always be evaluated, even under nocode_wanted,
5270 so that its size slot is initialized (e.g. under sizeof
5271 or typeof). */
5272 nocode_wanted = 0;
5273 gexpr();
5275 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5276 n = vtop->c.i;
5277 if (n < 0)
5278 tcc_error("invalid array size");
5279 } else {
5280 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5281 tcc_error("size of variable length array should be an integer");
5282 n = 0;
5283 t1 = VT_VLA;
5286 skip(']');
5287 /* parse next post type */
5288 post_type(type, ad, storage, 0);
5290 if ((type->t & VT_BTYPE) == VT_FUNC)
5291 tcc_error("declaration of an array of functions");
5292 if ((type->t & VT_BTYPE) == VT_VOID
5293 || type_size(type, &unused_align) < 0)
5294 tcc_error("declaration of an array of incomplete type elements");
5296 t1 |= type->t & VT_VLA;
5298 if (t1 & VT_VLA) {
5299 if (n < 0)
5300 tcc_error("need explicit inner array size in VLAs");
5301 loc -= type_size(&int_type, &align);
5302 loc &= -align;
5303 n = loc;
5305 vla_runtime_type_size(type, &align);
5306 gen_op('*');
5307 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5308 vswap();
5309 vstore();
5311 if (n != -1)
5312 vpop();
5313 nocode_wanted = saved_nocode_wanted;
5315 /* we push an anonymous symbol which will contain the array
5316 element type */
5317 s = sym_push(SYM_FIELD, type, 0, n);
5318 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5319 type->ref = s;
5321 return 1;
5324 /* Parse a type declarator (except basic type), and return the type
5325 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5326 expected. 'type' should contain the basic type. 'ad' is the
5327 attribute definition of the basic type. It can be modified by
5328 type_decl(). If this (possibly abstract) declarator is a pointer chain
5329 it returns the innermost pointed to type (equals *type, but is a different
5330 pointer), otherwise returns type itself, that's used for recursive calls. */
5331 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5333 CType *post, *ret;
5334 int qualifiers, storage;
5336 /* recursive type, remove storage bits first, apply them later again */
5337 storage = type->t & VT_STORAGE;
5338 type->t &= ~VT_STORAGE;
5339 post = ret = type;
5341 while (tok == '*') {
5342 qualifiers = 0;
5343 redo:
5344 next();
5345 switch(tok) {
5346 case TOK_CONST1:
5347 case TOK_CONST2:
5348 case TOK_CONST3:
5349 qualifiers |= VT_CONSTANT;
5350 goto redo;
5351 case TOK_VOLATILE1:
5352 case TOK_VOLATILE2:
5353 case TOK_VOLATILE3:
5354 qualifiers |= VT_VOLATILE;
5355 goto redo;
5356 case TOK_RESTRICT1:
5357 case TOK_RESTRICT2:
5358 case TOK_RESTRICT3:
5359 goto redo;
5360 /* XXX: clarify attribute handling */
5361 case TOK_ATTRIBUTE1:
5362 case TOK_ATTRIBUTE2:
5363 parse_attribute(ad);
5364 break;
5366 mk_pointer(type);
5367 type->t |= qualifiers;
5368 if (ret == type)
5369 /* innermost pointed to type is the one for the first derivation */
5370 ret = pointed_type(type);
5373 if (tok == '(') {
5374 /* This is possibly a parameter type list for abstract declarators
5375 ('int ()'), use post_type for testing this. */
5376 if (!post_type(type, ad, 0, td)) {
5377 /* It's not, so it's a nested declarator, and the post operations
5378 apply to the innermost pointed to type (if any). */
5379 /* XXX: this is not correct to modify 'ad' at this point, but
5380 the syntax is not clear */
5381 parse_attribute(ad);
5382 post = type_decl(type, ad, v, td);
5383 skip(')');
5384 } else
5385 goto abstract;
5386 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5387 /* type identifier */
5388 *v = tok;
5389 next();
5390 } else {
5391 abstract:
5392 if (!(td & TYPE_ABSTRACT))
5393 expect("identifier");
5394 *v = 0;
5396 post_type(post, ad, storage, 0);
5397 parse_attribute(ad);
5398 type->t |= storage;
5399 return ret;
5402 /* indirection with full error checking and bound check */
5403 ST_FUNC void indir(void)
5405 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5406 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5407 return;
5408 expect("pointer");
5410 if (vtop->r & VT_LVAL)
5411 gv(RC_INT);
5412 vtop->type = *pointed_type(&vtop->type);
5413 /* Arrays and functions are never lvalues */
5414 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5415 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5416 vtop->r |= VT_LVAL;
5417 /* if bound checking, the referenced pointer must be checked */
5418 #ifdef CONFIG_TCC_BCHECK
5419 if (tcc_state->do_bounds_check)
5420 vtop->r |= VT_MUSTBOUND;
5421 #endif
5425 /* pass a parameter to a function and do type checking and casting */
5426 static void gfunc_param_typed(Sym *func, Sym *arg)
5428 int func_type;
5429 CType type;
5431 func_type = func->f.func_type;
5432 if (func_type == FUNC_OLD ||
5433 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5434 /* default casting : only need to convert float to double */
5435 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5436 gen_cast_s(VT_DOUBLE);
5437 } else if (vtop->type.t & VT_BITFIELD) {
5438 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5439 type.ref = vtop->type.ref;
5440 gen_cast(&type);
5441 } else if (vtop->r & VT_MUSTCAST) {
5442 force_charshort_cast();
5444 } else if (arg == NULL) {
5445 tcc_error("too many arguments to function");
5446 } else {
5447 type = arg->type;
5448 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5449 gen_assign_cast(&type);
5453 /* parse an expression and return its type without any side effect. */
5454 static void expr_type(CType *type, void (*expr_fn)(void))
5456 nocode_wanted++;
5457 expr_fn();
5458 *type = vtop->type;
5459 vpop();
5460 nocode_wanted--;
5463 /* parse an expression of the form '(type)' or '(expr)' and return its
5464 type */
5465 static void parse_expr_type(CType *type)
5467 int n;
5468 AttributeDef ad;
5470 skip('(');
5471 if (parse_btype(type, &ad)) {
5472 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5473 } else {
5474 expr_type(type, gexpr);
5476 skip(')');
5479 static void parse_type(CType *type)
5481 AttributeDef ad;
5482 int n;
5484 if (!parse_btype(type, &ad)) {
5485 expect("type");
5487 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5490 static void parse_builtin_params(int nc, const char *args)
5492 char c, sep = '(';
5493 CType type;
5494 if (nc)
5495 nocode_wanted++;
5496 next();
5497 if (*args == 0)
5498 skip(sep);
5499 while ((c = *args++)) {
5500 skip(sep);
5501 sep = ',';
5502 if (c == 't') {
5503 parse_type(&type);
5504 vpush(&type);
5505 continue;
5507 expr_eq();
5508 type.ref = NULL;
5509 type.t = 0;
5510 switch (c) {
5511 case 'e':
5512 continue;
5513 case 'V':
5514 type.t = VT_CONSTANT;
5515 case 'v':
5516 type.t |= VT_VOID;
5517 mk_pointer (&type);
5518 break;
5519 case 'S':
5520 type.t = VT_CONSTANT;
5521 case 's':
5522 type.t |= char_type.t;
5523 mk_pointer (&type);
5524 break;
5525 case 'i':
5526 type.t = VT_INT;
5527 break;
5528 case 'l':
5529 type.t = VT_SIZE_T;
5530 break;
5531 default:
5532 break;
5534 gen_assign_cast(&type);
5536 skip(')');
5537 if (nc)
5538 nocode_wanted--;
5541 ST_FUNC void unary(void)
5543 int n, t, align, size, r, sizeof_caller;
5544 CType type;
5545 Sym *s;
5546 AttributeDef ad;
5548 /* generate line number info */
5549 if (tcc_state->do_debug)
5550 tcc_debug_line(tcc_state);
5552 sizeof_caller = in_sizeof;
5553 in_sizeof = 0;
5554 type.ref = NULL;
5555 /* XXX: GCC 2.95.3 does not generate a table although it should be
5556 better here */
5557 tok_next:
5558 switch(tok) {
5559 case TOK_EXTENSION:
5560 next();
5561 goto tok_next;
5562 case TOK_LCHAR:
5563 #ifdef TCC_TARGET_PE
5564 t = VT_SHORT|VT_UNSIGNED;
5565 goto push_tokc;
5566 #endif
5567 case TOK_CINT:
5568 case TOK_CCHAR:
5569 t = VT_INT;
5570 push_tokc:
5571 type.t = t;
5572 vsetc(&type, VT_CONST, &tokc);
5573 next();
5574 break;
5575 case TOK_CUINT:
5576 t = VT_INT | VT_UNSIGNED;
5577 goto push_tokc;
5578 case TOK_CLLONG:
5579 t = VT_LLONG;
5580 goto push_tokc;
5581 case TOK_CULLONG:
5582 t = VT_LLONG | VT_UNSIGNED;
5583 goto push_tokc;
5584 case TOK_CFLOAT:
5585 t = VT_FLOAT;
5586 goto push_tokc;
5587 case TOK_CDOUBLE:
5588 t = VT_DOUBLE;
5589 goto push_tokc;
5590 case TOK_CLDOUBLE:
5591 t = VT_LDOUBLE;
5592 goto push_tokc;
5593 case TOK_CLONG:
5594 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5595 goto push_tokc;
5596 case TOK_CULONG:
5597 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5598 goto push_tokc;
5599 case TOK___FUNCTION__:
5600 if (!gnu_ext)
5601 goto tok_identifier;
5602 /* fall thru */
5603 case TOK___FUNC__:
5605 void *ptr;
5606 int len;
5607 /* special function name identifier */
5608 len = strlen(funcname) + 1;
5609 /* generate char[len] type */
5610 type.t = VT_BYTE;
5611 mk_pointer(&type);
5612 type.t |= VT_ARRAY;
5613 type.ref->c = len;
5614 vpush_ref(&type, data_section, data_section->data_offset, len);
5615 if (!NODATA_WANTED) {
5616 ptr = section_ptr_add(data_section, len);
5617 memcpy(ptr, funcname, len);
5619 next();
5621 break;
5622 case TOK_LSTR:
5623 #ifdef TCC_TARGET_PE
5624 t = VT_SHORT | VT_UNSIGNED;
5625 #else
5626 t = VT_INT;
5627 #endif
5628 goto str_init;
5629 case TOK_STR:
5630 /* string parsing */
5631 t = VT_BYTE;
5632 if (tcc_state->char_is_unsigned)
5633 t = VT_BYTE | VT_UNSIGNED;
5634 str_init:
5635 if (tcc_state->warn_write_strings)
5636 t |= VT_CONSTANT;
5637 type.t = t;
5638 mk_pointer(&type);
5639 type.t |= VT_ARRAY;
5640 memset(&ad, 0, sizeof(AttributeDef));
5641 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5642 break;
5643 case '(':
5644 next();
5645 /* cast ? */
5646 if (parse_btype(&type, &ad)) {
5647 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5648 skip(')');
5649 /* check ISOC99 compound literal */
5650 if (tok == '{') {
5651 /* data is allocated locally by default */
5652 if (global_expr)
5653 r = VT_CONST;
5654 else
5655 r = VT_LOCAL;
5656 /* all except arrays are lvalues */
5657 if (!(type.t & VT_ARRAY))
5658 r |= VT_LVAL;
5659 memset(&ad, 0, sizeof(AttributeDef));
5660 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5661 } else {
5662 if (sizeof_caller) {
5663 vpush(&type);
5664 return;
5666 unary();
5667 gen_cast(&type);
5669 } else if (tok == '{') {
5670 int saved_nocode_wanted = nocode_wanted;
5671 if (const_wanted && !(nocode_wanted & unevalmask))
5672 expect("constant");
5673 if (0 == local_scope)
5674 tcc_error("statement expression outside of function");
5675 /* save all registers */
5676 save_regs(0);
5677 /* statement expression : we do not accept break/continue
5678 inside as GCC does. We do retain the nocode_wanted state,
5679 as statement expressions can't ever be entered from the
5680 outside, so any reactivation of code emission (from labels
5681 or loop heads) can be disabled again after the end of it. */
5682 block(1);
5683 nocode_wanted = saved_nocode_wanted;
5684 skip(')');
5685 } else {
5686 gexpr();
5687 skip(')');
5689 break;
5690 case '*':
5691 next();
5692 unary();
5693 indir();
5694 break;
5695 case '&':
5696 next();
5697 unary();
5698 /* functions names must be treated as function pointers,
5699 except for unary '&' and sizeof. Since we consider that
5700 functions are not lvalues, we only have to handle it
5701 there and in function calls. */
5702 /* arrays can also be used although they are not lvalues */
5703 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5704 !(vtop->type.t & VT_ARRAY))
5705 test_lvalue();
5706 if (vtop->sym)
5707 vtop->sym->a.addrtaken = 1;
5708 mk_pointer(&vtop->type);
5709 gaddrof();
5710 break;
5711 case '!':
5712 next();
5713 unary();
5714 gen_test_zero(TOK_EQ);
5715 break;
5716 case '~':
5717 next();
5718 unary();
5719 vpushi(-1);
5720 gen_op('^');
5721 break;
5722 case '+':
5723 next();
5724 unary();
5725 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5726 tcc_error("pointer not accepted for unary plus");
5727 /* In order to force cast, we add zero, except for floating point
5728 where we really need an noop (otherwise -0.0 will be transformed
5729 into +0.0). */
5730 if (!is_float(vtop->type.t)) {
5731 vpushi(0);
5732 gen_op('+');
5734 break;
5735 case TOK_SIZEOF:
5736 case TOK_ALIGNOF1:
5737 case TOK_ALIGNOF2:
5738 case TOK_ALIGNOF3:
5739 t = tok;
5740 next();
5741 in_sizeof++;
5742 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5743 s = NULL;
5744 if (vtop[1].r & VT_SYM)
5745 s = vtop[1].sym; /* hack: accessing previous vtop */
5746 size = type_size(&type, &align);
5747 if (s && s->a.aligned)
5748 align = 1 << (s->a.aligned - 1);
5749 if (t == TOK_SIZEOF) {
5750 if (!(type.t & VT_VLA)) {
5751 if (size < 0)
5752 tcc_error("sizeof applied to an incomplete type");
5753 vpushs(size);
5754 } else {
5755 vla_runtime_type_size(&type, &align);
5757 } else {
5758 vpushs(align);
5760 vtop->type.t |= VT_UNSIGNED;
5761 break;
5763 case TOK_builtin_expect:
5764 /* __builtin_expect is a no-op for now */
5765 parse_builtin_params(0, "ee");
5766 vpop();
5767 break;
5768 case TOK_builtin_types_compatible_p:
5769 parse_builtin_params(0, "tt");
5770 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5771 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5772 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5773 vtop -= 2;
5774 vpushi(n);
5775 break;
5776 case TOK_builtin_choose_expr:
5778 int64_t c;
5779 next();
5780 skip('(');
5781 c = expr_const64();
5782 skip(',');
5783 if (!c) {
5784 nocode_wanted++;
5786 expr_eq();
5787 if (!c) {
5788 vpop();
5789 nocode_wanted--;
5791 skip(',');
5792 if (c) {
5793 nocode_wanted++;
5795 expr_eq();
5796 if (c) {
5797 vpop();
5798 nocode_wanted--;
5800 skip(')');
5802 break;
5803 case TOK_builtin_constant_p:
5804 parse_builtin_params(1, "e");
5805 n = (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
5806 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
5807 vtop--;
5808 vpushi(n);
5809 break;
5810 case TOK_builtin_frame_address:
5811 case TOK_builtin_return_address:
5813 int tok1 = tok;
5814 int level;
5815 next();
5816 skip('(');
5817 if (tok != TOK_CINT) {
5818 tcc_error("%s only takes positive integers",
5819 tok1 == TOK_builtin_return_address ?
5820 "__builtin_return_address" :
5821 "__builtin_frame_address");
5823 level = (uint32_t)tokc.i;
5824 next();
5825 skip(')');
5826 type.t = VT_VOID;
5827 mk_pointer(&type);
5828 vset(&type, VT_LOCAL, 0); /* local frame */
5829 while (level--) {
5830 #ifdef TCC_TARGET_RISCV64
5831 vpushi(2*PTR_SIZE);
5832 gen_op('-');
5833 #endif
5834 mk_pointer(&vtop->type);
5835 indir(); /* -> parent frame */
5837 if (tok1 == TOK_builtin_return_address) {
5838 // assume return address is just above frame pointer on stack
5839 #ifdef TCC_TARGET_ARM
5840 vpushi(2*PTR_SIZE);
5841 gen_op('+');
5842 #elif defined TCC_TARGET_RISCV64
5843 vpushi(PTR_SIZE);
5844 gen_op('-');
5845 #else
5846 vpushi(PTR_SIZE);
5847 gen_op('+');
5848 #endif
5849 mk_pointer(&vtop->type);
5850 indir();
5853 break;
5854 #ifdef TCC_TARGET_RISCV64
5855 case TOK_builtin_va_start:
5856 parse_builtin_params(0, "ee");
5857 r = vtop->r & VT_VALMASK;
5858 if (r == VT_LLOCAL)
5859 r = VT_LOCAL;
5860 if (r != VT_LOCAL)
5861 tcc_error("__builtin_va_start expects a local variable");
5862 gen_va_start();
5863 vstore();
5864 break;
5865 #endif
5866 #ifdef TCC_TARGET_X86_64
5867 #ifdef TCC_TARGET_PE
5868 case TOK_builtin_va_start:
5869 parse_builtin_params(0, "ee");
5870 r = vtop->r & VT_VALMASK;
5871 if (r == VT_LLOCAL)
5872 r = VT_LOCAL;
5873 if (r != VT_LOCAL)
5874 tcc_error("__builtin_va_start expects a local variable");
5875 vtop->r = r;
5876 vtop->type = char_pointer_type;
5877 vtop->c.i += 8;
5878 vstore();
5879 break;
5880 #else
5881 case TOK_builtin_va_arg_types:
5882 parse_builtin_params(0, "t");
5883 vpushi(classify_x86_64_va_arg(&vtop->type));
5884 vswap();
5885 vpop();
5886 break;
5887 #endif
5888 #endif
5890 #ifdef TCC_TARGET_ARM64
5891 case TOK_builtin_va_start: {
5892 parse_builtin_params(0, "ee");
5893 //xx check types
5894 gen_va_start();
5895 vpushi(0);
5896 vtop->type.t = VT_VOID;
5897 break;
5899 case TOK_builtin_va_arg: {
5900 parse_builtin_params(0, "et");
5901 type = vtop->type;
5902 vpop();
5903 //xx check types
5904 gen_va_arg(&type);
5905 vtop->type = type;
5906 break;
5908 case TOK___arm64_clear_cache: {
5909 parse_builtin_params(0, "ee");
5910 gen_clear_cache();
5911 vpushi(0);
5912 vtop->type.t = VT_VOID;
5913 break;
5915 #endif
5917 /* pre operations */
5918 case TOK_INC:
5919 case TOK_DEC:
5920 t = tok;
5921 next();
5922 unary();
5923 inc(0, t);
5924 break;
5925 case '-':
5926 next();
5927 unary();
5928 if (is_float(vtop->type.t)) {
5929 gen_opif(TOK_NEG);
5930 } else {
5931 vpushi(0);
5932 vswap();
5933 gen_op('-');
5935 break;
5936 case TOK_LAND:
5937 if (!gnu_ext)
5938 goto tok_identifier;
5939 next();
5940 /* allow to take the address of a label */
5941 if (tok < TOK_UIDENT)
5942 expect("label identifier");
5943 s = label_find(tok);
5944 if (!s) {
5945 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5946 } else {
5947 if (s->r == LABEL_DECLARED)
5948 s->r = LABEL_FORWARD;
5950 if (!s->type.t) {
5951 s->type.t = VT_VOID;
5952 mk_pointer(&s->type);
5953 s->type.t |= VT_STATIC;
5955 vpushsym(&s->type, s);
5956 next();
5957 break;
5959 case TOK_GENERIC:
5961 CType controlling_type;
5962 int has_default = 0;
5963 int has_match = 0;
5964 int learn = 0;
5965 TokenString *str = NULL;
5966 int saved_const_wanted = const_wanted;
5968 next();
5969 skip('(');
5970 const_wanted = 0;
5971 expr_type(&controlling_type, expr_eq);
5972 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5973 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5974 mk_pointer(&controlling_type);
5975 const_wanted = saved_const_wanted;
5976 for (;;) {
5977 learn = 0;
5978 skip(',');
5979 if (tok == TOK_DEFAULT) {
5980 if (has_default)
5981 tcc_error("too many 'default'");
5982 has_default = 1;
5983 if (!has_match)
5984 learn = 1;
5985 next();
5986 } else {
5987 AttributeDef ad_tmp;
5988 int itmp;
5989 CType cur_type;
5991 in_generic++;
5992 parse_btype(&cur_type, &ad_tmp);
5993 in_generic--;
5995 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5996 if (compare_types(&controlling_type, &cur_type, 0)) {
5997 if (has_match) {
5998 tcc_error("type match twice");
6000 has_match = 1;
6001 learn = 1;
6004 skip(':');
6005 if (learn) {
6006 if (str)
6007 tok_str_free(str);
6008 skip_or_save_block(&str);
6009 } else {
6010 skip_or_save_block(NULL);
6012 if (tok == ')')
6013 break;
6015 if (!str) {
6016 char buf[60];
6017 type_to_str(buf, sizeof buf, &controlling_type, NULL);
6018 tcc_error("type '%s' does not match any association", buf);
6020 begin_macro(str, 1);
6021 next();
6022 expr_eq();
6023 if (tok != TOK_EOF)
6024 expect(",");
6025 end_macro();
6026 next();
6027 break;
6029 // special qnan , snan and infinity values
6030 case TOK___NAN__:
6031 n = 0x7fc00000;
6032 special_math_val:
6033 vpushi(n);
6034 vtop->type.t = VT_FLOAT;
6035 next();
6036 break;
6037 case TOK___SNAN__:
6038 n = 0x7f800001;
6039 goto special_math_val;
6040 case TOK___INF__:
6041 n = 0x7f800000;
6042 goto special_math_val;
6044 default:
6045 tok_identifier:
6046 t = tok;
6047 next();
6048 if (t < TOK_UIDENT)
6049 expect("identifier");
6050 s = sym_find(t);
6051 if (!s || IS_ASM_SYM(s)) {
6052 const char *name = get_tok_str(t, NULL);
6053 if (tok != '(')
6054 tcc_error("'%s' undeclared", name);
6055 /* for simple function calls, we tolerate undeclared
6056 external reference to int() function */
6057 if (tcc_state->warn_implicit_function_declaration
6058 #ifdef TCC_TARGET_PE
6059 /* people must be warned about using undeclared WINAPI functions
6060 (which usually start with uppercase letter) */
6061 || (name[0] >= 'A' && name[0] <= 'Z')
6062 #endif
6064 tcc_warning("implicit declaration of function '%s'", name);
6065 s = external_global_sym(t, &func_old_type);
6068 r = s->r;
6069 /* A symbol that has a register is a local register variable,
6070 which starts out as VT_LOCAL value. */
6071 if ((r & VT_VALMASK) < VT_CONST)
6072 r = (r & ~VT_VALMASK) | VT_LOCAL;
6074 vset(&s->type, r, s->c);
6075 /* Point to s as backpointer (even without r&VT_SYM).
6076 Will be used by at least the x86 inline asm parser for
6077 regvars. */
6078 vtop->sym = s;
6080 if (r & VT_SYM) {
6081 vtop->c.i = 0;
6082 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
6083 vtop->c.i = s->enum_val;
6085 break;
6088 /* post operations */
6089 while (1) {
6090 if (tok == TOK_INC || tok == TOK_DEC) {
6091 inc(1, tok);
6092 next();
6093 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
6094 int qualifiers, cumofs = 0;
6095 /* field */
6096 if (tok == TOK_ARROW)
6097 indir();
6098 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
6099 test_lvalue();
6100 gaddrof();
6101 /* expect pointer on structure */
6102 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
6103 expect("struct or union");
6104 if (tok == TOK_CDOUBLE)
6105 expect("field name");
6106 next();
6107 if (tok == TOK_CINT || tok == TOK_CUINT)
6108 expect("field name");
6109 s = find_field(&vtop->type, tok, &cumofs);
6110 if (!s)
6111 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
6112 /* add field offset to pointer */
6113 vtop->type = char_pointer_type; /* change type to 'char *' */
6114 vpushi(cumofs + s->c);
6115 gen_op('+');
6116 /* change type to field type, and set to lvalue */
6117 vtop->type = s->type;
6118 vtop->type.t |= qualifiers;
6119 /* an array is never an lvalue */
6120 if (!(vtop->type.t & VT_ARRAY)) {
6121 vtop->r |= VT_LVAL;
6122 #ifdef CONFIG_TCC_BCHECK
6123 /* if bound checking, the referenced pointer must be checked */
6124 if (tcc_state->do_bounds_check)
6125 vtop->r |= VT_MUSTBOUND;
6126 #endif
6128 next();
6129 } else if (tok == '[') {
6130 next();
6131 gexpr();
6132 gen_op('+');
6133 indir();
6134 skip(']');
6135 } else if (tok == '(') {
6136 SValue ret;
6137 Sym *sa;
6138 int nb_args, ret_nregs, ret_align, regsize, variadic;
6140 /* function call */
6141 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
6142 /* pointer test (no array accepted) */
6143 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
6144 vtop->type = *pointed_type(&vtop->type);
6145 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
6146 goto error_func;
6147 } else {
6148 error_func:
6149 expect("function pointer");
6151 } else {
6152 vtop->r &= ~VT_LVAL; /* no lvalue */
6154 /* get return type */
6155 s = vtop->type.ref;
6156 next();
6157 sa = s->next; /* first parameter */
6158 nb_args = regsize = 0;
6159 ret.r2 = VT_CONST;
6160 /* compute first implicit argument if a structure is returned */
6161 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
6162 variadic = (s->f.func_type == FUNC_ELLIPSIS);
6163 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6164 &ret_align, &regsize);
6165 if (ret_nregs <= 0) {
6166 /* get some space for the returned structure */
6167 size = type_size(&s->type, &align);
6168 #ifdef TCC_TARGET_ARM64
6169 /* On arm64, a small struct is return in registers.
6170 It is much easier to write it to memory if we know
6171 that we are allowed to write some extra bytes, so
6172 round the allocated space up to a power of 2: */
6173 if (size < 16)
6174 while (size & (size - 1))
6175 size = (size | (size - 1)) + 1;
6176 #endif
6177 loc = (loc - size) & -align;
6178 ret.type = s->type;
6179 ret.r = VT_LOCAL | VT_LVAL;
6180 /* pass it as 'int' to avoid structure arg passing
6181 problems */
6182 vseti(VT_LOCAL, loc);
6183 #ifdef CONFIG_TCC_BCHECK
6184 if (tcc_state->do_bounds_check)
6185 --loc;
6186 #endif
6187 ret.c = vtop->c;
6188 if (ret_nregs < 0)
6189 vtop--;
6190 else
6191 nb_args++;
6193 } else {
6194 ret_nregs = 1;
6195 ret.type = s->type;
6198 if (ret_nregs > 0) {
6199 /* return in register */
6200 ret.c.i = 0;
6201 PUT_R_RET(&ret, ret.type.t);
6203 if (tok != ')') {
6204 for(;;) {
6205 expr_eq();
6206 gfunc_param_typed(s, sa);
6207 nb_args++;
6208 if (sa)
6209 sa = sa->next;
6210 if (tok == ')')
6211 break;
6212 skip(',');
6215 if (sa)
6216 tcc_error("too few arguments to function");
6217 skip(')');
6218 gfunc_call(nb_args);
6220 if (ret_nregs < 0) {
6221 vsetc(&ret.type, ret.r, &ret.c);
6222 #ifdef TCC_TARGET_RISCV64
6223 arch_transfer_ret_regs(1);
6224 #endif
6225 } else {
6226 /* return value */
6227 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6228 vsetc(&ret.type, r, &ret.c);
6229 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6232 /* handle packed struct return */
6233 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6234 int addr, offset;
6236 size = type_size(&s->type, &align);
6237 /* We're writing whole regs often, make sure there's enough
6238 space. Assume register size is power of 2. */
6239 if (regsize > align)
6240 align = regsize;
6241 loc = (loc - size) & -align;
6242 addr = loc;
6243 offset = 0;
6244 for (;;) {
6245 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6246 vswap();
6247 vstore();
6248 vtop--;
6249 if (--ret_nregs == 0)
6250 break;
6251 offset += regsize;
6253 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6256 /* Promote char/short return values. This is matters only
6257 for calling function that were not compiled by TCC and
6258 only on some architectures. For those where it doesn't
6259 matter we expect things to be already promoted to int,
6260 but not larger. */
6261 t = s->type.t & VT_BTYPE;
6262 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6263 #ifdef PROMOTE_RET
6264 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6265 #else
6266 vtop->type.t = VT_INT;
6267 #endif
6270 if (s->f.func_noreturn)
6271 CODE_OFF();
6272 } else {
6273 break;
6278 #ifndef precedence_parser /* original top-down parser */
6280 static void expr_prod(void)
6282 int t;
6284 unary();
6285 while ((t = tok) == '*' || t == '/' || t == '%') {
6286 next();
6287 unary();
6288 gen_op(t);
6292 static void expr_sum(void)
6294 int t;
6296 expr_prod();
6297 while ((t = tok) == '+' || t == '-') {
6298 next();
6299 expr_prod();
6300 gen_op(t);
6304 static void expr_shift(void)
6306 int t;
6308 expr_sum();
6309 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6310 next();
6311 expr_sum();
6312 gen_op(t);
6316 static void expr_cmp(void)
6318 int t;
6320 expr_shift();
6321 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6322 t == TOK_ULT || t == TOK_UGE) {
6323 next();
6324 expr_shift();
6325 gen_op(t);
6329 static void expr_cmpeq(void)
6331 int t;
6333 expr_cmp();
6334 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6335 next();
6336 expr_cmp();
6337 gen_op(t);
6341 static void expr_and(void)
6343 expr_cmpeq();
6344 while (tok == '&') {
6345 next();
6346 expr_cmpeq();
6347 gen_op('&');
6351 static void expr_xor(void)
6353 expr_and();
6354 while (tok == '^') {
6355 next();
6356 expr_and();
6357 gen_op('^');
6361 static void expr_or(void)
6363 expr_xor();
6364 while (tok == '|') {
6365 next();
6366 expr_xor();
6367 gen_op('|');
6371 static void expr_landor(int op);
6373 static void expr_land(void)
6375 expr_or();
6376 if (tok == TOK_LAND)
6377 expr_landor(tok);
6380 static void expr_lor(void)
6382 expr_land();
6383 if (tok == TOK_LOR)
6384 expr_landor(tok);
6387 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6388 #else /* defined precedence_parser */
6389 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6390 # define expr_lor() unary(), expr_infix(1)
6392 static int precedence(int tok)
6394 switch (tok) {
6395 case TOK_LOR: return 1;
6396 case TOK_LAND: return 2;
6397 case '|': return 3;
6398 case '^': return 4;
6399 case '&': return 5;
6400 case TOK_EQ: case TOK_NE: return 6;
6401 relat: case TOK_ULT: case TOK_UGE: return 7;
6402 case TOK_SHL: case TOK_SAR: return 8;
6403 case '+': case '-': return 9;
6404 case '*': case '/': case '%': return 10;
6405 default:
6406 if (tok >= TOK_ULE && tok <= TOK_GT)
6407 goto relat;
6408 return 0;
6411 static unsigned char prec[256];
6412 static void init_prec(void)
6414 int i;
6415 for (i = 0; i < 256; i++)
6416 prec[i] = precedence(i);
6418 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6420 static void expr_landor(int op);
6422 static void expr_infix(int p)
6424 int t = tok, p2;
6425 while ((p2 = precedence(t)) >= p) {
6426 if (t == TOK_LOR || t == TOK_LAND) {
6427 expr_landor(t);
6428 } else {
6429 next();
6430 unary();
6431 if (precedence(tok) > p2)
6432 expr_infix(p2 + 1);
6433 gen_op(t);
6435 t = tok;
6438 #endif
6440 /* Assuming vtop is a value used in a conditional context
6441 (i.e. compared with zero) return 0 if it's false, 1 if
6442 true and -1 if it can't be statically determined. */
6443 static int condition_3way(void)
6445 int c = -1;
6446 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6447 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6448 vdup();
6449 gen_cast_s(VT_BOOL);
6450 c = vtop->c.i;
6451 vpop();
6453 return c;
6456 static void expr_landor(int op)
6458 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6459 for(;;) {
6460 c = f ? i : condition_3way();
6461 if (c < 0)
6462 save_regs(1), cc = 0;
6463 else if (c != i)
6464 nocode_wanted++, f = 1;
6465 if (tok != op)
6466 break;
6467 if (c < 0)
6468 t = gvtst(i, t);
6469 else
6470 vpop();
6471 next();
6472 expr_landor_next(op);
6474 if (cc || f) {
6475 vpop();
6476 vpushi(i ^ f);
6477 gsym(t);
6478 nocode_wanted -= f;
6479 } else {
6480 gvtst_set(i, t);
6484 static int is_cond_bool(SValue *sv)
6486 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6487 && (sv->type.t & VT_BTYPE) == VT_INT)
6488 return (unsigned)sv->c.i < 2;
6489 if (sv->r == VT_CMP)
6490 return 1;
6491 return 0;
6494 static void expr_cond(void)
6496 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6497 SValue sv;
6498 CType type;
6499 int ncw_prev;
6501 expr_lor();
6502 if (tok == '?') {
6503 next();
6504 c = condition_3way();
6505 g = (tok == ':' && gnu_ext);
6506 tt = 0;
6507 if (!g) {
6508 if (c < 0) {
6509 save_regs(1);
6510 tt = gvtst(1, 0);
6511 } else {
6512 vpop();
6514 } else if (c < 0) {
6515 /* needed to avoid having different registers saved in
6516 each branch */
6517 save_regs(1);
6518 gv_dup();
6519 tt = gvtst(0, 0);
6522 ncw_prev = nocode_wanted;
6523 if (c == 0)
6524 nocode_wanted++;
6525 if (!g)
6526 gexpr();
6528 if (c < 0 && vtop->r == VT_CMP) {
6529 t1 = gvtst(0, 0);
6530 vpushi(0);
6531 gvtst_set(0, t1);
6532 gv(RC_INT);
6535 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6536 mk_pointer(&vtop->type);
6537 sv = *vtop; /* save value to handle it later */
6538 vtop--; /* no vpop so that FP stack is not flushed */
6540 if (g) {
6541 u = tt;
6542 } else if (c < 0) {
6543 u = gjmp(0);
6544 gsym(tt);
6545 } else
6546 u = 0;
6548 nocode_wanted = ncw_prev;
6549 if (c == 1)
6550 nocode_wanted++;
6551 skip(':');
6552 expr_cond();
6554 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6555 if (sv.r == VT_CMP) {
6556 t1 = sv.jtrue;
6557 t2 = u;
6558 } else {
6559 t1 = gvtst(0, 0);
6560 t2 = gjmp(0);
6561 gsym(u);
6562 vpushv(&sv);
6564 gvtst_set(0, t1);
6565 gvtst_set(1, t2);
6566 nocode_wanted = ncw_prev;
6567 // tcc_warning("two conditions expr_cond");
6568 return;
6571 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6572 mk_pointer(&vtop->type);
6574 /* cast operands to correct type according to ISOC rules */
6575 if (!combine_types(&type, &sv, vtop, '?'))
6576 type_incompatibility_error(&sv.type, &vtop->type,
6577 "type mismatch in conditional expression (have '%s' and '%s')");
6578 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6579 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6580 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6582 /* now we convert second operand */
6583 if (c != 1) {
6584 gen_cast(&type);
6585 if (islv) {
6586 mk_pointer(&vtop->type);
6587 gaddrof();
6588 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6589 gaddrof();
6592 rc = RC_TYPE(type.t);
6593 /* for long longs, we use fixed registers to avoid having
6594 to handle a complicated move */
6595 if (USING_TWO_WORDS(type.t))
6596 rc = RC_RET(type.t);
6598 tt = r2 = 0;
6599 if (c < 0) {
6600 r2 = gv(rc);
6601 tt = gjmp(0);
6603 gsym(u);
6604 nocode_wanted = ncw_prev;
6606 /* this is horrible, but we must also convert first
6607 operand */
6608 if (c != 0) {
6609 *vtop = sv;
6610 gen_cast(&type);
6611 if (islv) {
6612 mk_pointer(&vtop->type);
6613 gaddrof();
6614 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6615 gaddrof();
6618 if (c < 0) {
6619 r1 = gv(rc);
6620 move_reg(r2, r1, islv ? VT_PTR : type.t);
6621 vtop->r = r2;
6622 gsym(tt);
6625 if (islv)
6626 indir();
6630 static void expr_eq(void)
6632 int t;
6634 expr_cond();
6635 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6636 test_lvalue();
6637 next();
6638 if (t == '=') {
6639 expr_eq();
6640 } else {
6641 vdup();
6642 expr_eq();
6643 gen_op(TOK_ASSIGN_OP(t));
6645 vstore();
6649 ST_FUNC void gexpr(void)
6651 while (1) {
6652 expr_eq();
6653 if (tok != ',')
6654 break;
6655 vpop();
6656 next();
6660 /* parse a constant expression and return value in vtop. */
6661 static void expr_const1(void)
6663 const_wanted++;
6664 nocode_wanted += unevalmask + 1;
6665 expr_cond();
6666 nocode_wanted -= unevalmask + 1;
6667 const_wanted--;
6670 /* parse an integer constant and return its value. */
6671 static inline int64_t expr_const64(void)
6673 int64_t c;
6674 expr_const1();
6675 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6676 expect("constant expression");
6677 c = vtop->c.i;
6678 vpop();
6679 return c;
6682 /* parse an integer constant and return its value.
6683 Complain if it doesn't fit 32bit (signed or unsigned). */
6684 ST_FUNC int expr_const(void)
6686 int c;
6687 int64_t wc = expr_const64();
6688 c = wc;
6689 if (c != wc && (unsigned)c != wc)
6690 tcc_error("constant exceeds 32 bit");
6691 return c;
6694 /* ------------------------------------------------------------------------- */
6695 /* return from function */
6697 #ifndef TCC_TARGET_ARM64
6698 static void gfunc_return(CType *func_type)
6700 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6701 CType type, ret_type;
6702 int ret_align, ret_nregs, regsize;
6703 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6704 &ret_align, &regsize);
6705 if (ret_nregs < 0) {
6706 #ifdef TCC_TARGET_RISCV64
6707 arch_transfer_ret_regs(0);
6708 #endif
6709 } else if (0 == ret_nregs) {
6710 /* if returning structure, must copy it to implicit
6711 first pointer arg location */
6712 type = *func_type;
6713 mk_pointer(&type);
6714 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6715 indir();
6716 vswap();
6717 /* copy structure value to pointer */
6718 vstore();
6719 } else {
6720 /* returning structure packed into registers */
6721 int size, addr, align, rc;
6722 size = type_size(func_type,&align);
6723 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6724 (vtop->c.i & (ret_align-1)))
6725 && (align & (ret_align-1))) {
6726 loc = (loc - size) & -ret_align;
6727 addr = loc;
6728 type = *func_type;
6729 vset(&type, VT_LOCAL | VT_LVAL, addr);
6730 vswap();
6731 vstore();
6732 vpop();
6733 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6735 vtop->type = ret_type;
6736 rc = RC_RET(ret_type.t);
6737 if (ret_nregs == 1)
6738 gv(rc);
6739 else {
6740 for (;;) {
6741 vdup();
6742 gv(rc);
6743 vpop();
6744 if (--ret_nregs == 0)
6745 break;
6746 /* We assume that when a structure is returned in multiple
6747 registers, their classes are consecutive values of the
6748 suite s(n) = 2^n */
6749 rc <<= 1;
6750 vtop->c.i += regsize;
6754 } else {
6755 gv(RC_RET(func_type->t));
6757 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6759 #endif
6761 static void check_func_return(void)
6763 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6764 return;
6765 if (!strcmp (funcname, "main")
6766 && (func_vt.t & VT_BTYPE) == VT_INT) {
6767 /* main returns 0 by default */
6768 vpushi(0);
6769 gen_assign_cast(&func_vt);
6770 gfunc_return(&func_vt);
6771 } else {
6772 tcc_warning("function might return no value: '%s'", funcname);
6776 /* ------------------------------------------------------------------------- */
6777 /* switch/case */
6779 static int case_cmpi(const void *pa, const void *pb)
6781 int64_t a = (*(struct case_t**) pa)->v1;
6782 int64_t b = (*(struct case_t**) pb)->v1;
6783 return a < b ? -1 : a > b;
6786 static int case_cmpu(const void *pa, const void *pb)
6788 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
6789 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
6790 return a < b ? -1 : a > b;
6793 static void gtst_addr(int t, int a)
6795 gsym_addr(gvtst(0, t), a);
6798 static void gcase(struct case_t **base, int len, int *bsym)
6800 struct case_t *p;
6801 int e;
6802 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6803 while (len > 8) {
6804 /* binary search */
6805 p = base[len/2];
6806 vdup();
6807 if (ll)
6808 vpushll(p->v2);
6809 else
6810 vpushi(p->v2);
6811 gen_op(TOK_LE);
6812 e = gvtst(1, 0);
6813 vdup();
6814 if (ll)
6815 vpushll(p->v1);
6816 else
6817 vpushi(p->v1);
6818 gen_op(TOK_GE);
6819 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6820 /* x < v1 */
6821 gcase(base, len/2, bsym);
6822 /* x > v2 */
6823 gsym(e);
6824 e = len/2 + 1;
6825 base += e; len -= e;
6827 /* linear scan */
6828 while (len--) {
6829 p = *base++;
6830 vdup();
6831 if (ll)
6832 vpushll(p->v2);
6833 else
6834 vpushi(p->v2);
6835 if (p->v1 == p->v2) {
6836 gen_op(TOK_EQ);
6837 gtst_addr(0, p->sym);
6838 } else {
6839 gen_op(TOK_LE);
6840 e = gvtst(1, 0);
6841 vdup();
6842 if (ll)
6843 vpushll(p->v1);
6844 else
6845 vpushi(p->v1);
6846 gen_op(TOK_GE);
6847 gtst_addr(0, p->sym);
6848 gsym(e);
6851 *bsym = gjmp(*bsym);
6854 /* ------------------------------------------------------------------------- */
6855 /* __attribute__((cleanup(fn))) */
6857 static void try_call_scope_cleanup(Sym *stop)
6859 Sym *cls = cur_scope->cl.s;
6861 for (; cls != stop; cls = cls->ncl) {
6862 Sym *fs = cls->next;
6863 Sym *vs = cls->prev_tok;
6865 vpushsym(&fs->type, fs);
6866 vset(&vs->type, vs->r, vs->c);
6867 vtop->sym = vs;
6868 mk_pointer(&vtop->type);
6869 gaddrof();
6870 gfunc_call(1);
6874 static void try_call_cleanup_goto(Sym *cleanupstate)
6876 Sym *oc, *cc;
6877 int ocd, ccd;
6879 if (!cur_scope->cl.s)
6880 return;
6882 /* search NCA of both cleanup chains given parents and initial depth */
6883 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6884 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6886 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6888 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6891 try_call_scope_cleanup(cc);
6894 /* call 'func' for each __attribute__((cleanup(func))) */
6895 static void block_cleanup(struct scope *o)
6897 int jmp = 0;
6898 Sym *g, **pg;
6899 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6900 if (g->prev_tok->r & LABEL_FORWARD) {
6901 Sym *pcl = g->next;
6902 if (!jmp)
6903 jmp = gjmp(0);
6904 gsym(pcl->jnext);
6905 try_call_scope_cleanup(o->cl.s);
6906 pcl->jnext = gjmp(0);
6907 if (!o->cl.n)
6908 goto remove_pending;
6909 g->c = o->cl.n;
6910 pg = &g->prev;
6911 } else {
6912 remove_pending:
6913 *pg = g->prev;
6914 sym_free(g);
6917 gsym(jmp);
6918 try_call_scope_cleanup(o->cl.s);
6921 /* ------------------------------------------------------------------------- */
6922 /* VLA */
6924 static void vla_restore(int loc)
6926 if (loc)
6927 gen_vla_sp_restore(loc);
6930 static void vla_leave(struct scope *o)
6932 if (o->vla.num < cur_scope->vla.num)
6933 vla_restore(o->vla.loc);
6936 /* ------------------------------------------------------------------------- */
6937 /* local scopes */
6939 void new_scope(struct scope *o)
6941 /* copy and link previous scope */
6942 *o = *cur_scope;
6943 o->prev = cur_scope;
6944 cur_scope = o;
6946 /* record local declaration stack position */
6947 o->lstk = local_stack;
6948 o->llstk = local_label_stack;
6950 ++local_scope;
6952 if (tcc_state->do_debug)
6953 tcc_debug_stabn(N_LBRAC, ind - func_ind);
6956 void prev_scope(struct scope *o, int is_expr)
6958 vla_leave(o->prev);
6960 if (o->cl.s != o->prev->cl.s)
6961 block_cleanup(o->prev);
6963 /* pop locally defined labels */
6964 label_pop(&local_label_stack, o->llstk, is_expr);
6966 /* In the is_expr case (a statement expression is finished here),
6967 vtop might refer to symbols on the local_stack. Either via the
6968 type or via vtop->sym. We can't pop those nor any that in turn
6969 might be referred to. To make it easier we don't roll back
6970 any symbols in that case; some upper level call to block() will
6971 do that. We do have to remove such symbols from the lookup
6972 tables, though. sym_pop will do that. */
6974 /* pop locally defined symbols */
6975 pop_local_syms(&local_stack, o->lstk, is_expr, 0);
6976 cur_scope = o->prev;
6977 --local_scope;
6979 if (tcc_state->do_debug)
6980 tcc_debug_stabn(N_RBRAC, ind - func_ind);
6983 /* leave a scope via break/continue(/goto) */
6984 void leave_scope(struct scope *o)
6986 if (!o)
6987 return;
6988 try_call_scope_cleanup(o->cl.s);
6989 vla_leave(o);
6992 /* ------------------------------------------------------------------------- */
6993 /* call block from 'for do while' loops */
6995 static void lblock(int *bsym, int *csym)
6997 struct scope *lo = loop_scope, *co = cur_scope;
6998 int *b = co->bsym, *c = co->csym;
6999 if (csym) {
7000 co->csym = csym;
7001 loop_scope = co;
7003 co->bsym = bsym;
7004 block(0);
7005 co->bsym = b;
7006 if (csym) {
7007 co->csym = c;
7008 loop_scope = lo;
7012 static void block(int is_expr)
7014 int a, b, c, d, e, t;
7015 struct scope o;
7016 Sym *s;
7018 if (is_expr) {
7019 /* default return value is (void) */
7020 vpushi(0);
7021 vtop->type.t = VT_VOID;
7024 again:
7025 t = tok;
7026 /* If the token carries a value, next() might destroy it. Only with
7027 invalid code such as f(){"123"4;} */
7028 if (TOK_HAS_VALUE(t))
7029 goto expr;
7030 next();
7032 if (t == TOK_IF) {
7033 skip('(');
7034 gexpr();
7035 skip(')');
7036 a = gvtst(1, 0);
7037 block(0);
7038 if (tok == TOK_ELSE) {
7039 d = gjmp(0);
7040 gsym(a);
7041 next();
7042 block(0);
7043 gsym(d); /* patch else jmp */
7044 } else {
7045 gsym(a);
7048 } else if (t == TOK_WHILE) {
7049 d = gind();
7050 skip('(');
7051 gexpr();
7052 skip(')');
7053 a = gvtst(1, 0);
7054 b = 0;
7055 lblock(&a, &b);
7056 gjmp_addr(d);
7057 gsym_addr(b, d);
7058 gsym(a);
7060 } else if (t == '{') {
7061 new_scope(&o);
7063 /* handle local labels declarations */
7064 while (tok == TOK_LABEL) {
7065 do {
7066 next();
7067 if (tok < TOK_UIDENT)
7068 expect("label identifier");
7069 label_push(&local_label_stack, tok, LABEL_DECLARED);
7070 next();
7071 } while (tok == ',');
7072 skip(';');
7075 while (tok != '}') {
7076 decl(VT_LOCAL);
7077 if (tok != '}') {
7078 if (is_expr)
7079 vpop();
7080 block(is_expr);
7084 prev_scope(&o, is_expr);
7085 if (local_scope)
7086 next();
7087 else if (!nocode_wanted)
7088 check_func_return();
7090 } else if (t == TOK_RETURN) {
7091 b = (func_vt.t & VT_BTYPE) != VT_VOID;
7092 if (tok != ';') {
7093 gexpr();
7094 if (b) {
7095 gen_assign_cast(&func_vt);
7096 } else {
7097 if (vtop->type.t != VT_VOID)
7098 tcc_warning("void function returns a value");
7099 vtop--;
7101 } else if (b) {
7102 tcc_warning("'return' with no value");
7103 b = 0;
7105 leave_scope(root_scope);
7106 if (b)
7107 gfunc_return(&func_vt);
7108 skip(';');
7109 /* jump unless last stmt in top-level block */
7110 if (tok != '}' || local_scope != 1)
7111 rsym = gjmp(rsym);
7112 CODE_OFF();
7114 } else if (t == TOK_BREAK) {
7115 /* compute jump */
7116 if (!cur_scope->bsym)
7117 tcc_error("cannot break");
7118 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
7119 leave_scope(cur_switch->scope);
7120 else
7121 leave_scope(loop_scope);
7122 *cur_scope->bsym = gjmp(*cur_scope->bsym);
7123 skip(';');
7125 } else if (t == TOK_CONTINUE) {
7126 /* compute jump */
7127 if (!cur_scope->csym)
7128 tcc_error("cannot continue");
7129 leave_scope(loop_scope);
7130 *cur_scope->csym = gjmp(*cur_scope->csym);
7131 skip(';');
7133 } else if (t == TOK_FOR) {
7134 new_scope(&o);
7136 skip('(');
7137 if (tok != ';') {
7138 /* c99 for-loop init decl? */
7139 if (!decl0(VT_LOCAL, 1, NULL)) {
7140 /* no, regular for-loop init expr */
7141 gexpr();
7142 vpop();
7145 skip(';');
7146 a = b = 0;
7147 c = d = gind();
7148 if (tok != ';') {
7149 gexpr();
7150 a = gvtst(1, 0);
7152 skip(';');
7153 if (tok != ')') {
7154 e = gjmp(0);
7155 d = gind();
7156 gexpr();
7157 vpop();
7158 gjmp_addr(c);
7159 gsym(e);
7161 skip(')');
7162 lblock(&a, &b);
7163 gjmp_addr(d);
7164 gsym_addr(b, d);
7165 gsym(a);
7166 prev_scope(&o, 0);
7168 } else if (t == TOK_DO) {
7169 a = b = 0;
7170 d = gind();
7171 lblock(&a, &b);
7172 gsym(b);
7173 skip(TOK_WHILE);
7174 skip('(');
7175 gexpr();
7176 skip(')');
7177 skip(';');
7178 c = gvtst(0, 0);
7179 gsym_addr(c, d);
7180 gsym(a);
7182 } else if (t == TOK_SWITCH) {
7183 struct switch_t *sw;
7185 sw = tcc_mallocz(sizeof *sw);
7186 sw->bsym = &a;
7187 sw->scope = cur_scope;
7188 sw->prev = cur_switch;
7189 cur_switch = sw;
7191 skip('(');
7192 gexpr();
7193 skip(')');
7194 sw->sv = *vtop--; /* save switch value */
7196 a = 0;
7197 b = gjmp(0); /* jump to first case */
7198 lblock(&a, NULL);
7199 a = gjmp(a); /* add implicit break */
7200 /* case lookup */
7201 gsym(b);
7203 if (sw->sv.type.t & VT_UNSIGNED)
7204 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7205 else
7206 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7208 for (b = 1; b < sw->n; b++)
7209 if (sw->sv.type.t & VT_UNSIGNED
7210 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7211 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7212 tcc_error("duplicate case value");
7214 vpushv(&sw->sv);
7215 gv(RC_INT);
7216 d = 0, gcase(sw->p, sw->n, &d);
7217 vpop();
7218 if (sw->def_sym)
7219 gsym_addr(d, sw->def_sym);
7220 else
7221 gsym(d);
7222 /* break label */
7223 gsym(a);
7225 dynarray_reset(&sw->p, &sw->n);
7226 cur_switch = sw->prev;
7227 tcc_free(sw);
7229 } else if (t == TOK_CASE) {
7230 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7231 if (!cur_switch)
7232 expect("switch");
7233 cr->v1 = cr->v2 = expr_const64();
7234 if (gnu_ext && tok == TOK_DOTS) {
7235 next();
7236 cr->v2 = expr_const64();
7237 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7238 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7239 tcc_warning("empty case range");
7241 cr->sym = gind();
7242 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7243 skip(':');
7244 is_expr = 0;
7245 goto block_after_label;
7247 } else if (t == TOK_DEFAULT) {
7248 if (!cur_switch)
7249 expect("switch");
7250 if (cur_switch->def_sym)
7251 tcc_error("too many 'default'");
7252 cur_switch->def_sym = gind();
7253 skip(':');
7254 is_expr = 0;
7255 goto block_after_label;
7257 } else if (t == TOK_GOTO) {
7258 vla_restore(root_scope->vla.loc);
7259 if (tok == '*' && gnu_ext) {
7260 /* computed goto */
7261 next();
7262 gexpr();
7263 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7264 expect("pointer");
7265 ggoto();
7267 } else if (tok >= TOK_UIDENT) {
7268 s = label_find(tok);
7269 /* put forward definition if needed */
7270 if (!s)
7271 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7272 else if (s->r == LABEL_DECLARED)
7273 s->r = LABEL_FORWARD;
7275 if (s->r & LABEL_FORWARD) {
7276 /* start new goto chain for cleanups, linked via label->next */
7277 if (cur_scope->cl.s && !nocode_wanted) {
7278 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7279 pending_gotos->prev_tok = s;
7280 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7281 pending_gotos->next = s;
7283 s->jnext = gjmp(s->jnext);
7284 } else {
7285 try_call_cleanup_goto(s->cleanupstate);
7286 gjmp_addr(s->jnext);
7288 next();
7290 } else {
7291 expect("label identifier");
7293 skip(';');
7295 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7296 asm_instr();
7298 } else {
7299 if (tok == ':' && t >= TOK_UIDENT) {
7300 /* label case */
7301 next();
7302 s = label_find(t);
7303 if (s) {
7304 if (s->r == LABEL_DEFINED)
7305 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7306 s->r = LABEL_DEFINED;
7307 if (s->next) {
7308 Sym *pcl; /* pending cleanup goto */
7309 for (pcl = s->next; pcl; pcl = pcl->prev)
7310 gsym(pcl->jnext);
7311 sym_pop(&s->next, NULL, 0);
7312 } else
7313 gsym(s->jnext);
7314 } else {
7315 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7317 s->jnext = gind();
7318 s->cleanupstate = cur_scope->cl.s;
7320 block_after_label:
7321 vla_restore(cur_scope->vla.loc);
7322 /* we accept this, but it is a mistake */
7323 if (tok == '}') {
7324 tcc_warning("deprecated use of label at end of compound statement");
7325 } else {
7326 goto again;
7329 } else {
7330 /* expression case */
7331 if (t != ';') {
7332 unget_tok(t);
7333 expr:
7334 if (is_expr) {
7335 vpop();
7336 gexpr();
7337 } else {
7338 gexpr();
7339 vpop();
7341 skip(';');
7347 /* This skips over a stream of tokens containing balanced {} and ()
7348 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7349 with a '{'). If STR then allocates and stores the skipped tokens
7350 in *STR. This doesn't check if () and {} are nested correctly,
7351 i.e. "({)}" is accepted. */
7352 static void skip_or_save_block(TokenString **str)
7354 int braces = tok == '{';
7355 int level = 0;
7356 if (str)
7357 *str = tok_str_alloc();
7359 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
7360 int t;
7361 if (tok == TOK_EOF) {
7362 if (str || level > 0)
7363 tcc_error("unexpected end of file");
7364 else
7365 break;
7367 if (str)
7368 tok_str_add_tok(*str);
7369 t = tok;
7370 next();
7371 if (t == '{' || t == '(') {
7372 level++;
7373 } else if (t == '}' || t == ')') {
7374 level--;
7375 if (level == 0 && braces && t == '}')
7376 break;
7379 if (str) {
7380 tok_str_add(*str, -1);
7381 tok_str_add(*str, 0);
7385 #define EXPR_CONST 1
7386 #define EXPR_ANY 2
7388 static void parse_init_elem(int expr_type)
7390 int saved_global_expr;
7391 switch(expr_type) {
7392 case EXPR_CONST:
7393 /* compound literals must be allocated globally in this case */
7394 saved_global_expr = global_expr;
7395 global_expr = 1;
7396 expr_const1();
7397 global_expr = saved_global_expr;
7398 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7399 (compound literals). */
7400 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7401 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7402 || vtop->sym->v < SYM_FIRST_ANOM))
7403 #ifdef TCC_TARGET_PE
7404 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7405 #endif
7407 tcc_error("initializer element is not constant");
7408 break;
7409 case EXPR_ANY:
7410 expr_eq();
7411 break;
7415 #if 1
7416 static void init_assert(init_params *p, int offset)
7418 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7419 : !nocode_wanted && offset > p->local_offset)
7420 tcc_internal_error("initializer overflow");
7422 #else
7423 #define init_assert(sec, offset)
7424 #endif
7426 /* put zeros for variable based init */
7427 static void init_putz(init_params *p, unsigned long c, int size)
7429 init_assert(p, c + size);
7430 if (p->sec) {
7431 /* nothing to do because globals are already set to zero */
7432 } else {
7433 vpush_helper_func(TOK_memset);
7434 vseti(VT_LOCAL, c);
7435 #ifdef TCC_TARGET_ARM
7436 vpushs(size);
7437 vpushi(0);
7438 #else
7439 vpushi(0);
7440 vpushs(size);
7441 #endif
7442 gfunc_call(3);
7446 #define DIF_FIRST 1
7447 #define DIF_SIZE_ONLY 2
7448 #define DIF_HAVE_ELEM 4
7449 #define DIF_CLEAR 8
7451 /* delete relocations for specified range c ... c + size. Unfortunatly
7452 in very special cases, relocations may occur unordered */
7453 static void decl_design_delrels(Section *sec, int c, int size)
7455 ElfW_Rel *rel, *rel2, *rel_end;
7456 if (!sec || !sec->reloc)
7457 return;
7458 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7459 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7460 while (rel < rel_end) {
7461 if (rel->r_offset >= c && rel->r_offset < c + size) {
7462 sec->reloc->data_offset -= sizeof *rel;
7463 } else {
7464 if (rel2 != rel)
7465 memcpy(rel2, rel, sizeof *rel);
7466 ++rel2;
7468 ++rel;
7472 static void decl_design_flex(init_params *p, Sym *ref, int index)
7474 if (ref == p->flex_array_ref) {
7475 if (index >= ref->c)
7476 ref->c = index + 1;
7477 } else if (ref->c < 0)
7478 tcc_error("flexible array has zero size in this context");
7481 /* t is the array or struct type. c is the array or struct
7482 address. cur_field is the pointer to the current
7483 field, for arrays the 'c' member contains the current start
7484 index. 'flags' is as in decl_initializer.
7485 'al' contains the already initialized length of the
7486 current container (starting at c). This returns the new length of that. */
7487 static int decl_designator(init_params *p, CType *type, unsigned long c,
7488 Sym **cur_field, int flags, int al)
7490 Sym *s, *f;
7491 int index, index_last, align, l, nb_elems, elem_size;
7492 unsigned long corig = c;
7494 elem_size = 0;
7495 nb_elems = 1;
7497 if (flags & DIF_HAVE_ELEM)
7498 goto no_designator;
7500 if (gnu_ext && tok >= TOK_UIDENT) {
7501 l = tok, next();
7502 if (tok == ':')
7503 goto struct_field;
7504 unget_tok(l);
7507 /* NOTE: we only support ranges for last designator */
7508 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7509 if (tok == '[') {
7510 if (!(type->t & VT_ARRAY))
7511 expect("array type");
7512 next();
7513 index = index_last = expr_const();
7514 if (tok == TOK_DOTS && gnu_ext) {
7515 next();
7516 index_last = expr_const();
7518 skip(']');
7519 s = type->ref;
7520 decl_design_flex(p, s, index_last);
7521 if (index < 0 || index_last >= s->c || index_last < index)
7522 tcc_error("index exceeds array bounds or range is empty");
7523 if (cur_field)
7524 (*cur_field)->c = index_last;
7525 type = pointed_type(type);
7526 elem_size = type_size(type, &align);
7527 c += index * elem_size;
7528 nb_elems = index_last - index + 1;
7529 } else {
7530 int cumofs;
7531 next();
7532 l = tok;
7533 struct_field:
7534 next();
7535 if ((type->t & VT_BTYPE) != VT_STRUCT)
7536 expect("struct/union type");
7537 cumofs = 0;
7538 f = find_field(type, l, &cumofs);
7539 if (!f)
7540 expect("field");
7541 if (cur_field)
7542 *cur_field = f;
7543 type = &f->type;
7544 c += cumofs + f->c;
7546 cur_field = NULL;
7548 if (!cur_field) {
7549 if (tok == '=') {
7550 next();
7551 } else if (!gnu_ext) {
7552 expect("=");
7554 } else {
7555 no_designator:
7556 if (type->t & VT_ARRAY) {
7557 index = (*cur_field)->c;
7558 s = type->ref;
7559 decl_design_flex(p, s, index);
7560 if (index >= s->c)
7561 tcc_error("too many initializers");
7562 type = pointed_type(type);
7563 elem_size = type_size(type, &align);
7564 c += index * elem_size;
7565 } else {
7566 f = *cur_field;
7567 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
7568 *cur_field = f = f->next;
7569 if (!f)
7570 tcc_error("too many initializers");
7571 type = &f->type;
7572 c += f->c;
7576 if (!elem_size) /* for structs */
7577 elem_size = type_size(type, &align);
7579 /* Using designators the same element can be initialized more
7580 than once. In that case we need to delete possibly already
7581 existing relocations. */
7582 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7583 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7584 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7587 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7589 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7590 Sym aref = {0};
7591 CType t1;
7592 int i;
7593 if (p->sec || (type->t & VT_ARRAY)) {
7594 /* make init_putv/vstore believe it were a struct */
7595 aref.c = elem_size;
7596 t1.t = VT_STRUCT, t1.ref = &aref;
7597 type = &t1;
7599 if (p->sec)
7600 vpush_ref(type, p->sec, c, elem_size);
7601 else
7602 vset(type, VT_LOCAL|VT_LVAL, c);
7603 for (i = 1; i < nb_elems; i++) {
7604 vdup();
7605 init_putv(p, type, c + elem_size * i);
7607 vpop();
7610 c += nb_elems * elem_size;
7611 if (c - corig > al)
7612 al = c - corig;
7613 return al;
7616 /* store a value or an expression directly in global data or in local array */
7617 static void init_putv(init_params *p, CType *type, unsigned long c)
7619 int bt;
7620 void *ptr;
7621 CType dtype;
7622 int size, align;
7623 Section *sec = p->sec;
7625 dtype = *type;
7626 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7628 size = type_size(type, &align);
7629 if (type->t & VT_BITFIELD)
7630 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7631 init_assert(p, c + size);
7633 if (sec) {
7634 /* XXX: not portable */
7635 /* XXX: generate error if incorrect relocation */
7636 gen_assign_cast(&dtype);
7637 bt = type->t & VT_BTYPE;
7639 if ((vtop->r & VT_SYM)
7640 && bt != VT_PTR
7641 && bt != VT_FUNC
7642 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7643 || (type->t & VT_BITFIELD))
7644 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7646 tcc_error("initializer element is not computable at load time");
7648 if (NODATA_WANTED) {
7649 vtop--;
7650 return;
7653 ptr = sec->data + c;
7655 /* XXX: make code faster ? */
7656 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7657 vtop->sym->v >= SYM_FIRST_ANOM &&
7658 /* XXX This rejects compound literals like
7659 '(void *){ptr}'. The problem is that '&sym' is
7660 represented the same way, which would be ruled out
7661 by the SYM_FIRST_ANOM check above, but also '"string"'
7662 in 'char *p = "string"' is represented the same
7663 with the type being VT_PTR and the symbol being an
7664 anonymous one. That is, there's no difference in vtop
7665 between '(void *){x}' and '&(void *){x}'. Ignore
7666 pointer typed entities here. Hopefully no real code
7667 will ever use compound literals with scalar type. */
7668 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7669 /* These come from compound literals, memcpy stuff over. */
7670 Section *ssec;
7671 ElfSym *esym;
7672 ElfW_Rel *rel;
7673 esym = elfsym(vtop->sym);
7674 ssec = tcc_state->sections[esym->st_shndx];
7675 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7676 if (ssec->reloc) {
7677 /* We need to copy over all memory contents, and that
7678 includes relocations. Use the fact that relocs are
7679 created it order, so look from the end of relocs
7680 until we hit one before the copied region. */
7681 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
7682 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
7683 while (num_relocs--) {
7684 rel--;
7685 if (rel->r_offset >= esym->st_value + size)
7686 continue;
7687 if (rel->r_offset < esym->st_value)
7688 break;
7689 put_elf_reloca(symtab_section, sec,
7690 c + rel->r_offset - esym->st_value,
7691 ELFW(R_TYPE)(rel->r_info),
7692 ELFW(R_SYM)(rel->r_info),
7693 #if PTR_SIZE == 8
7694 rel->r_addend
7695 #else
7697 #endif
7701 } else {
7702 if (type->t & VT_BITFIELD) {
7703 int bit_pos, bit_size, bits, n;
7704 unsigned char *p, v, m;
7705 bit_pos = BIT_POS(vtop->type.t);
7706 bit_size = BIT_SIZE(vtop->type.t);
7707 p = (unsigned char*)ptr + (bit_pos >> 3);
7708 bit_pos &= 7, bits = 0;
7709 while (bit_size) {
7710 n = 8 - bit_pos;
7711 if (n > bit_size)
7712 n = bit_size;
7713 v = vtop->c.i >> bits << bit_pos;
7714 m = ((1 << n) - 1) << bit_pos;
7715 *p = (*p & ~m) | (v & m);
7716 bits += n, bit_size -= n, bit_pos = 0, ++p;
7718 } else
7719 switch(bt) {
7720 /* XXX: when cross-compiling we assume that each type has the
7721 same representation on host and target, which is likely to
7722 be wrong in the case of long double */
7723 case VT_BOOL:
7724 vtop->c.i = vtop->c.i != 0;
7725 case VT_BYTE:
7726 *(char *)ptr = vtop->c.i;
7727 break;
7728 case VT_SHORT:
7729 *(short *)ptr = vtop->c.i;
7730 break;
7731 case VT_FLOAT:
7732 *(float*)ptr = vtop->c.f;
7733 break;
7734 case VT_DOUBLE:
7735 *(double *)ptr = vtop->c.d;
7736 break;
7737 case VT_LDOUBLE:
7738 #if defined TCC_IS_NATIVE_387
7739 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7740 memcpy(ptr, &vtop->c.ld, 10);
7741 #ifdef __TINYC__
7742 else if (sizeof (long double) == sizeof (double))
7743 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7744 #endif
7745 else if (vtop->c.ld == 0.0)
7747 else
7748 #endif
7749 if (sizeof(long double) == LDOUBLE_SIZE)
7750 *(long double*)ptr = vtop->c.ld;
7751 else if (sizeof(double) == LDOUBLE_SIZE)
7752 *(double *)ptr = (double)vtop->c.ld;
7753 #ifndef TCC_CROSS_TEST
7754 else
7755 tcc_error("can't cross compile long double constants");
7756 #endif
7757 break;
7758 #if PTR_SIZE != 8
7759 case VT_LLONG:
7760 *(long long *)ptr = vtop->c.i;
7761 break;
7762 #else
7763 case VT_LLONG:
7764 #endif
7765 case VT_PTR:
7767 addr_t val = vtop->c.i;
7768 #if PTR_SIZE == 8
7769 if (vtop->r & VT_SYM)
7770 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7771 else
7772 *(addr_t *)ptr = val;
7773 #else
7774 if (vtop->r & VT_SYM)
7775 greloc(sec, vtop->sym, c, R_DATA_PTR);
7776 *(addr_t *)ptr = val;
7777 #endif
7778 break;
7780 default:
7782 int val = vtop->c.i;
7783 #if PTR_SIZE == 8
7784 if (vtop->r & VT_SYM)
7785 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7786 else
7787 *(int *)ptr = val;
7788 #else
7789 if (vtop->r & VT_SYM)
7790 greloc(sec, vtop->sym, c, R_DATA_PTR);
7791 *(int *)ptr = val;
7792 #endif
7793 break;
7797 vtop--;
7798 } else {
7799 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7800 vswap();
7801 vstore();
7802 vpop();
7806 /* 't' contains the type and storage info. 'c' is the offset of the
7807 object in section 'sec'. If 'sec' is NULL, it means stack based
7808 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7809 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7810 size only evaluation is wanted (only for arrays). */
7811 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
7813 int len, n, no_oblock, i;
7814 int size1, align1;
7815 Sym *s, *f;
7816 Sym indexsym;
7817 CType *t1;
7819 /* generate line number info */
7820 if (!p->sec && tcc_state->do_debug)
7821 tcc_debug_line(tcc_state);
7823 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7824 /* In case of strings we have special handling for arrays, so
7825 don't consume them as initializer value (which would commit them
7826 to some anonymous symbol). */
7827 tok != TOK_LSTR && tok != TOK_STR &&
7828 !(flags & DIF_SIZE_ONLY)) {
7829 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7830 flags |= DIF_HAVE_ELEM;
7833 if ((flags & DIF_HAVE_ELEM) &&
7834 !(type->t & VT_ARRAY) &&
7835 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7836 The source type might have VT_CONSTANT set, which is
7837 of course assignable to non-const elements. */
7838 is_compatible_unqualified_types(type, &vtop->type)) {
7839 goto init_putv;
7841 } else if (type->t & VT_ARRAY) {
7842 no_oblock = 1;
7843 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7844 tok == '{') {
7845 skip('{');
7846 no_oblock = 0;
7849 s = type->ref;
7850 n = s->c;
7851 t1 = pointed_type(type);
7852 size1 = type_size(t1, &align1);
7854 /* only parse strings here if correct type (otherwise: handle
7855 them as ((w)char *) expressions */
7856 if ((tok == TOK_LSTR &&
7857 #ifdef TCC_TARGET_PE
7858 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7859 #else
7860 (t1->t & VT_BTYPE) == VT_INT
7861 #endif
7862 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7863 len = 0;
7864 cstr_reset(&initstr);
7865 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7866 tcc_error("unhandled string literal merging");
7867 while (tok == TOK_STR || tok == TOK_LSTR) {
7868 if (initstr.size)
7869 initstr.size -= size1;
7870 if (tok == TOK_STR)
7871 len += tokc.str.size;
7872 else
7873 len += tokc.str.size / sizeof(nwchar_t);
7874 len--;
7875 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7876 next();
7878 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7879 && tok != TOK_EOF) {
7880 /* Not a lone literal but part of a bigger expression. */
7881 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7882 tokc.str.size = initstr.size;
7883 tokc.str.data = initstr.data;
7884 goto do_init_array;
7887 if (!(flags & DIF_SIZE_ONLY)) {
7888 int nb = n;
7889 if (len < nb)
7890 nb = len;
7891 if (len > nb)
7892 tcc_warning("initializer-string for array is too long");
7893 /* in order to go faster for common case (char
7894 string in global variable, we handle it
7895 specifically */
7896 if (p->sec && size1 == 1) {
7897 init_assert(p, c + nb);
7898 if (!NODATA_WANTED)
7899 memcpy(p->sec->data + c, initstr.data, nb);
7900 } else {
7901 for(i=0;i<n;i++) {
7902 if (i >= nb) {
7903 /* only add trailing zero if enough storage (no
7904 warning in this case since it is standard) */
7905 if (flags & DIF_CLEAR)
7906 break;
7907 if (n - i >= 4) {
7908 init_putz(p, c + i * size1, (n - i) * size1);
7909 break;
7911 ch = 0;
7912 } else if (size1 == 1)
7913 ch = ((unsigned char *)initstr.data)[i];
7914 else
7915 ch = ((nwchar_t *)initstr.data)[i];
7916 vpushi(ch);
7917 init_putv(p, t1, c + i * size1);
7920 } else {
7921 decl_design_flex(p, s, len);
7923 } else {
7925 do_init_array:
7926 indexsym.c = 0;
7927 f = &indexsym;
7929 do_init_list:
7930 /* zero memory once in advance */
7931 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
7932 init_putz(p, c, n*size1);
7933 flags |= DIF_CLEAR;
7936 len = 0;
7937 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7938 len = decl_designator(p, type, c, &f, flags, len);
7939 flags &= ~DIF_HAVE_ELEM;
7940 if (type->t & VT_ARRAY) {
7941 ++indexsym.c;
7942 /* special test for multi dimensional arrays (may not
7943 be strictly correct if designators are used at the
7944 same time) */
7945 if (no_oblock && len >= n*size1)
7946 break;
7947 } else {
7948 if (s->type.t == VT_UNION)
7949 f = NULL;
7950 else
7951 f = f->next;
7952 if (no_oblock && f == NULL)
7953 break;
7956 if (tok == '}')
7957 break;
7958 skip(',');
7961 if (!no_oblock)
7962 skip('}');
7963 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7964 no_oblock = 1;
7965 if ((flags & DIF_FIRST) || tok == '{') {
7966 skip('{');
7967 no_oblock = 0;
7969 s = type->ref;
7970 f = s->next;
7971 n = s->c;
7972 size1 = 1;
7973 goto do_init_list;
7974 } else if (tok == '{') {
7975 if (flags & DIF_HAVE_ELEM)
7976 skip(';');
7977 next();
7978 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
7979 skip('}');
7980 } else if ((flags & DIF_SIZE_ONLY)) {
7981 /* If we supported only ISO C we wouldn't have to accept calling
7982 this on anything than an array if DIF_SIZE_ONLY (and even then
7983 only on the outermost level, so no recursion would be needed),
7984 because initializing a flex array member isn't supported.
7985 But GNU C supports it, so we need to recurse even into
7986 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7987 /* just skip expression */
7988 skip_or_save_block(NULL);
7989 } else {
7990 if (!(flags & DIF_HAVE_ELEM)) {
7991 /* This should happen only when we haven't parsed
7992 the init element above for fear of committing a
7993 string constant to memory too early. */
7994 if (tok != TOK_STR && tok != TOK_LSTR)
7995 expect("string constant");
7996 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
7998 init_putv:
7999 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
8000 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
8001 && vtop->c.i == 0
8002 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
8004 vpop();
8005 else
8006 init_putv(p, type, c);
8010 /* parse an initializer for type 't' if 'has_init' is non zero, and
8011 allocate space in local or global data space ('r' is either
8012 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
8013 variable 'v' of scope 'scope' is declared before initializers
8014 are parsed. If 'v' is zero, then a reference to the new object
8015 is put in the value stack. If 'has_init' is 2, a special parsing
8016 is done to handle string constants. */
8017 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
8018 int has_init, int v, int scope)
8020 int size, align, addr;
8021 TokenString *init_str = NULL;
8023 Section *sec;
8024 Sym *flexible_array;
8025 Sym *sym = NULL;
8026 int saved_nocode_wanted = nocode_wanted;
8027 #ifdef CONFIG_TCC_BCHECK
8028 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
8029 #endif
8030 init_params p = {0};
8032 /* Always allocate static or global variables */
8033 if (v && (r & VT_VALMASK) == VT_CONST)
8034 nocode_wanted |= 0x80000000;
8036 flexible_array = NULL;
8037 size = type_size(type, &align);
8039 /* exactly one flexible array may be initialized, either the
8040 toplevel array or the last member of the toplevel struct */
8042 if (size < 0) {
8043 /* If the base type itself was an array type of unspecified size
8044 (like in 'typedef int arr[]; arr x = {1};') then we will
8045 overwrite the unknown size by the real one for this decl.
8046 We need to unshare the ref symbol holding that size. */
8047 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
8048 p.flex_array_ref = type->ref;
8050 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
8051 Sym *field = type->ref->next;
8052 if (field) {
8053 while (field->next)
8054 field = field->next;
8055 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
8056 flexible_array = field;
8057 p.flex_array_ref = field->type.ref;
8058 size = -1;
8063 if (size < 0) {
8064 /* If unknown size, do a dry-run 1st pass */
8065 if (!has_init)
8066 tcc_error("unknown type size");
8067 if (has_init == 2) {
8068 /* only get strings */
8069 init_str = tok_str_alloc();
8070 while (tok == TOK_STR || tok == TOK_LSTR) {
8071 tok_str_add_tok(init_str);
8072 next();
8074 tok_str_add(init_str, -1);
8075 tok_str_add(init_str, 0);
8076 } else
8077 skip_or_save_block(&init_str);
8078 unget_tok(0);
8080 /* compute size */
8081 begin_macro(init_str, 1);
8082 next();
8083 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
8084 /* prepare second initializer parsing */
8085 macro_ptr = init_str->str;
8086 next();
8088 /* if still unknown size, error */
8089 size = type_size(type, &align);
8090 if (size < 0)
8091 tcc_error("unknown type size");
8093 /* If there's a flex member and it was used in the initializer
8094 adjust size. */
8095 if (flexible_array && flexible_array->type.ref->c > 0)
8096 size += flexible_array->type.ref->c
8097 * pointed_size(&flexible_array->type);
8100 /* take into account specified alignment if bigger */
8101 if (ad->a.aligned) {
8102 int speca = 1 << (ad->a.aligned - 1);
8103 if (speca > align)
8104 align = speca;
8105 } else if (ad->a.packed) {
8106 align = 1;
8109 if (!v && NODATA_WANTED)
8110 size = 0, align = 1;
8112 if ((r & VT_VALMASK) == VT_LOCAL) {
8113 sec = NULL;
8114 #ifdef CONFIG_TCC_BCHECK
8115 if (bcheck && v) {
8116 /* add padding between stack variables for bound checking */
8117 loc--;
8119 #endif
8120 loc = (loc - size) & -align;
8121 addr = loc;
8122 p.local_offset = addr + size;
8123 #ifdef CONFIG_TCC_BCHECK
8124 if (bcheck && v) {
8125 /* add padding between stack variables for bound checking */
8126 loc--;
8128 #endif
8129 if (v) {
8130 /* local variable */
8131 #ifdef CONFIG_TCC_ASM
8132 if (ad->asm_label) {
8133 int reg = asm_parse_regvar(ad->asm_label);
8134 if (reg >= 0)
8135 r = (r & ~VT_VALMASK) | reg;
8137 #endif
8138 sym = sym_push(v, type, r, addr);
8139 if (ad->cleanup_func) {
8140 Sym *cls = sym_push2(&all_cleanups,
8141 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8142 cls->prev_tok = sym;
8143 cls->next = ad->cleanup_func;
8144 cls->ncl = cur_scope->cl.s;
8145 cur_scope->cl.s = cls;
8148 sym->a = ad->a;
8149 } else {
8150 /* push local reference */
8151 vset(type, r, addr);
8153 } else {
8154 if (v && scope == VT_CONST) {
8155 /* see if the symbol was already defined */
8156 sym = sym_find(v);
8157 if (sym) {
8158 patch_storage(sym, ad, type);
8159 /* we accept several definitions of the same global variable. */
8160 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8161 goto no_alloc;
8165 /* allocate symbol in corresponding section */
8166 sec = ad->section;
8167 if (!sec) {
8168 if (type->t & VT_CONSTANT)
8169 sec = data_ro_section;
8170 else if (has_init)
8171 sec = data_section;
8172 else if (tcc_state->nocommon)
8173 sec = bss_section;
8176 if (sec) {
8177 addr = section_add(sec, size, align);
8178 #ifdef CONFIG_TCC_BCHECK
8179 /* add padding if bound check */
8180 if (bcheck)
8181 section_add(sec, 1, 1);
8182 #endif
8183 } else {
8184 addr = align; /* SHN_COMMON is special, symbol value is align */
8185 sec = common_section;
8188 if (v) {
8189 if (!sym) {
8190 sym = sym_push(v, type, r | VT_SYM, 0);
8191 patch_storage(sym, ad, NULL);
8193 /* update symbol definition */
8194 put_extern_sym(sym, sec, addr, size);
8195 } else {
8196 /* push global reference */
8197 vpush_ref(type, sec, addr, size);
8198 sym = vtop->sym;
8199 vtop->r |= r;
8202 #ifdef CONFIG_TCC_BCHECK
8203 /* handles bounds now because the symbol must be defined
8204 before for the relocation */
8205 if (bcheck) {
8206 addr_t *bounds_ptr;
8208 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8209 /* then add global bound info */
8210 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8211 bounds_ptr[0] = 0; /* relocated */
8212 bounds_ptr[1] = size;
8214 #endif
8217 if (type->t & VT_VLA) {
8218 int a;
8220 if (NODATA_WANTED)
8221 goto no_alloc;
8223 /* save current stack pointer */
8224 if (root_scope->vla.loc == 0) {
8225 struct scope *v = cur_scope;
8226 gen_vla_sp_save(loc -= PTR_SIZE);
8227 do v->vla.loc = loc; while ((v = v->prev));
8230 vla_runtime_type_size(type, &a);
8231 gen_vla_alloc(type, a);
8232 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8233 /* on _WIN64, because of the function args scratch area, the
8234 result of alloca differs from RSP and is returned in RAX. */
8235 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8236 #endif
8237 gen_vla_sp_save(addr);
8238 cur_scope->vla.loc = addr;
8239 cur_scope->vla.num++;
8240 } else if (has_init) {
8241 p.sec = sec;
8242 decl_initializer(&p, type, addr, DIF_FIRST);
8243 /* patch flexible array member size back to -1, */
8244 /* for possible subsequent similar declarations */
8245 if (flexible_array)
8246 flexible_array->type.ref->c = -1;
8249 no_alloc:
8250 /* restore parse state if needed */
8251 if (init_str) {
8252 end_macro();
8253 next();
8256 nocode_wanted = saved_nocode_wanted;
8259 /* parse a function defined by symbol 'sym' and generate its code in
8260 'cur_text_section' */
8261 static void gen_function(Sym *sym)
8263 struct scope f = { 0 };
8264 cur_scope = root_scope = &f;
8265 nocode_wanted = 0;
8266 ind = cur_text_section->data_offset;
8267 if (sym->a.aligned) {
8268 size_t newoff = section_add(cur_text_section, 0,
8269 1 << (sym->a.aligned - 1));
8270 gen_fill_nops(newoff - ind);
8272 /* NOTE: we patch the symbol size later */
8273 put_extern_sym(sym, cur_text_section, ind, 0);
8274 if (sym->type.ref->f.func_ctor)
8275 add_array (tcc_state, ".init_array", sym->c);
8276 if (sym->type.ref->f.func_dtor)
8277 add_array (tcc_state, ".fini_array", sym->c);
8279 funcname = get_tok_str(sym->v, NULL);
8280 func_ind = ind;
8281 func_vt = sym->type.ref->type;
8282 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8284 /* put debug symbol */
8285 tcc_debug_funcstart(tcc_state, sym);
8286 /* push a dummy symbol to enable local sym storage */
8287 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8288 local_scope = 1; /* for function parameters */
8289 gfunc_prolog(sym);
8290 local_scope = 0;
8291 rsym = 0;
8292 clear_temp_local_var_list();
8293 block(0);
8294 gsym(rsym);
8295 nocode_wanted = 0;
8296 /* reset local stack */
8297 pop_local_syms(&local_stack, NULL, 0, func_var);
8298 gfunc_epilog();
8299 cur_text_section->data_offset = ind;
8300 local_scope = 0;
8301 label_pop(&global_label_stack, NULL, 0);
8302 sym_pop(&all_cleanups, NULL, 0);
8303 /* patch symbol size */
8304 elfsym(sym)->st_size = ind - func_ind;
8305 /* end of function */
8306 tcc_debug_funcend(tcc_state, ind - func_ind);
8307 /* It's better to crash than to generate wrong code */
8308 cur_text_section = NULL;
8309 funcname = ""; /* for safety */
8310 func_vt.t = VT_VOID; /* for safety */
8311 func_var = 0; /* for safety */
8312 ind = 0; /* for safety */
8313 nocode_wanted = 0x80000000;
8314 check_vstack();
8315 /* do this after funcend debug info */
8316 next();
8319 static void gen_inline_functions(TCCState *s)
8321 Sym *sym;
8322 int inline_generated, i;
8323 struct InlineFunc *fn;
8325 tcc_open_bf(s, ":inline:", 0);
8326 /* iterate while inline function are referenced */
8327 do {
8328 inline_generated = 0;
8329 for (i = 0; i < s->nb_inline_fns; ++i) {
8330 fn = s->inline_fns[i];
8331 sym = fn->sym;
8332 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8333 /* the function was used or forced (and then not internal):
8334 generate its code and convert it to a normal function */
8335 fn->sym = NULL;
8336 tcc_debug_putfile(s, fn->filename);
8337 begin_macro(fn->func_str, 1);
8338 next();
8339 cur_text_section = text_section;
8340 gen_function(sym);
8341 end_macro();
8343 inline_generated = 1;
8346 } while (inline_generated);
8347 tcc_close();
8350 static void free_inline_functions(TCCState *s)
8352 int i;
8353 /* free tokens of unused inline functions */
8354 for (i = 0; i < s->nb_inline_fns; ++i) {
8355 struct InlineFunc *fn = s->inline_fns[i];
8356 if (fn->sym)
8357 tok_str_free(fn->func_str);
8359 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8362 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8363 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8364 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
8366 int v, has_init, r, oldint;
8367 CType type, btype;
8368 Sym *sym;
8369 AttributeDef ad, adbase;
8371 while (1) {
8372 if (tok == TOK_STATIC_ASSERT) {
8373 CString error_str;
8374 int c;
8376 next();
8377 skip('(');
8378 c = expr_const();
8380 if (tok == ')') {
8381 if (!c)
8382 tcc_error("_Static_assert fail");
8383 next();
8384 goto static_assert_out;
8387 skip(',');
8388 parse_mult_str(&error_str, "string constant");
8389 if (c == 0)
8390 tcc_error("%s", (char *)error_str.data);
8391 cstr_free(&error_str);
8392 skip(')');
8393 static_assert_out:
8394 skip(';');
8395 continue;
8398 oldint = 0;
8399 if (!parse_btype(&btype, &adbase)) {
8400 if (is_for_loop_init)
8401 return 0;
8402 /* skip redundant ';' if not in old parameter decl scope */
8403 if (tok == ';' && l != VT_CMP) {
8404 next();
8405 continue;
8407 if (l != VT_CONST)
8408 break;
8409 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8410 /* global asm block */
8411 asm_global_instr();
8412 continue;
8414 if (tok >= TOK_UIDENT) {
8415 /* special test for old K&R protos without explicit int
8416 type. Only accepted when defining global data */
8417 btype.t = VT_INT;
8418 oldint = 1;
8419 } else {
8420 if (tok != TOK_EOF)
8421 expect("declaration");
8422 break;
8426 if (tok == ';') {
8427 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8428 v = btype.ref->v;
8429 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8430 tcc_warning("unnamed struct/union that defines no instances");
8431 next();
8432 continue;
8434 if (IS_ENUM(btype.t)) {
8435 next();
8436 continue;
8440 while (1) { /* iterate thru each declaration */
8441 type = btype;
8442 ad = adbase;
8443 type_decl(&type, &ad, &v, TYPE_DIRECT);
8444 #if 0
8446 char buf[500];
8447 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8448 printf("type = '%s'\n", buf);
8450 #endif
8451 if ((type.t & VT_BTYPE) == VT_FUNC) {
8452 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
8453 tcc_error("function without file scope cannot be static");
8454 /* if old style function prototype, we accept a
8455 declaration list */
8456 sym = type.ref;
8457 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
8458 decl0(VT_CMP, 0, sym);
8459 #ifdef TCC_TARGET_MACHO
8460 if (sym->f.func_alwinl
8461 && ((type.t & (VT_EXTERN | VT_INLINE))
8462 == (VT_EXTERN | VT_INLINE))) {
8463 /* always_inline functions must be handled as if they
8464 don't generate multiple global defs, even if extern
8465 inline, i.e. GNU inline semantics for those. Rewrite
8466 them into static inline. */
8467 type.t &= ~VT_EXTERN;
8468 type.t |= VT_STATIC;
8470 #endif
8471 /* always compile 'extern inline' */
8472 if (type.t & VT_EXTERN)
8473 type.t &= ~VT_INLINE;
8475 } else if (oldint) {
8476 tcc_warning("type defaults to int");
8479 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8480 ad.asm_label = asm_label_instr();
8481 /* parse one last attribute list, after asm label */
8482 parse_attribute(&ad);
8483 #if 0
8484 /* gcc does not allow __asm__("label") with function definition,
8485 but why not ... */
8486 if (tok == '{')
8487 expect(";");
8488 #endif
8491 #ifdef TCC_TARGET_PE
8492 if (ad.a.dllimport || ad.a.dllexport) {
8493 if (type.t & VT_STATIC)
8494 tcc_error("cannot have dll linkage with static");
8495 if (type.t & VT_TYPEDEF) {
8496 tcc_warning("'%s' attribute ignored for typedef",
8497 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8498 (ad.a.dllexport = 0, "dllexport"));
8499 } else if (ad.a.dllimport) {
8500 if ((type.t & VT_BTYPE) == VT_FUNC)
8501 ad.a.dllimport = 0;
8502 else
8503 type.t |= VT_EXTERN;
8506 #endif
8507 if (tok == '{') {
8508 if (l != VT_CONST)
8509 tcc_error("cannot use local functions");
8510 if ((type.t & VT_BTYPE) != VT_FUNC)
8511 expect("function definition");
8513 /* reject abstract declarators in function definition
8514 make old style params without decl have int type */
8515 sym = type.ref;
8516 while ((sym = sym->next) != NULL) {
8517 if (!(sym->v & ~SYM_FIELD))
8518 expect("identifier");
8519 if (sym->type.t == VT_VOID)
8520 sym->type = int_type;
8523 /* apply post-declaraton attributes */
8524 merge_funcattr(&type.ref->f, &ad.f);
8526 /* put function symbol */
8527 type.t &= ~VT_EXTERN;
8528 sym = external_sym(v, &type, 0, &ad);
8530 /* static inline functions are just recorded as a kind
8531 of macro. Their code will be emitted at the end of
8532 the compilation unit only if they are used */
8533 if (sym->type.t & VT_INLINE) {
8534 struct InlineFunc *fn;
8535 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8536 strcpy(fn->filename, file->filename);
8537 fn->sym = sym;
8538 skip_or_save_block(&fn->func_str);
8539 dynarray_add(&tcc_state->inline_fns,
8540 &tcc_state->nb_inline_fns, fn);
8541 } else {
8542 /* compute text section */
8543 cur_text_section = ad.section;
8544 if (!cur_text_section)
8545 cur_text_section = text_section;
8546 gen_function(sym);
8548 break;
8549 } else {
8550 if (l == VT_CMP) {
8551 /* find parameter in function parameter list */
8552 for (sym = func_sym->next; sym; sym = sym->next)
8553 if ((sym->v & ~SYM_FIELD) == v)
8554 goto found;
8555 tcc_error("declaration for parameter '%s' but no such parameter",
8556 get_tok_str(v, NULL));
8557 found:
8558 if (type.t & VT_STORAGE) /* 'register' is okay */
8559 tcc_error("storage class specified for '%s'",
8560 get_tok_str(v, NULL));
8561 if (sym->type.t != VT_VOID)
8562 tcc_error("redefinition of parameter '%s'",
8563 get_tok_str(v, NULL));
8564 convert_parameter_type(&type);
8565 sym->type = type;
8566 } else if (type.t & VT_TYPEDEF) {
8567 /* save typedefed type */
8568 /* XXX: test storage specifiers ? */
8569 sym = sym_find(v);
8570 if (sym && sym->sym_scope == local_scope) {
8571 if (!is_compatible_types(&sym->type, &type)
8572 || !(sym->type.t & VT_TYPEDEF))
8573 tcc_error("incompatible redefinition of '%s'",
8574 get_tok_str(v, NULL));
8575 sym->type = type;
8576 } else {
8577 sym = sym_push(v, &type, 0, 0);
8579 sym->a = ad.a;
8580 sym->f = ad.f;
8581 if (tcc_state->do_debug)
8582 tcc_debug_typedef (tcc_state, sym);
8583 } else if ((type.t & VT_BTYPE) == VT_VOID
8584 && !(type.t & VT_EXTERN)) {
8585 tcc_error("declaration of void object");
8586 } else {
8587 r = 0;
8588 if ((type.t & VT_BTYPE) == VT_FUNC) {
8589 /* external function definition */
8590 /* specific case for func_call attribute */
8591 type.ref->f = ad.f;
8592 } else if (!(type.t & VT_ARRAY)) {
8593 /* not lvalue if array */
8594 r |= VT_LVAL;
8596 has_init = (tok == '=');
8597 if (has_init && (type.t & VT_VLA))
8598 tcc_error("variable length array cannot be initialized");
8599 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8600 || (type.t & VT_BTYPE) == VT_FUNC
8601 /* as with GCC, uninitialized global arrays with no size
8602 are considered extern: */
8603 || ((type.t & VT_ARRAY) && !has_init
8604 && l == VT_CONST && type.ref->c < 0)
8606 /* external variable or function */
8607 type.t |= VT_EXTERN;
8608 sym = external_sym(v, &type, r, &ad);
8609 if (ad.alias_target) {
8610 /* Aliases need to be emitted when their target
8611 symbol is emitted, even if perhaps unreferenced.
8612 We only support the case where the base is
8613 already defined, otherwise we would need
8614 deferring to emit the aliases until the end of
8615 the compile unit. */
8616 Sym *alias_target = sym_find(ad.alias_target);
8617 ElfSym *esym = elfsym(alias_target);
8618 if (!esym)
8619 tcc_error("unsupported forward __alias__ attribute");
8620 put_extern_sym2(sym, esym->st_shndx,
8621 esym->st_value, esym->st_size, 1);
8623 } else {
8624 if (type.t & VT_STATIC)
8625 r |= VT_CONST;
8626 else
8627 r |= l;
8628 if (has_init)
8629 next();
8630 else if (l == VT_CONST)
8631 /* uninitialized global variables may be overridden */
8632 type.t |= VT_EXTERN;
8633 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
8636 if (tok != ',') {
8637 if (is_for_loop_init)
8638 return 1;
8639 skip(';');
8640 break;
8642 next();
8646 return 0;
8649 static void decl(int l)
8651 decl0(l, 0, NULL);
8654 /* ------------------------------------------------------------------------- */
8655 #undef gjmp_addr
8656 #undef gjmp
8657 /* ------------------------------------------------------------------------- */