Fake __has_include handling
[tinycc.git] / tccgen.c
blob25798c2ad5f8031174d39ba65429f83faa061f22
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 static int section_sym;
50 ST_DATA SValue *vtop;
51 static SValue _vstack[1 + VSTACK_SIZE];
52 #define vstack (_vstack + 1)
54 ST_DATA int const_wanted; /* true if constant wanted */
55 ST_DATA int nocode_wanted; /* no code generation wanted */
56 #define unevalmask 0xffff /* unevaluated subexpression */
57 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
58 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
60 /* Automagical code suppression ----> */
61 #define CODE_OFF() (nocode_wanted |= 0x20000000)
62 #define CODE_ON() (nocode_wanted &= ~0x20000000)
64 /* Clear 'nocode_wanted' at label if it was used */
65 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
66 static int gind(void) { CODE_ON(); return ind; }
68 /* Set 'nocode_wanted' after unconditional jumps */
69 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
70 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
72 /* These are #undef'd at the end of this file */
73 #define gjmp_addr gjmp_addr_acs
74 #define gjmp gjmp_acs
75 /* <---- */
77 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
78 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
79 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
80 ST_DATA int func_vc;
81 static int last_line_num, new_file, func_ind; /* debug info control */
82 ST_DATA const char *funcname;
83 ST_DATA CType int_type, func_old_type, char_pointer_type;
84 static CString initstr;
86 #if PTR_SIZE == 4
87 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
88 #define VT_PTRDIFF_T VT_INT
89 #elif LONG_SIZE == 4
90 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_LLONG
92 #else
93 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
95 #endif
97 ST_DATA struct switch_t {
98 struct case_t {
99 int64_t v1, v2;
100 int sym;
101 } **p; int n; /* list of case ranges */
102 int def_sym; /* default symbol */
103 int *bsym;
104 struct scope *scope;
105 struct switch_t *prev;
106 SValue sv;
107 } *cur_switch; /* current switch */
109 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
110 /*list of temporary local variables on the stack in current function. */
111 ST_DATA struct temp_local_variable {
112 int location; //offset on stack. Svalue.c.i
113 short size;
114 short align;
115 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
116 short nb_temp_local_vars;
118 static struct scope {
119 struct scope *prev;
120 struct { int loc, num; } vla;
121 struct { Sym *s; int n; } cl;
122 int *bsym, *csym;
123 Sym *lstk, *llstk;
124 } *cur_scope, *loop_scope, *root_scope;
126 /********************************************************/
127 /* stab debug support */
129 static const struct {
130 int type;
131 const char *name;
132 } default_debug[] = {
133 { VT_INT, "int:t1=r1;-2147483648;2147483647;" },
134 { VT_BYTE, "char:t2=r2;0;127;" },
135 #if LONG_SIZE == 4
136 { VT_LONG | VT_INT, "long int:t3=r3;-2147483648;2147483647;" },
137 #else
138 { VT_LLONG | VT_LONG, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
139 #endif
140 { VT_INT | VT_UNSIGNED, "unsigned int:t4=r4;0;037777777777;" },
141 #if LONG_SIZE == 4
142 { VT_LONG | VT_INT | VT_UNSIGNED, "long unsigned int:t5=r5;0;037777777777;" },
143 #else
144 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
145 { VT_LLONG | VT_LONG | VT_UNSIGNED, "long unsigned int:t5=r5;0;01777777777777777777777;" },
146 #endif
147 { VT_QLONG, "__int128:t6=r6;0;-1;" },
148 { VT_QLONG | VT_UNSIGNED, "__int128 unsigned:t7=r7;0;-1;" },
149 { VT_LLONG, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
150 { VT_LLONG | VT_UNSIGNED, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
151 { VT_SHORT, "short int:t10=r10;-32768;32767;" },
152 { VT_SHORT | VT_UNSIGNED, "short unsigned int:t11=r11;0;65535;" },
153 { VT_BYTE | VT_DEFSIGN, "signed char:t12=r12;-128;127;" },
154 { VT_BYTE | VT_DEFSIGN | VT_UNSIGNED, "unsigned char:t13=r13;0;255;" },
155 { VT_FLOAT, "float:t14=r1;4;0;" },
156 { VT_DOUBLE, "double:t15=r1;8;0;" },
157 { VT_LDOUBLE, "long double:t16=r1;16;0;" },
158 { -1, "_Float32:t17=r1;4;0;" },
159 { -1, "_Float64:t18=r1;8;0;" },
160 { -1, "_Float128:t19=r1;16;0;" },
161 { -1, "_Float32x:t20=r1;8;0;" },
162 { -1, "_Float64x:t21=r1;16;0;" },
163 { -1, "_Decimal32:t22=r1;4;0;" },
164 { -1, "_Decimal64:t23=r1;8;0;" },
165 { -1, "_Decimal128:t24=r1;16;0;" },
166 /* if default char is unsigned */
167 { VT_BYTE | VT_UNSIGNED, "unsigned char:t25=r25;0;255;" },
168 { VT_VOID, "void:t26=26" },
171 static int debug_next_type;
173 static struct debug_hash {
174 int debug_type;
175 Sym *type;
176 } *debug_hash;
178 static int n_debug_hash;
180 static struct debug_info {
181 int start;
182 int end;
183 int n_sym;
184 struct debug_sym {
185 int type;
186 unsigned long value;
187 char *str;
188 Section *sec;
189 int sym_index;
190 } *sym;
191 struct debug_info *child, *next, *last, *parent;
192 } *debug_info, *debug_info_root;
194 /********************************************************/
195 #if 1
196 #define precedence_parser
197 static void init_prec(void);
198 #endif
199 /********************************************************/
200 #ifndef CONFIG_TCC_ASM
201 ST_FUNC void asm_instr(void)
203 tcc_error("inline asm() not supported");
205 ST_FUNC void asm_global_instr(void)
207 tcc_error("inline asm() not supported");
209 #endif
211 /* ------------------------------------------------------------------------- */
212 static void gen_cast(CType *type);
213 static void gen_cast_s(int t);
214 static inline CType *pointed_type(CType *type);
215 static int is_compatible_types(CType *type1, CType *type2);
216 static int parse_btype(CType *type, AttributeDef *ad);
217 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
218 static void parse_expr_type(CType *type);
219 static void init_putv(CType *type, Section *sec, unsigned long c);
220 static void decl_initializer(CType *type, Section *sec, unsigned long c, int flags);
221 static void block(int is_expr);
222 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
223 static void decl(int l);
224 static int decl0(int l, int is_for_loop_init, Sym *);
225 static void expr_eq(void);
226 static void vla_runtime_type_size(CType *type, int *a);
227 static int is_compatible_unqualified_types(CType *type1, CType *type2);
228 static inline int64_t expr_const64(void);
229 static void vpush64(int ty, unsigned long long v);
230 static void vpush(CType *type);
231 static int gvtst(int inv, int t);
232 static void gen_inline_functions(TCCState *s);
233 static void free_inline_functions(TCCState *s);
234 static void skip_or_save_block(TokenString **str);
235 static void gv_dup(void);
236 static int get_temp_local_var(int size,int align);
237 static void clear_temp_local_var_list();
238 static void cast_error(CType *st, CType *dt);
240 ST_INLN int is_float(int t)
242 int bt = t & VT_BTYPE;
243 return bt == VT_LDOUBLE
244 || bt == VT_DOUBLE
245 || bt == VT_FLOAT
246 || bt == VT_QFLOAT;
249 static inline int is_integer_btype(int bt)
251 return bt == VT_BYTE
252 || bt == VT_BOOL
253 || bt == VT_SHORT
254 || bt == VT_INT
255 || bt == VT_LLONG;
258 static int btype_size(int bt)
260 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
261 bt == VT_SHORT ? 2 :
262 bt == VT_INT ? 4 :
263 bt == VT_LLONG ? 8 :
264 bt == VT_PTR ? PTR_SIZE : 0;
267 /* returns function return register from type */
268 static int R_RET(int t)
270 if (!is_float(t))
271 return REG_IRET;
272 #ifdef TCC_TARGET_X86_64
273 if ((t & VT_BTYPE) == VT_LDOUBLE)
274 return TREG_ST0;
275 #elif defined TCC_TARGET_RISCV64
276 if ((t & VT_BTYPE) == VT_LDOUBLE)
277 return REG_IRET;
278 #endif
279 return REG_FRET;
282 /* returns 2nd function return register, if any */
283 static int R2_RET(int t)
285 t &= VT_BTYPE;
286 #if PTR_SIZE == 4
287 if (t == VT_LLONG)
288 return REG_IRE2;
289 #elif defined TCC_TARGET_X86_64
290 if (t == VT_QLONG)
291 return REG_IRE2;
292 if (t == VT_QFLOAT)
293 return REG_FRE2;
294 #elif defined TCC_TARGET_RISCV64
295 if (t == VT_LDOUBLE)
296 return REG_IRE2;
297 #endif
298 return VT_CONST;
301 /* returns true for two-word types */
302 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
304 /* put function return registers to stack value */
305 static void PUT_R_RET(SValue *sv, int t)
307 sv->r = R_RET(t), sv->r2 = R2_RET(t);
310 /* returns function return register class for type t */
311 static int RC_RET(int t)
313 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
316 /* returns generic register class for type t */
317 static int RC_TYPE(int t)
319 if (!is_float(t))
320 return RC_INT;
321 #ifdef TCC_TARGET_X86_64
322 if ((t & VT_BTYPE) == VT_LDOUBLE)
323 return RC_ST0;
324 if ((t & VT_BTYPE) == VT_QFLOAT)
325 return RC_FRET;
326 #elif defined TCC_TARGET_RISCV64
327 if ((t & VT_BTYPE) == VT_LDOUBLE)
328 return RC_INT;
329 #endif
330 return RC_FLOAT;
333 /* returns 2nd register class corresponding to t and rc */
334 static int RC2_TYPE(int t, int rc)
336 if (!USING_TWO_WORDS(t))
337 return 0;
338 #ifdef RC_IRE2
339 if (rc == RC_IRET)
340 return RC_IRE2;
341 #endif
342 #ifdef RC_FRE2
343 if (rc == RC_FRET)
344 return RC_FRE2;
345 #endif
346 if (rc & RC_FLOAT)
347 return RC_FLOAT;
348 return RC_INT;
351 /* we use our own 'finite' function to avoid potential problems with
352 non standard math libs */
353 /* XXX: endianness dependent */
354 ST_FUNC int ieee_finite(double d)
356 int p[4];
357 memcpy(p, &d, sizeof(double));
358 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
361 /* compiling intel long double natively */
362 #if (defined __i386__ || defined __x86_64__) \
363 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
364 # define TCC_IS_NATIVE_387
365 #endif
367 ST_FUNC void test_lvalue(void)
369 if (!(vtop->r & VT_LVAL))
370 expect("lvalue");
373 ST_FUNC void check_vstack(void)
375 if (vtop != vstack - 1)
376 tcc_error("internal compiler error: vstack leak (%d)",
377 (int)(vtop - vstack + 1));
380 /* ------------------------------------------------------------------------- */
381 /* vstack debugging aid */
383 #if 0
384 void pv (const char *lbl, int a, int b)
386 int i;
387 for (i = a; i < a + b; ++i) {
388 SValue *p = &vtop[-i];
389 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
390 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
393 #endif
395 /* ------------------------------------------------------------------------- */
396 /* start of translation unit info */
397 ST_FUNC void tcc_debug_start(TCCState *s1)
399 if (s1->do_debug) {
400 int i;
401 char buf[512];
403 /* file info: full path + filename */
404 section_sym = put_elf_sym(symtab_section, 0, 0,
405 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
406 text_section->sh_num, NULL);
407 getcwd(buf, sizeof(buf));
408 #ifdef _WIN32
409 normalize_slashes(buf);
410 #endif
411 pstrcat(buf, sizeof(buf), "/");
412 put_stabs_r(s1, buf, N_SO, 0, 0,
413 text_section->data_offset, text_section, section_sym);
414 put_stabs_r(s1, file->prev->filename, N_SO, 0, 0,
415 text_section->data_offset, text_section, section_sym);
416 for (i = 0; i < sizeof (default_debug) / sizeof (default_debug[0]); i++)
417 put_stabs(s1, default_debug[i].name, N_LSYM, 0, 0, 0);
419 new_file = last_line_num = 0;
420 func_ind = -1;
421 debug_next_type = sizeof(default_debug) / sizeof(default_debug[0]);
422 debug_hash = NULL;
423 n_debug_hash = 0;
425 /* we're currently 'including' the <command line> */
426 tcc_debug_bincl(s1);
429 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
430 symbols can be safely used */
431 put_elf_sym(symtab_section, 0, 0,
432 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
433 SHN_ABS, file->filename);
436 static void tcc_debug_stabs (TCCState *s1, const char *str, int type, unsigned long value,
437 Section *sec, int sym_index)
439 struct debug_sym *s;
441 if (debug_info) {
442 debug_info->sym =
443 (struct debug_sym *)tcc_realloc (debug_info->sym,
444 sizeof(struct debug_sym) *
445 (debug_info->n_sym + 1));
446 s = debug_info->sym + debug_info->n_sym++;
447 s->type = type;
448 s->value = value;
449 s->str = tcc_strdup(str);
450 s->sec = sec;
451 s->sym_index = sym_index;
453 else if (sec)
454 put_stabs_r (s1, str, type, 0, 0, value, sec, sym_index);
455 else
456 put_stabs (s1, str, type, 0, 0, value);
459 static void tcc_debug_stabn(int type, int value)
461 if (type == N_LBRAC) {
462 struct debug_info *info =
463 (struct debug_info *) tcc_mallocz(sizeof (*info));
465 info->start = value;
466 info->parent = debug_info;
467 if (debug_info) {
468 if (debug_info->child) {
469 if (debug_info->child->last)
470 debug_info->child->last->next = info;
471 else
472 debug_info->child->next = info;
473 debug_info->child->last = info;
475 else
476 debug_info->child = info;
478 else
479 debug_info_root = info;
480 debug_info = info;
482 else {
483 debug_info->end = value;
484 debug_info = debug_info->parent;
488 static void tcc_get_debug_info(TCCState *s1, Sym *s, CString *result)
490 int type;
491 int n = 0;
492 int debug_type = -1;
493 Sym *t = s;
494 CString str;
496 for (;;) {
497 type = t->type.t & ~(VT_EXTERN | VT_STATIC | VT_CONSTANT | VT_VOLATILE);
498 if ((type & VT_BTYPE) != VT_BYTE)
499 type &= ~VT_DEFSIGN;
500 if (type == VT_PTR || type == (VT_PTR | VT_ARRAY))
501 n++, t = t->type.ref;
502 else
503 break;
505 if ((type & VT_BTYPE) == VT_STRUCT) {
506 int i;
508 t = t->type.ref;
509 for (i = 0; i < n_debug_hash; i++) {
510 if (t == debug_hash[i].type) {
511 debug_type = debug_hash[i].debug_type;
512 break;
515 if (debug_type == -1) {
516 debug_type = ++debug_next_type;
517 debug_hash = (struct debug_hash *)
518 tcc_realloc (debug_hash,
519 (n_debug_hash + 1) * sizeof(*debug_hash));
520 debug_hash[n_debug_hash].debug_type = debug_type;
521 debug_hash[n_debug_hash++].type = t;
522 cstr_new (&str);
523 cstr_printf (&str, "%s:T%d=%c%d",
524 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
525 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
526 debug_type,
527 IS_UNION (t->type.t) ? 'u' : 's',
528 t->c);
529 while (t->next) {
530 int pos, size, align;
532 t = t->next;
533 cstr_printf (&str, "%s:",
534 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
535 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
536 tcc_get_debug_info (s1, t, &str);
537 if (t->type.t & VT_BITFIELD) {
538 pos = t->c * 8 + BIT_POS(t->type.t);
539 size = BIT_SIZE(t->type.t);
541 else {
542 pos = t->c * 8;
543 size = type_size(&t->type, &align) * 8;
545 cstr_printf (&str, ",%d,%d;", pos, size);
547 cstr_printf (&str, ";");
548 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
549 cstr_free (&str);
552 else if (IS_ENUM(type)) {
553 Sym *e = t = t->type.ref;
555 debug_type = ++debug_next_type;
556 cstr_new (&str);
557 cstr_printf (&str, "%s:T%d=e",
558 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
559 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
560 debug_type);
561 while (t->next) {
562 t = t->next;
563 cstr_printf (&str, "%s:",
564 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
565 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
566 cstr_printf (&str, e->type.t & VT_UNSIGNED ? "%u," : "%d,",
567 (int)t->enum_val);
569 cstr_printf (&str, ";");
570 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
571 cstr_free (&str);
573 else if ((type & VT_BTYPE) != VT_FUNC) {
574 type &= ~VT_STRUCT_MASK;
575 for (debug_type = 1;
576 debug_type <= sizeof(default_debug) / sizeof(default_debug[0]);
577 debug_type++)
578 if (default_debug[debug_type - 1].type == type)
579 break;
580 if (debug_type > sizeof(default_debug) / sizeof(default_debug[0]))
581 return;
583 if (n > 0)
584 cstr_printf (result, "%d=", ++debug_next_type);
585 t = s;
586 for (;;) {
587 type = t->type.t & ~(VT_EXTERN | VT_STATIC | VT_CONSTANT | VT_VOLATILE);
588 if ((type & VT_BTYPE) != VT_BYTE)
589 type &= ~VT_DEFSIGN;
590 if (type == VT_PTR)
591 cstr_printf (result, "%d=*", ++debug_next_type);
592 else if (type == (VT_PTR | VT_ARRAY))
593 cstr_printf (result, "%d=ar1;0;%d;",
594 ++debug_next_type, t->type.ref->c - 1);
595 else if (type == VT_FUNC) {
596 cstr_printf (result, "%d=f", ++debug_next_type);
597 tcc_get_debug_info (s1, t->type.ref, result);
598 return;
600 else
601 break;
602 t = t->type.ref;
604 cstr_printf (result, "%d", debug_type);
607 static void tcc_debug_finish (TCCState *s1, struct debug_info *cur)
609 while (cur) {
610 int i;
611 struct debug_info *next = cur->next;
613 for (i = 0; i < cur->n_sym; i++) {
614 struct debug_sym *s = &cur->sym[i];
616 if (s->sec)
617 put_stabs_r(s1, s->str, s->type, 0, 0, s->value,
618 s->sec, s->sym_index);
619 else
620 put_stabs(s1, s->str, s->type, 0, 0, s->value);
621 tcc_free (s->str);
623 tcc_free (cur->sym);
624 put_stabn(s1, N_LBRAC, 0, 0, cur->start);
625 tcc_debug_finish (s1, cur->child);
626 put_stabn(s1, N_RBRAC, 0, 0, cur->end);
627 tcc_free (cur);
628 cur = next;
632 static void tcc_add_debug_info(TCCState *s1, int param, Sym *s, Sym *e)
634 CString debug_str;
635 cstr_new (&debug_str);
636 for (; s != e; s = s->prev) {
637 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
638 continue;
639 cstr_reset (&debug_str);
640 cstr_printf (&debug_str, "%s:%s", get_tok_str(s->v, NULL), param ? "p" : "");
641 tcc_get_debug_info(s1, s, &debug_str);
642 tcc_debug_stabs(s1, debug_str.data, param ? N_PSYM : N_LSYM, s->c, NULL, 0);
644 cstr_free (&debug_str);
647 static void tcc_debug_extern_sym(TCCState *s1, Sym *sym, int sh_num, int sym_bind)
649 Section *s = s1->sections[sh_num];
650 CString str;
652 cstr_new (&str);
653 cstr_printf (&str, "%s:%c",
654 get_tok_str(sym->v, NULL),
655 sym_bind == STB_GLOBAL ? 'G' : local_scope ? 'V' : 'S'
657 tcc_get_debug_info(s1, sym, &str);
658 if (sym_bind == STB_GLOBAL)
659 tcc_debug_stabs(s1, str.data, N_GSYM, 0, NULL, 0);
660 else
661 tcc_debug_stabs(s1, str.data,
662 (sym->type.t & VT_STATIC) && data_section == s
663 ? N_STSYM : N_LCSYM, 0, s, sym->c);
664 cstr_free (&str);
667 /* put end of translation unit info */
668 ST_FUNC void tcc_debug_end(TCCState *s1)
670 if (!s1->do_debug)
671 return;
672 put_stabs_r(s1, NULL, N_SO, 0, 0,
673 text_section->data_offset, text_section, section_sym);
674 tcc_free(debug_hash);
677 static BufferedFile* put_new_file(TCCState *s1)
679 BufferedFile *f = file;
680 /* use upper file if from inline ":asm:" */
681 if (f->filename[0] == ':')
682 f = f->prev;
683 if (f && new_file) {
684 put_stabs_r(s1, f->filename, N_SOL, 0, 0, ind, text_section, section_sym);
685 new_file = last_line_num = 0;
687 return f;
690 /* generate line number info */
691 ST_FUNC void tcc_debug_line(TCCState *s1)
693 BufferedFile *f;
694 if (!s1->do_debug
695 || cur_text_section != text_section
696 || !(f = put_new_file(s1))
697 || last_line_num == f->line_num)
698 return;
699 if (func_ind != -1) {
700 put_stabn(s1, N_SLINE, 0, f->line_num, ind - func_ind);
701 } else {
702 /* from tcc_assemble */
703 put_stabs_r(s1, NULL, N_SLINE, 0, f->line_num, ind, text_section, section_sym);
705 last_line_num = f->line_num;
708 /* put function symbol */
709 ST_FUNC void tcc_debug_funcstart(TCCState *s1, Sym *sym)
711 CString debug_str;
712 BufferedFile *f;
713 if (!s1->do_debug)
714 return;
715 debug_info_root = NULL;
716 debug_info = NULL;
717 tcc_debug_stabn(N_LBRAC, ind - func_ind);
718 if (!(f = put_new_file(s1)))
719 return;
720 cstr_new (&debug_str);
721 cstr_printf(&debug_str, "%s:%c", funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
722 tcc_get_debug_info(s1, sym->type.ref, &debug_str);
723 put_stabs_r(s1, debug_str.data, N_FUN, 0, f->line_num, 0, cur_text_section, sym->c);
724 cstr_free (&debug_str);
726 tcc_debug_line(s1);
729 /* put function size */
730 ST_FUNC void tcc_debug_funcend(TCCState *s1, int size)
732 if (!s1->do_debug)
733 return;
734 tcc_debug_stabn(N_RBRAC, size);
735 tcc_debug_finish (s1, debug_info_root);
738 /* put alternative filename */
739 ST_FUNC void tcc_debug_putfile(TCCState *s1, const char *filename)
741 if (0 == strcmp(file->filename, filename))
742 return;
743 pstrcpy(file->filename, sizeof(file->filename), filename);
744 new_file = 1;
747 /* begin of #include */
748 ST_FUNC void tcc_debug_bincl(TCCState *s1)
750 if (!s1->do_debug)
751 return;
752 put_stabs(s1, file->filename, N_BINCL, 0, 0, 0);
753 new_file = 1;
756 /* end of #include */
757 ST_FUNC void tcc_debug_eincl(TCCState *s1)
759 if (!s1->do_debug)
760 return;
761 put_stabn(s1, N_EINCL, 0, 0, 0);
762 new_file = 1;
765 /* ------------------------------------------------------------------------- */
766 /* initialize vstack and types. This must be done also for tcc -E */
767 ST_FUNC void tccgen_init(TCCState *s1)
769 vtop = vstack - 1;
770 memset(vtop, 0, sizeof *vtop);
772 /* define some often used types */
773 int_type.t = VT_INT;
774 char_pointer_type.t = VT_BYTE;
775 mk_pointer(&char_pointer_type);
776 func_old_type.t = VT_FUNC;
777 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
778 func_old_type.ref->f.func_call = FUNC_CDECL;
779 func_old_type.ref->f.func_type = FUNC_OLD;
780 #ifdef precedence_parser
781 init_prec();
782 #endif
783 cstr_new(&initstr);
786 ST_FUNC int tccgen_compile(TCCState *s1)
788 cur_text_section = NULL;
789 funcname = "";
790 anon_sym = SYM_FIRST_ANOM;
791 section_sym = 0;
792 const_wanted = 0;
793 nocode_wanted = 0x80000000;
794 local_scope = 0;
796 tcc_debug_start(s1);
797 #ifdef TCC_TARGET_ARM
798 arm_init(s1);
799 #endif
800 #ifdef INC_DEBUG
801 printf("%s: **** new file\n", file->filename);
802 #endif
803 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
804 next();
805 decl(VT_CONST);
806 gen_inline_functions(s1);
807 check_vstack();
808 /* end of translation unit info */
809 tcc_debug_end(s1);
810 return 0;
813 ST_FUNC void tccgen_finish(TCCState *s1)
815 cstr_free(&initstr);
816 free_inline_functions(s1);
817 sym_pop(&global_stack, NULL, 0);
818 sym_pop(&local_stack, NULL, 0);
819 /* free preprocessor macros */
820 free_defines(NULL);
821 /* free sym_pools */
822 dynarray_reset(&sym_pools, &nb_sym_pools);
823 sym_free_first = NULL;
826 /* ------------------------------------------------------------------------- */
827 ST_FUNC ElfSym *elfsym(Sym *s)
829 if (!s || !s->c)
830 return NULL;
831 return &((ElfSym *)symtab_section->data)[s->c];
834 /* apply storage attributes to Elf symbol */
835 ST_FUNC void update_storage(Sym *sym)
837 ElfSym *esym;
838 int sym_bind, old_sym_bind;
840 esym = elfsym(sym);
841 if (!esym)
842 return;
844 if (sym->a.visibility)
845 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
846 | sym->a.visibility;
848 if (sym->type.t & (VT_STATIC | VT_INLINE))
849 sym_bind = STB_LOCAL;
850 else if (sym->a.weak)
851 sym_bind = STB_WEAK;
852 else
853 sym_bind = STB_GLOBAL;
854 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
855 if (sym_bind != old_sym_bind) {
856 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
859 #ifdef TCC_TARGET_PE
860 if (sym->a.dllimport)
861 esym->st_other |= ST_PE_IMPORT;
862 if (sym->a.dllexport)
863 esym->st_other |= ST_PE_EXPORT;
864 #endif
866 #if 0
867 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
868 get_tok_str(sym->v, NULL),
869 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
870 sym->a.visibility,
871 sym->a.dllexport,
872 sym->a.dllimport
874 #endif
877 /* ------------------------------------------------------------------------- */
878 /* update sym->c so that it points to an external symbol in section
879 'section' with value 'value' */
881 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
882 addr_t value, unsigned long size,
883 int can_add_underscore)
885 int sym_type, sym_bind, info, other, t;
886 ElfSym *esym;
887 const char *name;
888 char buf1[256];
889 #ifdef CONFIG_TCC_BCHECK
890 char buf[32];
891 #endif
892 if (!sym->c) {
893 name = get_tok_str(sym->v, NULL);
894 #ifdef CONFIG_TCC_BCHECK
895 if (tcc_state->do_bounds_check) {
896 /* XXX: avoid doing that for statics ? */
897 /* if bound checking is activated, we change some function
898 names by adding the "__bound" prefix */
899 #if defined(TCC_TARGET_ARM) && defined(TCC_ARM_EABI)
900 if (strcmp (name, "memcpy") == 0 ||
901 strcmp (name, "memmove") == 0 ||
902 strcmp (name, "memset") == 0)
903 goto add_bound;
904 #endif
905 switch(sym->v) {
906 #ifdef TCC_TARGET_PE
907 /* XXX: we rely only on malloc hooks */
908 case TOK_malloc:
909 case TOK_free:
910 case TOK_realloc:
911 case TOK_memalign:
912 case TOK_calloc:
913 #endif
914 case TOK_memcpy:
915 case TOK_memmove:
916 #if defined(TCC_TARGET_ARM) && defined(TCC_ARM_EABI)
917 case TOK_memmove4:
918 case TOK_memmove8:
919 #endif
920 case TOK_memset:
921 case TOK_memcmp:
922 case TOK_strlen:
923 case TOK_strcpy:
924 case TOK_strncpy:
925 case TOK_strcmp:
926 case TOK_strncmp:
927 case TOK_strcat:
928 case TOK_strchr:
929 case TOK_strdup:
930 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
931 case TOK_alloca:
932 #endif
933 case TOK_mmap:
934 case TOK_munmap:
935 case TOK_longjmp:
936 #ifndef TCC_TARGET_PE
937 case TOK_siglongjmp:
938 #endif
939 #if defined(TCC_TARGET_ARM) && defined(TCC_ARM_EABI)
940 add_bound:
941 #endif
942 strcpy(buf, "__bound_");
943 strcat(buf, name);
944 name = buf;
945 break;
948 #endif
949 t = sym->type.t;
950 if ((t & VT_BTYPE) == VT_FUNC) {
951 sym_type = STT_FUNC;
952 } else if ((t & VT_BTYPE) == VT_VOID) {
953 sym_type = STT_NOTYPE;
954 } else {
955 sym_type = STT_OBJECT;
957 if (t & (VT_STATIC | VT_INLINE))
958 sym_bind = STB_LOCAL;
959 else
960 sym_bind = STB_GLOBAL;
961 other = 0;
962 #ifdef TCC_TARGET_PE
963 if (sym_type == STT_FUNC && sym->type.ref) {
964 Sym *ref = sym->type.ref;
965 if (ref->a.nodecorate) {
966 can_add_underscore = 0;
968 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
969 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
970 name = buf1;
971 other |= ST_PE_STDCALL;
972 can_add_underscore = 0;
975 #endif
976 if (tcc_state->leading_underscore && can_add_underscore) {
977 buf1[0] = '_';
978 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
979 name = buf1;
981 if (sym->asm_label)
982 name = get_tok_str(sym->asm_label, NULL);
983 info = ELFW(ST_INFO)(sym_bind, sym_type);
984 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
986 if (tcc_state->do_debug
987 && sym_type != STT_FUNC
988 && sym->v < SYM_FIRST_ANOM)
989 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind);
991 } else {
992 esym = elfsym(sym);
993 esym->st_value = value;
994 esym->st_size = size;
995 esym->st_shndx = sh_num;
997 update_storage(sym);
1000 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
1001 addr_t value, unsigned long size)
1003 int sh_num = section ? section->sh_num : SHN_UNDEF;
1004 put_extern_sym2(sym, sh_num, value, size, 1);
1007 /* add a new relocation entry to symbol 'sym' in section 's' */
1008 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
1009 addr_t addend)
1011 int c = 0;
1013 if (nocode_wanted && s == cur_text_section)
1014 return;
1016 if (sym) {
1017 if (0 == sym->c)
1018 put_extern_sym(sym, NULL, 0, 0);
1019 c = sym->c;
1022 /* now we can add ELF relocation info */
1023 put_elf_reloca(symtab_section, s, offset, type, c, addend);
1026 #if PTR_SIZE == 4
1027 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
1029 greloca(s, sym, offset, type, 0);
1031 #endif
1033 /* ------------------------------------------------------------------------- */
1034 /* symbol allocator */
1035 static Sym *__sym_malloc(void)
1037 Sym *sym_pool, *sym, *last_sym;
1038 int i;
1040 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
1041 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
1043 last_sym = sym_free_first;
1044 sym = sym_pool;
1045 for(i = 0; i < SYM_POOL_NB; i++) {
1046 sym->next = last_sym;
1047 last_sym = sym;
1048 sym++;
1050 sym_free_first = last_sym;
1051 return last_sym;
1054 static inline Sym *sym_malloc(void)
1056 Sym *sym;
1057 #ifndef SYM_DEBUG
1058 sym = sym_free_first;
1059 if (!sym)
1060 sym = __sym_malloc();
1061 sym_free_first = sym->next;
1062 return sym;
1063 #else
1064 sym = tcc_malloc(sizeof(Sym));
1065 return sym;
1066 #endif
1069 ST_INLN void sym_free(Sym *sym)
1071 #ifndef SYM_DEBUG
1072 sym->next = sym_free_first;
1073 sym_free_first = sym;
1074 #else
1075 tcc_free(sym);
1076 #endif
1079 /* push, without hashing */
1080 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
1082 Sym *s;
1084 s = sym_malloc();
1085 memset(s, 0, sizeof *s);
1086 s->v = v;
1087 s->type.t = t;
1088 s->c = c;
1089 /* add in stack */
1090 s->prev = *ps;
1091 *ps = s;
1092 return s;
1095 /* find a symbol and return its associated structure. 's' is the top
1096 of the symbol stack */
1097 ST_FUNC Sym *sym_find2(Sym *s, int v)
1099 while (s) {
1100 if (s->v == v)
1101 return s;
1102 else if (s->v == -1)
1103 return NULL;
1104 s = s->prev;
1106 return NULL;
1109 /* structure lookup */
1110 ST_INLN Sym *struct_find(int v)
1112 v -= TOK_IDENT;
1113 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1114 return NULL;
1115 return table_ident[v]->sym_struct;
1118 /* find an identifier */
1119 ST_INLN Sym *sym_find(int v)
1121 v -= TOK_IDENT;
1122 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1123 return NULL;
1124 return table_ident[v]->sym_identifier;
1127 static int sym_scope(Sym *s)
1129 if (IS_ENUM_VAL (s->type.t))
1130 return s->type.ref->sym_scope;
1131 else
1132 return s->sym_scope;
1135 /* push a given symbol on the symbol stack */
1136 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
1138 Sym *s, **ps;
1139 TokenSym *ts;
1141 if (local_stack)
1142 ps = &local_stack;
1143 else
1144 ps = &global_stack;
1145 s = sym_push2(ps, v, type->t, c);
1146 s->type.ref = type->ref;
1147 s->r = r;
1148 /* don't record fields or anonymous symbols */
1149 /* XXX: simplify */
1150 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1151 /* record symbol in token array */
1152 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1153 if (v & SYM_STRUCT)
1154 ps = &ts->sym_struct;
1155 else
1156 ps = &ts->sym_identifier;
1157 s->prev_tok = *ps;
1158 *ps = s;
1159 s->sym_scope = local_scope;
1160 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
1161 tcc_error("redeclaration of '%s'",
1162 get_tok_str(v & ~SYM_STRUCT, NULL));
1164 return s;
1167 /* push a global identifier */
1168 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
1170 Sym *s, **ps;
1171 s = sym_push2(&global_stack, v, t, c);
1172 s->r = VT_CONST | VT_SYM;
1173 /* don't record anonymous symbol */
1174 if (v < SYM_FIRST_ANOM) {
1175 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
1176 /* modify the top most local identifier, so that sym_identifier will
1177 point to 's' when popped; happens when called from inline asm */
1178 while (*ps != NULL && (*ps)->sym_scope)
1179 ps = &(*ps)->prev_tok;
1180 s->prev_tok = *ps;
1181 *ps = s;
1183 return s;
1186 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1187 pop them yet from the list, but do remove them from the token array. */
1188 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
1190 Sym *s, *ss, **ps;
1191 TokenSym *ts;
1192 int v;
1194 s = *ptop;
1195 while(s != b) {
1196 ss = s->prev;
1197 v = s->v;
1198 /* remove symbol in token array */
1199 /* XXX: simplify */
1200 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1201 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1202 if (v & SYM_STRUCT)
1203 ps = &ts->sym_struct;
1204 else
1205 ps = &ts->sym_identifier;
1206 *ps = s->prev_tok;
1208 if (!keep)
1209 sym_free(s);
1210 s = ss;
1212 if (!keep)
1213 *ptop = b;
1216 /* ------------------------------------------------------------------------- */
1217 static void vcheck_cmp(void)
1219 /* cannot let cpu flags if other instruction are generated. Also
1220 avoid leaving VT_JMP anywhere except on the top of the stack
1221 because it would complicate the code generator.
1223 Don't do this when nocode_wanted. vtop might come from
1224 !nocode_wanted regions (see 88_codeopt.c) and transforming
1225 it to a register without actually generating code is wrong
1226 as their value might still be used for real. All values
1227 we push under nocode_wanted will eventually be popped
1228 again, so that the VT_CMP/VT_JMP value will be in vtop
1229 when code is unsuppressed again. */
1231 if (vtop->r == VT_CMP && !nocode_wanted)
1232 gv(RC_INT);
1235 static void vsetc(CType *type, int r, CValue *vc)
1237 if (vtop >= vstack + (VSTACK_SIZE - 1))
1238 tcc_error("memory full (vstack)");
1239 vcheck_cmp();
1240 vtop++;
1241 vtop->type = *type;
1242 vtop->r = r;
1243 vtop->r2 = VT_CONST;
1244 vtop->c = *vc;
1245 vtop->sym = NULL;
1248 ST_FUNC void vswap(void)
1250 SValue tmp;
1252 vcheck_cmp();
1253 tmp = vtop[0];
1254 vtop[0] = vtop[-1];
1255 vtop[-1] = tmp;
1258 /* pop stack value */
1259 ST_FUNC void vpop(void)
1261 int v;
1262 v = vtop->r & VT_VALMASK;
1263 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1264 /* for x86, we need to pop the FP stack */
1265 if (v == TREG_ST0) {
1266 o(0xd8dd); /* fstp %st(0) */
1267 } else
1268 #endif
1269 if (v == VT_CMP) {
1270 /* need to put correct jump if && or || without test */
1271 gsym(vtop->jtrue);
1272 gsym(vtop->jfalse);
1274 vtop--;
1277 /* push constant of type "type" with useless value */
1278 static void vpush(CType *type)
1280 vset(type, VT_CONST, 0);
1283 /* push arbitrary 64bit constant */
1284 static void vpush64(int ty, unsigned long long v)
1286 CValue cval;
1287 CType ctype;
1288 ctype.t = ty;
1289 ctype.ref = NULL;
1290 cval.i = v;
1291 vsetc(&ctype, VT_CONST, &cval);
1294 /* push integer constant */
1295 ST_FUNC void vpushi(int v)
1297 vpush64(VT_INT, v);
1300 /* push a pointer sized constant */
1301 static void vpushs(addr_t v)
1303 vpush64(VT_SIZE_T, v);
1306 /* push long long constant */
1307 static inline void vpushll(long long v)
1309 vpush64(VT_LLONG, v);
1312 ST_FUNC void vset(CType *type, int r, int v)
1314 CValue cval;
1315 cval.i = v;
1316 vsetc(type, r, &cval);
1319 static void vseti(int r, int v)
1321 CType type;
1322 type.t = VT_INT;
1323 type.ref = NULL;
1324 vset(&type, r, v);
1327 ST_FUNC void vpushv(SValue *v)
1329 if (vtop >= vstack + (VSTACK_SIZE - 1))
1330 tcc_error("memory full (vstack)");
1331 vtop++;
1332 *vtop = *v;
1335 static void vdup(void)
1337 vpushv(vtop);
1340 /* rotate n first stack elements to the bottom
1341 I1 ... In -> I2 ... In I1 [top is right]
1343 ST_FUNC void vrotb(int n)
1345 int i;
1346 SValue tmp;
1348 vcheck_cmp();
1349 tmp = vtop[-n + 1];
1350 for(i=-n+1;i!=0;i++)
1351 vtop[i] = vtop[i+1];
1352 vtop[0] = tmp;
1355 /* rotate the n elements before entry e towards the top
1356 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1358 ST_FUNC void vrote(SValue *e, int n)
1360 int i;
1361 SValue tmp;
1363 vcheck_cmp();
1364 tmp = *e;
1365 for(i = 0;i < n - 1; i++)
1366 e[-i] = e[-i - 1];
1367 e[-n + 1] = tmp;
1370 /* rotate n first stack elements to the top
1371 I1 ... In -> In I1 ... I(n-1) [top is right]
1373 ST_FUNC void vrott(int n)
1375 vrote(vtop, n);
1378 /* ------------------------------------------------------------------------- */
1379 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1381 /* called from generators to set the result from relational ops */
1382 ST_FUNC void vset_VT_CMP(int op)
1384 vtop->r = VT_CMP;
1385 vtop->cmp_op = op;
1386 vtop->jfalse = 0;
1387 vtop->jtrue = 0;
1390 /* called once before asking generators to load VT_CMP to a register */
1391 static void vset_VT_JMP(void)
1393 int op = vtop->cmp_op;
1395 if (vtop->jtrue || vtop->jfalse) {
1396 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1397 int inv = op & (op < 2); /* small optimization */
1398 vseti(VT_JMP+inv, gvtst(inv, 0));
1399 } else {
1400 /* otherwise convert flags (rsp. 0/1) to register */
1401 vtop->c.i = op;
1402 if (op < 2) /* doesn't seem to happen */
1403 vtop->r = VT_CONST;
1407 /* Set CPU Flags, doesn't yet jump */
1408 static void gvtst_set(int inv, int t)
1410 int *p;
1412 if (vtop->r != VT_CMP) {
1413 vpushi(0);
1414 gen_op(TOK_NE);
1415 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1416 vset_VT_CMP(vtop->c.i != 0);
1419 p = inv ? &vtop->jfalse : &vtop->jtrue;
1420 *p = gjmp_append(*p, t);
1423 /* Generate value test
1425 * Generate a test for any value (jump, comparison and integers) */
1426 static int gvtst(int inv, int t)
1428 int op, x, u;
1430 gvtst_set(inv, t);
1431 t = vtop->jtrue, u = vtop->jfalse;
1432 if (inv)
1433 x = u, u = t, t = x;
1434 op = vtop->cmp_op;
1436 /* jump to the wanted target */
1437 if (op > 1)
1438 t = gjmp_cond(op ^ inv, t);
1439 else if (op != inv)
1440 t = gjmp(t);
1441 /* resolve complementary jumps to here */
1442 gsym(u);
1444 vtop--;
1445 return t;
1448 /* generate a zero or nozero test */
1449 static void gen_test_zero(int op)
1451 if (vtop->r == VT_CMP) {
1452 int j;
1453 if (op == TOK_EQ) {
1454 j = vtop->jfalse;
1455 vtop->jfalse = vtop->jtrue;
1456 vtop->jtrue = j;
1457 vtop->cmp_op ^= 1;
1459 } else {
1460 vpushi(0);
1461 gen_op(op);
1465 /* ------------------------------------------------------------------------- */
1466 /* push a symbol value of TYPE */
1467 static inline void vpushsym(CType *type, Sym *sym)
1469 CValue cval;
1470 cval.i = 0;
1471 vsetc(type, VT_CONST | VT_SYM, &cval);
1472 vtop->sym = sym;
1475 /* Return a static symbol pointing to a section */
1476 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1478 int v;
1479 Sym *sym;
1481 v = anon_sym++;
1482 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1483 sym->type.t |= VT_STATIC;
1484 put_extern_sym(sym, sec, offset, size);
1485 return sym;
1488 /* push a reference to a section offset by adding a dummy symbol */
1489 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1491 vpushsym(type, get_sym_ref(type, sec, offset, size));
1494 /* define a new external reference to a symbol 'v' of type 'u' */
1495 ST_FUNC Sym *external_global_sym(int v, CType *type)
1497 Sym *s;
1499 s = sym_find(v);
1500 if (!s) {
1501 /* push forward reference */
1502 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1503 s->type.ref = type->ref;
1504 } else if (IS_ASM_SYM(s)) {
1505 s->type.t = type->t | (s->type.t & VT_EXTERN);
1506 s->type.ref = type->ref;
1507 update_storage(s);
1509 return s;
1512 /* Merge symbol attributes. */
1513 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1515 if (sa1->aligned && !sa->aligned)
1516 sa->aligned = sa1->aligned;
1517 sa->packed |= sa1->packed;
1518 sa->weak |= sa1->weak;
1519 if (sa1->visibility != STV_DEFAULT) {
1520 int vis = sa->visibility;
1521 if (vis == STV_DEFAULT
1522 || vis > sa1->visibility)
1523 vis = sa1->visibility;
1524 sa->visibility = vis;
1526 sa->dllexport |= sa1->dllexport;
1527 sa->nodecorate |= sa1->nodecorate;
1528 sa->dllimport |= sa1->dllimport;
1531 /* Merge function attributes. */
1532 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1534 if (fa1->func_call && !fa->func_call)
1535 fa->func_call = fa1->func_call;
1536 if (fa1->func_type && !fa->func_type)
1537 fa->func_type = fa1->func_type;
1538 if (fa1->func_args && !fa->func_args)
1539 fa->func_args = fa1->func_args;
1540 if (fa1->func_noreturn)
1541 fa->func_noreturn = 1;
1542 if (fa1->func_ctor)
1543 fa->func_ctor = 1;
1544 if (fa1->func_dtor)
1545 fa->func_dtor = 1;
1548 /* Merge attributes. */
1549 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1551 merge_symattr(&ad->a, &ad1->a);
1552 merge_funcattr(&ad->f, &ad1->f);
1554 if (ad1->section)
1555 ad->section = ad1->section;
1556 if (ad1->alias_target)
1557 ad->alias_target = ad1->alias_target;
1558 if (ad1->asm_label)
1559 ad->asm_label = ad1->asm_label;
1560 if (ad1->attr_mode)
1561 ad->attr_mode = ad1->attr_mode;
1564 /* Merge some type attributes. */
1565 static void patch_type(Sym *sym, CType *type)
1567 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1568 if (!(sym->type.t & VT_EXTERN))
1569 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1570 sym->type.t &= ~VT_EXTERN;
1573 if (IS_ASM_SYM(sym)) {
1574 /* stay static if both are static */
1575 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1576 sym->type.ref = type->ref;
1579 if (!is_compatible_types(&sym->type, type)) {
1580 tcc_error("incompatible types for redefinition of '%s'",
1581 get_tok_str(sym->v, NULL));
1583 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1584 int static_proto = sym->type.t & VT_STATIC;
1585 /* warn if static follows non-static function declaration */
1586 if ((type->t & VT_STATIC) && !static_proto
1587 /* XXX this test for inline shouldn't be here. Until we
1588 implement gnu-inline mode again it silences a warning for
1589 mingw caused by our workarounds. */
1590 && !((type->t | sym->type.t) & VT_INLINE))
1591 tcc_warning("static storage ignored for redefinition of '%s'",
1592 get_tok_str(sym->v, NULL));
1594 /* set 'inline' if both agree or if one has static */
1595 if ((type->t | sym->type.t) & VT_INLINE) {
1596 if (!((type->t ^ sym->type.t) & VT_INLINE)
1597 || ((type->t | sym->type.t) & VT_STATIC))
1598 static_proto |= VT_INLINE;
1601 if (0 == (type->t & VT_EXTERN)) {
1602 struct FuncAttr f = sym->type.ref->f;
1603 /* put complete type, use static from prototype */
1604 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1605 sym->type.ref = type->ref;
1606 merge_funcattr(&sym->type.ref->f, &f);
1607 } else {
1608 sym->type.t &= ~VT_INLINE | static_proto;
1611 if (sym->type.ref->f.func_type == FUNC_OLD
1612 && type->ref->f.func_type != FUNC_OLD) {
1613 sym->type.ref = type->ref;
1616 } else {
1617 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1618 /* set array size if it was omitted in extern declaration */
1619 sym->type.ref->c = type->ref->c;
1621 if ((type->t ^ sym->type.t) & VT_STATIC)
1622 tcc_warning("storage mismatch for redefinition of '%s'",
1623 get_tok_str(sym->v, NULL));
1627 /* Merge some storage attributes. */
1628 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1630 if (type)
1631 patch_type(sym, type);
1633 #ifdef TCC_TARGET_PE
1634 if (sym->a.dllimport != ad->a.dllimport)
1635 tcc_error("incompatible dll linkage for redefinition of '%s'",
1636 get_tok_str(sym->v, NULL));
1637 #endif
1638 merge_symattr(&sym->a, &ad->a);
1639 if (ad->asm_label)
1640 sym->asm_label = ad->asm_label;
1641 update_storage(sym);
1644 /* copy sym to other stack */
1645 static Sym *sym_copy(Sym *s0, Sym **ps)
1647 Sym *s;
1648 s = sym_malloc(), *s = *s0;
1649 s->prev = *ps, *ps = s;
1650 if (s->v < SYM_FIRST_ANOM) {
1651 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1652 s->prev_tok = *ps, *ps = s;
1654 return s;
1657 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1658 static void sym_copy_ref(Sym *s, Sym **ps)
1660 int bt = s->type.t & VT_BTYPE;
1661 if (bt == VT_FUNC || bt == VT_PTR) {
1662 Sym **sp = &s->type.ref;
1663 for (s = *sp, *sp = NULL; s; s = s->next) {
1664 Sym *s2 = sym_copy(s, ps);
1665 sp = &(*sp = s2)->next;
1666 sym_copy_ref(s2, ps);
1671 /* define a new external reference to a symbol 'v' */
1672 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1674 Sym *s;
1676 /* look for global symbol */
1677 s = sym_find(v);
1678 while (s && s->sym_scope)
1679 s = s->prev_tok;
1681 if (!s) {
1682 /* push forward reference */
1683 s = global_identifier_push(v, type->t, 0);
1684 s->r |= r;
1685 s->a = ad->a;
1686 s->asm_label = ad->asm_label;
1687 s->type.ref = type->ref;
1688 /* copy type to the global stack */
1689 if (local_stack)
1690 sym_copy_ref(s, &global_stack);
1691 } else {
1692 patch_storage(s, ad, type);
1694 /* push variables on local_stack if any */
1695 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1696 s = sym_copy(s, &local_stack);
1697 return s;
1700 /* push a reference to global symbol v */
1701 ST_FUNC void vpush_global_sym(CType *type, int v)
1703 vpushsym(type, external_global_sym(v, type));
1706 /* save registers up to (vtop - n) stack entry */
1707 ST_FUNC void save_regs(int n)
1709 SValue *p, *p1;
1710 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1711 save_reg(p->r);
1714 /* save r to the memory stack, and mark it as being free */
1715 ST_FUNC void save_reg(int r)
1717 save_reg_upstack(r, 0);
1720 /* save r to the memory stack, and mark it as being free,
1721 if seen up to (vtop - n) stack entry */
1722 ST_FUNC void save_reg_upstack(int r, int n)
1724 int l, size, align, bt;
1725 SValue *p, *p1, sv;
1727 if ((r &= VT_VALMASK) >= VT_CONST)
1728 return;
1729 if (nocode_wanted)
1730 return;
1731 l = 0;
1732 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1733 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1734 /* must save value on stack if not already done */
1735 if (!l) {
1736 bt = p->type.t & VT_BTYPE;
1737 if (bt == VT_VOID)
1738 continue;
1739 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1740 bt = VT_PTR;
1741 sv.type.t = bt;
1742 size = type_size(&sv.type, &align);
1743 l = get_temp_local_var(size,align);
1744 sv.r = VT_LOCAL | VT_LVAL;
1745 sv.c.i = l;
1746 store(p->r & VT_VALMASK, &sv);
1747 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1748 /* x86 specific: need to pop fp register ST0 if saved */
1749 if (r == TREG_ST0) {
1750 o(0xd8dd); /* fstp %st(0) */
1752 #endif
1753 /* special long long case */
1754 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1755 sv.c.i += PTR_SIZE;
1756 store(p->r2, &sv);
1759 /* mark that stack entry as being saved on the stack */
1760 if (p->r & VT_LVAL) {
1761 /* also clear the bounded flag because the
1762 relocation address of the function was stored in
1763 p->c.i */
1764 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1765 } else {
1766 p->r = VT_LVAL | VT_LOCAL;
1768 p->r2 = VT_CONST;
1769 p->c.i = l;
1774 #ifdef TCC_TARGET_ARM
1775 /* find a register of class 'rc2' with at most one reference on stack.
1776 * If none, call get_reg(rc) */
1777 ST_FUNC int get_reg_ex(int rc, int rc2)
1779 int r;
1780 SValue *p;
1782 for(r=0;r<NB_REGS;r++) {
1783 if (reg_classes[r] & rc2) {
1784 int n;
1785 n=0;
1786 for(p = vstack; p <= vtop; p++) {
1787 if ((p->r & VT_VALMASK) == r ||
1788 p->r2 == r)
1789 n++;
1791 if (n <= 1)
1792 return r;
1795 return get_reg(rc);
1797 #endif
1799 /* find a free register of class 'rc'. If none, save one register */
1800 ST_FUNC int get_reg(int rc)
1802 int r;
1803 SValue *p;
1805 /* find a free register */
1806 for(r=0;r<NB_REGS;r++) {
1807 if (reg_classes[r] & rc) {
1808 if (nocode_wanted)
1809 return r;
1810 for(p=vstack;p<=vtop;p++) {
1811 if ((p->r & VT_VALMASK) == r ||
1812 p->r2 == r)
1813 goto notfound;
1815 return r;
1817 notfound: ;
1820 /* no register left : free the first one on the stack (VERY
1821 IMPORTANT to start from the bottom to ensure that we don't
1822 spill registers used in gen_opi()) */
1823 for(p=vstack;p<=vtop;p++) {
1824 /* look at second register (if long long) */
1825 r = p->r2;
1826 if (r < VT_CONST && (reg_classes[r] & rc))
1827 goto save_found;
1828 r = p->r & VT_VALMASK;
1829 if (r < VT_CONST && (reg_classes[r] & rc)) {
1830 save_found:
1831 save_reg(r);
1832 return r;
1835 /* Should never comes here */
1836 return -1;
1839 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1840 static int get_temp_local_var(int size,int align){
1841 int i;
1842 struct temp_local_variable *temp_var;
1843 int found_var;
1844 SValue *p;
1845 int r;
1846 char free;
1847 char found;
1848 found=0;
1849 for(i=0;i<nb_temp_local_vars;i++){
1850 temp_var=&arr_temp_local_vars[i];
1851 if(temp_var->size<size||align!=temp_var->align){
1852 continue;
1854 /*check if temp_var is free*/
1855 free=1;
1856 for(p=vstack;p<=vtop;p++) {
1857 r=p->r&VT_VALMASK;
1858 if(r==VT_LOCAL||r==VT_LLOCAL){
1859 if(p->c.i==temp_var->location){
1860 free=0;
1861 break;
1865 if(free){
1866 found_var=temp_var->location;
1867 found=1;
1868 break;
1871 if(!found){
1872 loc = (loc - size) & -align;
1873 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
1874 temp_var=&arr_temp_local_vars[i];
1875 temp_var->location=loc;
1876 temp_var->size=size;
1877 temp_var->align=align;
1878 nb_temp_local_vars++;
1880 found_var=loc;
1882 return found_var;
1885 static void clear_temp_local_var_list(){
1886 nb_temp_local_vars=0;
1889 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
1890 if needed */
1891 static void move_reg(int r, int s, int t)
1893 SValue sv;
1895 if (r != s) {
1896 save_reg(r);
1897 sv.type.t = t;
1898 sv.type.ref = NULL;
1899 sv.r = s;
1900 sv.c.i = 0;
1901 load(r, &sv);
1905 /* get address of vtop (vtop MUST BE an lvalue) */
1906 ST_FUNC void gaddrof(void)
1908 vtop->r &= ~VT_LVAL;
1909 /* tricky: if saved lvalue, then we can go back to lvalue */
1910 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
1911 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
1914 #ifdef CONFIG_TCC_BCHECK
1915 /* generate lvalue bound code */
1916 static void gbound(void)
1918 CType type1;
1920 vtop->r &= ~VT_MUSTBOUND;
1921 /* if lvalue, then use checking code before dereferencing */
1922 if (vtop->r & VT_LVAL) {
1923 /* if not VT_BOUNDED value, then make one */
1924 if (!(vtop->r & VT_BOUNDED)) {
1925 /* must save type because we must set it to int to get pointer */
1926 type1 = vtop->type;
1927 vtop->type.t = VT_PTR;
1928 gaddrof();
1929 vpushi(0);
1930 gen_bounded_ptr_add();
1931 vtop->r |= VT_LVAL;
1932 vtop->type = type1;
1934 /* then check for dereferencing */
1935 gen_bounded_ptr_deref();
1939 /* we need to call __bound_ptr_add before we start to load function
1940 args into registers */
1941 ST_FUNC void gbound_args(int nb_args)
1943 int i;
1944 for (i = 1; i <= nb_args; ++i)
1945 if (vtop[1 - i].r & VT_MUSTBOUND) {
1946 vrotb(i);
1947 gbound();
1948 vrott(i);
1952 /* Add bounds for local symbols from S to E (via ->prev) */
1953 static void add_local_bounds(Sym *s, Sym *e)
1955 for (; s != e; s = s->prev) {
1956 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
1957 continue;
1958 /* Add arrays/structs/unions because we always take address */
1959 if ((s->type.t & VT_ARRAY)
1960 || (s->type.t & VT_BTYPE) == VT_STRUCT
1961 || s->a.addrtaken) {
1962 /* add local bound info */
1963 int align, size = type_size(&s->type, &align);
1964 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
1965 2 * sizeof(addr_t));
1966 bounds_ptr[0] = s->c;
1967 bounds_ptr[1] = size;
1971 #endif
1973 /* Wrapper around sym_pop, that potentially also registers local bounds. */
1974 static void pop_local_syms(Sym **ptop, Sym *b, int keep, int ellipsis)
1976 #ifdef CONFIG_TCC_BCHECK
1977 if (tcc_state->do_bounds_check && !ellipsis && !keep)
1978 add_local_bounds(*ptop, b);
1979 #endif
1980 if (tcc_state->do_debug)
1981 tcc_add_debug_info (tcc_state, !local_scope, *ptop, b);
1982 sym_pop(ptop, b, keep);
1985 static void incr_bf_adr(int o)
1987 vtop->type = char_pointer_type;
1988 gaddrof();
1989 vpushs(o);
1990 gen_op('+');
1991 vtop->type.t = VT_BYTE | VT_UNSIGNED;
1992 vtop->r |= VT_LVAL;
1995 /* single-byte load mode for packed or otherwise unaligned bitfields */
1996 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
1998 int n, o, bits;
1999 save_reg_upstack(vtop->r, 1);
2000 vpush64(type->t & VT_BTYPE, 0); // B X
2001 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2002 do {
2003 vswap(); // X B
2004 incr_bf_adr(o);
2005 vdup(); // X B B
2006 n = 8 - bit_pos;
2007 if (n > bit_size)
2008 n = bit_size;
2009 if (bit_pos)
2010 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
2011 if (n < 8)
2012 vpushi((1 << n) - 1), gen_op('&');
2013 gen_cast(type);
2014 if (bits)
2015 vpushi(bits), gen_op(TOK_SHL);
2016 vrotb(3); // B Y X
2017 gen_op('|'); // B X
2018 bits += n, bit_size -= n, o = 1;
2019 } while (bit_size);
2020 vswap(), vpop();
2021 if (!(type->t & VT_UNSIGNED)) {
2022 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
2023 vpushi(n), gen_op(TOK_SHL);
2024 vpushi(n), gen_op(TOK_SAR);
2028 /* single-byte store mode for packed or otherwise unaligned bitfields */
2029 static void store_packed_bf(int bit_pos, int bit_size)
2031 int bits, n, o, m, c;
2033 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2034 vswap(); // X B
2035 save_reg_upstack(vtop->r, 1);
2036 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2037 do {
2038 incr_bf_adr(o); // X B
2039 vswap(); //B X
2040 c ? vdup() : gv_dup(); // B V X
2041 vrott(3); // X B V
2042 if (bits)
2043 vpushi(bits), gen_op(TOK_SHR);
2044 if (bit_pos)
2045 vpushi(bit_pos), gen_op(TOK_SHL);
2046 n = 8 - bit_pos;
2047 if (n > bit_size)
2048 n = bit_size;
2049 if (n < 8) {
2050 m = ((1 << n) - 1) << bit_pos;
2051 vpushi(m), gen_op('&'); // X B V1
2052 vpushv(vtop-1); // X B V1 B
2053 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
2054 gen_op('&'); // X B V1 B1
2055 gen_op('|'); // X B V2
2057 vdup(), vtop[-1] = vtop[-2]; // X B B V2
2058 vstore(), vpop(); // X B
2059 bits += n, bit_size -= n, bit_pos = 0, o = 1;
2060 } while (bit_size);
2061 vpop(), vpop();
2064 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
2066 int t;
2067 if (0 == sv->type.ref)
2068 return 0;
2069 t = sv->type.ref->auxtype;
2070 if (t != -1 && t != VT_STRUCT) {
2071 sv->type.t = (sv->type.t & ~VT_BTYPE) | t;
2072 sv->r |= VT_LVAL;
2074 return t;
2077 /* store vtop a register belonging to class 'rc'. lvalues are
2078 converted to values. Cannot be used if cannot be converted to
2079 register value (such as structures). */
2080 ST_FUNC int gv(int rc)
2082 int r, r2, r_ok, r2_ok, rc2, bt;
2083 int bit_pos, bit_size, size, align;
2085 /* NOTE: get_reg can modify vstack[] */
2086 if (vtop->type.t & VT_BITFIELD) {
2087 CType type;
2089 bit_pos = BIT_POS(vtop->type.t);
2090 bit_size = BIT_SIZE(vtop->type.t);
2091 /* remove bit field info to avoid loops */
2092 vtop->type.t &= ~VT_STRUCT_MASK;
2094 type.ref = NULL;
2095 type.t = vtop->type.t & VT_UNSIGNED;
2096 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
2097 type.t |= VT_UNSIGNED;
2099 r = adjust_bf(vtop, bit_pos, bit_size);
2101 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2102 type.t |= VT_LLONG;
2103 else
2104 type.t |= VT_INT;
2106 if (r == VT_STRUCT) {
2107 load_packed_bf(&type, bit_pos, bit_size);
2108 } else {
2109 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
2110 /* cast to int to propagate signedness in following ops */
2111 gen_cast(&type);
2112 /* generate shifts */
2113 vpushi(bits - (bit_pos + bit_size));
2114 gen_op(TOK_SHL);
2115 vpushi(bits - bit_size);
2116 /* NOTE: transformed to SHR if unsigned */
2117 gen_op(TOK_SAR);
2119 r = gv(rc);
2120 } else {
2121 if (is_float(vtop->type.t) &&
2122 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2123 unsigned long offset;
2124 /* CPUs usually cannot use float constants, so we store them
2125 generically in data segment */
2126 size = type_size(&vtop->type, &align);
2127 if (NODATA_WANTED)
2128 size = 0, align = 1;
2129 offset = section_add(data_section, size, align);
2130 vpush_ref(&vtop->type, data_section, offset, size);
2131 vswap();
2132 init_putv(&vtop->type, data_section, offset);
2133 vtop->r |= VT_LVAL;
2135 #ifdef CONFIG_TCC_BCHECK
2136 if (vtop->r & VT_MUSTBOUND)
2137 gbound();
2138 #endif
2140 bt = vtop->type.t & VT_BTYPE;
2142 #ifdef TCC_TARGET_RISCV64
2143 /* XXX mega hack */
2144 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
2145 rc = RC_INT;
2146 #endif
2147 rc2 = RC2_TYPE(bt, rc);
2149 /* need to reload if:
2150 - constant
2151 - lvalue (need to dereference pointer)
2152 - already a register, but not in the right class */
2153 r = vtop->r & VT_VALMASK;
2154 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
2155 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
2157 if (!r_ok || !r2_ok) {
2158 if (!r_ok)
2159 r = get_reg(rc);
2160 if (rc2) {
2161 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
2162 int original_type = vtop->type.t;
2164 /* two register type load :
2165 expand to two words temporarily */
2166 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2167 /* load constant */
2168 unsigned long long ll = vtop->c.i;
2169 vtop->c.i = ll; /* first word */
2170 load(r, vtop);
2171 vtop->r = r; /* save register value */
2172 vpushi(ll >> 32); /* second word */
2173 } else if (vtop->r & VT_LVAL) {
2174 /* We do not want to modifier the long long pointer here.
2175 So we save any other instances down the stack */
2176 save_reg_upstack(vtop->r, 1);
2177 /* load from memory */
2178 vtop->type.t = load_type;
2179 load(r, vtop);
2180 vdup();
2181 vtop[-1].r = r; /* save register value */
2182 /* increment pointer to get second word */
2183 vtop->type.t = VT_PTRDIFF_T;
2184 gaddrof();
2185 vpushs(PTR_SIZE);
2186 gen_op('+');
2187 vtop->r |= VT_LVAL;
2188 vtop->type.t = load_type;
2189 } else {
2190 /* move registers */
2191 if (!r_ok)
2192 load(r, vtop);
2193 if (r2_ok && vtop->r2 < VT_CONST)
2194 goto done;
2195 vdup();
2196 vtop[-1].r = r; /* save register value */
2197 vtop->r = vtop[-1].r2;
2199 /* Allocate second register. Here we rely on the fact that
2200 get_reg() tries first to free r2 of an SValue. */
2201 r2 = get_reg(rc2);
2202 load(r2, vtop);
2203 vpop();
2204 /* write second register */
2205 vtop->r2 = r2;
2206 done:
2207 vtop->type.t = original_type;
2208 } else {
2209 if (vtop->r == VT_CMP)
2210 vset_VT_JMP();
2211 /* one register type load */
2212 load(r, vtop);
2215 vtop->r = r;
2216 #ifdef TCC_TARGET_C67
2217 /* uses register pairs for doubles */
2218 if (bt == VT_DOUBLE)
2219 vtop->r2 = r+1;
2220 #endif
2222 return r;
2225 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2226 ST_FUNC void gv2(int rc1, int rc2)
2228 /* generate more generic register first. But VT_JMP or VT_CMP
2229 values must be generated first in all cases to avoid possible
2230 reload errors */
2231 if (vtop->r != VT_CMP && rc1 <= rc2) {
2232 vswap();
2233 gv(rc1);
2234 vswap();
2235 gv(rc2);
2236 /* test if reload is needed for first register */
2237 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
2238 vswap();
2239 gv(rc1);
2240 vswap();
2242 } else {
2243 gv(rc2);
2244 vswap();
2245 gv(rc1);
2246 vswap();
2247 /* test if reload is needed for first register */
2248 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
2249 gv(rc2);
2254 #if PTR_SIZE == 4
2255 /* expand 64bit on stack in two ints */
2256 ST_FUNC void lexpand(void)
2258 int u, v;
2259 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
2260 v = vtop->r & (VT_VALMASK | VT_LVAL);
2261 if (v == VT_CONST) {
2262 vdup();
2263 vtop[0].c.i >>= 32;
2264 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
2265 vdup();
2266 vtop[0].c.i += 4;
2267 } else {
2268 gv(RC_INT);
2269 vdup();
2270 vtop[0].r = vtop[-1].r2;
2271 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
2273 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
2275 #endif
2277 #if PTR_SIZE == 4
2278 /* build a long long from two ints */
2279 static void lbuild(int t)
2281 gv2(RC_INT, RC_INT);
2282 vtop[-1].r2 = vtop[0].r;
2283 vtop[-1].type.t = t;
2284 vpop();
2286 #endif
2288 /* convert stack entry to register and duplicate its value in another
2289 register */
2290 static void gv_dup(void)
2292 int t, rc, r;
2294 t = vtop->type.t;
2295 #if PTR_SIZE == 4
2296 if ((t & VT_BTYPE) == VT_LLONG) {
2297 if (t & VT_BITFIELD) {
2298 gv(RC_INT);
2299 t = vtop->type.t;
2301 lexpand();
2302 gv_dup();
2303 vswap();
2304 vrotb(3);
2305 gv_dup();
2306 vrotb(4);
2307 /* stack: H L L1 H1 */
2308 lbuild(t);
2309 vrotb(3);
2310 vrotb(3);
2311 vswap();
2312 lbuild(t);
2313 vswap();
2314 return;
2316 #endif
2317 /* duplicate value */
2318 rc = RC_TYPE(t);
2319 gv(rc);
2320 r = get_reg(rc);
2321 vdup();
2322 load(r, vtop);
2323 vtop->r = r;
2326 #if PTR_SIZE == 4
2327 /* generate CPU independent (unsigned) long long operations */
2328 static void gen_opl(int op)
2330 int t, a, b, op1, c, i;
2331 int func;
2332 unsigned short reg_iret = REG_IRET;
2333 unsigned short reg_lret = REG_IRE2;
2334 SValue tmp;
2336 switch(op) {
2337 case '/':
2338 case TOK_PDIV:
2339 func = TOK___divdi3;
2340 goto gen_func;
2341 case TOK_UDIV:
2342 func = TOK___udivdi3;
2343 goto gen_func;
2344 case '%':
2345 func = TOK___moddi3;
2346 goto gen_mod_func;
2347 case TOK_UMOD:
2348 func = TOK___umoddi3;
2349 gen_mod_func:
2350 #ifdef TCC_ARM_EABI
2351 reg_iret = TREG_R2;
2352 reg_lret = TREG_R3;
2353 #endif
2354 gen_func:
2355 /* call generic long long function */
2356 vpush_global_sym(&func_old_type, func);
2357 vrott(3);
2358 gfunc_call(2);
2359 vpushi(0);
2360 vtop->r = reg_iret;
2361 vtop->r2 = reg_lret;
2362 break;
2363 case '^':
2364 case '&':
2365 case '|':
2366 case '*':
2367 case '+':
2368 case '-':
2369 //pv("gen_opl A",0,2);
2370 t = vtop->type.t;
2371 vswap();
2372 lexpand();
2373 vrotb(3);
2374 lexpand();
2375 /* stack: L1 H1 L2 H2 */
2376 tmp = vtop[0];
2377 vtop[0] = vtop[-3];
2378 vtop[-3] = tmp;
2379 tmp = vtop[-2];
2380 vtop[-2] = vtop[-3];
2381 vtop[-3] = tmp;
2382 vswap();
2383 /* stack: H1 H2 L1 L2 */
2384 //pv("gen_opl B",0,4);
2385 if (op == '*') {
2386 vpushv(vtop - 1);
2387 vpushv(vtop - 1);
2388 gen_op(TOK_UMULL);
2389 lexpand();
2390 /* stack: H1 H2 L1 L2 ML MH */
2391 for(i=0;i<4;i++)
2392 vrotb(6);
2393 /* stack: ML MH H1 H2 L1 L2 */
2394 tmp = vtop[0];
2395 vtop[0] = vtop[-2];
2396 vtop[-2] = tmp;
2397 /* stack: ML MH H1 L2 H2 L1 */
2398 gen_op('*');
2399 vrotb(3);
2400 vrotb(3);
2401 gen_op('*');
2402 /* stack: ML MH M1 M2 */
2403 gen_op('+');
2404 gen_op('+');
2405 } else if (op == '+' || op == '-') {
2406 /* XXX: add non carry method too (for MIPS or alpha) */
2407 if (op == '+')
2408 op1 = TOK_ADDC1;
2409 else
2410 op1 = TOK_SUBC1;
2411 gen_op(op1);
2412 /* stack: H1 H2 (L1 op L2) */
2413 vrotb(3);
2414 vrotb(3);
2415 gen_op(op1 + 1); /* TOK_xxxC2 */
2416 } else {
2417 gen_op(op);
2418 /* stack: H1 H2 (L1 op L2) */
2419 vrotb(3);
2420 vrotb(3);
2421 /* stack: (L1 op L2) H1 H2 */
2422 gen_op(op);
2423 /* stack: (L1 op L2) (H1 op H2) */
2425 /* stack: L H */
2426 lbuild(t);
2427 break;
2428 case TOK_SAR:
2429 case TOK_SHR:
2430 case TOK_SHL:
2431 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2432 t = vtop[-1].type.t;
2433 vswap();
2434 lexpand();
2435 vrotb(3);
2436 /* stack: L H shift */
2437 c = (int)vtop->c.i;
2438 /* constant: simpler */
2439 /* NOTE: all comments are for SHL. the other cases are
2440 done by swapping words */
2441 vpop();
2442 if (op != TOK_SHL)
2443 vswap();
2444 if (c >= 32) {
2445 /* stack: L H */
2446 vpop();
2447 if (c > 32) {
2448 vpushi(c - 32);
2449 gen_op(op);
2451 if (op != TOK_SAR) {
2452 vpushi(0);
2453 } else {
2454 gv_dup();
2455 vpushi(31);
2456 gen_op(TOK_SAR);
2458 vswap();
2459 } else {
2460 vswap();
2461 gv_dup();
2462 /* stack: H L L */
2463 vpushi(c);
2464 gen_op(op);
2465 vswap();
2466 vpushi(32 - c);
2467 if (op == TOK_SHL)
2468 gen_op(TOK_SHR);
2469 else
2470 gen_op(TOK_SHL);
2471 vrotb(3);
2472 /* stack: L L H */
2473 vpushi(c);
2474 if (op == TOK_SHL)
2475 gen_op(TOK_SHL);
2476 else
2477 gen_op(TOK_SHR);
2478 gen_op('|');
2480 if (op != TOK_SHL)
2481 vswap();
2482 lbuild(t);
2483 } else {
2484 /* XXX: should provide a faster fallback on x86 ? */
2485 switch(op) {
2486 case TOK_SAR:
2487 func = TOK___ashrdi3;
2488 goto gen_func;
2489 case TOK_SHR:
2490 func = TOK___lshrdi3;
2491 goto gen_func;
2492 case TOK_SHL:
2493 func = TOK___ashldi3;
2494 goto gen_func;
2497 break;
2498 default:
2499 /* compare operations */
2500 t = vtop->type.t;
2501 vswap();
2502 lexpand();
2503 vrotb(3);
2504 lexpand();
2505 /* stack: L1 H1 L2 H2 */
2506 tmp = vtop[-1];
2507 vtop[-1] = vtop[-2];
2508 vtop[-2] = tmp;
2509 /* stack: L1 L2 H1 H2 */
2510 save_regs(4);
2511 /* compare high */
2512 op1 = op;
2513 /* when values are equal, we need to compare low words. since
2514 the jump is inverted, we invert the test too. */
2515 if (op1 == TOK_LT)
2516 op1 = TOK_LE;
2517 else if (op1 == TOK_GT)
2518 op1 = TOK_GE;
2519 else if (op1 == TOK_ULT)
2520 op1 = TOK_ULE;
2521 else if (op1 == TOK_UGT)
2522 op1 = TOK_UGE;
2523 a = 0;
2524 b = 0;
2525 gen_op(op1);
2526 if (op == TOK_NE) {
2527 b = gvtst(0, 0);
2528 } else {
2529 a = gvtst(1, 0);
2530 if (op != TOK_EQ) {
2531 /* generate non equal test */
2532 vpushi(0);
2533 vset_VT_CMP(TOK_NE);
2534 b = gvtst(0, 0);
2537 /* compare low. Always unsigned */
2538 op1 = op;
2539 if (op1 == TOK_LT)
2540 op1 = TOK_ULT;
2541 else if (op1 == TOK_LE)
2542 op1 = TOK_ULE;
2543 else if (op1 == TOK_GT)
2544 op1 = TOK_UGT;
2545 else if (op1 == TOK_GE)
2546 op1 = TOK_UGE;
2547 gen_op(op1);
2548 #if 0//def TCC_TARGET_I386
2549 if (op == TOK_NE) { gsym(b); break; }
2550 if (op == TOK_EQ) { gsym(a); break; }
2551 #endif
2552 gvtst_set(1, a);
2553 gvtst_set(0, b);
2554 break;
2557 #endif
2559 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2561 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2562 return (a ^ b) >> 63 ? -x : x;
2565 static int gen_opic_lt(uint64_t a, uint64_t b)
2567 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2570 /* handle integer constant optimizations and various machine
2571 independent opt */
2572 static void gen_opic(int op)
2574 SValue *v1 = vtop - 1;
2575 SValue *v2 = vtop;
2576 int t1 = v1->type.t & VT_BTYPE;
2577 int t2 = v2->type.t & VT_BTYPE;
2578 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2579 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2580 uint64_t l1 = c1 ? v1->c.i : 0;
2581 uint64_t l2 = c2 ? v2->c.i : 0;
2582 int shm = (t1 == VT_LLONG) ? 63 : 31;
2584 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2585 l1 = ((uint32_t)l1 |
2586 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2587 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2588 l2 = ((uint32_t)l2 |
2589 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2591 if (c1 && c2) {
2592 switch(op) {
2593 case '+': l1 += l2; break;
2594 case '-': l1 -= l2; break;
2595 case '&': l1 &= l2; break;
2596 case '^': l1 ^= l2; break;
2597 case '|': l1 |= l2; break;
2598 case '*': l1 *= l2; break;
2600 case TOK_PDIV:
2601 case '/':
2602 case '%':
2603 case TOK_UDIV:
2604 case TOK_UMOD:
2605 /* if division by zero, generate explicit division */
2606 if (l2 == 0) {
2607 if (const_wanted && !(nocode_wanted & unevalmask))
2608 tcc_error("division by zero in constant");
2609 goto general_case;
2611 switch(op) {
2612 default: l1 = gen_opic_sdiv(l1, l2); break;
2613 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2614 case TOK_UDIV: l1 = l1 / l2; break;
2615 case TOK_UMOD: l1 = l1 % l2; break;
2617 break;
2618 case TOK_SHL: l1 <<= (l2 & shm); break;
2619 case TOK_SHR: l1 >>= (l2 & shm); break;
2620 case TOK_SAR:
2621 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2622 break;
2623 /* tests */
2624 case TOK_ULT: l1 = l1 < l2; break;
2625 case TOK_UGE: l1 = l1 >= l2; break;
2626 case TOK_EQ: l1 = l1 == l2; break;
2627 case TOK_NE: l1 = l1 != l2; break;
2628 case TOK_ULE: l1 = l1 <= l2; break;
2629 case TOK_UGT: l1 = l1 > l2; break;
2630 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2631 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2632 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2633 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2634 /* logical */
2635 case TOK_LAND: l1 = l1 && l2; break;
2636 case TOK_LOR: l1 = l1 || l2; break;
2637 default:
2638 goto general_case;
2640 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2641 l1 = ((uint32_t)l1 |
2642 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2643 v1->c.i = l1;
2644 vtop--;
2645 } else {
2646 /* if commutative ops, put c2 as constant */
2647 if (c1 && (op == '+' || op == '&' || op == '^' ||
2648 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2649 vswap();
2650 c2 = c1; //c = c1, c1 = c2, c2 = c;
2651 l2 = l1; //l = l1, l1 = l2, l2 = l;
2653 if (!const_wanted &&
2654 c1 && ((l1 == 0 &&
2655 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2656 (l1 == -1 && op == TOK_SAR))) {
2657 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2658 vtop--;
2659 } else if (!const_wanted &&
2660 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2661 (op == '|' &&
2662 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2663 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2664 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2665 if (l2 == 1)
2666 vtop->c.i = 0;
2667 vswap();
2668 vtop--;
2669 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2670 op == TOK_PDIV) &&
2671 l2 == 1) ||
2672 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2673 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2674 l2 == 0) ||
2675 (op == '&' &&
2676 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2677 /* filter out NOP operations like x*1, x-0, x&-1... */
2678 vtop--;
2679 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2680 /* try to use shifts instead of muls or divs */
2681 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2682 int n = -1;
2683 while (l2) {
2684 l2 >>= 1;
2685 n++;
2687 vtop->c.i = n;
2688 if (op == '*')
2689 op = TOK_SHL;
2690 else if (op == TOK_PDIV)
2691 op = TOK_SAR;
2692 else
2693 op = TOK_SHR;
2695 goto general_case;
2696 } else if (c2 && (op == '+' || op == '-') &&
2697 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2698 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2699 /* symbol + constant case */
2700 if (op == '-')
2701 l2 = -l2;
2702 l2 += vtop[-1].c.i;
2703 /* The backends can't always deal with addends to symbols
2704 larger than +-1<<31. Don't construct such. */
2705 if ((int)l2 != l2)
2706 goto general_case;
2707 vtop--;
2708 vtop->c.i = l2;
2709 } else {
2710 general_case:
2711 /* call low level op generator */
2712 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2713 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2714 gen_opl(op);
2715 else
2716 gen_opi(op);
2721 /* generate a floating point operation with constant propagation */
2722 static void gen_opif(int op)
2724 int c1, c2;
2725 SValue *v1, *v2;
2726 #if defined _MSC_VER && defined __x86_64__
2727 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2728 volatile
2729 #endif
2730 long double f1, f2;
2732 v1 = vtop - 1;
2733 v2 = vtop;
2734 /* currently, we cannot do computations with forward symbols */
2735 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2736 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2737 if (c1 && c2) {
2738 if (v1->type.t == VT_FLOAT) {
2739 f1 = v1->c.f;
2740 f2 = v2->c.f;
2741 } else if (v1->type.t == VT_DOUBLE) {
2742 f1 = v1->c.d;
2743 f2 = v2->c.d;
2744 } else {
2745 f1 = v1->c.ld;
2746 f2 = v2->c.ld;
2749 /* NOTE: we only do constant propagation if finite number (not
2750 NaN or infinity) (ANSI spec) */
2751 if (!ieee_finite(f1) || !ieee_finite(f2))
2752 goto general_case;
2754 switch(op) {
2755 case '+': f1 += f2; break;
2756 case '-': f1 -= f2; break;
2757 case '*': f1 *= f2; break;
2758 case '/':
2759 if (f2 == 0.0) {
2760 /* If not in initializer we need to potentially generate
2761 FP exceptions at runtime, otherwise we want to fold. */
2762 if (!const_wanted)
2763 goto general_case;
2765 f1 /= f2;
2766 break;
2767 /* XXX: also handles tests ? */
2768 default:
2769 goto general_case;
2771 /* XXX: overflow test ? */
2772 if (v1->type.t == VT_FLOAT) {
2773 v1->c.f = f1;
2774 } else if (v1->type.t == VT_DOUBLE) {
2775 v1->c.d = f1;
2776 } else {
2777 v1->c.ld = f1;
2779 vtop--;
2780 } else {
2781 general_case:
2782 gen_opf(op);
2786 /* print a type. If 'varstr' is not NULL, then the variable is also
2787 printed in the type */
2788 /* XXX: union */
2789 /* XXX: add array and function pointers */
2790 static void type_to_str(char *buf, int buf_size,
2791 CType *type, const char *varstr)
2793 int bt, v, t;
2794 Sym *s, *sa;
2795 char buf1[256];
2796 const char *tstr;
2798 t = type->t;
2799 bt = t & VT_BTYPE;
2800 buf[0] = '\0';
2802 if (t & VT_EXTERN)
2803 pstrcat(buf, buf_size, "extern ");
2804 if (t & VT_STATIC)
2805 pstrcat(buf, buf_size, "static ");
2806 if (t & VT_TYPEDEF)
2807 pstrcat(buf, buf_size, "typedef ");
2808 if (t & VT_INLINE)
2809 pstrcat(buf, buf_size, "inline ");
2810 if (t & VT_VOLATILE)
2811 pstrcat(buf, buf_size, "volatile ");
2812 if (t & VT_CONSTANT)
2813 pstrcat(buf, buf_size, "const ");
2815 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
2816 || ((t & VT_UNSIGNED)
2817 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
2818 && !IS_ENUM(t)
2820 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
2822 buf_size -= strlen(buf);
2823 buf += strlen(buf);
2825 switch(bt) {
2826 case VT_VOID:
2827 tstr = "void";
2828 goto add_tstr;
2829 case VT_BOOL:
2830 tstr = "_Bool";
2831 goto add_tstr;
2832 case VT_BYTE:
2833 tstr = "char";
2834 goto add_tstr;
2835 case VT_SHORT:
2836 tstr = "short";
2837 goto add_tstr;
2838 case VT_INT:
2839 tstr = "int";
2840 goto maybe_long;
2841 case VT_LLONG:
2842 tstr = "long long";
2843 maybe_long:
2844 if (t & VT_LONG)
2845 tstr = "long";
2846 if (!IS_ENUM(t))
2847 goto add_tstr;
2848 tstr = "enum ";
2849 goto tstruct;
2850 case VT_FLOAT:
2851 tstr = "float";
2852 goto add_tstr;
2853 case VT_DOUBLE:
2854 tstr = "double";
2855 if (!(t & VT_LONG))
2856 goto add_tstr;
2857 case VT_LDOUBLE:
2858 tstr = "long double";
2859 add_tstr:
2860 pstrcat(buf, buf_size, tstr);
2861 break;
2862 case VT_STRUCT:
2863 tstr = "struct ";
2864 if (IS_UNION(t))
2865 tstr = "union ";
2866 tstruct:
2867 pstrcat(buf, buf_size, tstr);
2868 v = type->ref->v & ~SYM_STRUCT;
2869 if (v >= SYM_FIRST_ANOM)
2870 pstrcat(buf, buf_size, "<anonymous>");
2871 else
2872 pstrcat(buf, buf_size, get_tok_str(v, NULL));
2873 break;
2874 case VT_FUNC:
2875 s = type->ref;
2876 buf1[0]=0;
2877 if (varstr && '*' == *varstr) {
2878 pstrcat(buf1, sizeof(buf1), "(");
2879 pstrcat(buf1, sizeof(buf1), varstr);
2880 pstrcat(buf1, sizeof(buf1), ")");
2882 pstrcat(buf1, buf_size, "(");
2883 sa = s->next;
2884 while (sa != NULL) {
2885 char buf2[256];
2886 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
2887 pstrcat(buf1, sizeof(buf1), buf2);
2888 sa = sa->next;
2889 if (sa)
2890 pstrcat(buf1, sizeof(buf1), ", ");
2892 if (s->f.func_type == FUNC_ELLIPSIS)
2893 pstrcat(buf1, sizeof(buf1), ", ...");
2894 pstrcat(buf1, sizeof(buf1), ")");
2895 type_to_str(buf, buf_size, &s->type, buf1);
2896 goto no_var;
2897 case VT_PTR:
2898 s = type->ref;
2899 if (t & VT_ARRAY) {
2900 if (varstr && '*' == *varstr)
2901 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
2902 else
2903 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
2904 type_to_str(buf, buf_size, &s->type, buf1);
2905 goto no_var;
2907 pstrcpy(buf1, sizeof(buf1), "*");
2908 if (t & VT_CONSTANT)
2909 pstrcat(buf1, buf_size, "const ");
2910 if (t & VT_VOLATILE)
2911 pstrcat(buf1, buf_size, "volatile ");
2912 if (varstr)
2913 pstrcat(buf1, sizeof(buf1), varstr);
2914 type_to_str(buf, buf_size, &s->type, buf1);
2915 goto no_var;
2917 if (varstr) {
2918 pstrcat(buf, buf_size, " ");
2919 pstrcat(buf, buf_size, varstr);
2921 no_var: ;
2924 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
2926 char buf1[256], buf2[256];
2927 type_to_str(buf1, sizeof(buf1), st, NULL);
2928 type_to_str(buf2, sizeof(buf2), dt, NULL);
2929 tcc_error(fmt, buf1, buf2);
2932 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
2934 char buf1[256], buf2[256];
2935 type_to_str(buf1, sizeof(buf1), st, NULL);
2936 type_to_str(buf2, sizeof(buf2), dt, NULL);
2937 tcc_warning(fmt, buf1, buf2);
2940 static int pointed_size(CType *type)
2942 int align;
2943 return type_size(pointed_type(type), &align);
2946 static void vla_runtime_pointed_size(CType *type)
2948 int align;
2949 vla_runtime_type_size(pointed_type(type), &align);
2952 static inline int is_null_pointer(SValue *p)
2954 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
2955 return 0;
2956 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
2957 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
2958 ((p->type.t & VT_BTYPE) == VT_PTR &&
2959 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
2960 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
2961 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
2965 /* compare function types. OLD functions match any new functions */
2966 static int is_compatible_func(CType *type1, CType *type2)
2968 Sym *s1, *s2;
2970 s1 = type1->ref;
2971 s2 = type2->ref;
2972 if (s1->f.func_call != s2->f.func_call)
2973 return 0;
2974 if (s1->f.func_type != s2->f.func_type
2975 && s1->f.func_type != FUNC_OLD
2976 && s2->f.func_type != FUNC_OLD)
2977 return 0;
2978 /* we should check the function return type for FUNC_OLD too
2979 but that causes problems with the internally used support
2980 functions such as TOK_memmove */
2981 if (s1->f.func_type == FUNC_OLD && !s1->next)
2982 return 1;
2983 if (s2->f.func_type == FUNC_OLD && !s2->next)
2984 return 1;
2985 for (;;) {
2986 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
2987 return 0;
2988 s1 = s1->next;
2989 s2 = s2->next;
2990 if (!s1)
2991 return !s2;
2992 if (!s2)
2993 return 0;
2997 /* return true if type1 and type2 are the same. If unqualified is
2998 true, qualifiers on the types are ignored.
3000 static int compare_types(CType *type1, CType *type2, int unqualified)
3002 int bt1, t1, t2;
3004 t1 = type1->t & VT_TYPE;
3005 t2 = type2->t & VT_TYPE;
3006 if (unqualified) {
3007 /* strip qualifiers before comparing */
3008 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3009 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3012 /* Default Vs explicit signedness only matters for char */
3013 if ((t1 & VT_BTYPE) != VT_BYTE) {
3014 t1 &= ~VT_DEFSIGN;
3015 t2 &= ~VT_DEFSIGN;
3017 /* XXX: bitfields ? */
3018 if (t1 != t2)
3019 return 0;
3021 if ((t1 & VT_ARRAY)
3022 && !(type1->ref->c < 0
3023 || type2->ref->c < 0
3024 || type1->ref->c == type2->ref->c))
3025 return 0;
3027 /* test more complicated cases */
3028 bt1 = t1 & VT_BTYPE;
3029 if (bt1 == VT_PTR) {
3030 type1 = pointed_type(type1);
3031 type2 = pointed_type(type2);
3032 return is_compatible_types(type1, type2);
3033 } else if (bt1 == VT_STRUCT) {
3034 return (type1->ref == type2->ref);
3035 } else if (bt1 == VT_FUNC) {
3036 return is_compatible_func(type1, type2);
3037 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
3038 /* If both are enums then they must be the same, if only one is then
3039 t1 and t2 must be equal, which was checked above already. */
3040 return type1->ref == type2->ref;
3041 } else {
3042 return 1;
3046 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3047 type is stored in DEST if non-null (except for pointer plus/minus) . */
3048 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
3050 CType *type1 = &op1->type, *type2 = &op2->type, type;
3051 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
3052 int ret = 1;
3054 type.t = VT_VOID;
3055 type.ref = NULL;
3057 if (bt1 == VT_VOID || bt2 == VT_VOID) {
3058 ret = op == '?' ? 1 : 0;
3059 /* NOTE: as an extension, we accept void on only one side */
3060 type.t = VT_VOID;
3061 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3062 if (op == '+') ; /* Handled in caller */
3063 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3064 /* If one is a null ptr constant the result type is the other. */
3065 else if (is_null_pointer (op2)) type = *type1;
3066 else if (is_null_pointer (op1)) type = *type2;
3067 else if (bt1 != bt2) {
3068 /* accept comparison or cond-expr between pointer and integer
3069 with a warning */
3070 if ((op == '?' || TOK_ISCOND(op))
3071 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
3072 tcc_warning("pointer/integer mismatch in %s",
3073 op == '?' ? "conditional expression" : "comparison");
3074 else if (op != '-' || !is_integer_btype(bt2))
3075 ret = 0;
3076 type = *(bt1 == VT_PTR ? type1 : type2);
3077 } else {
3078 CType *pt1 = pointed_type(type1);
3079 CType *pt2 = pointed_type(type2);
3080 int pbt1 = pt1->t & VT_BTYPE;
3081 int pbt2 = pt2->t & VT_BTYPE;
3082 int newquals, copied = 0;
3083 if (pbt1 != VT_VOID && pbt2 != VT_VOID
3084 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
3085 if (op != '?' && !TOK_ISCOND(op))
3086 ret = 0;
3087 else
3088 type_incompatibility_warning(type1, type2,
3089 op == '?'
3090 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3091 : "pointer type mismatch in comparison('%s' and '%s')");
3093 if (op == '?') {
3094 /* pointers to void get preferred, otherwise the
3095 pointed to types minus qualifs should be compatible */
3096 type = *((pbt1 == VT_VOID) ? type1 : type2);
3097 /* combine qualifs */
3098 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
3099 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
3100 & newquals)
3102 /* copy the pointer target symbol */
3103 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3104 0, type.ref->c);
3105 copied = 1;
3106 pointed_type(&type)->t |= newquals;
3108 /* pointers to incomplete arrays get converted to
3109 pointers to completed ones if possible */
3110 if (pt1->t & VT_ARRAY
3111 && pt2->t & VT_ARRAY
3112 && pointed_type(&type)->ref->c < 0
3113 && (pt1->ref->c > 0 || pt2->ref->c > 0))
3115 if (!copied)
3116 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3117 0, type.ref->c);
3118 pointed_type(&type)->ref =
3119 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
3120 0, pointed_type(&type)->ref->c);
3121 pointed_type(&type)->ref->c =
3122 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
3126 if (TOK_ISCOND(op))
3127 type.t = VT_SIZE_T;
3128 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
3129 if (op != '?' || !compare_types(type1, type2, 1))
3130 ret = 0;
3131 type = *type1;
3132 } else if (is_float(bt1) || is_float(bt2)) {
3133 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
3134 type.t = VT_LDOUBLE;
3135 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
3136 type.t = VT_DOUBLE;
3137 } else {
3138 type.t = VT_FLOAT;
3140 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
3141 /* cast to biggest op */
3142 type.t = VT_LLONG | VT_LONG;
3143 if (bt1 == VT_LLONG)
3144 type.t &= t1;
3145 if (bt2 == VT_LLONG)
3146 type.t &= t2;
3147 /* convert to unsigned if it does not fit in a long long */
3148 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
3149 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
3150 type.t |= VT_UNSIGNED;
3151 } else {
3152 /* integer operations */
3153 type.t = VT_INT | (VT_LONG & (t1 | t2));
3154 /* convert to unsigned if it does not fit in an integer */
3155 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
3156 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
3157 type.t |= VT_UNSIGNED;
3159 if (dest)
3160 *dest = type;
3161 return ret;
3164 /* generic gen_op: handles types problems */
3165 ST_FUNC void gen_op(int op)
3167 int u, t1, t2, bt1, bt2, t;
3168 CType type1, combtype;
3170 redo:
3171 t1 = vtop[-1].type.t;
3172 t2 = vtop[0].type.t;
3173 bt1 = t1 & VT_BTYPE;
3174 bt2 = t2 & VT_BTYPE;
3176 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
3177 if (bt2 == VT_FUNC) {
3178 mk_pointer(&vtop->type);
3179 gaddrof();
3181 if (bt1 == VT_FUNC) {
3182 vswap();
3183 mk_pointer(&vtop->type);
3184 gaddrof();
3185 vswap();
3187 goto redo;
3188 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
3189 tcc_error_noabort("invalid operand types for binary operation");
3190 vpop();
3191 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3192 /* at least one operand is a pointer */
3193 /* relational op: must be both pointers */
3194 if (TOK_ISCOND(op))
3195 goto std_op;
3196 /* if both pointers, then it must be the '-' op */
3197 if (bt1 == VT_PTR && bt2 == VT_PTR) {
3198 if (op != '-')
3199 tcc_error("cannot use pointers here");
3200 if (vtop[-1].type.t & VT_VLA) {
3201 vla_runtime_pointed_size(&vtop[-1].type);
3202 } else {
3203 vpushi(pointed_size(&vtop[-1].type));
3205 vrott(3);
3206 gen_opic(op);
3207 vtop->type.t = VT_PTRDIFF_T;
3208 vswap();
3209 gen_op(TOK_PDIV);
3210 } else {
3211 /* exactly one pointer : must be '+' or '-'. */
3212 if (op != '-' && op != '+')
3213 tcc_error("cannot use pointers here");
3214 /* Put pointer as first operand */
3215 if (bt2 == VT_PTR) {
3216 vswap();
3217 t = t1, t1 = t2, t2 = t;
3219 #if PTR_SIZE == 4
3220 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
3221 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3222 gen_cast_s(VT_INT);
3223 #endif
3224 type1 = vtop[-1].type;
3225 if (vtop[-1].type.t & VT_VLA)
3226 vla_runtime_pointed_size(&vtop[-1].type);
3227 else {
3228 u = pointed_size(&vtop[-1].type);
3229 if (u < 0)
3230 tcc_error("unknown array element size");
3231 #if PTR_SIZE == 8
3232 vpushll(u);
3233 #else
3234 /* XXX: cast to int ? (long long case) */
3235 vpushi(u);
3236 #endif
3238 gen_op('*');
3239 #ifdef CONFIG_TCC_BCHECK
3240 if (tcc_state->do_bounds_check && !const_wanted) {
3241 /* if bounded pointers, we generate a special code to
3242 test bounds */
3243 if (op == '-') {
3244 vpushi(0);
3245 vswap();
3246 gen_op('-');
3248 gen_bounded_ptr_add();
3249 } else
3250 #endif
3252 gen_opic(op);
3254 type1.t &= ~VT_ARRAY;
3255 /* put again type if gen_opic() swaped operands */
3256 vtop->type = type1;
3258 } else {
3259 /* floats can only be used for a few operations */
3260 if (is_float(combtype.t)
3261 && op != '+' && op != '-' && op != '*' && op != '/'
3262 && !TOK_ISCOND(op))
3263 tcc_error("invalid operands for binary operation");
3264 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
3265 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
3266 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
3267 t |= VT_UNSIGNED;
3268 t |= (VT_LONG & t1);
3269 combtype.t = t;
3271 std_op:
3272 t = t2 = combtype.t;
3273 /* XXX: currently, some unsigned operations are explicit, so
3274 we modify them here */
3275 if (t & VT_UNSIGNED) {
3276 if (op == TOK_SAR)
3277 op = TOK_SHR;
3278 else if (op == '/')
3279 op = TOK_UDIV;
3280 else if (op == '%')
3281 op = TOK_UMOD;
3282 else if (op == TOK_LT)
3283 op = TOK_ULT;
3284 else if (op == TOK_GT)
3285 op = TOK_UGT;
3286 else if (op == TOK_LE)
3287 op = TOK_ULE;
3288 else if (op == TOK_GE)
3289 op = TOK_UGE;
3291 vswap();
3292 gen_cast_s(t);
3293 vswap();
3294 /* special case for shifts and long long: we keep the shift as
3295 an integer */
3296 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
3297 t2 = VT_INT;
3298 gen_cast_s(t2);
3299 if (is_float(t))
3300 gen_opif(op);
3301 else
3302 gen_opic(op);
3303 if (TOK_ISCOND(op)) {
3304 /* relational op: the result is an int */
3305 vtop->type.t = VT_INT;
3306 } else {
3307 vtop->type.t = t;
3310 // Make sure that we have converted to an rvalue:
3311 if (vtop->r & VT_LVAL)
3312 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3315 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3316 #define gen_cvt_itof1 gen_cvt_itof
3317 #else
3318 /* generic itof for unsigned long long case */
3319 static void gen_cvt_itof1(int t)
3321 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3322 (VT_LLONG | VT_UNSIGNED)) {
3324 if (t == VT_FLOAT)
3325 vpush_global_sym(&func_old_type, TOK___floatundisf);
3326 #if LDOUBLE_SIZE != 8
3327 else if (t == VT_LDOUBLE)
3328 vpush_global_sym(&func_old_type, TOK___floatundixf);
3329 #endif
3330 else
3331 vpush_global_sym(&func_old_type, TOK___floatundidf);
3332 vrott(2);
3333 gfunc_call(1);
3334 vpushi(0);
3335 PUT_R_RET(vtop, t);
3336 } else {
3337 gen_cvt_itof(t);
3340 #endif
3342 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3343 #define gen_cvt_ftoi1 gen_cvt_ftoi
3344 #else
3345 /* generic ftoi for unsigned long long case */
3346 static void gen_cvt_ftoi1(int t)
3348 int st;
3349 if (t == (VT_LLONG | VT_UNSIGNED)) {
3350 /* not handled natively */
3351 st = vtop->type.t & VT_BTYPE;
3352 if (st == VT_FLOAT)
3353 vpush_global_sym(&func_old_type, TOK___fixunssfdi);
3354 #if LDOUBLE_SIZE != 8
3355 else if (st == VT_LDOUBLE)
3356 vpush_global_sym(&func_old_type, TOK___fixunsxfdi);
3357 #endif
3358 else
3359 vpush_global_sym(&func_old_type, TOK___fixunsdfdi);
3360 vrott(2);
3361 gfunc_call(1);
3362 vpushi(0);
3363 PUT_R_RET(vtop, t);
3364 } else {
3365 gen_cvt_ftoi(t);
3368 #endif
3370 /* special delayed cast for char/short */
3371 static void force_charshort_cast(void)
3373 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3374 int dbt = vtop->type.t;
3375 vtop->r &= ~VT_MUSTCAST;
3376 vtop->type.t = sbt;
3377 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3378 vtop->type.t = dbt;
3381 static void gen_cast_s(int t)
3383 CType type;
3384 type.t = t;
3385 type.ref = NULL;
3386 gen_cast(&type);
3389 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3390 static void gen_cast(CType *type)
3392 int sbt, dbt, sf, df, c;
3393 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3395 /* special delayed cast for char/short */
3396 if (vtop->r & VT_MUSTCAST)
3397 force_charshort_cast();
3399 /* bitfields first get cast to ints */
3400 if (vtop->type.t & VT_BITFIELD)
3401 gv(RC_INT);
3403 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3404 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3405 if (sbt == VT_FUNC)
3406 sbt = VT_PTR;
3408 again:
3409 if (sbt != dbt) {
3410 sf = is_float(sbt);
3411 df = is_float(dbt);
3412 dbt_bt = dbt & VT_BTYPE;
3413 sbt_bt = sbt & VT_BTYPE;
3415 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3416 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3417 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3418 #endif
3419 if (c) {
3420 /* constant case: we can do it now */
3421 /* XXX: in ISOC, cannot do it if error in convert */
3422 if (sbt == VT_FLOAT)
3423 vtop->c.ld = vtop->c.f;
3424 else if (sbt == VT_DOUBLE)
3425 vtop->c.ld = vtop->c.d;
3427 if (df) {
3428 if (sbt_bt == VT_LLONG) {
3429 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3430 vtop->c.ld = vtop->c.i;
3431 else
3432 vtop->c.ld = -(long double)-vtop->c.i;
3433 } else if(!sf) {
3434 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3435 vtop->c.ld = (uint32_t)vtop->c.i;
3436 else
3437 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3440 if (dbt == VT_FLOAT)
3441 vtop->c.f = (float)vtop->c.ld;
3442 else if (dbt == VT_DOUBLE)
3443 vtop->c.d = (double)vtop->c.ld;
3444 } else if (sf && dbt == VT_BOOL) {
3445 vtop->c.i = (vtop->c.ld != 0);
3446 } else {
3447 if(sf)
3448 vtop->c.i = vtop->c.ld;
3449 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3451 else if (sbt & VT_UNSIGNED)
3452 vtop->c.i = (uint32_t)vtop->c.i;
3453 else
3454 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3456 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3458 else if (dbt == VT_BOOL)
3459 vtop->c.i = (vtop->c.i != 0);
3460 else {
3461 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3462 dbt_bt == VT_SHORT ? 0xffff :
3463 0xffffffff;
3464 vtop->c.i &= m;
3465 if (!(dbt & VT_UNSIGNED))
3466 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3469 goto done;
3471 } else if (dbt == VT_BOOL
3472 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3473 == (VT_CONST | VT_SYM)) {
3474 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3475 vtop->r = VT_CONST;
3476 vtop->c.i = 1;
3477 goto done;
3480 /* cannot generate code for global or static initializers */
3481 if (STATIC_DATA_WANTED)
3482 goto done;
3484 /* non constant case: generate code */
3485 if (dbt == VT_BOOL) {
3486 gen_test_zero(TOK_NE);
3487 goto done;
3490 if (sf || df) {
3491 if (sf && df) {
3492 /* convert from fp to fp */
3493 gen_cvt_ftof(dbt);
3494 } else if (df) {
3495 /* convert int to fp */
3496 gen_cvt_itof1(dbt);
3497 } else {
3498 /* convert fp to int */
3499 sbt = dbt;
3500 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3501 sbt = VT_INT;
3502 gen_cvt_ftoi1(sbt);
3503 goto again; /* may need char/short cast */
3505 goto done;
3508 ds = btype_size(dbt_bt);
3509 ss = btype_size(sbt_bt);
3510 if (ds == 0 || ss == 0) {
3511 if (dbt_bt == VT_VOID)
3512 goto done;
3513 cast_error(&vtop->type, type);
3515 if (IS_ENUM(type->t) && type->ref->c < 0)
3516 tcc_error("cast to incomplete type");
3518 /* same size and no sign conversion needed */
3519 if (ds == ss && ds >= 4)
3520 goto done;
3521 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3522 tcc_warning("cast between pointer and integer of different size");
3523 if (sbt_bt == VT_PTR) {
3524 /* put integer type to allow logical operations below */
3525 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3529 /* processor allows { int a = 0, b = *(char*)&a; }
3530 That means that if we cast to less width, we can just
3531 change the type and read it still later. */
3532 #define ALLOW_SUBTYPE_ACCESS 1
3534 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3535 /* value still in memory */
3536 if (ds <= ss)
3537 goto done;
3538 /* ss <= 4 here */
3539 if (ds <= 4) {
3540 gv(RC_INT);
3541 goto done; /* no 64bit envolved */
3544 gv(RC_INT);
3546 trunc = 0;
3547 #if PTR_SIZE == 4
3548 if (ds == 8) {
3549 /* generate high word */
3550 if (sbt & VT_UNSIGNED) {
3551 vpushi(0);
3552 gv(RC_INT);
3553 } else {
3554 gv_dup();
3555 vpushi(31);
3556 gen_op(TOK_SAR);
3558 lbuild(dbt);
3559 } else if (ss == 8) {
3560 /* from long long: just take low order word */
3561 lexpand();
3562 vpop();
3564 ss = 4;
3566 #elif PTR_SIZE == 8
3567 if (ds == 8) {
3568 /* need to convert from 32bit to 64bit */
3569 if (sbt & VT_UNSIGNED) {
3570 #if defined(TCC_TARGET_RISCV64)
3571 /* RISC-V keeps 32bit vals in registers sign-extended.
3572 So here we need a zero-extension. */
3573 trunc = 32;
3574 #else
3575 goto done;
3576 #endif
3577 } else {
3578 gen_cvt_sxtw();
3579 goto done;
3581 ss = ds, ds = 4, dbt = sbt;
3582 } else if (ss == 8) {
3583 /* XXX some architectures (e.g. risc-v) would like it
3584 better for this merely being a 32-to-64 sign or zero-
3585 extension. */
3586 trunc = 32; /* zero upper 32 bits */
3587 } else {
3588 ss = 4;
3590 #endif
3592 if (ds >= ss)
3593 goto done;
3594 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3595 if (ss == 4) {
3596 gen_cvt_csti(dbt);
3597 goto done;
3599 #endif
3600 bits = (ss - ds) * 8;
3601 /* for unsigned, gen_op will convert SAR to SHR */
3602 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3603 vpushi(bits);
3604 gen_op(TOK_SHL);
3605 vpushi(bits - trunc);
3606 gen_op(TOK_SAR);
3607 vpushi(trunc);
3608 gen_op(TOK_SHR);
3610 done:
3611 vtop->type = *type;
3612 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3615 /* return type size as known at compile time. Put alignment at 'a' */
3616 ST_FUNC int type_size(CType *type, int *a)
3618 Sym *s;
3619 int bt;
3621 bt = type->t & VT_BTYPE;
3622 if (bt == VT_STRUCT) {
3623 /* struct/union */
3624 s = type->ref;
3625 *a = s->r;
3626 return s->c;
3627 } else if (bt == VT_PTR) {
3628 if (type->t & VT_ARRAY) {
3629 int ts;
3631 s = type->ref;
3632 ts = type_size(&s->type, a);
3634 if (ts < 0 && s->c < 0)
3635 ts = -ts;
3637 return ts * s->c;
3638 } else {
3639 *a = PTR_SIZE;
3640 return PTR_SIZE;
3642 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3643 return -1; /* incomplete enum */
3644 } else if (bt == VT_LDOUBLE) {
3645 *a = LDOUBLE_ALIGN;
3646 return LDOUBLE_SIZE;
3647 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3648 #ifdef TCC_TARGET_I386
3649 #ifdef TCC_TARGET_PE
3650 *a = 8;
3651 #else
3652 *a = 4;
3653 #endif
3654 #elif defined(TCC_TARGET_ARM)
3655 #ifdef TCC_ARM_EABI
3656 *a = 8;
3657 #else
3658 *a = 4;
3659 #endif
3660 #else
3661 *a = 8;
3662 #endif
3663 return 8;
3664 } else if (bt == VT_INT || bt == VT_FLOAT) {
3665 *a = 4;
3666 return 4;
3667 } else if (bt == VT_SHORT) {
3668 *a = 2;
3669 return 2;
3670 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3671 *a = 8;
3672 return 16;
3673 } else {
3674 /* char, void, function, _Bool */
3675 *a = 1;
3676 return 1;
3680 /* push type size as known at runtime time on top of value stack. Put
3681 alignment at 'a' */
3682 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
3684 if (type->t & VT_VLA) {
3685 type_size(&type->ref->type, a);
3686 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3687 } else {
3688 vpushi(type_size(type, a));
3692 /* return the pointed type of t */
3693 static inline CType *pointed_type(CType *type)
3695 return &type->ref->type;
3698 /* modify type so that its it is a pointer to type. */
3699 ST_FUNC void mk_pointer(CType *type)
3701 Sym *s;
3702 s = sym_push(SYM_FIELD, type, 0, -1);
3703 type->t = VT_PTR | (type->t & VT_STORAGE);
3704 type->ref = s;
3707 /* return true if type1 and type2 are exactly the same (including
3708 qualifiers).
3710 static int is_compatible_types(CType *type1, CType *type2)
3712 return compare_types(type1,type2,0);
3715 /* return true if type1 and type2 are the same (ignoring qualifiers).
3717 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3719 return compare_types(type1,type2,1);
3722 static void cast_error(CType *st, CType *dt)
3724 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
3727 /* verify type compatibility to store vtop in 'dt' type */
3728 static void verify_assign_cast(CType *dt)
3730 CType *st, *type1, *type2;
3731 int dbt, sbt, qualwarn, lvl;
3733 st = &vtop->type; /* source type */
3734 dbt = dt->t & VT_BTYPE;
3735 sbt = st->t & VT_BTYPE;
3736 if (dt->t & VT_CONSTANT)
3737 tcc_warning("assignment of read-only location");
3738 switch(dbt) {
3739 case VT_VOID:
3740 if (sbt != dbt)
3741 tcc_error("assignment to void expression");
3742 break;
3743 case VT_PTR:
3744 /* special cases for pointers */
3745 /* '0' can also be a pointer */
3746 if (is_null_pointer(vtop))
3747 break;
3748 /* accept implicit pointer to integer cast with warning */
3749 if (is_integer_btype(sbt)) {
3750 tcc_warning("assignment makes pointer from integer without a cast");
3751 break;
3753 type1 = pointed_type(dt);
3754 if (sbt == VT_PTR)
3755 type2 = pointed_type(st);
3756 else if (sbt == VT_FUNC)
3757 type2 = st; /* a function is implicitly a function pointer */
3758 else
3759 goto error;
3760 if (is_compatible_types(type1, type2))
3761 break;
3762 for (qualwarn = lvl = 0;; ++lvl) {
3763 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
3764 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
3765 qualwarn = 1;
3766 dbt = type1->t & (VT_BTYPE|VT_LONG);
3767 sbt = type2->t & (VT_BTYPE|VT_LONG);
3768 if (dbt != VT_PTR || sbt != VT_PTR)
3769 break;
3770 type1 = pointed_type(type1);
3771 type2 = pointed_type(type2);
3773 if (!is_compatible_unqualified_types(type1, type2)) {
3774 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
3775 /* void * can match anything */
3776 } else if (dbt == sbt
3777 && is_integer_btype(sbt & VT_BTYPE)
3778 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
3779 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
3780 /* Like GCC don't warn by default for merely changes
3781 in pointer target signedness. Do warn for different
3782 base types, though, in particular for unsigned enums
3783 and signed int targets. */
3784 } else {
3785 tcc_warning("assignment from incompatible pointer type");
3786 break;
3789 if (qualwarn)
3790 tcc_warning("assignment discards qualifiers from pointer target type");
3791 break;
3792 case VT_BYTE:
3793 case VT_SHORT:
3794 case VT_INT:
3795 case VT_LLONG:
3796 if (sbt == VT_PTR || sbt == VT_FUNC) {
3797 tcc_warning("assignment makes integer from pointer without a cast");
3798 } else if (sbt == VT_STRUCT) {
3799 goto case_VT_STRUCT;
3801 /* XXX: more tests */
3802 break;
3803 case VT_STRUCT:
3804 case_VT_STRUCT:
3805 if (!is_compatible_unqualified_types(dt, st)) {
3806 error:
3807 cast_error(st, dt);
3809 break;
3813 static void gen_assign_cast(CType *dt)
3815 verify_assign_cast(dt);
3816 gen_cast(dt);
3819 /* store vtop in lvalue pushed on stack */
3820 ST_FUNC void vstore(void)
3822 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
3824 ft = vtop[-1].type.t;
3825 sbt = vtop->type.t & VT_BTYPE;
3826 dbt = ft & VT_BTYPE;
3828 verify_assign_cast(&vtop[-1].type);
3830 if (sbt == VT_STRUCT) {
3831 /* if structure, only generate pointer */
3832 /* structure assignment : generate memcpy */
3833 /* XXX: optimize if small size */
3834 size = type_size(&vtop->type, &align);
3836 /* destination */
3837 vswap();
3838 #ifdef CONFIG_TCC_BCHECK
3839 if (vtop->r & VT_MUSTBOUND)
3840 gbound(); /* check would be wrong after gaddrof() */
3841 #endif
3842 vtop->type.t = VT_PTR;
3843 gaddrof();
3845 /* address of memcpy() */
3846 #ifdef TCC_ARM_EABI
3847 if(!(align & 7))
3848 vpush_global_sym(&func_old_type, TOK_memmove8);
3849 else if(!(align & 3))
3850 vpush_global_sym(&func_old_type, TOK_memmove4);
3851 else
3852 #endif
3853 /* Use memmove, rather than memcpy, as dest and src may be same: */
3854 vpush_global_sym(&func_old_type, TOK_memmove);
3856 vswap();
3857 /* source */
3858 vpushv(vtop - 2);
3859 #ifdef CONFIG_TCC_BCHECK
3860 if (vtop->r & VT_MUSTBOUND)
3861 gbound();
3862 #endif
3863 vtop->type.t = VT_PTR;
3864 gaddrof();
3865 /* type size */
3866 vpushi(size);
3867 gfunc_call(3);
3868 /* leave source on stack */
3870 } else if (ft & VT_BITFIELD) {
3871 /* bitfield store handling */
3873 /* save lvalue as expression result (example: s.b = s.a = n;) */
3874 vdup(), vtop[-1] = vtop[-2];
3876 bit_pos = BIT_POS(ft);
3877 bit_size = BIT_SIZE(ft);
3878 /* remove bit field info to avoid loops */
3879 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
3881 if (dbt == VT_BOOL) {
3882 gen_cast(&vtop[-1].type);
3883 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
3885 r = adjust_bf(vtop - 1, bit_pos, bit_size);
3886 if (dbt != VT_BOOL) {
3887 gen_cast(&vtop[-1].type);
3888 dbt = vtop[-1].type.t & VT_BTYPE;
3890 if (r == VT_STRUCT) {
3891 store_packed_bf(bit_pos, bit_size);
3892 } else {
3893 unsigned long long mask = (1ULL << bit_size) - 1;
3894 if (dbt != VT_BOOL) {
3895 /* mask source */
3896 if (dbt == VT_LLONG)
3897 vpushll(mask);
3898 else
3899 vpushi((unsigned)mask);
3900 gen_op('&');
3902 /* shift source */
3903 vpushi(bit_pos);
3904 gen_op(TOK_SHL);
3905 vswap();
3906 /* duplicate destination */
3907 vdup();
3908 vrott(3);
3909 /* load destination, mask and or with source */
3910 if (dbt == VT_LLONG)
3911 vpushll(~(mask << bit_pos));
3912 else
3913 vpushi(~((unsigned)mask << bit_pos));
3914 gen_op('&');
3915 gen_op('|');
3916 /* store result */
3917 vstore();
3918 /* ... and discard */
3919 vpop();
3921 } else if (dbt == VT_VOID) {
3922 --vtop;
3923 } else {
3924 /* optimize char/short casts */
3925 delayed_cast = 0;
3926 if ((dbt == VT_BYTE || dbt == VT_SHORT)
3927 && is_integer_btype(sbt)
3929 if ((vtop->r & VT_MUSTCAST)
3930 && btype_size(dbt) > btype_size(sbt)
3932 force_charshort_cast();
3933 delayed_cast = 1;
3934 } else {
3935 gen_cast(&vtop[-1].type);
3938 #ifdef CONFIG_TCC_BCHECK
3939 /* bound check case */
3940 if (vtop[-1].r & VT_MUSTBOUND) {
3941 vswap();
3942 gbound();
3943 vswap();
3945 #endif
3946 gv(RC_TYPE(dbt)); /* generate value */
3948 if (delayed_cast) {
3949 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
3950 //tcc_warning("deley cast %x -> %x", sbt, dbt);
3951 vtop->type.t = ft & VT_TYPE;
3954 /* if lvalue was saved on stack, must read it */
3955 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
3956 SValue sv;
3957 r = get_reg(RC_INT);
3958 sv.type.t = VT_PTRDIFF_T;
3959 sv.r = VT_LOCAL | VT_LVAL;
3960 sv.c.i = vtop[-1].c.i;
3961 load(r, &sv);
3962 vtop[-1].r = r | VT_LVAL;
3965 r = vtop->r & VT_VALMASK;
3966 /* two word case handling :
3967 store second register at word + 4 (or +8 for x86-64) */
3968 if (USING_TWO_WORDS(dbt)) {
3969 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
3970 vtop[-1].type.t = load_type;
3971 store(r, vtop - 1);
3972 vswap();
3973 /* convert to int to increment easily */
3974 vtop->type.t = VT_PTRDIFF_T;
3975 gaddrof();
3976 vpushs(PTR_SIZE);
3977 gen_op('+');
3978 vtop->r |= VT_LVAL;
3979 vswap();
3980 vtop[-1].type.t = load_type;
3981 /* XXX: it works because r2 is spilled last ! */
3982 store(vtop->r2, vtop - 1);
3983 } else {
3984 /* single word */
3985 store(r, vtop - 1);
3987 vswap();
3988 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
3992 /* post defines POST/PRE add. c is the token ++ or -- */
3993 ST_FUNC void inc(int post, int c)
3995 test_lvalue();
3996 vdup(); /* save lvalue */
3997 if (post) {
3998 gv_dup(); /* duplicate value */
3999 vrotb(3);
4000 vrotb(3);
4002 /* add constant */
4003 vpushi(c - TOK_MID);
4004 gen_op('+');
4005 vstore(); /* store value */
4006 if (post)
4007 vpop(); /* if post op, return saved value */
4010 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
4012 /* read the string */
4013 if (tok != TOK_STR)
4014 expect(msg);
4015 cstr_new(astr);
4016 while (tok == TOK_STR) {
4017 /* XXX: add \0 handling too ? */
4018 cstr_cat(astr, tokc.str.data, -1);
4019 next();
4021 cstr_ccat(astr, '\0');
4024 /* If I is >= 1 and a power of two, returns log2(i)+1.
4025 If I is 0 returns 0. */
4026 static int exact_log2p1(int i)
4028 int ret;
4029 if (!i)
4030 return 0;
4031 for (ret = 1; i >= 1 << 8; ret += 8)
4032 i >>= 8;
4033 if (i >= 1 << 4)
4034 ret += 4, i >>= 4;
4035 if (i >= 1 << 2)
4036 ret += 2, i >>= 2;
4037 if (i >= 1 << 1)
4038 ret++;
4039 return ret;
4042 /* Parse __attribute__((...)) GNUC extension. */
4043 static void parse_attribute(AttributeDef *ad)
4045 int t, n;
4046 CString astr;
4048 redo:
4049 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
4050 return;
4051 next();
4052 skip('(');
4053 skip('(');
4054 while (tok != ')') {
4055 if (tok < TOK_IDENT)
4056 expect("attribute name");
4057 t = tok;
4058 next();
4059 switch(t) {
4060 case TOK_CLEANUP1:
4061 case TOK_CLEANUP2:
4063 Sym *s;
4065 skip('(');
4066 s = sym_find(tok);
4067 if (!s) {
4068 tcc_warning("implicit declaration of function '%s'",
4069 get_tok_str(tok, &tokc));
4070 s = external_global_sym(tok, &func_old_type);
4071 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
4072 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
4073 ad->cleanup_func = s;
4074 next();
4075 skip(')');
4076 break;
4078 case TOK_CONSTRUCTOR1:
4079 case TOK_CONSTRUCTOR2:
4080 ad->f.func_ctor = 1;
4081 break;
4082 case TOK_DESTRUCTOR1:
4083 case TOK_DESTRUCTOR2:
4084 ad->f.func_dtor = 1;
4085 break;
4086 case TOK_SECTION1:
4087 case TOK_SECTION2:
4088 skip('(');
4089 parse_mult_str(&astr, "section name");
4090 ad->section = find_section(tcc_state, (char *)astr.data);
4091 skip(')');
4092 cstr_free(&astr);
4093 break;
4094 case TOK_ALIAS1:
4095 case TOK_ALIAS2:
4096 skip('(');
4097 parse_mult_str(&astr, "alias(\"target\")");
4098 ad->alias_target = /* save string as token, for later */
4099 tok_alloc((char*)astr.data, astr.size-1)->tok;
4100 skip(')');
4101 cstr_free(&astr);
4102 break;
4103 case TOK_VISIBILITY1:
4104 case TOK_VISIBILITY2:
4105 skip('(');
4106 parse_mult_str(&astr,
4107 "visibility(\"default|hidden|internal|protected\")");
4108 if (!strcmp (astr.data, "default"))
4109 ad->a.visibility = STV_DEFAULT;
4110 else if (!strcmp (astr.data, "hidden"))
4111 ad->a.visibility = STV_HIDDEN;
4112 else if (!strcmp (astr.data, "internal"))
4113 ad->a.visibility = STV_INTERNAL;
4114 else if (!strcmp (astr.data, "protected"))
4115 ad->a.visibility = STV_PROTECTED;
4116 else
4117 expect("visibility(\"default|hidden|internal|protected\")");
4118 skip(')');
4119 cstr_free(&astr);
4120 break;
4121 case TOK_ALIGNED1:
4122 case TOK_ALIGNED2:
4123 if (tok == '(') {
4124 next();
4125 n = expr_const();
4126 if (n <= 0 || (n & (n - 1)) != 0)
4127 tcc_error("alignment must be a positive power of two");
4128 skip(')');
4129 } else {
4130 n = MAX_ALIGN;
4132 ad->a.aligned = exact_log2p1(n);
4133 if (n != 1 << (ad->a.aligned - 1))
4134 tcc_error("alignment of %d is larger than implemented", n);
4135 break;
4136 case TOK_PACKED1:
4137 case TOK_PACKED2:
4138 ad->a.packed = 1;
4139 break;
4140 case TOK_WEAK1:
4141 case TOK_WEAK2:
4142 ad->a.weak = 1;
4143 break;
4144 case TOK_UNUSED1:
4145 case TOK_UNUSED2:
4146 /* currently, no need to handle it because tcc does not
4147 track unused objects */
4148 break;
4149 case TOK_NORETURN1:
4150 case TOK_NORETURN2:
4151 ad->f.func_noreturn = 1;
4152 break;
4153 case TOK_CDECL1:
4154 case TOK_CDECL2:
4155 case TOK_CDECL3:
4156 ad->f.func_call = FUNC_CDECL;
4157 break;
4158 case TOK_STDCALL1:
4159 case TOK_STDCALL2:
4160 case TOK_STDCALL3:
4161 ad->f.func_call = FUNC_STDCALL;
4162 break;
4163 #ifdef TCC_TARGET_I386
4164 case TOK_REGPARM1:
4165 case TOK_REGPARM2:
4166 skip('(');
4167 n = expr_const();
4168 if (n > 3)
4169 n = 3;
4170 else if (n < 0)
4171 n = 0;
4172 if (n > 0)
4173 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
4174 skip(')');
4175 break;
4176 case TOK_FASTCALL1:
4177 case TOK_FASTCALL2:
4178 case TOK_FASTCALL3:
4179 ad->f.func_call = FUNC_FASTCALLW;
4180 break;
4181 #endif
4182 case TOK_MODE:
4183 skip('(');
4184 switch(tok) {
4185 case TOK_MODE_DI:
4186 ad->attr_mode = VT_LLONG + 1;
4187 break;
4188 case TOK_MODE_QI:
4189 ad->attr_mode = VT_BYTE + 1;
4190 break;
4191 case TOK_MODE_HI:
4192 ad->attr_mode = VT_SHORT + 1;
4193 break;
4194 case TOK_MODE_SI:
4195 case TOK_MODE_word:
4196 ad->attr_mode = VT_INT + 1;
4197 break;
4198 default:
4199 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
4200 break;
4202 next();
4203 skip(')');
4204 break;
4205 case TOK_DLLEXPORT:
4206 ad->a.dllexport = 1;
4207 break;
4208 case TOK_NODECORATE:
4209 ad->a.nodecorate = 1;
4210 break;
4211 case TOK_DLLIMPORT:
4212 ad->a.dllimport = 1;
4213 break;
4214 default:
4215 if (tcc_state->warn_unsupported)
4216 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
4217 /* skip parameters */
4218 if (tok == '(') {
4219 int parenthesis = 0;
4220 do {
4221 if (tok == '(')
4222 parenthesis++;
4223 else if (tok == ')')
4224 parenthesis--;
4225 next();
4226 } while (parenthesis && tok != -1);
4228 break;
4230 if (tok != ',')
4231 break;
4232 next();
4234 skip(')');
4235 skip(')');
4236 goto redo;
4239 static Sym * find_field (CType *type, int v, int *cumofs)
4241 Sym *s = type->ref;
4242 v |= SYM_FIELD;
4243 while ((s = s->next) != NULL) {
4244 if ((s->v & SYM_FIELD) &&
4245 (s->type.t & VT_BTYPE) == VT_STRUCT &&
4246 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
4247 Sym *ret = find_field (&s->type, v, cumofs);
4248 if (ret) {
4249 *cumofs += s->c;
4250 return ret;
4253 if (s->v == v)
4254 break;
4256 return s;
4259 static void struct_layout(CType *type, AttributeDef *ad)
4261 int size, align, maxalign, offset, c, bit_pos, bit_size;
4262 int packed, a, bt, prevbt, prev_bit_size;
4263 int pcc = !tcc_state->ms_bitfields;
4264 int pragma_pack = *tcc_state->pack_stack_ptr;
4265 Sym *f;
4267 maxalign = 1;
4268 offset = 0;
4269 c = 0;
4270 bit_pos = 0;
4271 prevbt = VT_STRUCT; /* make it never match */
4272 prev_bit_size = 0;
4274 //#define BF_DEBUG
4276 for (f = type->ref->next; f; f = f->next) {
4277 if (f->type.t & VT_BITFIELD)
4278 bit_size = BIT_SIZE(f->type.t);
4279 else
4280 bit_size = -1;
4281 size = type_size(&f->type, &align);
4282 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4283 packed = 0;
4285 if (pcc && bit_size == 0) {
4286 /* in pcc mode, packing does not affect zero-width bitfields */
4288 } else {
4289 /* in pcc mode, attribute packed overrides if set. */
4290 if (pcc && (f->a.packed || ad->a.packed))
4291 align = packed = 1;
4293 /* pragma pack overrides align if lesser and packs bitfields always */
4294 if (pragma_pack) {
4295 packed = 1;
4296 if (pragma_pack < align)
4297 align = pragma_pack;
4298 /* in pcc mode pragma pack also overrides individual align */
4299 if (pcc && pragma_pack < a)
4300 a = 0;
4303 /* some individual align was specified */
4304 if (a)
4305 align = a;
4307 if (type->ref->type.t == VT_UNION) {
4308 if (pcc && bit_size >= 0)
4309 size = (bit_size + 7) >> 3;
4310 offset = 0;
4311 if (size > c)
4312 c = size;
4314 } else if (bit_size < 0) {
4315 if (pcc)
4316 c += (bit_pos + 7) >> 3;
4317 c = (c + align - 1) & -align;
4318 offset = c;
4319 if (size > 0)
4320 c += size;
4321 bit_pos = 0;
4322 prevbt = VT_STRUCT;
4323 prev_bit_size = 0;
4325 } else {
4326 /* A bit-field. Layout is more complicated. There are two
4327 options: PCC (GCC) compatible and MS compatible */
4328 if (pcc) {
4329 /* In PCC layout a bit-field is placed adjacent to the
4330 preceding bit-fields, except if:
4331 - it has zero-width
4332 - an individual alignment was given
4333 - it would overflow its base type container and
4334 there is no packing */
4335 if (bit_size == 0) {
4336 new_field:
4337 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4338 bit_pos = 0;
4339 } else if (f->a.aligned) {
4340 goto new_field;
4341 } else if (!packed) {
4342 int a8 = align * 8;
4343 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4344 if (ofs > size / align)
4345 goto new_field;
4348 /* in pcc mode, long long bitfields have type int if they fit */
4349 if (size == 8 && bit_size <= 32)
4350 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4352 while (bit_pos >= align * 8)
4353 c += align, bit_pos -= align * 8;
4354 offset = c;
4356 /* In PCC layout named bit-fields influence the alignment
4357 of the containing struct using the base types alignment,
4358 except for packed fields (which here have correct align). */
4359 if (f->v & SYM_FIRST_ANOM
4360 // && bit_size // ??? gcc on ARM/rpi does that
4362 align = 1;
4364 } else {
4365 bt = f->type.t & VT_BTYPE;
4366 if ((bit_pos + bit_size > size * 8)
4367 || (bit_size > 0) == (bt != prevbt)
4369 c = (c + align - 1) & -align;
4370 offset = c;
4371 bit_pos = 0;
4372 /* In MS bitfield mode a bit-field run always uses
4373 at least as many bits as the underlying type.
4374 To start a new run it's also required that this
4375 or the last bit-field had non-zero width. */
4376 if (bit_size || prev_bit_size)
4377 c += size;
4379 /* In MS layout the records alignment is normally
4380 influenced by the field, except for a zero-width
4381 field at the start of a run (but by further zero-width
4382 fields it is again). */
4383 if (bit_size == 0 && prevbt != bt)
4384 align = 1;
4385 prevbt = bt;
4386 prev_bit_size = bit_size;
4389 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4390 | (bit_pos << VT_STRUCT_SHIFT);
4391 bit_pos += bit_size;
4393 if (align > maxalign)
4394 maxalign = align;
4396 #ifdef BF_DEBUG
4397 printf("set field %s offset %-2d size %-2d align %-2d",
4398 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4399 if (f->type.t & VT_BITFIELD) {
4400 printf(" pos %-2d bits %-2d",
4401 BIT_POS(f->type.t),
4402 BIT_SIZE(f->type.t)
4405 printf("\n");
4406 #endif
4408 f->c = offset;
4409 f->r = 0;
4412 if (pcc)
4413 c += (bit_pos + 7) >> 3;
4415 /* store size and alignment */
4416 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4417 if (a < maxalign)
4418 a = maxalign;
4419 type->ref->r = a;
4420 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4421 /* can happen if individual align for some member was given. In
4422 this case MSVC ignores maxalign when aligning the size */
4423 a = pragma_pack;
4424 if (a < bt)
4425 a = bt;
4427 c = (c + a - 1) & -a;
4428 type->ref->c = c;
4430 #ifdef BF_DEBUG
4431 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4432 #endif
4434 /* check whether we can access bitfields by their type */
4435 for (f = type->ref->next; f; f = f->next) {
4436 int s, px, cx, c0;
4437 CType t;
4439 if (0 == (f->type.t & VT_BITFIELD))
4440 continue;
4441 f->type.ref = f;
4442 f->auxtype = -1;
4443 bit_size = BIT_SIZE(f->type.t);
4444 if (bit_size == 0)
4445 continue;
4446 bit_pos = BIT_POS(f->type.t);
4447 size = type_size(&f->type, &align);
4448 if (bit_pos + bit_size <= size * 8 && f->c + size <= c)
4449 continue;
4451 /* try to access the field using a different type */
4452 c0 = -1, s = align = 1;
4453 t.t = VT_BYTE;
4454 for (;;) {
4455 px = f->c * 8 + bit_pos;
4456 cx = (px >> 3) & -align;
4457 px = px - (cx << 3);
4458 if (c0 == cx)
4459 break;
4460 s = (px + bit_size + 7) >> 3;
4461 if (s > 4) {
4462 t.t = VT_LLONG;
4463 } else if (s > 2) {
4464 t.t = VT_INT;
4465 } else if (s > 1) {
4466 t.t = VT_SHORT;
4467 } else {
4468 t.t = VT_BYTE;
4470 s = type_size(&t, &align);
4471 c0 = cx;
4474 if (px + bit_size <= s * 8 && cx + s <= c) {
4475 /* update offset and bit position */
4476 f->c = cx;
4477 bit_pos = px;
4478 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4479 | (bit_pos << VT_STRUCT_SHIFT);
4480 if (s != size)
4481 f->auxtype = t.t;
4482 #ifdef BF_DEBUG
4483 printf("FIX field %s offset %-2d size %-2d align %-2d "
4484 "pos %-2d bits %-2d\n",
4485 get_tok_str(f->v & ~SYM_FIELD, NULL),
4486 cx, s, align, px, bit_size);
4487 #endif
4488 } else {
4489 /* fall back to load/store single-byte wise */
4490 f->auxtype = VT_STRUCT;
4491 #ifdef BF_DEBUG
4492 printf("FIX field %s : load byte-wise\n",
4493 get_tok_str(f->v & ~SYM_FIELD, NULL));
4494 #endif
4499 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4500 static void struct_decl(CType *type, int u)
4502 int v, c, size, align, flexible;
4503 int bit_size, bsize, bt;
4504 Sym *s, *ss, **ps;
4505 AttributeDef ad, ad1;
4506 CType type1, btype;
4508 memset(&ad, 0, sizeof ad);
4509 next();
4510 parse_attribute(&ad);
4511 if (tok != '{') {
4512 v = tok;
4513 next();
4514 /* struct already defined ? return it */
4515 if (v < TOK_IDENT)
4516 expect("struct/union/enum name");
4517 s = struct_find(v);
4518 if (s && (s->sym_scope == local_scope || tok != '{')) {
4519 if (u == s->type.t)
4520 goto do_decl;
4521 if (u == VT_ENUM && IS_ENUM(s->type.t))
4522 goto do_decl;
4523 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4525 } else {
4526 v = anon_sym++;
4528 /* Record the original enum/struct/union token. */
4529 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4530 type1.ref = NULL;
4531 /* we put an undefined size for struct/union */
4532 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4533 s->r = 0; /* default alignment is zero as gcc */
4534 do_decl:
4535 type->t = s->type.t;
4536 type->ref = s;
4538 if (tok == '{') {
4539 next();
4540 if (s->c != -1)
4541 tcc_error("struct/union/enum already defined");
4542 s->c = -2;
4543 /* cannot be empty */
4544 /* non empty enums are not allowed */
4545 ps = &s->next;
4546 if (u == VT_ENUM) {
4547 long long ll = 0, pl = 0, nl = 0;
4548 CType t;
4549 t.ref = s;
4550 /* enum symbols have static storage */
4551 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4552 for(;;) {
4553 v = tok;
4554 if (v < TOK_UIDENT)
4555 expect("identifier");
4556 ss = sym_find(v);
4557 if (ss && !local_stack)
4558 tcc_error("redefinition of enumerator '%s'",
4559 get_tok_str(v, NULL));
4560 next();
4561 if (tok == '=') {
4562 next();
4563 ll = expr_const64();
4565 ss = sym_push(v, &t, VT_CONST, 0);
4566 ss->enum_val = ll;
4567 *ps = ss, ps = &ss->next;
4568 if (ll < nl)
4569 nl = ll;
4570 if (ll > pl)
4571 pl = ll;
4572 if (tok != ',')
4573 break;
4574 next();
4575 ll++;
4576 /* NOTE: we accept a trailing comma */
4577 if (tok == '}')
4578 break;
4580 skip('}');
4581 /* set integral type of the enum */
4582 t.t = VT_INT;
4583 if (nl >= 0) {
4584 if (pl != (unsigned)pl)
4585 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4586 t.t |= VT_UNSIGNED;
4587 } else if (pl != (int)pl || nl != (int)nl)
4588 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4589 s->type.t = type->t = t.t | VT_ENUM;
4590 s->c = 0;
4591 /* set type for enum members */
4592 for (ss = s->next; ss; ss = ss->next) {
4593 ll = ss->enum_val;
4594 if (ll == (int)ll) /* default is int if it fits */
4595 continue;
4596 if (t.t & VT_UNSIGNED) {
4597 ss->type.t |= VT_UNSIGNED;
4598 if (ll == (unsigned)ll)
4599 continue;
4601 ss->type.t = (ss->type.t & ~VT_BTYPE)
4602 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4604 } else {
4605 c = 0;
4606 flexible = 0;
4607 while (tok != '}') {
4608 if (!parse_btype(&btype, &ad1)) {
4609 skip(';');
4610 continue;
4612 while (1) {
4613 if (flexible)
4614 tcc_error("flexible array member '%s' not at the end of struct",
4615 get_tok_str(v, NULL));
4616 bit_size = -1;
4617 v = 0;
4618 type1 = btype;
4619 if (tok != ':') {
4620 if (tok != ';')
4621 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4622 if (v == 0) {
4623 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4624 expect("identifier");
4625 else {
4626 int v = btype.ref->v;
4627 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4628 if (tcc_state->ms_extensions == 0)
4629 expect("identifier");
4633 if (type_size(&type1, &align) < 0) {
4634 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4635 flexible = 1;
4636 else
4637 tcc_error("field '%s' has incomplete type",
4638 get_tok_str(v, NULL));
4640 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4641 (type1.t & VT_BTYPE) == VT_VOID ||
4642 (type1.t & VT_STORAGE))
4643 tcc_error("invalid type for '%s'",
4644 get_tok_str(v, NULL));
4646 if (tok == ':') {
4647 next();
4648 bit_size = expr_const();
4649 /* XXX: handle v = 0 case for messages */
4650 if (bit_size < 0)
4651 tcc_error("negative width in bit-field '%s'",
4652 get_tok_str(v, NULL));
4653 if (v && bit_size == 0)
4654 tcc_error("zero width for bit-field '%s'",
4655 get_tok_str(v, NULL));
4656 parse_attribute(&ad1);
4658 size = type_size(&type1, &align);
4659 if (bit_size >= 0) {
4660 bt = type1.t & VT_BTYPE;
4661 if (bt != VT_INT &&
4662 bt != VT_BYTE &&
4663 bt != VT_SHORT &&
4664 bt != VT_BOOL &&
4665 bt != VT_LLONG)
4666 tcc_error("bitfields must have scalar type");
4667 bsize = size * 8;
4668 if (bit_size > bsize) {
4669 tcc_error("width of '%s' exceeds its type",
4670 get_tok_str(v, NULL));
4671 } else if (bit_size == bsize
4672 && !ad.a.packed && !ad1.a.packed) {
4673 /* no need for bit fields */
4675 } else if (bit_size == 64) {
4676 tcc_error("field width 64 not implemented");
4677 } else {
4678 type1.t = (type1.t & ~VT_STRUCT_MASK)
4679 | VT_BITFIELD
4680 | (bit_size << (VT_STRUCT_SHIFT + 6));
4683 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4684 /* Remember we've seen a real field to check
4685 for placement of flexible array member. */
4686 c = 1;
4688 /* If member is a struct or bit-field, enforce
4689 placing into the struct (as anonymous). */
4690 if (v == 0 &&
4691 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4692 bit_size >= 0)) {
4693 v = anon_sym++;
4695 if (v) {
4696 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
4697 ss->a = ad1.a;
4698 *ps = ss;
4699 ps = &ss->next;
4701 if (tok == ';' || tok == TOK_EOF)
4702 break;
4703 skip(',');
4705 skip(';');
4707 skip('}');
4708 parse_attribute(&ad);
4709 if (ad.cleanup_func) {
4710 tcc_warning("attribute '__cleanup__' ignored on type");
4712 struct_layout(type, &ad);
4717 static void sym_to_attr(AttributeDef *ad, Sym *s)
4719 merge_symattr(&ad->a, &s->a);
4720 merge_funcattr(&ad->f, &s->f);
4723 /* Add type qualifiers to a type. If the type is an array then the qualifiers
4724 are added to the element type, copied because it could be a typedef. */
4725 static void parse_btype_qualify(CType *type, int qualifiers)
4727 while (type->t & VT_ARRAY) {
4728 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
4729 type = &type->ref->type;
4731 type->t |= qualifiers;
4734 /* return 0 if no type declaration. otherwise, return the basic type
4735 and skip it.
4737 static int parse_btype(CType *type, AttributeDef *ad)
4739 int t, u, bt, st, type_found, typespec_found, g, n;
4740 Sym *s;
4741 CType type1;
4743 memset(ad, 0, sizeof(AttributeDef));
4744 type_found = 0;
4745 typespec_found = 0;
4746 t = VT_INT;
4747 bt = st = -1;
4748 type->ref = NULL;
4750 while(1) {
4751 switch(tok) {
4752 case TOK_EXTENSION:
4753 /* currently, we really ignore extension */
4754 next();
4755 continue;
4757 /* basic types */
4758 case TOK_CHAR:
4759 u = VT_BYTE;
4760 basic_type:
4761 next();
4762 basic_type1:
4763 if (u == VT_SHORT || u == VT_LONG) {
4764 if (st != -1 || (bt != -1 && bt != VT_INT))
4765 tmbt: tcc_error("too many basic types");
4766 st = u;
4767 } else {
4768 if (bt != -1 || (st != -1 && u != VT_INT))
4769 goto tmbt;
4770 bt = u;
4772 if (u != VT_INT)
4773 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4774 typespec_found = 1;
4775 break;
4776 case TOK_VOID:
4777 u = VT_VOID;
4778 goto basic_type;
4779 case TOK_SHORT:
4780 u = VT_SHORT;
4781 goto basic_type;
4782 case TOK_INT:
4783 u = VT_INT;
4784 goto basic_type;
4785 case TOK_ALIGNAS:
4786 { int n;
4787 AttributeDef ad1;
4788 next();
4789 skip('(');
4790 memset(&ad1, 0, sizeof(AttributeDef));
4791 if (parse_btype(&type1, &ad1)) {
4792 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
4793 if (ad1.a.aligned)
4794 n = 1 << (ad1.a.aligned - 1);
4795 else
4796 type_size(&type1, &n);
4797 } else {
4798 n = expr_const();
4799 if (n <= 0 || (n & (n - 1)) != 0)
4800 tcc_error("alignment must be a positive power of two");
4802 skip(')');
4803 ad->a.aligned = exact_log2p1(n);
4805 continue;
4806 case TOK_LONG:
4807 if ((t & VT_BTYPE) == VT_DOUBLE) {
4808 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4809 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4810 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
4811 } else {
4812 u = VT_LONG;
4813 goto basic_type;
4815 next();
4816 break;
4817 #ifdef TCC_TARGET_ARM64
4818 case TOK_UINT128:
4819 /* GCC's __uint128_t appears in some Linux header files. Make it a
4820 synonym for long double to get the size and alignment right. */
4821 u = VT_LDOUBLE;
4822 goto basic_type;
4823 #endif
4824 case TOK_BOOL:
4825 u = VT_BOOL;
4826 goto basic_type;
4827 case TOK_FLOAT:
4828 u = VT_FLOAT;
4829 goto basic_type;
4830 case TOK_DOUBLE:
4831 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
4832 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
4833 } else {
4834 u = VT_DOUBLE;
4835 goto basic_type;
4837 next();
4838 break;
4839 case TOK_ENUM:
4840 struct_decl(&type1, VT_ENUM);
4841 basic_type2:
4842 u = type1.t;
4843 type->ref = type1.ref;
4844 goto basic_type1;
4845 case TOK_STRUCT:
4846 struct_decl(&type1, VT_STRUCT);
4847 goto basic_type2;
4848 case TOK_UNION:
4849 struct_decl(&type1, VT_UNION);
4850 goto basic_type2;
4852 /* type modifiers */
4853 case TOK_CONST1:
4854 case TOK_CONST2:
4855 case TOK_CONST3:
4856 type->t = t;
4857 parse_btype_qualify(type, VT_CONSTANT);
4858 t = type->t;
4859 next();
4860 break;
4861 case TOK_VOLATILE1:
4862 case TOK_VOLATILE2:
4863 case TOK_VOLATILE3:
4864 type->t = t;
4865 parse_btype_qualify(type, VT_VOLATILE);
4866 t = type->t;
4867 next();
4868 break;
4869 case TOK_SIGNED1:
4870 case TOK_SIGNED2:
4871 case TOK_SIGNED3:
4872 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
4873 tcc_error("signed and unsigned modifier");
4874 t |= VT_DEFSIGN;
4875 next();
4876 typespec_found = 1;
4877 break;
4878 case TOK_REGISTER:
4879 case TOK_AUTO:
4880 case TOK_RESTRICT1:
4881 case TOK_RESTRICT2:
4882 case TOK_RESTRICT3:
4883 next();
4884 break;
4885 case TOK_UNSIGNED:
4886 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
4887 tcc_error("signed and unsigned modifier");
4888 t |= VT_DEFSIGN | VT_UNSIGNED;
4889 next();
4890 typespec_found = 1;
4891 break;
4893 /* storage */
4894 case TOK_EXTERN:
4895 g = VT_EXTERN;
4896 goto storage;
4897 case TOK_STATIC:
4898 g = VT_STATIC;
4899 goto storage;
4900 case TOK_TYPEDEF:
4901 g = VT_TYPEDEF;
4902 goto storage;
4903 storage:
4904 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
4905 tcc_error("multiple storage classes");
4906 t |= g;
4907 next();
4908 break;
4909 case TOK_INLINE1:
4910 case TOK_INLINE2:
4911 case TOK_INLINE3:
4912 t |= VT_INLINE;
4913 next();
4914 break;
4915 case TOK_NORETURN3:
4916 next();
4917 ad->f.func_noreturn = 1;
4918 break;
4919 /* GNUC attribute */
4920 case TOK_ATTRIBUTE1:
4921 case TOK_ATTRIBUTE2:
4922 parse_attribute(ad);
4923 if (ad->attr_mode) {
4924 u = ad->attr_mode -1;
4925 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
4927 continue;
4928 /* GNUC typeof */
4929 case TOK_TYPEOF1:
4930 case TOK_TYPEOF2:
4931 case TOK_TYPEOF3:
4932 next();
4933 parse_expr_type(&type1);
4934 /* remove all storage modifiers except typedef */
4935 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
4936 if (type1.ref)
4937 sym_to_attr(ad, type1.ref);
4938 goto basic_type2;
4939 default:
4940 if (typespec_found)
4941 goto the_end;
4942 s = sym_find(tok);
4943 if (!s || !(s->type.t & VT_TYPEDEF))
4944 goto the_end;
4946 n = tok, next();
4947 if (tok == ':' && !in_generic) {
4948 /* ignore if it's a label */
4949 unget_tok(n);
4950 goto the_end;
4953 t &= ~(VT_BTYPE|VT_LONG);
4954 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
4955 type->t = (s->type.t & ~VT_TYPEDEF) | u;
4956 type->ref = s->type.ref;
4957 if (t)
4958 parse_btype_qualify(type, t);
4959 t = type->t;
4960 /* get attributes from typedef */
4961 sym_to_attr(ad, s);
4962 typespec_found = 1;
4963 st = bt = -2;
4964 break;
4966 type_found = 1;
4968 the_end:
4969 if (tcc_state->char_is_unsigned) {
4970 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
4971 t |= VT_UNSIGNED;
4973 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
4974 bt = t & (VT_BTYPE|VT_LONG);
4975 if (bt == VT_LONG)
4976 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
4977 #ifdef TCC_TARGET_PE
4978 if (bt == VT_LDOUBLE)
4979 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
4980 #endif
4981 type->t = t;
4982 return type_found;
4985 /* convert a function parameter type (array to pointer and function to
4986 function pointer) */
4987 static inline void convert_parameter_type(CType *pt)
4989 /* remove const and volatile qualifiers (XXX: const could be used
4990 to indicate a const function parameter */
4991 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
4992 /* array must be transformed to pointer according to ANSI C */
4993 pt->t &= ~VT_ARRAY;
4994 if ((pt->t & VT_BTYPE) == VT_FUNC) {
4995 mk_pointer(pt);
4999 ST_FUNC void parse_asm_str(CString *astr)
5001 skip('(');
5002 parse_mult_str(astr, "string constant");
5005 /* Parse an asm label and return the token */
5006 static int asm_label_instr(void)
5008 int v;
5009 CString astr;
5011 next();
5012 parse_asm_str(&astr);
5013 skip(')');
5014 #ifdef ASM_DEBUG
5015 printf("asm_alias: \"%s\"\n", (char *)astr.data);
5016 #endif
5017 v = tok_alloc(astr.data, astr.size - 1)->tok;
5018 cstr_free(&astr);
5019 return v;
5022 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
5024 int n, l, t1, arg_size, align, unused_align;
5025 Sym **plast, *s, *first;
5026 AttributeDef ad1;
5027 CType pt;
5029 if (tok == '(') {
5030 /* function type, or recursive declarator (return if so) */
5031 next();
5032 if (td && !(td & TYPE_ABSTRACT))
5033 return 0;
5034 if (tok == ')')
5035 l = 0;
5036 else if (parse_btype(&pt, &ad1))
5037 l = FUNC_NEW;
5038 else if (td) {
5039 merge_attr (ad, &ad1);
5040 return 0;
5041 } else
5042 l = FUNC_OLD;
5043 first = NULL;
5044 plast = &first;
5045 arg_size = 0;
5046 if (l) {
5047 for(;;) {
5048 /* read param name and compute offset */
5049 if (l != FUNC_OLD) {
5050 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
5051 break;
5052 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
5053 if ((pt.t & VT_BTYPE) == VT_VOID)
5054 tcc_error("parameter declared as void");
5055 } else {
5056 n = tok;
5057 if (n < TOK_UIDENT)
5058 expect("identifier");
5059 pt.t = VT_VOID; /* invalid type */
5060 pt.ref = NULL;
5061 next();
5063 convert_parameter_type(&pt);
5064 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
5065 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
5066 *plast = s;
5067 plast = &s->next;
5068 if (tok == ')')
5069 break;
5070 skip(',');
5071 if (l == FUNC_NEW && tok == TOK_DOTS) {
5072 l = FUNC_ELLIPSIS;
5073 next();
5074 break;
5076 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
5077 tcc_error("invalid type");
5079 } else
5080 /* if no parameters, then old type prototype */
5081 l = FUNC_OLD;
5082 skip(')');
5083 /* NOTE: const is ignored in returned type as it has a special
5084 meaning in gcc / C++ */
5085 type->t &= ~VT_CONSTANT;
5086 /* some ancient pre-K&R C allows a function to return an array
5087 and the array brackets to be put after the arguments, such
5088 that "int c()[]" means something like "int[] c()" */
5089 if (tok == '[') {
5090 next();
5091 skip(']'); /* only handle simple "[]" */
5092 mk_pointer(type);
5094 /* we push a anonymous symbol which will contain the function prototype */
5095 ad->f.func_args = arg_size;
5096 ad->f.func_type = l;
5097 s = sym_push(SYM_FIELD, type, 0, 0);
5098 s->a = ad->a;
5099 s->f = ad->f;
5100 s->next = first;
5101 type->t = VT_FUNC;
5102 type->ref = s;
5103 } else if (tok == '[') {
5104 int saved_nocode_wanted = nocode_wanted;
5105 /* array definition */
5106 next();
5107 while (1) {
5108 /* XXX The optional type-quals and static should only be accepted
5109 in parameter decls. The '*' as well, and then even only
5110 in prototypes (not function defs). */
5111 switch (tok) {
5112 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
5113 case TOK_CONST1:
5114 case TOK_VOLATILE1:
5115 case TOK_STATIC:
5116 case '*':
5117 next();
5118 continue;
5119 default:
5120 break;
5122 break;
5124 n = -1;
5125 t1 = 0;
5126 if (tok != ']') {
5127 if (!local_stack || (storage & VT_STATIC))
5128 vpushi(expr_const());
5129 else {
5130 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5131 length must always be evaluated, even under nocode_wanted,
5132 so that its size slot is initialized (e.g. under sizeof
5133 or typeof). */
5134 nocode_wanted = 0;
5135 gexpr();
5137 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5138 n = vtop->c.i;
5139 if (n < 0)
5140 tcc_error("invalid array size");
5141 } else {
5142 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5143 tcc_error("size of variable length array should be an integer");
5144 n = 0;
5145 t1 = VT_VLA;
5148 skip(']');
5149 /* parse next post type */
5150 post_type(type, ad, storage, 0);
5152 if ((type->t & VT_BTYPE) == VT_FUNC)
5153 tcc_error("declaration of an array of functions");
5154 if ((type->t & VT_BTYPE) == VT_VOID
5155 || type_size(type, &unused_align) < 0)
5156 tcc_error("declaration of an array of incomplete type elements");
5158 t1 |= type->t & VT_VLA;
5160 if (t1 & VT_VLA) {
5161 if (n < 0)
5162 tcc_error("need explicit inner array size in VLAs");
5163 loc -= type_size(&int_type, &align);
5164 loc &= -align;
5165 n = loc;
5167 vla_runtime_type_size(type, &align);
5168 gen_op('*');
5169 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5170 vswap();
5171 vstore();
5173 if (n != -1)
5174 vpop();
5175 nocode_wanted = saved_nocode_wanted;
5177 /* we push an anonymous symbol which will contain the array
5178 element type */
5179 s = sym_push(SYM_FIELD, type, 0, n);
5180 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5181 type->ref = s;
5183 return 1;
5186 /* Parse a type declarator (except basic type), and return the type
5187 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5188 expected. 'type' should contain the basic type. 'ad' is the
5189 attribute definition of the basic type. It can be modified by
5190 type_decl(). If this (possibly abstract) declarator is a pointer chain
5191 it returns the innermost pointed to type (equals *type, but is a different
5192 pointer), otherwise returns type itself, that's used for recursive calls. */
5193 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5195 CType *post, *ret;
5196 int qualifiers, storage;
5198 /* recursive type, remove storage bits first, apply them later again */
5199 storage = type->t & VT_STORAGE;
5200 type->t &= ~VT_STORAGE;
5201 post = ret = type;
5203 while (tok == '*') {
5204 qualifiers = 0;
5205 redo:
5206 next();
5207 switch(tok) {
5208 case TOK_CONST1:
5209 case TOK_CONST2:
5210 case TOK_CONST3:
5211 qualifiers |= VT_CONSTANT;
5212 goto redo;
5213 case TOK_VOLATILE1:
5214 case TOK_VOLATILE2:
5215 case TOK_VOLATILE3:
5216 qualifiers |= VT_VOLATILE;
5217 goto redo;
5218 case TOK_RESTRICT1:
5219 case TOK_RESTRICT2:
5220 case TOK_RESTRICT3:
5221 goto redo;
5222 /* XXX: clarify attribute handling */
5223 case TOK_ATTRIBUTE1:
5224 case TOK_ATTRIBUTE2:
5225 parse_attribute(ad);
5226 break;
5228 mk_pointer(type);
5229 type->t |= qualifiers;
5230 if (ret == type)
5231 /* innermost pointed to type is the one for the first derivation */
5232 ret = pointed_type(type);
5235 if (tok == '(') {
5236 /* This is possibly a parameter type list for abstract declarators
5237 ('int ()'), use post_type for testing this. */
5238 if (!post_type(type, ad, 0, td)) {
5239 /* It's not, so it's a nested declarator, and the post operations
5240 apply to the innermost pointed to type (if any). */
5241 /* XXX: this is not correct to modify 'ad' at this point, but
5242 the syntax is not clear */
5243 parse_attribute(ad);
5244 post = type_decl(type, ad, v, td);
5245 skip(')');
5246 } else
5247 goto abstract;
5248 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5249 /* type identifier */
5250 *v = tok;
5251 next();
5252 } else {
5253 abstract:
5254 if (!(td & TYPE_ABSTRACT))
5255 expect("identifier");
5256 *v = 0;
5258 post_type(post, ad, storage, 0);
5259 parse_attribute(ad);
5260 type->t |= storage;
5261 return ret;
5264 /* indirection with full error checking and bound check */
5265 ST_FUNC void indir(void)
5267 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5268 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5269 return;
5270 expect("pointer");
5272 if (vtop->r & VT_LVAL)
5273 gv(RC_INT);
5274 vtop->type = *pointed_type(&vtop->type);
5275 /* Arrays and functions are never lvalues */
5276 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5277 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5278 vtop->r |= VT_LVAL;
5279 /* if bound checking, the referenced pointer must be checked */
5280 #ifdef CONFIG_TCC_BCHECK
5281 if (tcc_state->do_bounds_check)
5282 vtop->r |= VT_MUSTBOUND;
5283 #endif
5287 /* pass a parameter to a function and do type checking and casting */
5288 static void gfunc_param_typed(Sym *func, Sym *arg)
5290 int func_type;
5291 CType type;
5293 func_type = func->f.func_type;
5294 if (func_type == FUNC_OLD ||
5295 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5296 /* default casting : only need to convert float to double */
5297 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5298 gen_cast_s(VT_DOUBLE);
5299 } else if (vtop->type.t & VT_BITFIELD) {
5300 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5301 type.ref = vtop->type.ref;
5302 gen_cast(&type);
5303 } else if (vtop->r & VT_MUSTCAST) {
5304 force_charshort_cast();
5306 } else if (arg == NULL) {
5307 tcc_error("too many arguments to function");
5308 } else {
5309 type = arg->type;
5310 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5311 gen_assign_cast(&type);
5315 /* parse an expression and return its type without any side effect. */
5316 static void expr_type(CType *type, void (*expr_fn)(void))
5318 nocode_wanted++;
5319 expr_fn();
5320 *type = vtop->type;
5321 vpop();
5322 nocode_wanted--;
5325 /* parse an expression of the form '(type)' or '(expr)' and return its
5326 type */
5327 static void parse_expr_type(CType *type)
5329 int n;
5330 AttributeDef ad;
5332 skip('(');
5333 if (parse_btype(type, &ad)) {
5334 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5335 } else {
5336 expr_type(type, gexpr);
5338 skip(')');
5341 static void parse_type(CType *type)
5343 AttributeDef ad;
5344 int n;
5346 if (!parse_btype(type, &ad)) {
5347 expect("type");
5349 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5352 static void parse_builtin_params(int nc, const char *args)
5354 char c, sep = '(';
5355 CType t;
5356 if (nc)
5357 nocode_wanted++;
5358 next();
5359 while ((c = *args++)) {
5360 skip(sep);
5361 sep = ',';
5362 switch (c) {
5363 case 'e': expr_eq(); continue;
5364 case 't': parse_type(&t); vpush(&t); continue;
5365 default: tcc_error("internal error"); break;
5368 skip(')');
5369 if (nc)
5370 nocode_wanted--;
5373 ST_FUNC void unary(void)
5375 int n, t, align, size, r, sizeof_caller;
5376 CType type;
5377 Sym *s;
5378 AttributeDef ad;
5380 /* generate line number info */
5381 if (tcc_state->do_debug)
5382 tcc_debug_line(tcc_state);
5384 sizeof_caller = in_sizeof;
5385 in_sizeof = 0;
5386 type.ref = NULL;
5387 /* XXX: GCC 2.95.3 does not generate a table although it should be
5388 better here */
5389 tok_next:
5390 switch(tok) {
5391 case TOK_EXTENSION:
5392 next();
5393 goto tok_next;
5394 case TOK_LCHAR:
5395 #ifdef TCC_TARGET_PE
5396 t = VT_SHORT|VT_UNSIGNED;
5397 goto push_tokc;
5398 #endif
5399 case TOK_CINT:
5400 case TOK_CCHAR:
5401 t = VT_INT;
5402 push_tokc:
5403 type.t = t;
5404 vsetc(&type, VT_CONST, &tokc);
5405 next();
5406 break;
5407 case TOK_CUINT:
5408 t = VT_INT | VT_UNSIGNED;
5409 goto push_tokc;
5410 case TOK_CLLONG:
5411 t = VT_LLONG;
5412 goto push_tokc;
5413 case TOK_CULLONG:
5414 t = VT_LLONG | VT_UNSIGNED;
5415 goto push_tokc;
5416 case TOK_CFLOAT:
5417 t = VT_FLOAT;
5418 goto push_tokc;
5419 case TOK_CDOUBLE:
5420 t = VT_DOUBLE;
5421 goto push_tokc;
5422 case TOK_CLDOUBLE:
5423 t = VT_LDOUBLE;
5424 goto push_tokc;
5425 case TOK_CLONG:
5426 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5427 goto push_tokc;
5428 case TOK_CULONG:
5429 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5430 goto push_tokc;
5431 case TOK___FUNCTION__:
5432 if (!gnu_ext)
5433 goto tok_identifier;
5434 /* fall thru */
5435 case TOK___FUNC__:
5437 void *ptr;
5438 int len;
5439 /* special function name identifier */
5440 len = strlen(funcname) + 1;
5441 /* generate char[len] type */
5442 type.t = VT_BYTE;
5443 mk_pointer(&type);
5444 type.t |= VT_ARRAY;
5445 type.ref->c = len;
5446 vpush_ref(&type, data_section, data_section->data_offset, len);
5447 if (!NODATA_WANTED) {
5448 ptr = section_ptr_add(data_section, len);
5449 memcpy(ptr, funcname, len);
5451 next();
5453 break;
5454 case TOK_LSTR:
5455 #ifdef TCC_TARGET_PE
5456 t = VT_SHORT | VT_UNSIGNED;
5457 #else
5458 t = VT_INT;
5459 #endif
5460 goto str_init;
5461 case TOK_STR:
5462 /* string parsing */
5463 t = VT_BYTE;
5464 if (tcc_state->char_is_unsigned)
5465 t = VT_BYTE | VT_UNSIGNED;
5466 str_init:
5467 if (tcc_state->warn_write_strings)
5468 t |= VT_CONSTANT;
5469 type.t = t;
5470 mk_pointer(&type);
5471 type.t |= VT_ARRAY;
5472 memset(&ad, 0, sizeof(AttributeDef));
5473 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5474 break;
5475 case '(':
5476 next();
5477 /* cast ? */
5478 if (parse_btype(&type, &ad)) {
5479 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5480 skip(')');
5481 /* check ISOC99 compound literal */
5482 if (tok == '{') {
5483 /* data is allocated locally by default */
5484 if (global_expr)
5485 r = VT_CONST;
5486 else
5487 r = VT_LOCAL;
5488 /* all except arrays are lvalues */
5489 if (!(type.t & VT_ARRAY))
5490 r |= VT_LVAL;
5491 memset(&ad, 0, sizeof(AttributeDef));
5492 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5493 } else {
5494 if (sizeof_caller) {
5495 vpush(&type);
5496 return;
5498 unary();
5499 gen_cast(&type);
5501 } else if (tok == '{') {
5502 int saved_nocode_wanted = nocode_wanted;
5503 if (const_wanted && !(nocode_wanted & unevalmask))
5504 tcc_error("expected constant");
5505 /* save all registers */
5506 save_regs(0);
5507 /* statement expression : we do not accept break/continue
5508 inside as GCC does. We do retain the nocode_wanted state,
5509 as statement expressions can't ever be entered from the
5510 outside, so any reactivation of code emission (from labels
5511 or loop heads) can be disabled again after the end of it. */
5512 block(1);
5513 nocode_wanted = saved_nocode_wanted;
5514 skip(')');
5515 } else {
5516 gexpr();
5517 skip(')');
5519 break;
5520 case '*':
5521 next();
5522 unary();
5523 indir();
5524 break;
5525 case '&':
5526 next();
5527 unary();
5528 /* functions names must be treated as function pointers,
5529 except for unary '&' and sizeof. Since we consider that
5530 functions are not lvalues, we only have to handle it
5531 there and in function calls. */
5532 /* arrays can also be used although they are not lvalues */
5533 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
5534 !(vtop->type.t & VT_ARRAY))
5535 test_lvalue();
5536 if (vtop->sym)
5537 vtop->sym->a.addrtaken = 1;
5538 mk_pointer(&vtop->type);
5539 gaddrof();
5540 break;
5541 case '!':
5542 next();
5543 unary();
5544 gen_test_zero(TOK_EQ);
5545 break;
5546 case '~':
5547 next();
5548 unary();
5549 vpushi(-1);
5550 gen_op('^');
5551 break;
5552 case '+':
5553 next();
5554 unary();
5555 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
5556 tcc_error("pointer not accepted for unary plus");
5557 /* In order to force cast, we add zero, except for floating point
5558 where we really need an noop (otherwise -0.0 will be transformed
5559 into +0.0). */
5560 if (!is_float(vtop->type.t)) {
5561 vpushi(0);
5562 gen_op('+');
5564 break;
5565 case TOK_SIZEOF:
5566 case TOK_ALIGNOF1:
5567 case TOK_ALIGNOF2:
5568 case TOK_ALIGNOF3:
5569 t = tok;
5570 next();
5571 in_sizeof++;
5572 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
5573 s = NULL;
5574 if (vtop[1].r & VT_SYM)
5575 s = vtop[1].sym; /* hack: accessing previous vtop */
5576 size = type_size(&type, &align);
5577 if (s && s->a.aligned)
5578 align = 1 << (s->a.aligned - 1);
5579 if (t == TOK_SIZEOF) {
5580 if (!(type.t & VT_VLA)) {
5581 if (size < 0)
5582 tcc_error("sizeof applied to an incomplete type");
5583 vpushs(size);
5584 } else {
5585 vla_runtime_type_size(&type, &align);
5587 } else {
5588 vpushs(align);
5590 vtop->type.t |= VT_UNSIGNED;
5591 break;
5593 case TOK_builtin_expect:
5594 /* __builtin_expect is a no-op for now */
5595 parse_builtin_params(0, "ee");
5596 vpop();
5597 break;
5598 case TOK_builtin_types_compatible_p:
5599 parse_builtin_params(0, "tt");
5600 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5601 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
5602 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
5603 vtop -= 2;
5604 vpushi(n);
5605 break;
5606 case TOK_builtin_choose_expr:
5608 int64_t c;
5609 next();
5610 skip('(');
5611 c = expr_const64();
5612 skip(',');
5613 if (!c) {
5614 nocode_wanted++;
5616 expr_eq();
5617 if (!c) {
5618 vpop();
5619 nocode_wanted--;
5621 skip(',');
5622 if (c) {
5623 nocode_wanted++;
5625 expr_eq();
5626 if (c) {
5627 vpop();
5628 nocode_wanted--;
5630 skip(')');
5632 break;
5633 case TOK_builtin_constant_p:
5634 parse_builtin_params(1, "e");
5635 n = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
5636 vtop--;
5637 vpushi(n);
5638 break;
5639 case TOK_builtin_frame_address:
5640 case TOK_builtin_return_address:
5642 int tok1 = tok;
5643 int level;
5644 next();
5645 skip('(');
5646 if (tok != TOK_CINT) {
5647 tcc_error("%s only takes positive integers",
5648 tok1 == TOK_builtin_return_address ?
5649 "__builtin_return_address" :
5650 "__builtin_frame_address");
5652 level = (uint32_t)tokc.i;
5653 next();
5654 skip(')');
5655 type.t = VT_VOID;
5656 mk_pointer(&type);
5657 vset(&type, VT_LOCAL, 0); /* local frame */
5658 while (level--) {
5659 #ifdef TCC_TARGET_RISCV64
5660 vpushi(2*PTR_SIZE);
5661 gen_op('-');
5662 #endif
5663 mk_pointer(&vtop->type);
5664 indir(); /* -> parent frame */
5666 if (tok1 == TOK_builtin_return_address) {
5667 // assume return address is just above frame pointer on stack
5668 #ifdef TCC_TARGET_ARM
5669 vpushi(2*PTR_SIZE);
5670 gen_op('+');
5671 #elif defined TCC_TARGET_RISCV64
5672 vpushi(PTR_SIZE);
5673 gen_op('-');
5674 #else
5675 vpushi(PTR_SIZE);
5676 gen_op('+');
5677 #endif
5678 mk_pointer(&vtop->type);
5679 indir();
5682 break;
5683 #ifdef TCC_TARGET_RISCV64
5684 case TOK_builtin_va_start:
5685 parse_builtin_params(0, "ee");
5686 r = vtop->r & VT_VALMASK;
5687 if (r == VT_LLOCAL)
5688 r = VT_LOCAL;
5689 if (r != VT_LOCAL)
5690 tcc_error("__builtin_va_start expects a local variable");
5691 gen_va_start();
5692 vstore();
5693 break;
5694 #endif
5695 #ifdef TCC_TARGET_X86_64
5696 #ifdef TCC_TARGET_PE
5697 case TOK_builtin_va_start:
5698 parse_builtin_params(0, "ee");
5699 r = vtop->r & VT_VALMASK;
5700 if (r == VT_LLOCAL)
5701 r = VT_LOCAL;
5702 if (r != VT_LOCAL)
5703 tcc_error("__builtin_va_start expects a local variable");
5704 vtop->r = r;
5705 vtop->type = char_pointer_type;
5706 vtop->c.i += 8;
5707 vstore();
5708 break;
5709 #else
5710 case TOK_builtin_va_arg_types:
5711 parse_builtin_params(0, "t");
5712 vpushi(classify_x86_64_va_arg(&vtop->type));
5713 vswap();
5714 vpop();
5715 break;
5716 #endif
5717 #endif
5719 #ifdef TCC_TARGET_ARM64
5720 case TOK_builtin_va_start: {
5721 parse_builtin_params(0, "ee");
5722 //xx check types
5723 gen_va_start();
5724 vpushi(0);
5725 vtop->type.t = VT_VOID;
5726 break;
5728 case TOK_builtin_va_arg: {
5729 parse_builtin_params(0, "et");
5730 type = vtop->type;
5731 vpop();
5732 //xx check types
5733 gen_va_arg(&type);
5734 vtop->type = type;
5735 break;
5737 case TOK___arm64_clear_cache: {
5738 parse_builtin_params(0, "ee");
5739 gen_clear_cache();
5740 vpushi(0);
5741 vtop->type.t = VT_VOID;
5742 break;
5744 #endif
5745 /* pre operations */
5746 case TOK_INC:
5747 case TOK_DEC:
5748 t = tok;
5749 next();
5750 unary();
5751 inc(0, t);
5752 break;
5753 case '-':
5754 next();
5755 unary();
5756 t = vtop->type.t & VT_BTYPE;
5757 if (is_float(t)) {
5758 /* In IEEE negate(x) isn't subtract(0,x), but rather
5759 subtract(-0, x). */
5760 vpush(&vtop->type);
5761 if (t == VT_FLOAT)
5762 vtop->c.f = -1.0 * 0.0;
5763 else if (t == VT_DOUBLE)
5764 vtop->c.d = -1.0 * 0.0;
5765 else
5766 vtop->c.ld = -1.0 * 0.0;
5767 } else
5768 vpushi(0);
5769 vswap();
5770 gen_op('-');
5771 break;
5772 case TOK_LAND:
5773 if (!gnu_ext)
5774 goto tok_identifier;
5775 next();
5776 /* allow to take the address of a label */
5777 if (tok < TOK_UIDENT)
5778 expect("label identifier");
5779 s = label_find(tok);
5780 if (!s) {
5781 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
5782 } else {
5783 if (s->r == LABEL_DECLARED)
5784 s->r = LABEL_FORWARD;
5786 if (!s->type.t) {
5787 s->type.t = VT_VOID;
5788 mk_pointer(&s->type);
5789 s->type.t |= VT_STATIC;
5791 vpushsym(&s->type, s);
5792 next();
5793 break;
5795 case TOK_GENERIC:
5797 CType controlling_type;
5798 int has_default = 0;
5799 int has_match = 0;
5800 int learn = 0;
5801 TokenString *str = NULL;
5802 int saved_const_wanted = const_wanted;
5804 next();
5805 skip('(');
5806 const_wanted = 0;
5807 expr_type(&controlling_type, expr_eq);
5808 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
5809 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
5810 mk_pointer(&controlling_type);
5811 const_wanted = saved_const_wanted;
5812 for (;;) {
5813 learn = 0;
5814 skip(',');
5815 if (tok == TOK_DEFAULT) {
5816 if (has_default)
5817 tcc_error("too many 'default'");
5818 has_default = 1;
5819 if (!has_match)
5820 learn = 1;
5821 next();
5822 } else {
5823 AttributeDef ad_tmp;
5824 int itmp;
5825 CType cur_type;
5827 in_generic++;
5828 parse_btype(&cur_type, &ad_tmp);
5829 in_generic--;
5831 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
5832 if (compare_types(&controlling_type, &cur_type, 0)) {
5833 if (has_match) {
5834 tcc_error("type match twice");
5836 has_match = 1;
5837 learn = 1;
5840 skip(':');
5841 if (learn) {
5842 if (str)
5843 tok_str_free(str);
5844 skip_or_save_block(&str);
5845 } else {
5846 skip_or_save_block(NULL);
5848 if (tok == ')')
5849 break;
5851 if (!str) {
5852 char buf[60];
5853 type_to_str(buf, sizeof buf, &controlling_type, NULL);
5854 tcc_error("type '%s' does not match any association", buf);
5856 begin_macro(str, 1);
5857 next();
5858 expr_eq();
5859 if (tok != TOK_EOF)
5860 expect(",");
5861 end_macro();
5862 next();
5863 break;
5865 // special qnan , snan and infinity values
5866 case TOK___NAN__:
5867 n = 0x7fc00000;
5868 special_math_val:
5869 vpushi(n);
5870 vtop->type.t = VT_FLOAT;
5871 next();
5872 break;
5873 case TOK___SNAN__:
5874 n = 0x7f800001;
5875 goto special_math_val;
5876 case TOK___INF__:
5877 n = 0x7f800000;
5878 goto special_math_val;
5880 default:
5881 tok_identifier:
5882 t = tok;
5883 next();
5884 if (t < TOK_UIDENT)
5885 expect("identifier");
5886 s = sym_find(t);
5887 if (!s || IS_ASM_SYM(s)) {
5888 const char *name = get_tok_str(t, NULL);
5889 if (tok != '(')
5890 tcc_error("'%s' undeclared", name);
5891 /* for simple function calls, we tolerate undeclared
5892 external reference to int() function */
5893 if (tcc_state->warn_implicit_function_declaration
5894 #ifdef TCC_TARGET_PE
5895 /* people must be warned about using undeclared WINAPI functions
5896 (which usually start with uppercase letter) */
5897 || (name[0] >= 'A' && name[0] <= 'Z')
5898 #endif
5900 tcc_warning("implicit declaration of function '%s'", name);
5901 s = external_global_sym(t, &func_old_type);
5904 r = s->r;
5905 /* A symbol that has a register is a local register variable,
5906 which starts out as VT_LOCAL value. */
5907 if ((r & VT_VALMASK) < VT_CONST)
5908 r = (r & ~VT_VALMASK) | VT_LOCAL;
5910 vset(&s->type, r, s->c);
5911 /* Point to s as backpointer (even without r&VT_SYM).
5912 Will be used by at least the x86 inline asm parser for
5913 regvars. */
5914 vtop->sym = s;
5916 if (r & VT_SYM) {
5917 vtop->c.i = 0;
5918 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
5919 vtop->c.i = s->enum_val;
5921 break;
5924 /* post operations */
5925 while (1) {
5926 if (tok == TOK_INC || tok == TOK_DEC) {
5927 inc(1, tok);
5928 next();
5929 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
5930 int qualifiers, cumofs = 0;
5931 /* field */
5932 if (tok == TOK_ARROW)
5933 indir();
5934 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
5935 test_lvalue();
5936 gaddrof();
5937 /* expect pointer on structure */
5938 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
5939 expect("struct or union");
5940 if (tok == TOK_CDOUBLE)
5941 expect("field name");
5942 next();
5943 if (tok == TOK_CINT || tok == TOK_CUINT)
5944 expect("field name");
5945 s = find_field(&vtop->type, tok, &cumofs);
5946 if (!s)
5947 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
5948 /* add field offset to pointer */
5949 vtop->type = char_pointer_type; /* change type to 'char *' */
5950 vpushi(cumofs + s->c);
5951 gen_op('+');
5952 /* change type to field type, and set to lvalue */
5953 vtop->type = s->type;
5954 vtop->type.t |= qualifiers;
5955 /* an array is never an lvalue */
5956 if (!(vtop->type.t & VT_ARRAY)) {
5957 vtop->r |= VT_LVAL;
5958 #ifdef CONFIG_TCC_BCHECK
5959 /* if bound checking, the referenced pointer must be checked */
5960 if (tcc_state->do_bounds_check)
5961 vtop->r |= VT_MUSTBOUND;
5962 #endif
5964 next();
5965 } else if (tok == '[') {
5966 next();
5967 gexpr();
5968 gen_op('+');
5969 indir();
5970 skip(']');
5971 } else if (tok == '(') {
5972 SValue ret;
5973 Sym *sa;
5974 int nb_args, ret_nregs, ret_align, regsize, variadic;
5976 /* function call */
5977 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
5978 /* pointer test (no array accepted) */
5979 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
5980 vtop->type = *pointed_type(&vtop->type);
5981 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
5982 goto error_func;
5983 } else {
5984 error_func:
5985 expect("function pointer");
5987 } else {
5988 vtop->r &= ~VT_LVAL; /* no lvalue */
5990 /* get return type */
5991 s = vtop->type.ref;
5992 next();
5993 sa = s->next; /* first parameter */
5994 nb_args = regsize = 0;
5995 ret.r2 = VT_CONST;
5996 /* compute first implicit argument if a structure is returned */
5997 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
5998 variadic = (s->f.func_type == FUNC_ELLIPSIS);
5999 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6000 &ret_align, &regsize);
6001 if (ret_nregs <= 0) {
6002 /* get some space for the returned structure */
6003 size = type_size(&s->type, &align);
6004 #ifdef TCC_TARGET_ARM64
6005 /* On arm64, a small struct is return in registers.
6006 It is much easier to write it to memory if we know
6007 that we are allowed to write some extra bytes, so
6008 round the allocated space up to a power of 2: */
6009 if (size < 16)
6010 while (size & (size - 1))
6011 size = (size | (size - 1)) + 1;
6012 #endif
6013 loc = (loc - size) & -align;
6014 ret.type = s->type;
6015 ret.r = VT_LOCAL | VT_LVAL;
6016 /* pass it as 'int' to avoid structure arg passing
6017 problems */
6018 vseti(VT_LOCAL, loc);
6019 ret.c = vtop->c;
6020 if (ret_nregs < 0)
6021 vtop--;
6022 else
6023 nb_args++;
6025 } else {
6026 ret_nregs = 1;
6027 ret.type = s->type;
6030 if (ret_nregs > 0) {
6031 /* return in register */
6032 ret.c.i = 0;
6033 PUT_R_RET(&ret, ret.type.t);
6035 if (tok != ')') {
6036 for(;;) {
6037 expr_eq();
6038 gfunc_param_typed(s, sa);
6039 nb_args++;
6040 if (sa)
6041 sa = sa->next;
6042 if (tok == ')')
6043 break;
6044 skip(',');
6047 if (sa)
6048 tcc_error("too few arguments to function");
6049 skip(')');
6050 #ifdef CONFIG_TCC_BCHECK
6051 if (tcc_state->do_bounds_check &&
6052 (nb_args == 1 || nb_args == 2) &&
6053 (vtop[-nb_args].r & VT_SYM) &&
6054 (vtop[-nb_args].sym->v == TOK_setjmp ||
6055 vtop[-nb_args].sym->v == TOK__setjmp
6056 #ifndef TCC_TARGET_PE
6057 || vtop[-nb_args].sym->v == TOK_sigsetjmp
6058 || vtop[-nb_args].sym->v == TOK___sigsetjmp
6059 #endif
6060 )) {
6061 vpush_global_sym(&func_old_type, TOK___bound_setjmp);
6062 vpushv(vtop - nb_args);
6063 if (nb_args == 2)
6064 vpushv(vtop - nb_args);
6065 gfunc_call(nb_args);
6067 #endif
6068 gfunc_call(nb_args);
6070 if (ret_nregs < 0) {
6071 vsetc(&ret.type, ret.r, &ret.c);
6072 #ifdef TCC_TARGET_RISCV64
6073 arch_transfer_ret_regs(1);
6074 #endif
6075 } else {
6076 /* return value */
6077 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6078 vsetc(&ret.type, r, &ret.c);
6079 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6082 /* handle packed struct return */
6083 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6084 int addr, offset;
6086 size = type_size(&s->type, &align);
6087 /* We're writing whole regs often, make sure there's enough
6088 space. Assume register size is power of 2. */
6089 if (regsize > align)
6090 align = regsize;
6091 loc = (loc - size) & -align;
6092 addr = loc;
6093 offset = 0;
6094 for (;;) {
6095 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6096 vswap();
6097 vstore();
6098 vtop--;
6099 if (--ret_nregs == 0)
6100 break;
6101 offset += regsize;
6103 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6106 /* Promote char/short return values. This is matters only
6107 for calling function that were not compiled by TCC and
6108 only on some architectures. For those where it doesn't
6109 matter we expect things to be already promoted to int,
6110 but not larger. */
6111 t = s->type.t & VT_BTYPE;
6112 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6113 #ifdef PROMOTE_RET
6114 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6115 #else
6116 vtop->type.t = VT_INT;
6117 #endif
6120 if (s->f.func_noreturn)
6121 CODE_OFF();
6122 } else {
6123 break;
6128 #ifndef precedence_parser /* original top-down parser */
6130 static void expr_prod(void)
6132 int t;
6134 unary();
6135 while ((t = tok) == '*' || t == '/' || t == '%') {
6136 next();
6137 unary();
6138 gen_op(t);
6142 static void expr_sum(void)
6144 int t;
6146 expr_prod();
6147 while ((t = tok) == '+' || t == '-') {
6148 next();
6149 expr_prod();
6150 gen_op(t);
6154 static void expr_shift(void)
6156 int t;
6158 expr_sum();
6159 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6160 next();
6161 expr_sum();
6162 gen_op(t);
6166 static void expr_cmp(void)
6168 int t;
6170 expr_shift();
6171 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6172 t == TOK_ULT || t == TOK_UGE) {
6173 next();
6174 expr_shift();
6175 gen_op(t);
6179 static void expr_cmpeq(void)
6181 int t;
6183 expr_cmp();
6184 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6185 next();
6186 expr_cmp();
6187 gen_op(t);
6191 static void expr_and(void)
6193 expr_cmpeq();
6194 while (tok == '&') {
6195 next();
6196 expr_cmpeq();
6197 gen_op('&');
6201 static void expr_xor(void)
6203 expr_and();
6204 while (tok == '^') {
6205 next();
6206 expr_and();
6207 gen_op('^');
6211 static void expr_or(void)
6213 expr_xor();
6214 while (tok == '|') {
6215 next();
6216 expr_xor();
6217 gen_op('|');
6221 static void expr_landor(int op);
6223 static void expr_land(void)
6225 expr_or();
6226 if (tok == TOK_LAND)
6227 expr_landor(tok);
6230 static void expr_lor(void)
6232 expr_land();
6233 if (tok == TOK_LOR)
6234 expr_landor(tok);
6237 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6238 #else /* defined precedence_parser */
6239 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6240 # define expr_lor() unary(), expr_infix(1)
6242 static int precedence(int tok)
6244 switch (tok) {
6245 case TOK_LOR: return 1;
6246 case TOK_LAND: return 2;
6247 case '|': return 3;
6248 case '^': return 4;
6249 case '&': return 5;
6250 case TOK_EQ: case TOK_NE: return 6;
6251 relat: case TOK_ULT: case TOK_UGE: return 7;
6252 case TOK_SHL: case TOK_SAR: return 8;
6253 case '+': case '-': return 9;
6254 case '*': case '/': case '%': return 10;
6255 default:
6256 if (tok >= TOK_ULE && tok <= TOK_GT)
6257 goto relat;
6258 return 0;
6261 static unsigned char prec[256];
6262 static void init_prec(void)
6264 int i;
6265 for (i = 0; i < 256; i++)
6266 prec[i] = precedence(i);
6268 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6270 static void expr_landor(int op);
6272 static void expr_infix(int p)
6274 int t = tok, p2;
6275 while ((p2 = precedence(t)) >= p) {
6276 if (t == TOK_LOR || t == TOK_LAND) {
6277 expr_landor(t);
6278 } else {
6279 next();
6280 unary();
6281 if (precedence(tok) > p2)
6282 expr_infix(p2 + 1);
6283 gen_op(t);
6285 t = tok;
6288 #endif
6290 /* Assuming vtop is a value used in a conditional context
6291 (i.e. compared with zero) return 0 if it's false, 1 if
6292 true and -1 if it can't be statically determined. */
6293 static int condition_3way(void)
6295 int c = -1;
6296 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6297 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6298 vdup();
6299 gen_cast_s(VT_BOOL);
6300 c = vtop->c.i;
6301 vpop();
6303 return c;
6306 static void expr_landor(int op)
6308 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6309 for(;;) {
6310 c = f ? i : condition_3way();
6311 if (c < 0)
6312 save_regs(1), cc = 0;
6313 else if (c != i)
6314 nocode_wanted++, f = 1;
6315 if (tok != op)
6316 break;
6317 if (c < 0)
6318 t = gvtst(i, t);
6319 else
6320 vpop();
6321 next();
6322 expr_landor_next(op);
6324 if (cc || f) {
6325 vpop();
6326 vpushi(i ^ f);
6327 gsym(t);
6328 nocode_wanted -= f;
6329 } else {
6330 gvtst_set(i, t);
6334 static int is_cond_bool(SValue *sv)
6336 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6337 && (sv->type.t & VT_BTYPE) == VT_INT)
6338 return (unsigned)sv->c.i < 2;
6339 if (sv->r == VT_CMP)
6340 return 1;
6341 return 0;
6344 static void expr_cond(void)
6346 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6347 SValue sv;
6348 CType type;
6349 int ncw_prev;
6351 expr_lor();
6352 if (tok == '?') {
6353 next();
6354 c = condition_3way();
6355 g = (tok == ':' && gnu_ext);
6356 tt = 0;
6357 if (!g) {
6358 if (c < 0) {
6359 save_regs(1);
6360 tt = gvtst(1, 0);
6361 } else {
6362 vpop();
6364 } else if (c < 0) {
6365 /* needed to avoid having different registers saved in
6366 each branch */
6367 save_regs(1);
6368 gv_dup();
6369 tt = gvtst(0, 0);
6372 ncw_prev = nocode_wanted;
6373 if (c == 0)
6374 nocode_wanted++;
6375 if (!g)
6376 gexpr();
6378 if (c < 0 && vtop->r == VT_CMP) {
6379 t1 = gvtst(0, 0);
6380 vpushi(0);
6381 gvtst_set(0, t1);
6382 gv(RC_INT);
6385 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6386 mk_pointer(&vtop->type);
6387 sv = *vtop; /* save value to handle it later */
6388 vtop--; /* no vpop so that FP stack is not flushed */
6390 if (g) {
6391 u = tt;
6392 } else if (c < 0) {
6393 u = gjmp(0);
6394 gsym(tt);
6395 } else
6396 u = 0;
6398 nocode_wanted = ncw_prev;
6399 if (c == 1)
6400 nocode_wanted++;
6401 skip(':');
6402 expr_cond();
6404 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6405 if (sv.r == VT_CMP) {
6406 t1 = sv.jtrue;
6407 t2 = u;
6408 } else {
6409 t1 = gvtst(0, 0);
6410 t2 = gjmp(0);
6411 gsym(u);
6412 vpushv(&sv);
6414 gvtst_set(0, t1);
6415 gvtst_set(1, t2);
6416 nocode_wanted = ncw_prev;
6417 // tcc_warning("two conditions expr_cond");
6418 return;
6421 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6422 mk_pointer(&vtop->type);
6424 /* cast operands to correct type according to ISOC rules */
6425 if (!combine_types(&type, &sv, vtop, '?'))
6426 type_incompatibility_error(&sv.type, &vtop->type,
6427 "type mismatch in conditional expression (have '%s' and '%s')");
6428 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6429 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6430 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6432 /* now we convert second operand */
6433 if (c != 1) {
6434 gen_cast(&type);
6435 if (islv) {
6436 mk_pointer(&vtop->type);
6437 gaddrof();
6438 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6439 gaddrof();
6442 rc = RC_TYPE(type.t);
6443 /* for long longs, we use fixed registers to avoid having
6444 to handle a complicated move */
6445 if (USING_TWO_WORDS(type.t))
6446 rc = RC_RET(type.t);
6448 tt = r2 = 0;
6449 if (c < 0) {
6450 r2 = gv(rc);
6451 tt = gjmp(0);
6453 gsym(u);
6454 nocode_wanted = ncw_prev;
6456 /* this is horrible, but we must also convert first
6457 operand */
6458 if (c != 0) {
6459 *vtop = sv;
6460 gen_cast(&type);
6461 if (islv) {
6462 mk_pointer(&vtop->type);
6463 gaddrof();
6464 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6465 gaddrof();
6468 if (c < 0) {
6469 r1 = gv(rc);
6470 move_reg(r2, r1, islv ? VT_PTR : type.t);
6471 vtop->r = r2;
6472 gsym(tt);
6475 if (islv)
6476 indir();
6480 static void expr_eq(void)
6482 int t;
6484 expr_cond();
6485 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6486 test_lvalue();
6487 next();
6488 if (t == '=') {
6489 expr_eq();
6490 } else {
6491 vdup();
6492 expr_eq();
6493 gen_op(TOK_ASSIGN_OP(t));
6495 vstore();
6499 ST_FUNC void gexpr(void)
6501 while (1) {
6502 expr_eq();
6503 if (tok != ',')
6504 break;
6505 vpop();
6506 next();
6510 /* parse a constant expression and return value in vtop. */
6511 static void expr_const1(void)
6513 const_wanted++;
6514 nocode_wanted += unevalmask + 1;
6515 expr_cond();
6516 nocode_wanted -= unevalmask + 1;
6517 const_wanted--;
6520 /* parse an integer constant and return its value. */
6521 static inline int64_t expr_const64(void)
6523 int64_t c;
6524 expr_const1();
6525 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6526 expect("constant expression");
6527 c = vtop->c.i;
6528 vpop();
6529 return c;
6532 /* parse an integer constant and return its value.
6533 Complain if it doesn't fit 32bit (signed or unsigned). */
6534 ST_FUNC int expr_const(void)
6536 int c;
6537 int64_t wc = expr_const64();
6538 c = wc;
6539 if (c != wc && (unsigned)c != wc)
6540 tcc_error("constant exceeds 32 bit");
6541 return c;
6544 /* ------------------------------------------------------------------------- */
6545 /* return from function */
6547 #ifndef TCC_TARGET_ARM64
6548 static void gfunc_return(CType *func_type)
6550 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
6551 CType type, ret_type;
6552 int ret_align, ret_nregs, regsize;
6553 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
6554 &ret_align, &regsize);
6555 if (ret_nregs < 0) {
6556 #ifdef TCC_TARGET_RISCV64
6557 arch_transfer_ret_regs(0);
6558 #endif
6559 } else if (0 == ret_nregs) {
6560 /* if returning structure, must copy it to implicit
6561 first pointer arg location */
6562 type = *func_type;
6563 mk_pointer(&type);
6564 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
6565 indir();
6566 vswap();
6567 /* copy structure value to pointer */
6568 vstore();
6569 } else {
6570 /* returning structure packed into registers */
6571 int size, addr, align, rc;
6572 size = type_size(func_type,&align);
6573 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
6574 (vtop->c.i & (ret_align-1)))
6575 && (align & (ret_align-1))) {
6576 loc = (loc - size) & -ret_align;
6577 addr = loc;
6578 type = *func_type;
6579 vset(&type, VT_LOCAL | VT_LVAL, addr);
6580 vswap();
6581 vstore();
6582 vpop();
6583 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
6585 vtop->type = ret_type;
6586 rc = RC_RET(ret_type.t);
6587 if (ret_nregs == 1)
6588 gv(rc);
6589 else {
6590 for (;;) {
6591 vdup();
6592 gv(rc);
6593 vpop();
6594 if (--ret_nregs == 0)
6595 break;
6596 /* We assume that when a structure is returned in multiple
6597 registers, their classes are consecutive values of the
6598 suite s(n) = 2^n */
6599 rc <<= 1;
6600 vtop->c.i += regsize;
6604 } else {
6605 gv(RC_RET(func_type->t));
6607 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
6609 #endif
6611 static void check_func_return(void)
6613 if ((func_vt.t & VT_BTYPE) == VT_VOID)
6614 return;
6615 if (!strcmp (funcname, "main")
6616 && (func_vt.t & VT_BTYPE) == VT_INT) {
6617 /* main returns 0 by default */
6618 vpushi(0);
6619 gen_assign_cast(&func_vt);
6620 gfunc_return(&func_vt);
6621 } else {
6622 tcc_warning("function might return no value: '%s'", funcname);
6626 /* ------------------------------------------------------------------------- */
6627 /* switch/case */
6629 static int case_cmp(const void *pa, const void *pb)
6631 int64_t a = (*(struct case_t**) pa)->v1;
6632 int64_t b = (*(struct case_t**) pb)->v1;
6633 return a < b ? -1 : a > b;
6636 static void gtst_addr(int t, int a)
6638 gsym_addr(gvtst(0, t), a);
6641 static void gcase(struct case_t **base, int len, int *bsym)
6643 struct case_t *p;
6644 int e;
6645 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
6646 while (len > 8) {
6647 /* binary search */
6648 p = base[len/2];
6649 vdup();
6650 if (ll)
6651 vpushll(p->v2);
6652 else
6653 vpushi(p->v2);
6654 gen_op(TOK_LE);
6655 e = gvtst(1, 0);
6656 vdup();
6657 if (ll)
6658 vpushll(p->v1);
6659 else
6660 vpushi(p->v1);
6661 gen_op(TOK_GE);
6662 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
6663 /* x < v1 */
6664 gcase(base, len/2, bsym);
6665 /* x > v2 */
6666 gsym(e);
6667 e = len/2 + 1;
6668 base += e; len -= e;
6670 /* linear scan */
6671 while (len--) {
6672 p = *base++;
6673 vdup();
6674 if (ll)
6675 vpushll(p->v2);
6676 else
6677 vpushi(p->v2);
6678 if (p->v1 == p->v2) {
6679 gen_op(TOK_EQ);
6680 gtst_addr(0, p->sym);
6681 } else {
6682 gen_op(TOK_LE);
6683 e = gvtst(1, 0);
6684 vdup();
6685 if (ll)
6686 vpushll(p->v1);
6687 else
6688 vpushi(p->v1);
6689 gen_op(TOK_GE);
6690 gtst_addr(0, p->sym);
6691 gsym(e);
6694 *bsym = gjmp(*bsym);
6697 /* ------------------------------------------------------------------------- */
6698 /* __attribute__((cleanup(fn))) */
6700 static void try_call_scope_cleanup(Sym *stop)
6702 Sym *cls = cur_scope->cl.s;
6704 for (; cls != stop; cls = cls->ncl) {
6705 Sym *fs = cls->next;
6706 Sym *vs = cls->prev_tok;
6708 vpushsym(&fs->type, fs);
6709 vset(&vs->type, vs->r, vs->c);
6710 vtop->sym = vs;
6711 mk_pointer(&vtop->type);
6712 gaddrof();
6713 gfunc_call(1);
6717 static void try_call_cleanup_goto(Sym *cleanupstate)
6719 Sym *oc, *cc;
6720 int ocd, ccd;
6722 if (!cur_scope->cl.s)
6723 return;
6725 /* search NCA of both cleanup chains given parents and initial depth */
6726 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
6727 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
6729 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
6731 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
6734 try_call_scope_cleanup(cc);
6737 /* call 'func' for each __attribute__((cleanup(func))) */
6738 static void block_cleanup(struct scope *o)
6740 int jmp = 0;
6741 Sym *g, **pg;
6742 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
6743 if (g->prev_tok->r & LABEL_FORWARD) {
6744 Sym *pcl = g->next;
6745 if (!jmp)
6746 jmp = gjmp(0);
6747 gsym(pcl->jnext);
6748 try_call_scope_cleanup(o->cl.s);
6749 pcl->jnext = gjmp(0);
6750 if (!o->cl.n)
6751 goto remove_pending;
6752 g->c = o->cl.n;
6753 pg = &g->prev;
6754 } else {
6755 remove_pending:
6756 *pg = g->prev;
6757 sym_free(g);
6760 gsym(jmp);
6761 try_call_scope_cleanup(o->cl.s);
6764 /* ------------------------------------------------------------------------- */
6765 /* VLA */
6767 static void vla_restore(int loc)
6769 if (loc)
6770 gen_vla_sp_restore(loc);
6773 static void vla_leave(struct scope *o)
6775 if (o->vla.num < cur_scope->vla.num)
6776 vla_restore(o->vla.loc);
6779 /* ------------------------------------------------------------------------- */
6780 /* local scopes */
6782 void new_scope(struct scope *o)
6784 /* copy and link previous scope */
6785 *o = *cur_scope;
6786 o->prev = cur_scope;
6787 cur_scope = o;
6789 /* record local declaration stack position */
6790 o->lstk = local_stack;
6791 o->llstk = local_label_stack;
6793 ++local_scope;
6795 if (tcc_state->do_debug)
6796 tcc_debug_stabn(N_LBRAC, ind - func_ind);
6799 void prev_scope(struct scope *o, int is_expr)
6801 vla_leave(o->prev);
6803 if (o->cl.s != o->prev->cl.s)
6804 block_cleanup(o->prev);
6806 /* pop locally defined labels */
6807 label_pop(&local_label_stack, o->llstk, is_expr);
6809 /* In the is_expr case (a statement expression is finished here),
6810 vtop might refer to symbols on the local_stack. Either via the
6811 type or via vtop->sym. We can't pop those nor any that in turn
6812 might be referred to. To make it easier we don't roll back
6813 any symbols in that case; some upper level call to block() will
6814 do that. We do have to remove such symbols from the lookup
6815 tables, though. sym_pop will do that. */
6817 /* pop locally defined symbols */
6818 pop_local_syms(&local_stack, o->lstk, is_expr, 0);
6819 cur_scope = o->prev;
6820 --local_scope;
6822 if (tcc_state->do_debug)
6823 tcc_debug_stabn(N_RBRAC, ind - func_ind);
6826 /* leave a scope via break/continue(/goto) */
6827 void leave_scope(struct scope *o)
6829 if (!o)
6830 return;
6831 try_call_scope_cleanup(o->cl.s);
6832 vla_leave(o);
6835 /* ------------------------------------------------------------------------- */
6836 /* call block from 'for do while' loops */
6838 static void lblock(int *bsym, int *csym)
6840 struct scope *lo = loop_scope, *co = cur_scope;
6841 int *b = co->bsym, *c = co->csym;
6842 if (csym) {
6843 co->csym = csym;
6844 loop_scope = co;
6846 co->bsym = bsym;
6847 block(0);
6848 co->bsym = b;
6849 if (csym) {
6850 co->csym = c;
6851 loop_scope = lo;
6855 static void block(int is_expr)
6857 int a, b, c, d, e, t;
6858 struct scope o;
6859 Sym *s;
6861 if (is_expr) {
6862 /* default return value is (void) */
6863 vpushi(0);
6864 vtop->type.t = VT_VOID;
6867 again:
6868 t = tok, next();
6870 if (t == TOK_IF) {
6871 skip('(');
6872 gexpr();
6873 skip(')');
6874 a = gvtst(1, 0);
6875 block(0);
6876 if (tok == TOK_ELSE) {
6877 d = gjmp(0);
6878 gsym(a);
6879 next();
6880 block(0);
6881 gsym(d); /* patch else jmp */
6882 } else {
6883 gsym(a);
6886 } else if (t == TOK_WHILE) {
6887 d = gind();
6888 skip('(');
6889 gexpr();
6890 skip(')');
6891 a = gvtst(1, 0);
6892 b = 0;
6893 lblock(&a, &b);
6894 gjmp_addr(d);
6895 gsym_addr(b, d);
6896 gsym(a);
6898 } else if (t == '{') {
6899 new_scope(&o);
6901 /* handle local labels declarations */
6902 while (tok == TOK_LABEL) {
6903 do {
6904 next();
6905 if (tok < TOK_UIDENT)
6906 expect("label identifier");
6907 label_push(&local_label_stack, tok, LABEL_DECLARED);
6908 next();
6909 } while (tok == ',');
6910 skip(';');
6913 while (tok != '}') {
6914 decl(VT_LOCAL);
6915 if (tok != '}') {
6916 if (is_expr)
6917 vpop();
6918 block(is_expr);
6922 prev_scope(&o, is_expr);
6923 if (local_scope)
6924 next();
6925 else if (!nocode_wanted)
6926 check_func_return();
6928 } else if (t == TOK_RETURN) {
6929 b = (func_vt.t & VT_BTYPE) != VT_VOID;
6930 if (tok != ';') {
6931 gexpr();
6932 if (b) {
6933 gen_assign_cast(&func_vt);
6934 } else {
6935 if (vtop->type.t != VT_VOID)
6936 tcc_warning("void function returns a value");
6937 vtop--;
6939 } else if (b) {
6940 tcc_warning("'return' with no value");
6941 b = 0;
6943 leave_scope(root_scope);
6944 if (b)
6945 gfunc_return(&func_vt);
6946 skip(';');
6947 /* jump unless last stmt in top-level block */
6948 if (tok != '}' || local_scope != 1)
6949 rsym = gjmp(rsym);
6950 CODE_OFF();
6952 } else if (t == TOK_BREAK) {
6953 /* compute jump */
6954 if (!cur_scope->bsym)
6955 tcc_error("cannot break");
6956 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
6957 leave_scope(cur_switch->scope);
6958 else
6959 leave_scope(loop_scope);
6960 *cur_scope->bsym = gjmp(*cur_scope->bsym);
6961 skip(';');
6963 } else if (t == TOK_CONTINUE) {
6964 /* compute jump */
6965 if (!cur_scope->csym)
6966 tcc_error("cannot continue");
6967 leave_scope(loop_scope);
6968 *cur_scope->csym = gjmp(*cur_scope->csym);
6969 skip(';');
6971 } else if (t == TOK_FOR) {
6972 new_scope(&o);
6974 skip('(');
6975 if (tok != ';') {
6976 /* c99 for-loop init decl? */
6977 if (!decl0(VT_LOCAL, 1, NULL)) {
6978 /* no, regular for-loop init expr */
6979 gexpr();
6980 vpop();
6983 skip(';');
6984 a = b = 0;
6985 c = d = gind();
6986 if (tok != ';') {
6987 gexpr();
6988 a = gvtst(1, 0);
6990 skip(';');
6991 if (tok != ')') {
6992 e = gjmp(0);
6993 d = gind();
6994 gexpr();
6995 vpop();
6996 gjmp_addr(c);
6997 gsym(e);
6999 skip(')');
7000 lblock(&a, &b);
7001 gjmp_addr(d);
7002 gsym_addr(b, d);
7003 gsym(a);
7004 prev_scope(&o, 0);
7006 } else if (t == TOK_DO) {
7007 a = b = 0;
7008 d = gind();
7009 lblock(&a, &b);
7010 gsym(b);
7011 skip(TOK_WHILE);
7012 skip('(');
7013 gexpr();
7014 skip(')');
7015 skip(';');
7016 c = gvtst(0, 0);
7017 gsym_addr(c, d);
7018 gsym(a);
7020 } else if (t == TOK_SWITCH) {
7021 struct switch_t *sw;
7023 sw = tcc_mallocz(sizeof *sw);
7024 sw->bsym = &a;
7025 sw->scope = cur_scope;
7026 sw->prev = cur_switch;
7027 cur_switch = sw;
7029 skip('(');
7030 gexpr();
7031 skip(')');
7032 sw->sv = *vtop--; /* save switch value */
7034 a = 0;
7035 b = gjmp(0); /* jump to first case */
7036 lblock(&a, NULL);
7037 a = gjmp(a); /* add implicit break */
7038 /* case lookup */
7039 gsym(b);
7041 qsort(sw->p, sw->n, sizeof(void*), case_cmp);
7042 for (b = 1; b < sw->n; b++)
7043 if (sw->p[b - 1]->v2 >= sw->p[b]->v1)
7044 tcc_error("duplicate case value");
7046 /* Our switch table sorting is signed, so the compared
7047 value needs to be as well when it's 64bit. */
7048 vpushv(&sw->sv);
7049 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
7050 vtop->type.t &= ~VT_UNSIGNED;
7051 gv(RC_INT);
7052 d = 0, gcase(sw->p, sw->n, &d);
7053 vpop();
7054 if (sw->def_sym)
7055 gsym_addr(d, sw->def_sym);
7056 else
7057 gsym(d);
7058 /* break label */
7059 gsym(a);
7061 dynarray_reset(&sw->p, &sw->n);
7062 cur_switch = sw->prev;
7063 tcc_free(sw);
7065 } else if (t == TOK_CASE) {
7066 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7067 if (!cur_switch)
7068 expect("switch");
7069 cr->v1 = cr->v2 = expr_const64();
7070 if (gnu_ext && tok == TOK_DOTS) {
7071 next();
7072 cr->v2 = expr_const64();
7073 if (cr->v2 < cr->v1)
7074 tcc_warning("empty case range");
7076 cr->sym = gind();
7077 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7078 skip(':');
7079 is_expr = 0;
7080 goto block_after_label;
7082 } else if (t == TOK_DEFAULT) {
7083 if (!cur_switch)
7084 expect("switch");
7085 if (cur_switch->def_sym)
7086 tcc_error("too many 'default'");
7087 cur_switch->def_sym = gind();
7088 skip(':');
7089 is_expr = 0;
7090 goto block_after_label;
7092 } else if (t == TOK_GOTO) {
7093 vla_restore(root_scope->vla.loc);
7094 if (tok == '*' && gnu_ext) {
7095 /* computed goto */
7096 next();
7097 gexpr();
7098 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7099 expect("pointer");
7100 ggoto();
7102 } else if (tok >= TOK_UIDENT) {
7103 s = label_find(tok);
7104 /* put forward definition if needed */
7105 if (!s)
7106 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7107 else if (s->r == LABEL_DECLARED)
7108 s->r = LABEL_FORWARD;
7110 if (s->r & LABEL_FORWARD) {
7111 /* start new goto chain for cleanups, linked via label->next */
7112 if (cur_scope->cl.s && !nocode_wanted) {
7113 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7114 pending_gotos->prev_tok = s;
7115 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7116 pending_gotos->next = s;
7118 s->jnext = gjmp(s->jnext);
7119 } else {
7120 try_call_cleanup_goto(s->cleanupstate);
7121 gjmp_addr(s->jnext);
7123 next();
7125 } else {
7126 expect("label identifier");
7128 skip(';');
7130 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7131 asm_instr();
7133 } else {
7134 if (tok == ':' && t >= TOK_UIDENT) {
7135 /* label case */
7136 next();
7137 s = label_find(t);
7138 if (s) {
7139 if (s->r == LABEL_DEFINED)
7140 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7141 s->r = LABEL_DEFINED;
7142 if (s->next) {
7143 Sym *pcl; /* pending cleanup goto */
7144 for (pcl = s->next; pcl; pcl = pcl->prev)
7145 gsym(pcl->jnext);
7146 sym_pop(&s->next, NULL, 0);
7147 } else
7148 gsym(s->jnext);
7149 } else {
7150 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7152 s->jnext = gind();
7153 s->cleanupstate = cur_scope->cl.s;
7155 block_after_label:
7156 vla_restore(cur_scope->vla.loc);
7157 /* we accept this, but it is a mistake */
7158 if (tok == '}') {
7159 tcc_warning("deprecated use of label at end of compound statement");
7160 } else {
7161 goto again;
7164 } else {
7165 /* expression case */
7166 if (t != ';') {
7167 unget_tok(t);
7168 if (is_expr) {
7169 vpop();
7170 gexpr();
7171 } else {
7172 gexpr();
7173 vpop();
7175 skip(';');
7181 /* This skips over a stream of tokens containing balanced {} and ()
7182 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7183 with a '{'). If STR then allocates and stores the skipped tokens
7184 in *STR. This doesn't check if () and {} are nested correctly,
7185 i.e. "({)}" is accepted. */
7186 static void skip_or_save_block(TokenString **str)
7188 int braces = tok == '{';
7189 int level = 0;
7190 if (str)
7191 *str = tok_str_alloc();
7193 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
7194 int t;
7195 if (tok == TOK_EOF) {
7196 if (str || level > 0)
7197 tcc_error("unexpected end of file");
7198 else
7199 break;
7201 if (str)
7202 tok_str_add_tok(*str);
7203 t = tok;
7204 next();
7205 if (t == '{' || t == '(') {
7206 level++;
7207 } else if (t == '}' || t == ')') {
7208 level--;
7209 if (level == 0 && braces && t == '}')
7210 break;
7213 if (str) {
7214 tok_str_add(*str, -1);
7215 tok_str_add(*str, 0);
7219 #define EXPR_CONST 1
7220 #define EXPR_ANY 2
7222 static void parse_init_elem(int expr_type)
7224 int saved_global_expr;
7225 switch(expr_type) {
7226 case EXPR_CONST:
7227 /* compound literals must be allocated globally in this case */
7228 saved_global_expr = global_expr;
7229 global_expr = 1;
7230 expr_const1();
7231 global_expr = saved_global_expr;
7232 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7233 (compound literals). */
7234 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7235 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7236 || vtop->sym->v < SYM_FIRST_ANOM))
7237 #ifdef TCC_TARGET_PE
7238 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7239 #endif
7241 tcc_error("initializer element is not constant");
7242 break;
7243 case EXPR_ANY:
7244 expr_eq();
7245 break;
7249 /* put zeros for variable based init */
7250 static void init_putz(Section *sec, unsigned long c, int size)
7252 if (sec) {
7253 /* nothing to do because globals are already set to zero */
7254 } else {
7255 vpush_global_sym(&func_old_type, TOK_memset);
7256 vseti(VT_LOCAL, c);
7257 #ifdef TCC_TARGET_ARM
7258 vpushs(size);
7259 vpushi(0);
7260 #else
7261 vpushi(0);
7262 vpushs(size);
7263 #endif
7264 gfunc_call(3);
7268 #define DIF_FIRST 1
7269 #define DIF_SIZE_ONLY 2
7270 #define DIF_HAVE_ELEM 4
7272 /* t is the array or struct type. c is the array or struct
7273 address. cur_field is the pointer to the current
7274 field, for arrays the 'c' member contains the current start
7275 index. 'flags' is as in decl_initializer.
7276 'al' contains the already initialized length of the
7277 current container (starting at c). This returns the new length of that. */
7278 static int decl_designator(CType *type, Section *sec, unsigned long c,
7279 Sym **cur_field, int flags, int al)
7281 Sym *s, *f;
7282 int index, index_last, align, l, nb_elems, elem_size;
7283 unsigned long corig = c;
7285 elem_size = 0;
7286 nb_elems = 1;
7288 if (flags & DIF_HAVE_ELEM)
7289 goto no_designator;
7291 if (gnu_ext && tok >= TOK_UIDENT) {
7292 l = tok, next();
7293 if (tok == ':')
7294 goto struct_field;
7295 unget_tok(l);
7298 /* NOTE: we only support ranges for last designator */
7299 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7300 if (tok == '[') {
7301 if (!(type->t & VT_ARRAY))
7302 expect("array type");
7303 next();
7304 index = index_last = expr_const();
7305 if (tok == TOK_DOTS && gnu_ext) {
7306 next();
7307 index_last = expr_const();
7309 skip(']');
7310 s = type->ref;
7311 if (index < 0 || (s->c >= 0 && index_last >= s->c) ||
7312 index_last < index)
7313 tcc_error("invalid index");
7314 if (cur_field)
7315 (*cur_field)->c = index_last;
7316 type = pointed_type(type);
7317 elem_size = type_size(type, &align);
7318 c += index * elem_size;
7319 nb_elems = index_last - index + 1;
7320 } else {
7321 int cumofs;
7322 next();
7323 l = tok;
7324 struct_field:
7325 next();
7326 if ((type->t & VT_BTYPE) != VT_STRUCT)
7327 expect("struct/union type");
7328 cumofs = 0;
7329 f = find_field(type, l, &cumofs);
7330 if (!f)
7331 expect("field");
7332 if (cur_field)
7333 *cur_field = f;
7334 type = &f->type;
7335 c += cumofs + f->c;
7337 cur_field = NULL;
7339 if (!cur_field) {
7340 if (tok == '=') {
7341 next();
7342 } else if (!gnu_ext) {
7343 expect("=");
7345 } else {
7346 no_designator:
7347 if (type->t & VT_ARRAY) {
7348 index = (*cur_field)->c;
7349 if (type->ref->c >= 0 && index >= type->ref->c)
7350 tcc_error("index too large");
7351 type = pointed_type(type);
7352 c += index * type_size(type, &align);
7353 } else {
7354 f = *cur_field;
7355 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
7356 *cur_field = f = f->next;
7357 if (!f)
7358 tcc_error("too many field init");
7359 type = &f->type;
7360 c += f->c;
7363 /* must put zero in holes (note that doing it that way
7364 ensures that it even works with designators) */
7365 if (!(flags & DIF_SIZE_ONLY) && c - corig > al)
7366 init_putz(sec, corig + al, c - corig - al);
7367 decl_initializer(type, sec, c, flags & ~DIF_FIRST);
7369 /* XXX: make it more general */
7370 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7371 unsigned long c_end;
7372 uint8_t *src, *dst;
7373 int i;
7375 if (!sec) {
7376 vset(type, VT_LOCAL|VT_LVAL, c);
7377 for (i = 1; i < nb_elems; i++) {
7378 vset(type, VT_LOCAL|VT_LVAL, c + elem_size * i);
7379 vswap();
7380 vstore();
7382 vpop();
7383 } else if (!NODATA_WANTED) {
7384 c_end = c + nb_elems * elem_size;
7385 if (c_end > sec->data_allocated)
7386 section_realloc(sec, c_end);
7387 src = sec->data + c;
7388 dst = src;
7389 for(i = 1; i < nb_elems; i++) {
7390 dst += elem_size;
7391 memcpy(dst, src, elem_size);
7395 c += nb_elems * type_size(type, &align);
7396 if (c - corig > al)
7397 al = c - corig;
7398 return al;
7401 /* store a value or an expression directly in global data or in local array */
7402 static void init_putv(CType *type, Section *sec, unsigned long c)
7404 int bt;
7405 void *ptr;
7406 CType dtype;
7408 dtype = *type;
7409 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7411 if (sec) {
7412 int size, align;
7413 /* XXX: not portable */
7414 /* XXX: generate error if incorrect relocation */
7415 gen_assign_cast(&dtype);
7416 bt = type->t & VT_BTYPE;
7418 if ((vtop->r & VT_SYM)
7419 && bt != VT_PTR
7420 && bt != VT_FUNC
7421 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7422 || (type->t & VT_BITFIELD))
7423 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7425 tcc_error("initializer element is not computable at load time");
7427 if (NODATA_WANTED) {
7428 vtop--;
7429 return;
7432 size = type_size(type, &align);
7433 section_reserve(sec, c + size);
7434 ptr = sec->data + c;
7436 /* XXX: make code faster ? */
7437 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7438 vtop->sym->v >= SYM_FIRST_ANOM &&
7439 /* XXX This rejects compound literals like
7440 '(void *){ptr}'. The problem is that '&sym' is
7441 represented the same way, which would be ruled out
7442 by the SYM_FIRST_ANOM check above, but also '"string"'
7443 in 'char *p = "string"' is represented the same
7444 with the type being VT_PTR and the symbol being an
7445 anonymous one. That is, there's no difference in vtop
7446 between '(void *){x}' and '&(void *){x}'. Ignore
7447 pointer typed entities here. Hopefully no real code
7448 will ever use compound literals with scalar type. */
7449 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7450 /* These come from compound literals, memcpy stuff over. */
7451 Section *ssec;
7452 ElfSym *esym;
7453 ElfW_Rel *rel;
7454 esym = elfsym(vtop->sym);
7455 ssec = tcc_state->sections[esym->st_shndx];
7456 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
7457 if (ssec->reloc) {
7458 /* We need to copy over all memory contents, and that
7459 includes relocations. Use the fact that relocs are
7460 created it order, so look from the end of relocs
7461 until we hit one before the copied region. */
7462 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
7463 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
7464 while (num_relocs--) {
7465 rel--;
7466 if (rel->r_offset >= esym->st_value + size)
7467 continue;
7468 if (rel->r_offset < esym->st_value)
7469 break;
7470 /* Note: if the same fields are initialized multiple
7471 times (possible with designators) then we possibly
7472 add multiple relocations for the same offset here.
7473 That would lead to wrong code, the last reloc needs
7474 to win. We clean this up later after the whole
7475 initializer is parsed. */
7476 put_elf_reloca(symtab_section, sec,
7477 c + rel->r_offset - esym->st_value,
7478 ELFW(R_TYPE)(rel->r_info),
7479 ELFW(R_SYM)(rel->r_info),
7480 #if PTR_SIZE == 8
7481 rel->r_addend
7482 #else
7484 #endif
7488 } else {
7489 if (type->t & VT_BITFIELD) {
7490 int bit_pos, bit_size, bits, n;
7491 unsigned char *p, v, m;
7492 bit_pos = BIT_POS(vtop->type.t);
7493 bit_size = BIT_SIZE(vtop->type.t);
7494 p = (unsigned char*)ptr + (bit_pos >> 3);
7495 bit_pos &= 7, bits = 0;
7496 while (bit_size) {
7497 n = 8 - bit_pos;
7498 if (n > bit_size)
7499 n = bit_size;
7500 v = vtop->c.i >> bits << bit_pos;
7501 m = ((1 << n) - 1) << bit_pos;
7502 *p = (*p & ~m) | (v & m);
7503 bits += n, bit_size -= n, bit_pos = 0, ++p;
7505 } else
7506 switch(bt) {
7507 /* XXX: when cross-compiling we assume that each type has the
7508 same representation on host and target, which is likely to
7509 be wrong in the case of long double */
7510 case VT_BOOL:
7511 vtop->c.i = vtop->c.i != 0;
7512 case VT_BYTE:
7513 *(char *)ptr |= vtop->c.i;
7514 break;
7515 case VT_SHORT:
7516 *(short *)ptr |= vtop->c.i;
7517 break;
7518 case VT_FLOAT:
7519 *(float*)ptr = vtop->c.f;
7520 break;
7521 case VT_DOUBLE:
7522 *(double *)ptr = vtop->c.d;
7523 break;
7524 case VT_LDOUBLE:
7525 #if defined TCC_IS_NATIVE_387
7526 if (sizeof (long double) >= 10) /* zero pad ten-byte LD */
7527 memcpy(ptr, &vtop->c.ld, 10);
7528 #ifdef __TINYC__
7529 else if (sizeof (long double) == sizeof (double))
7530 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
7531 #endif
7532 else if (vtop->c.ld == 0.0)
7534 else
7535 #endif
7536 if (sizeof(long double) == LDOUBLE_SIZE)
7537 *(long double*)ptr = vtop->c.ld;
7538 else if (sizeof(double) == LDOUBLE_SIZE)
7539 *(double *)ptr = (double)vtop->c.ld;
7540 else
7541 tcc_error("can't cross compile long double constants");
7542 break;
7543 #if PTR_SIZE != 8
7544 case VT_LLONG:
7545 *(long long *)ptr |= vtop->c.i;
7546 break;
7547 #else
7548 case VT_LLONG:
7549 #endif
7550 case VT_PTR:
7552 addr_t val = vtop->c.i;
7553 #if PTR_SIZE == 8
7554 if (vtop->r & VT_SYM)
7555 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7556 else
7557 *(addr_t *)ptr |= val;
7558 #else
7559 if (vtop->r & VT_SYM)
7560 greloc(sec, vtop->sym, c, R_DATA_PTR);
7561 *(addr_t *)ptr |= val;
7562 #endif
7563 break;
7565 default:
7567 int val = vtop->c.i;
7568 #if PTR_SIZE == 8
7569 if (vtop->r & VT_SYM)
7570 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
7571 else
7572 *(int *)ptr |= val;
7573 #else
7574 if (vtop->r & VT_SYM)
7575 greloc(sec, vtop->sym, c, R_DATA_PTR);
7576 *(int *)ptr |= val;
7577 #endif
7578 break;
7582 vtop--;
7583 } else {
7584 vset(&dtype, VT_LOCAL|VT_LVAL, c);
7585 vswap();
7586 vstore();
7587 vpop();
7591 /* 't' contains the type and storage info. 'c' is the offset of the
7592 object in section 'sec'. If 'sec' is NULL, it means stack based
7593 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
7594 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
7595 size only evaluation is wanted (only for arrays). */
7596 static void decl_initializer(CType *type, Section *sec, unsigned long c,
7597 int flags)
7599 int len, n, no_oblock, i;
7600 int size1, align1;
7601 Sym *s, *f;
7602 Sym indexsym;
7603 CType *t1;
7605 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
7606 /* In case of strings we have special handling for arrays, so
7607 don't consume them as initializer value (which would commit them
7608 to some anonymous symbol). */
7609 tok != TOK_LSTR && tok != TOK_STR &&
7610 !(flags & DIF_SIZE_ONLY)) {
7611 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7612 flags |= DIF_HAVE_ELEM;
7615 if ((flags & DIF_HAVE_ELEM) &&
7616 !(type->t & VT_ARRAY) &&
7617 /* Use i_c_parameter_t, to strip toplevel qualifiers.
7618 The source type might have VT_CONSTANT set, which is
7619 of course assignable to non-const elements. */
7620 is_compatible_unqualified_types(type, &vtop->type)) {
7621 init_putv(type, sec, c);
7622 } else if (type->t & VT_ARRAY) {
7623 s = type->ref;
7624 n = s->c;
7625 t1 = pointed_type(type);
7626 size1 = type_size(t1, &align1);
7628 no_oblock = 1;
7629 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
7630 tok == '{') {
7631 if (tok != '{')
7632 tcc_error("character array initializer must be a literal,"
7633 " optionally enclosed in braces");
7634 skip('{');
7635 no_oblock = 0;
7638 /* only parse strings here if correct type (otherwise: handle
7639 them as ((w)char *) expressions */
7640 if ((tok == TOK_LSTR &&
7641 #ifdef TCC_TARGET_PE
7642 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
7643 #else
7644 (t1->t & VT_BTYPE) == VT_INT
7645 #endif
7646 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
7647 int nb;
7648 len = 0;
7649 cstr_reset(&initstr);
7650 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
7651 tcc_error("unhandled string literal merging");
7652 while (tok == TOK_STR || tok == TOK_LSTR) {
7653 if (initstr.size)
7654 initstr.size -= size1;
7655 if (tok == TOK_STR)
7656 len += tokc.str.size;
7657 else
7658 len += tokc.str.size / sizeof(nwchar_t);
7659 len--;
7660 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
7661 next();
7663 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
7664 && tok != TOK_EOF) {
7665 /* Not a lone literal but part of a bigger expression. */
7666 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
7667 tokc.str.size = initstr.size;
7668 tokc.str.data = initstr.data;
7669 indexsym.c = 0;
7670 f = &indexsym;
7671 goto do_init_list;
7673 nb = len;
7674 if (n >= 0 && len > n)
7675 nb = n;
7676 if (!(flags & DIF_SIZE_ONLY)) {
7677 if (len > nb)
7678 tcc_warning("initializer-string for array is too long");
7679 /* in order to go faster for common case (char
7680 string in global variable, we handle it
7681 specifically */
7682 if (sec && size1 == 1) {
7683 if (!NODATA_WANTED)
7684 memcpy(sec->data + c, initstr.data, nb);
7685 } else {
7686 for(i=0;i<nb;i++) {
7687 if (size1 == 1)
7688 ch = ((unsigned char *)initstr.data)[i];
7689 else
7690 ch = ((nwchar_t *)initstr.data)[i];
7691 vpushi(ch);
7692 init_putv(t1, sec, c + i * size1);
7696 /* only add trailing zero if enough storage (no
7697 warning in this case since it is standard) */
7698 if (n < 0 || len < n) {
7699 if (!(flags & DIF_SIZE_ONLY)) {
7700 vpushi(0);
7701 init_putv(t1, sec, c + (len * size1));
7703 len++;
7705 len *= size1;
7706 } else {
7707 indexsym.c = 0;
7708 f = &indexsym;
7710 do_init_list:
7711 len = 0;
7712 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
7713 len = decl_designator(type, sec, c, &f, flags, len);
7714 flags &= ~DIF_HAVE_ELEM;
7715 if (type->t & VT_ARRAY) {
7716 ++indexsym.c;
7717 /* special test for multi dimensional arrays (may not
7718 be strictly correct if designators are used at the
7719 same time) */
7720 if (no_oblock && len >= n*size1)
7721 break;
7722 } else {
7723 if (s->type.t == VT_UNION)
7724 f = NULL;
7725 else
7726 f = f->next;
7727 if (no_oblock && f == NULL)
7728 break;
7731 if (tok == '}')
7732 break;
7733 skip(',');
7736 /* put zeros at the end */
7737 if (!(flags & DIF_SIZE_ONLY) && len < n*size1)
7738 init_putz(sec, c + len, n*size1 - len);
7739 if (!no_oblock)
7740 skip('}');
7741 /* patch type size if needed, which happens only for array types */
7742 if (n < 0)
7743 s->c = size1 == 1 ? len : ((len + size1 - 1)/size1);
7744 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
7745 size1 = 1;
7746 no_oblock = 1;
7747 if ((flags & DIF_FIRST) || tok == '{') {
7748 skip('{');
7749 no_oblock = 0;
7751 s = type->ref;
7752 f = s->next;
7753 n = s->c;
7754 goto do_init_list;
7755 } else if (tok == '{') {
7756 if (flags & DIF_HAVE_ELEM)
7757 skip(';');
7758 next();
7759 decl_initializer(type, sec, c, flags & ~DIF_HAVE_ELEM);
7760 skip('}');
7761 } else if ((flags & DIF_SIZE_ONLY)) {
7762 /* If we supported only ISO C we wouldn't have to accept calling
7763 this on anything than an array if DIF_SIZE_ONLY (and even then
7764 only on the outermost level, so no recursion would be needed),
7765 because initializing a flex array member isn't supported.
7766 But GNU C supports it, so we need to recurse even into
7767 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
7768 /* just skip expression */
7769 skip_or_save_block(NULL);
7770 } else {
7771 if (!(flags & DIF_HAVE_ELEM)) {
7772 /* This should happen only when we haven't parsed
7773 the init element above for fear of committing a
7774 string constant to memory too early. */
7775 if (tok != TOK_STR && tok != TOK_LSTR)
7776 expect("string constant");
7777 parse_init_elem(!sec ? EXPR_ANY : EXPR_CONST);
7779 init_putv(type, sec, c);
7783 /* parse an initializer for type 't' if 'has_init' is non zero, and
7784 allocate space in local or global data space ('r' is either
7785 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
7786 variable 'v' of scope 'scope' is declared before initializers
7787 are parsed. If 'v' is zero, then a reference to the new object
7788 is put in the value stack. If 'has_init' is 2, a special parsing
7789 is done to handle string constants. */
7790 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
7791 int has_init, int v, int scope)
7793 int size, align, addr;
7794 TokenString *init_str = NULL;
7796 Section *sec;
7797 Sym *flexible_array;
7798 Sym *sym = NULL;
7799 int saved_nocode_wanted = nocode_wanted;
7800 #ifdef CONFIG_TCC_BCHECK
7801 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
7802 #endif
7804 /* Always allocate static or global variables */
7805 if (v && (r & VT_VALMASK) == VT_CONST)
7806 nocode_wanted |= 0x80000000;
7808 flexible_array = NULL;
7809 if ((type->t & VT_BTYPE) == VT_STRUCT) {
7810 Sym *field = type->ref->next;
7811 if (field) {
7812 while (field->next)
7813 field = field->next;
7814 if (field->type.t & VT_ARRAY && field->type.ref->c < 0)
7815 flexible_array = field;
7819 size = type_size(type, &align);
7820 /* If unknown size, we must evaluate it before
7821 evaluating initializers because
7822 initializers can generate global data too
7823 (e.g. string pointers or ISOC99 compound
7824 literals). It also simplifies local
7825 initializers handling */
7826 if (size < 0 || (flexible_array && has_init)) {
7827 if (!has_init)
7828 tcc_error("unknown type size");
7829 /* get all init string */
7830 if (has_init == 2) {
7831 init_str = tok_str_alloc();
7832 /* only get strings */
7833 while (tok == TOK_STR || tok == TOK_LSTR) {
7834 tok_str_add_tok(init_str);
7835 next();
7837 tok_str_add(init_str, -1);
7838 tok_str_add(init_str, 0);
7839 } else {
7840 skip_or_save_block(&init_str);
7842 unget_tok(0);
7844 /* compute size */
7845 begin_macro(init_str, 1);
7846 next();
7847 decl_initializer(type, NULL, 0, DIF_FIRST | DIF_SIZE_ONLY);
7848 /* prepare second initializer parsing */
7849 macro_ptr = init_str->str;
7850 next();
7852 /* if still unknown size, error */
7853 size = type_size(type, &align);
7854 if (size < 0)
7855 tcc_error("unknown type size");
7857 /* If there's a flex member and it was used in the initializer
7858 adjust size. */
7859 if (flexible_array &&
7860 flexible_array->type.ref->c > 0)
7861 size += flexible_array->type.ref->c
7862 * pointed_size(&flexible_array->type);
7863 /* take into account specified alignment if bigger */
7864 if (ad->a.aligned) {
7865 int speca = 1 << (ad->a.aligned - 1);
7866 if (speca > align)
7867 align = speca;
7868 } else if (ad->a.packed) {
7869 align = 1;
7872 if (!v && NODATA_WANTED)
7873 size = 0, align = 1;
7875 if ((r & VT_VALMASK) == VT_LOCAL) {
7876 sec = NULL;
7877 #ifdef CONFIG_TCC_BCHECK
7878 if (bcheck && v) {
7879 /* add padding between stack variables for bound checking */
7880 loc--;
7882 #endif
7883 loc = (loc - size) & -align;
7884 addr = loc;
7885 #ifdef CONFIG_TCC_BCHECK
7886 if (bcheck && v) {
7887 /* add padding between stack variables for bound checking */
7888 loc--;
7890 #endif
7891 if (v) {
7892 /* local variable */
7893 #ifdef CONFIG_TCC_ASM
7894 if (ad->asm_label) {
7895 int reg = asm_parse_regvar(ad->asm_label);
7896 if (reg >= 0)
7897 r = (r & ~VT_VALMASK) | reg;
7899 #endif
7900 sym = sym_push(v, type, r, addr);
7901 if (ad->cleanup_func) {
7902 Sym *cls = sym_push2(&all_cleanups,
7903 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
7904 cls->prev_tok = sym;
7905 cls->next = ad->cleanup_func;
7906 cls->ncl = cur_scope->cl.s;
7907 cur_scope->cl.s = cls;
7910 sym->a = ad->a;
7911 } else {
7912 /* push local reference */
7913 vset(type, r, addr);
7915 } else {
7916 if (v && scope == VT_CONST) {
7917 /* see if the symbol was already defined */
7918 sym = sym_find(v);
7919 if (sym) {
7920 patch_storage(sym, ad, type);
7921 /* we accept several definitions of the same global variable. */
7922 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
7923 goto no_alloc;
7927 /* allocate symbol in corresponding section */
7928 sec = ad->section;
7929 if (!sec) {
7930 if (has_init)
7931 sec = data_section;
7932 else if (tcc_state->nocommon)
7933 sec = bss_section;
7936 if (sec) {
7937 addr = section_add(sec, size, align);
7938 #ifdef CONFIG_TCC_BCHECK
7939 /* add padding if bound check */
7940 if (bcheck)
7941 section_add(sec, 1, 1);
7942 #endif
7943 } else {
7944 addr = align; /* SHN_COMMON is special, symbol value is align */
7945 sec = common_section;
7948 if (v) {
7949 if (!sym) {
7950 sym = sym_push(v, type, r | VT_SYM, 0);
7951 patch_storage(sym, ad, NULL);
7953 /* update symbol definition */
7954 put_extern_sym(sym, sec, addr, size);
7955 } else {
7956 /* push global reference */
7957 vpush_ref(type, sec, addr, size);
7958 sym = vtop->sym;
7959 vtop->r |= r;
7962 #ifdef CONFIG_TCC_BCHECK
7963 /* handles bounds now because the symbol must be defined
7964 before for the relocation */
7965 if (bcheck) {
7966 addr_t *bounds_ptr;
7968 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
7969 /* then add global bound info */
7970 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
7971 bounds_ptr[0] = 0; /* relocated */
7972 bounds_ptr[1] = size;
7974 #endif
7977 if (type->t & VT_VLA) {
7978 int a;
7980 if (NODATA_WANTED)
7981 goto no_alloc;
7983 /* save current stack pointer */
7984 if (root_scope->vla.loc == 0) {
7985 struct scope *v = cur_scope;
7986 gen_vla_sp_save(loc -= PTR_SIZE);
7987 do v->vla.loc = loc; while ((v = v->prev));
7990 vla_runtime_type_size(type, &a);
7991 gen_vla_alloc(type, a);
7992 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
7993 /* on _WIN64, because of the function args scratch area, the
7994 result of alloca differs from RSP and is returned in RAX. */
7995 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
7996 #endif
7997 gen_vla_sp_save(addr);
7998 cur_scope->vla.loc = addr;
7999 cur_scope->vla.num++;
8000 } else if (has_init) {
8001 size_t oldreloc_offset = 0;
8002 if (sec && sec->reloc)
8003 oldreloc_offset = sec->reloc->data_offset;
8004 decl_initializer(type, sec, addr, DIF_FIRST);
8005 if (sec && sec->reloc)
8006 squeeze_multi_relocs(sec, oldreloc_offset);
8007 /* patch flexible array member size back to -1, */
8008 /* for possible subsequent similar declarations */
8009 if (flexible_array)
8010 flexible_array->type.ref->c = -1;
8013 no_alloc:
8014 /* restore parse state if needed */
8015 if (init_str) {
8016 end_macro();
8017 next();
8020 nocode_wanted = saved_nocode_wanted;
8023 /* parse a function defined by symbol 'sym' and generate its code in
8024 'cur_text_section' */
8025 static void gen_function(Sym *sym)
8027 /* Initialize VLA state */
8028 struct scope f = { 0 };
8029 cur_scope = root_scope = &f;
8031 nocode_wanted = 0;
8032 ind = cur_text_section->data_offset;
8033 if (sym->a.aligned) {
8034 size_t newoff = section_add(cur_text_section, 0,
8035 1 << (sym->a.aligned - 1));
8036 gen_fill_nops(newoff - ind);
8038 /* NOTE: we patch the symbol size later */
8039 put_extern_sym(sym, cur_text_section, ind, 0);
8040 if (sym->type.ref->f.func_ctor)
8041 add_array (tcc_state, ".init_array", sym->c);
8042 if (sym->type.ref->f.func_dtor)
8043 add_array (tcc_state, ".fini_array", sym->c);
8045 funcname = get_tok_str(sym->v, NULL);
8046 func_ind = ind;
8047 func_vt = sym->type.ref->type;
8048 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8050 /* put debug symbol */
8051 tcc_debug_funcstart(tcc_state, sym);
8052 /* push a dummy symbol to enable local sym storage */
8053 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8054 local_scope = 1; /* for function parameters */
8055 gfunc_prolog(sym);
8056 local_scope = 0;
8057 rsym = 0;
8058 clear_temp_local_var_list();
8059 block(0);
8060 gsym(rsym);
8061 nocode_wanted = 0;
8062 /* reset local stack */
8063 pop_local_syms(&local_stack, NULL, 0, func_var);
8064 gfunc_epilog();
8065 cur_text_section->data_offset = ind;
8066 local_scope = 0;
8067 label_pop(&global_label_stack, NULL, 0);
8068 sym_pop(&all_cleanups, NULL, 0);
8069 /* patch symbol size */
8070 elfsym(sym)->st_size = ind - func_ind;
8071 /* end of function */
8072 tcc_debug_funcend(tcc_state, ind - func_ind);
8073 /* It's better to crash than to generate wrong code */
8074 cur_text_section = NULL;
8075 funcname = ""; /* for safety */
8076 func_vt.t = VT_VOID; /* for safety */
8077 func_var = 0; /* for safety */
8078 ind = 0; /* for safety */
8079 nocode_wanted = 0x80000000;
8080 check_vstack();
8081 /* do this after funcend debug info */
8082 next();
8085 static void gen_inline_functions(TCCState *s)
8087 Sym *sym;
8088 int inline_generated, i;
8089 struct InlineFunc *fn;
8091 tcc_open_bf(s, ":inline:", 0);
8092 /* iterate while inline function are referenced */
8093 do {
8094 inline_generated = 0;
8095 for (i = 0; i < s->nb_inline_fns; ++i) {
8096 fn = s->inline_fns[i];
8097 sym = fn->sym;
8098 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8099 /* the function was used or forced (and then not internal):
8100 generate its code and convert it to a normal function */
8101 fn->sym = NULL;
8102 tcc_debug_putfile(s, fn->filename);
8103 begin_macro(fn->func_str, 1);
8104 next();
8105 cur_text_section = text_section;
8106 gen_function(sym);
8107 end_macro();
8109 inline_generated = 1;
8112 } while (inline_generated);
8113 tcc_close();
8116 static void free_inline_functions(TCCState *s)
8118 int i;
8119 /* free tokens of unused inline functions */
8120 for (i = 0; i < s->nb_inline_fns; ++i) {
8121 struct InlineFunc *fn = s->inline_fns[i];
8122 if (fn->sym)
8123 tok_str_free(fn->func_str);
8125 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8128 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8129 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8130 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
8132 int v, has_init, r;
8133 CType type, btype;
8134 Sym *sym;
8135 AttributeDef ad, adbase;
8137 while (1) {
8138 if (tok == TOK_STATIC_ASSERT) {
8139 CString error_str;
8140 int c;
8142 next();
8143 skip('(');
8144 c = expr_const();
8146 if (tok == ')') {
8147 if (!c)
8148 tcc_error("_Static_assert fail");
8149 next();
8150 goto static_assert_out;
8153 skip(',');
8154 parse_mult_str(&error_str, "string constant");
8155 if (c == 0)
8156 tcc_error("%s", (char *)error_str.data);
8157 cstr_free(&error_str);
8158 skip(')');
8159 static_assert_out:
8160 skip(';');
8161 continue;
8163 if (!parse_btype(&btype, &adbase)) {
8164 if (is_for_loop_init)
8165 return 0;
8166 /* skip redundant ';' if not in old parameter decl scope */
8167 if (tok == ';' && l != VT_CMP) {
8168 next();
8169 continue;
8171 if (l != VT_CONST)
8172 break;
8173 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8174 /* global asm block */
8175 asm_global_instr();
8176 continue;
8178 if (tok >= TOK_UIDENT) {
8179 /* special test for old K&R protos without explicit int
8180 type. Only accepted when defining global data */
8181 btype.t = VT_INT;
8182 } else {
8183 if (tok != TOK_EOF)
8184 expect("declaration");
8185 break;
8188 if (tok == ';') {
8189 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8190 int v = btype.ref->v;
8191 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8192 tcc_warning("unnamed struct/union that defines no instances");
8193 next();
8194 continue;
8196 if (IS_ENUM(btype.t)) {
8197 next();
8198 continue;
8201 while (1) { /* iterate thru each declaration */
8202 type = btype;
8203 /* If the base type itself was an array type of unspecified
8204 size (like in 'typedef int arr[]; arr x = {1};') then
8205 we will overwrite the unknown size by the real one for
8206 this decl. We need to unshare the ref symbol holding
8207 that size. */
8208 if ((type.t & VT_ARRAY) && type.ref->c < 0) {
8209 type.ref = sym_push(SYM_FIELD, &type.ref->type, 0, type.ref->c);
8211 ad = adbase;
8212 type_decl(&type, &ad, &v, TYPE_DIRECT);
8213 #if 0
8215 char buf[500];
8216 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8217 printf("type = '%s'\n", buf);
8219 #endif
8220 if ((type.t & VT_BTYPE) == VT_FUNC) {
8221 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
8222 tcc_error("function without file scope cannot be static");
8223 /* if old style function prototype, we accept a
8224 declaration list */
8225 sym = type.ref;
8226 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
8227 decl0(VT_CMP, 0, sym);
8228 /* always compile 'extern inline' */
8229 if (type.t & VT_EXTERN)
8230 type.t &= ~VT_INLINE;
8233 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8234 ad.asm_label = asm_label_instr();
8235 /* parse one last attribute list, after asm label */
8236 parse_attribute(&ad);
8237 #if 0
8238 /* gcc does not allow __asm__("label") with function definition,
8239 but why not ... */
8240 if (tok == '{')
8241 expect(";");
8242 #endif
8245 #ifdef TCC_TARGET_PE
8246 if (ad.a.dllimport || ad.a.dllexport) {
8247 if (type.t & VT_STATIC)
8248 tcc_error("cannot have dll linkage with static");
8249 if (type.t & VT_TYPEDEF) {
8250 tcc_warning("'%s' attribute ignored for typedef",
8251 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8252 (ad.a.dllexport = 0, "dllexport"));
8253 } else if (ad.a.dllimport) {
8254 if ((type.t & VT_BTYPE) == VT_FUNC)
8255 ad.a.dllimport = 0;
8256 else
8257 type.t |= VT_EXTERN;
8260 #endif
8261 if (tok == '{') {
8262 if (l != VT_CONST)
8263 tcc_error("cannot use local functions");
8264 if ((type.t & VT_BTYPE) != VT_FUNC)
8265 expect("function definition");
8267 /* reject abstract declarators in function definition
8268 make old style params without decl have int type */
8269 sym = type.ref;
8270 while ((sym = sym->next) != NULL) {
8271 if (!(sym->v & ~SYM_FIELD))
8272 expect("identifier");
8273 if (sym->type.t == VT_VOID)
8274 sym->type = int_type;
8277 /* apply post-declaraton attributes */
8278 merge_funcattr(&type.ref->f, &ad.f);
8280 /* put function symbol */
8281 type.t &= ~VT_EXTERN;
8282 sym = external_sym(v, &type, 0, &ad);
8284 /* static inline functions are just recorded as a kind
8285 of macro. Their code will be emitted at the end of
8286 the compilation unit only if they are used */
8287 if (sym->type.t & VT_INLINE) {
8288 struct InlineFunc *fn;
8289 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8290 strcpy(fn->filename, file->filename);
8291 fn->sym = sym;
8292 skip_or_save_block(&fn->func_str);
8293 dynarray_add(&tcc_state->inline_fns,
8294 &tcc_state->nb_inline_fns, fn);
8295 } else {
8296 /* compute text section */
8297 cur_text_section = ad.section;
8298 if (!cur_text_section)
8299 cur_text_section = text_section;
8300 gen_function(sym);
8302 break;
8303 } else {
8304 if (l == VT_CMP) {
8305 /* find parameter in function parameter list */
8306 for (sym = func_sym->next; sym; sym = sym->next)
8307 if ((sym->v & ~SYM_FIELD) == v)
8308 goto found;
8309 tcc_error("declaration for parameter '%s' but no such parameter",
8310 get_tok_str(v, NULL));
8311 found:
8312 if (type.t & VT_STORAGE) /* 'register' is okay */
8313 tcc_error("storage class specified for '%s'",
8314 get_tok_str(v, NULL));
8315 if (sym->type.t != VT_VOID)
8316 tcc_error("redefinition of parameter '%s'",
8317 get_tok_str(v, NULL));
8318 convert_parameter_type(&type);
8319 sym->type = type;
8320 } else if (type.t & VT_TYPEDEF) {
8321 /* save typedefed type */
8322 /* XXX: test storage specifiers ? */
8323 sym = sym_find(v);
8324 if (sym && sym->sym_scope == local_scope) {
8325 if (!is_compatible_types(&sym->type, &type)
8326 || !(sym->type.t & VT_TYPEDEF))
8327 tcc_error("incompatible redefinition of '%s'",
8328 get_tok_str(v, NULL));
8329 sym->type = type;
8330 } else {
8331 sym = sym_push(v, &type, 0, 0);
8333 sym->a = ad.a;
8334 sym->f = ad.f;
8335 } else if ((type.t & VT_BTYPE) == VT_VOID
8336 && !(type.t & VT_EXTERN)) {
8337 tcc_error("declaration of void object");
8338 } else {
8339 r = 0;
8340 if ((type.t & VT_BTYPE) == VT_FUNC) {
8341 /* external function definition */
8342 /* specific case for func_call attribute */
8343 type.ref->f = ad.f;
8344 } else if (!(type.t & VT_ARRAY)) {
8345 /* not lvalue if array */
8346 r |= VT_LVAL;
8348 has_init = (tok == '=');
8349 if (has_init && (type.t & VT_VLA))
8350 tcc_error("variable length array cannot be initialized");
8351 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8352 || (type.t & VT_BTYPE) == VT_FUNC
8353 /* as with GCC, uninitialized global arrays with no size
8354 are considered extern: */
8355 || ((type.t & VT_ARRAY) && !has_init
8356 && l == VT_CONST && type.ref->c < 0)
8358 /* external variable or function */
8359 type.t |= VT_EXTERN;
8360 sym = external_sym(v, &type, r, &ad);
8361 if (ad.alias_target) {
8362 ElfSym *esym;
8363 Sym *alias_target;
8364 alias_target = sym_find(ad.alias_target);
8365 esym = elfsym(alias_target);
8366 if (!esym)
8367 tcc_error("unsupported forward __alias__ attribute");
8368 put_extern_sym2(sym, esym->st_shndx, esym->st_value, esym->st_size, 0);
8370 } else {
8371 if (type.t & VT_STATIC)
8372 r |= VT_CONST;
8373 else
8374 r |= l;
8375 if (has_init)
8376 next();
8377 else if (l == VT_CONST)
8378 /* uninitialized global variables may be overridden */
8379 type.t |= VT_EXTERN;
8380 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
8383 if (tok != ',') {
8384 if (is_for_loop_init)
8385 return 1;
8386 skip(';');
8387 break;
8389 next();
8393 return 0;
8396 static void decl(int l)
8398 decl0(l, 0, NULL);
8401 /* ------------------------------------------------------------------------- */
8402 #undef gjmp_addr
8403 #undef gjmp
8404 /* ------------------------------------------------------------------------- */