tccgen: accept array-size expressions in function paramters
[tinycc.git] / tccgen.c
blobf4dc0fc29170dc440061911b74d17916679f6de8
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 static int section_sym;
49 ST_DATA char debug_modes;
51 ST_DATA SValue *vtop;
52 static SValue _vstack[1 + VSTACK_SIZE];
53 #define vstack (_vstack + 1)
55 ST_DATA int const_wanted; /* true if constant wanted */
56 ST_DATA int nocode_wanted; /* no code generation wanted */
57 #define unevalmask 0xffff /* unevaluated subexpression */
58 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
59 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
61 /* Automagical code suppression ----> */
62 #define CODE_OFF() (nocode_wanted |= 0x20000000)
63 #define CODE_ON() (nocode_wanted &= ~0x20000000)
65 static void tcc_tcov_block_begin(void);
67 /* Clear 'nocode_wanted' at label if it was used */
68 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
69 static int gind(void) { int t = ind; CODE_ON(); if (debug_modes) tcc_tcov_block_begin(); return t; }
71 /* Set 'nocode_wanted' after unconditional jumps */
72 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
73 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
75 /* These are #undef'd at the end of this file */
76 #define gjmp_addr gjmp_addr_acs
77 #define gjmp gjmp_acs
78 /* <---- */
80 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
81 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
82 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
83 ST_DATA int func_vc;
84 static int last_line_num, new_file, func_ind; /* debug info control */
85 ST_DATA const char *funcname;
86 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
87 static CString initstr;
89 #if PTR_SIZE == 4
90 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_INT
92 #elif LONG_SIZE == 4
93 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T VT_LLONG
95 #else
96 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
97 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
98 #endif
100 static struct switch_t {
101 struct case_t {
102 int64_t v1, v2;
103 int sym;
104 } **p; int n; /* list of case ranges */
105 int def_sym; /* default symbol */
106 int *bsym;
107 struct scope *scope;
108 struct switch_t *prev;
109 SValue sv;
110 } *cur_switch; /* current switch */
112 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
113 /*list of temporary local variables on the stack in current function. */
114 static struct temp_local_variable {
115 int location; //offset on stack. Svalue.c.i
116 short size;
117 short align;
118 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
119 static int nb_temp_local_vars;
121 static struct scope {
122 struct scope *prev;
123 struct { int loc, locorig, num; } vla;
124 struct { Sym *s; int n; } cl;
125 int *bsym, *csym;
126 Sym *lstk, *llstk;
127 } *cur_scope, *loop_scope, *root_scope;
129 typedef struct {
130 Section *sec;
131 int local_offset;
132 Sym *flex_array_ref;
133 } init_params;
135 #if 1
136 #define precedence_parser
137 static void init_prec(void);
138 #endif
140 /********************************************************/
141 /* stab debug support */
143 static const struct {
144 int type;
145 const char *name;
146 } default_debug[] = {
147 { VT_INT, "int:t1=r1;-2147483648;2147483647;" },
148 { VT_BYTE, "char:t2=r2;0;127;" },
149 #if LONG_SIZE == 4
150 { VT_LONG | VT_INT, "long int:t3=r3;-2147483648;2147483647;" },
151 #else
152 { VT_LLONG | VT_LONG, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
153 #endif
154 { VT_INT | VT_UNSIGNED, "unsigned int:t4=r4;0;037777777777;" },
155 #if LONG_SIZE == 4
156 { VT_LONG | VT_INT | VT_UNSIGNED, "long unsigned int:t5=r5;0;037777777777;" },
157 #else
158 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
159 { VT_LLONG | VT_LONG | VT_UNSIGNED, "long unsigned int:t5=r5;0;01777777777777777777777;" },
160 #endif
161 { VT_QLONG, "__int128:t6=r6;0;-1;" },
162 { VT_QLONG | VT_UNSIGNED, "__int128 unsigned:t7=r7;0;-1;" },
163 { VT_LLONG, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
164 { VT_LLONG | VT_UNSIGNED, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
165 { VT_SHORT, "short int:t10=r10;-32768;32767;" },
166 { VT_SHORT | VT_UNSIGNED, "short unsigned int:t11=r11;0;65535;" },
167 { VT_BYTE | VT_DEFSIGN, "signed char:t12=r12;-128;127;" },
168 { VT_BYTE | VT_DEFSIGN | VT_UNSIGNED, "unsigned char:t13=r13;0;255;" },
169 { VT_FLOAT, "float:t14=r1;4;0;" },
170 { VT_DOUBLE, "double:t15=r1;8;0;" },
171 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
172 { VT_DOUBLE | VT_LONG, "long double:t16=r1;8;0;" },
173 #else
174 { VT_LDOUBLE, "long double:t16=r1;16;0;" },
175 #endif
176 { -1, "_Float32:t17=r1;4;0;" },
177 { -1, "_Float64:t18=r1;8;0;" },
178 { -1, "_Float128:t19=r1;16;0;" },
179 { -1, "_Float32x:t20=r1;8;0;" },
180 { -1, "_Float64x:t21=r1;16;0;" },
181 { -1, "_Decimal32:t22=r1;4;0;" },
182 { -1, "_Decimal64:t23=r1;8;0;" },
183 { -1, "_Decimal128:t24=r1;16;0;" },
184 /* if default char is unsigned */
185 { VT_BYTE | VT_UNSIGNED, "unsigned char:t25=r25;0;255;" },
186 /* boolean type */
187 { VT_BOOL, "bool:t26=r26;0;255;" },
188 { VT_VOID, "void:t27=27" },
191 static int debug_next_type;
193 static struct debug_hash {
194 int debug_type;
195 Sym *type;
196 } *debug_hash;
198 static int n_debug_hash;
200 static struct debug_info {
201 int start;
202 int end;
203 int n_sym;
204 struct debug_sym {
205 int type;
206 unsigned long value;
207 char *str;
208 Section *sec;
209 int sym_index;
210 } *sym;
211 struct debug_info *child, *next, *last, *parent;
212 } *debug_info, *debug_info_root;
214 static struct {
215 unsigned long offset;
216 unsigned long last_file_name;
217 unsigned long last_func_name;
218 int ind;
219 int line;
220 } tcov_data;
222 /********************************************************/
223 static void gen_cast(CType *type);
224 static void gen_cast_s(int t);
225 static inline CType *pointed_type(CType *type);
226 static int is_compatible_types(CType *type1, CType *type2);
227 static int parse_btype(CType *type, AttributeDef *ad);
228 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
229 static void parse_expr_type(CType *type);
230 static void init_putv(init_params *p, CType *type, unsigned long c);
231 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
232 static void block(int is_expr);
233 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
234 static void decl(int l);
235 static int decl0(int l, int is_for_loop_init, Sym *);
236 static void expr_eq(void);
237 static void vla_runtime_type_size(CType *type, int *a);
238 static int is_compatible_unqualified_types(CType *type1, CType *type2);
239 static inline int64_t expr_const64(void);
240 static void vpush64(int ty, unsigned long long v);
241 static void vpush(CType *type);
242 static int gvtst(int inv, int t);
243 static void gen_inline_functions(TCCState *s);
244 static void free_inline_functions(TCCState *s);
245 static void skip_or_save_block(TokenString **str);
246 static void gv_dup(void);
247 static int get_temp_local_var(int size,int align);
248 static void clear_temp_local_var_list();
249 static void cast_error(CType *st, CType *dt);
251 ST_INLN int is_float(int t)
253 int bt = t & VT_BTYPE;
254 return bt == VT_LDOUBLE
255 || bt == VT_DOUBLE
256 || bt == VT_FLOAT
257 || bt == VT_QFLOAT;
260 static inline int is_integer_btype(int bt)
262 return bt == VT_BYTE
263 || bt == VT_BOOL
264 || bt == VT_SHORT
265 || bt == VT_INT
266 || bt == VT_LLONG;
269 static int btype_size(int bt)
271 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
272 bt == VT_SHORT ? 2 :
273 bt == VT_INT ? 4 :
274 bt == VT_LLONG ? 8 :
275 bt == VT_PTR ? PTR_SIZE : 0;
278 /* returns function return register from type */
279 static int R_RET(int t)
281 if (!is_float(t))
282 return REG_IRET;
283 #ifdef TCC_TARGET_X86_64
284 if ((t & VT_BTYPE) == VT_LDOUBLE)
285 return TREG_ST0;
286 #elif defined TCC_TARGET_RISCV64
287 if ((t & VT_BTYPE) == VT_LDOUBLE)
288 return REG_IRET;
289 #endif
290 return REG_FRET;
293 /* returns 2nd function return register, if any */
294 static int R2_RET(int t)
296 t &= VT_BTYPE;
297 #if PTR_SIZE == 4
298 if (t == VT_LLONG)
299 return REG_IRE2;
300 #elif defined TCC_TARGET_X86_64
301 if (t == VT_QLONG)
302 return REG_IRE2;
303 if (t == VT_QFLOAT)
304 return REG_FRE2;
305 #elif defined TCC_TARGET_RISCV64
306 if (t == VT_LDOUBLE)
307 return REG_IRE2;
308 #endif
309 return VT_CONST;
312 /* returns true for two-word types */
313 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
315 /* put function return registers to stack value */
316 static void PUT_R_RET(SValue *sv, int t)
318 sv->r = R_RET(t), sv->r2 = R2_RET(t);
321 /* returns function return register class for type t */
322 static int RC_RET(int t)
324 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
327 /* returns generic register class for type t */
328 static int RC_TYPE(int t)
330 if (!is_float(t))
331 return RC_INT;
332 #ifdef TCC_TARGET_X86_64
333 if ((t & VT_BTYPE) == VT_LDOUBLE)
334 return RC_ST0;
335 if ((t & VT_BTYPE) == VT_QFLOAT)
336 return RC_FRET;
337 #elif defined TCC_TARGET_RISCV64
338 if ((t & VT_BTYPE) == VT_LDOUBLE)
339 return RC_INT;
340 #endif
341 return RC_FLOAT;
344 /* returns 2nd register class corresponding to t and rc */
345 static int RC2_TYPE(int t, int rc)
347 if (!USING_TWO_WORDS(t))
348 return 0;
349 #ifdef RC_IRE2
350 if (rc == RC_IRET)
351 return RC_IRE2;
352 #endif
353 #ifdef RC_FRE2
354 if (rc == RC_FRET)
355 return RC_FRE2;
356 #endif
357 if (rc & RC_FLOAT)
358 return RC_FLOAT;
359 return RC_INT;
362 /* we use our own 'finite' function to avoid potential problems with
363 non standard math libs */
364 /* XXX: endianness dependent */
365 ST_FUNC int ieee_finite(double d)
367 int p[4];
368 memcpy(p, &d, sizeof(double));
369 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
372 /* compiling intel long double natively */
373 #if (defined __i386__ || defined __x86_64__) \
374 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
375 # define TCC_IS_NATIVE_387
376 #endif
378 ST_FUNC void test_lvalue(void)
380 if (!(vtop->r & VT_LVAL))
381 expect("lvalue");
384 ST_FUNC void check_vstack(void)
386 if (vtop != vstack - 1)
387 tcc_error("internal compiler error: vstack leak (%d)",
388 (int)(vtop - vstack + 1));
391 /* ------------------------------------------------------------------------- */
392 /* vstack debugging aid */
394 #if 0
395 void pv (const char *lbl, int a, int b)
397 int i;
398 for (i = a; i < a + b; ++i) {
399 SValue *p = &vtop[-i];
400 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
401 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
404 #endif
406 /* ------------------------------------------------------------------------- */
407 /* start of translation unit info */
408 ST_FUNC void tcc_debug_start(TCCState *s1)
410 if (s1->do_debug) {
411 int i;
412 char buf[512];
414 /* file info: full path + filename */
415 section_sym = put_elf_sym(symtab_section, 0, 0,
416 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
417 text_section->sh_num, NULL);
418 getcwd(buf, sizeof(buf));
419 #ifdef _WIN32
420 normalize_slashes(buf);
421 #endif
422 pstrcat(buf, sizeof(buf), "/");
423 put_stabs_r(s1, buf, N_SO, 0, 0,
424 text_section->data_offset, text_section, section_sym);
425 put_stabs_r(s1, file->prev ? file->prev->filename : file->filename,
426 N_SO, 0, 0,
427 text_section->data_offset, text_section, section_sym);
428 for (i = 0; i < sizeof (default_debug) / sizeof (default_debug[0]); i++)
429 put_stabs(s1, default_debug[i].name, N_LSYM, 0, 0, 0);
431 new_file = last_line_num = 0;
432 func_ind = -1;
433 debug_next_type = sizeof(default_debug) / sizeof(default_debug[0]);
434 debug_hash = NULL;
435 n_debug_hash = 0;
437 /* we're currently 'including' the <command line> */
438 tcc_debug_bincl(s1);
441 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
442 symbols can be safely used */
443 put_elf_sym(symtab_section, 0, 0,
444 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
445 SHN_ABS, file->filename);
448 /* put end of translation unit info */
449 ST_FUNC void tcc_debug_end(TCCState *s1)
451 if (!s1->do_debug)
452 return;
453 put_stabs_r(s1, NULL, N_SO, 0, 0,
454 text_section->data_offset, text_section, section_sym);
455 tcc_free(debug_hash);
458 static BufferedFile* put_new_file(TCCState *s1)
460 BufferedFile *f = file;
461 /* use upper file if from inline ":asm:" */
462 if (f->filename[0] == ':')
463 f = f->prev;
464 if (f && new_file) {
465 put_stabs_r(s1, f->filename, N_SOL, 0, 0, ind, text_section, section_sym);
466 new_file = last_line_num = 0;
468 return f;
471 /* put alternative filename */
472 ST_FUNC void tcc_debug_putfile(TCCState *s1, const char *filename)
474 if (0 == strcmp(file->filename, filename))
475 return;
476 pstrcpy(file->filename, sizeof(file->filename), filename);
477 new_file = 1;
480 /* begin of #include */
481 ST_FUNC void tcc_debug_bincl(TCCState *s1)
483 if (!s1->do_debug)
484 return;
485 put_stabs(s1, file->filename, N_BINCL, 0, 0, 0);
486 new_file = 1;
489 /* end of #include */
490 ST_FUNC void tcc_debug_eincl(TCCState *s1)
492 if (!s1->do_debug)
493 return;
494 put_stabn(s1, N_EINCL, 0, 0, 0);
495 new_file = 1;
498 /* generate line number info */
499 static void tcc_debug_line(TCCState *s1)
501 BufferedFile *f;
502 if (!s1->do_debug
503 || cur_text_section != text_section
504 || !(f = put_new_file(s1))
505 || last_line_num == f->line_num)
506 return;
507 if (func_ind != -1) {
508 put_stabn(s1, N_SLINE, 0, f->line_num, ind - func_ind);
509 } else {
510 /* from tcc_assemble */
511 put_stabs_r(s1, NULL, N_SLINE, 0, f->line_num, ind, text_section, section_sym);
513 last_line_num = f->line_num;
516 static void tcc_debug_stabs (TCCState *s1, const char *str, int type, unsigned long value,
517 Section *sec, int sym_index)
519 struct debug_sym *s;
521 if (debug_info) {
522 debug_info->sym =
523 (struct debug_sym *)tcc_realloc (debug_info->sym,
524 sizeof(struct debug_sym) *
525 (debug_info->n_sym + 1));
526 s = debug_info->sym + debug_info->n_sym++;
527 s->type = type;
528 s->value = value;
529 s->str = tcc_strdup(str);
530 s->sec = sec;
531 s->sym_index = sym_index;
533 else if (sec)
534 put_stabs_r (s1, str, type, 0, 0, value, sec, sym_index);
535 else
536 put_stabs (s1, str, type, 0, 0, value);
539 static void tcc_debug_stabn(TCCState *s1, int type, int value)
541 if (!s1->do_debug)
542 return;
543 if (type == N_LBRAC) {
544 struct debug_info *info =
545 (struct debug_info *) tcc_mallocz(sizeof (*info));
547 info->start = value;
548 info->parent = debug_info;
549 if (debug_info) {
550 if (debug_info->child) {
551 if (debug_info->child->last)
552 debug_info->child->last->next = info;
553 else
554 debug_info->child->next = info;
555 debug_info->child->last = info;
557 else
558 debug_info->child = info;
560 else
561 debug_info_root = info;
562 debug_info = info;
564 else {
565 debug_info->end = value;
566 debug_info = debug_info->parent;
570 static void tcc_get_debug_info(TCCState *s1, Sym *s, CString *result)
572 int type;
573 int n = 0;
574 int debug_type = -1;
575 Sym *t = s;
576 CString str;
578 for (;;) {
579 type = t->type.t & ~(VT_STORAGE | VT_CONSTANT | VT_VOLATILE);
580 if ((type & VT_BTYPE) != VT_BYTE)
581 type &= ~VT_DEFSIGN;
582 if (type == VT_PTR || type == (VT_PTR | VT_ARRAY))
583 n++, t = t->type.ref;
584 else
585 break;
587 if ((type & VT_BTYPE) == VT_STRUCT) {
588 int i;
590 t = t->type.ref;
591 for (i = 0; i < n_debug_hash; i++) {
592 if (t == debug_hash[i].type) {
593 debug_type = debug_hash[i].debug_type;
594 break;
597 if (debug_type == -1) {
598 debug_type = ++debug_next_type;
599 debug_hash = (struct debug_hash *)
600 tcc_realloc (debug_hash,
601 (n_debug_hash + 1) * sizeof(*debug_hash));
602 debug_hash[n_debug_hash].debug_type = debug_type;
603 debug_hash[n_debug_hash++].type = t;
604 cstr_new (&str);
605 cstr_printf (&str, "%s:T%d=%c%d",
606 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
607 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
608 debug_type,
609 IS_UNION (t->type.t) ? 'u' : 's',
610 t->c);
611 while (t->next) {
612 int pos, size, align;
614 t = t->next;
615 cstr_printf (&str, "%s:",
616 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
617 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
618 tcc_get_debug_info (s1, t, &str);
619 if (t->type.t & VT_BITFIELD) {
620 pos = t->c * 8 + BIT_POS(t->type.t);
621 size = BIT_SIZE(t->type.t);
623 else {
624 pos = t->c * 8;
625 size = type_size(&t->type, &align) * 8;
627 cstr_printf (&str, ",%d,%d;", pos, size);
629 cstr_printf (&str, ";");
630 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
631 cstr_free (&str);
634 else if (IS_ENUM(type)) {
635 Sym *e = t = t->type.ref;
637 debug_type = ++debug_next_type;
638 cstr_new (&str);
639 cstr_printf (&str, "%s:T%d=e",
640 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
641 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
642 debug_type);
643 while (t->next) {
644 t = t->next;
645 cstr_printf (&str, "%s:",
646 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
647 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
648 cstr_printf (&str, e->type.t & VT_UNSIGNED ? "%u," : "%d,",
649 (int)t->enum_val);
651 cstr_printf (&str, ";");
652 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
653 cstr_free (&str);
655 else if ((type & VT_BTYPE) != VT_FUNC) {
656 type &= ~VT_STRUCT_MASK;
657 for (debug_type = 1;
658 debug_type <= sizeof(default_debug) / sizeof(default_debug[0]);
659 debug_type++)
660 if (default_debug[debug_type - 1].type == type)
661 break;
662 if (debug_type > sizeof(default_debug) / sizeof(default_debug[0]))
663 return;
665 if (n > 0)
666 cstr_printf (result, "%d=", ++debug_next_type);
667 t = s;
668 for (;;) {
669 type = t->type.t & ~(VT_STORAGE | VT_CONSTANT | VT_VOLATILE);
670 if ((type & VT_BTYPE) != VT_BYTE)
671 type &= ~VT_DEFSIGN;
672 if (type == VT_PTR)
673 cstr_printf (result, "%d=*", ++debug_next_type);
674 else if (type == (VT_PTR | VT_ARRAY))
675 cstr_printf (result, "%d=ar1;0;%d;",
676 ++debug_next_type, t->type.ref->c - 1);
677 else if (type == VT_FUNC) {
678 cstr_printf (result, "%d=f", ++debug_next_type);
679 tcc_get_debug_info (s1, t->type.ref, result);
680 return;
682 else
683 break;
684 t = t->type.ref;
686 cstr_printf (result, "%d", debug_type);
689 static void tcc_debug_finish (TCCState *s1, struct debug_info *cur)
691 while (cur) {
692 int i;
693 struct debug_info *next = cur->next;
695 for (i = 0; i < cur->n_sym; i++) {
696 struct debug_sym *s = &cur->sym[i];
698 if (s->sec)
699 put_stabs_r(s1, s->str, s->type, 0, 0, s->value,
700 s->sec, s->sym_index);
701 else
702 put_stabs(s1, s->str, s->type, 0, 0, s->value);
703 tcc_free (s->str);
705 tcc_free (cur->sym);
706 put_stabn(s1, N_LBRAC, 0, 0, cur->start);
707 tcc_debug_finish (s1, cur->child);
708 put_stabn(s1, N_RBRAC, 0, 0, cur->end);
709 tcc_free (cur);
710 cur = next;
714 static void tcc_add_debug_info(TCCState *s1, int param, Sym *s, Sym *e)
716 CString debug_str;
717 if (!s1->do_debug)
718 return;
719 cstr_new (&debug_str);
720 for (; s != e; s = s->prev) {
721 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
722 continue;
723 cstr_reset (&debug_str);
724 cstr_printf (&debug_str, "%s:%s", get_tok_str(s->v, NULL), param ? "p" : "");
725 tcc_get_debug_info(s1, s, &debug_str);
726 tcc_debug_stabs(s1, debug_str.data, param ? N_PSYM : N_LSYM, s->c, NULL, 0);
728 cstr_free (&debug_str);
731 /* put function symbol */
732 static void tcc_debug_funcstart(TCCState *s1, Sym *sym)
734 CString debug_str;
735 BufferedFile *f;
736 if (!s1->do_debug)
737 return;
738 debug_info_root = NULL;
739 debug_info = NULL;
740 tcc_debug_stabn(s1, N_LBRAC, ind - func_ind);
741 if (!(f = put_new_file(s1)))
742 return;
743 cstr_new (&debug_str);
744 cstr_printf(&debug_str, "%s:%c", funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
745 tcc_get_debug_info(s1, sym->type.ref, &debug_str);
746 put_stabs_r(s1, debug_str.data, N_FUN, 0, f->line_num, 0, cur_text_section, sym->c);
747 cstr_free (&debug_str);
749 tcc_debug_line(s1);
752 /* put function size */
753 static void tcc_debug_funcend(TCCState *s1, int size)
755 if (!s1->do_debug)
756 return;
757 tcc_debug_stabn(s1, N_RBRAC, size);
758 tcc_debug_finish (s1, debug_info_root);
762 static void tcc_debug_extern_sym(TCCState *s1, Sym *sym, int sh_num, int sym_bind, int sym_type)
764 Section *s;
765 CString str;
767 if (!s1->do_debug)
768 return;
769 if (sym_type == STT_FUNC || sym->v >= SYM_FIRST_ANOM)
770 return;
771 s = s1->sections[sh_num];
773 cstr_new (&str);
774 cstr_printf (&str, "%s:%c",
775 get_tok_str(sym->v, NULL),
776 sym_bind == STB_GLOBAL ? 'G' : local_scope ? 'V' : 'S'
778 tcc_get_debug_info(s1, sym, &str);
779 if (sym_bind == STB_GLOBAL)
780 tcc_debug_stabs(s1, str.data, N_GSYM, 0, NULL, 0);
781 else
782 tcc_debug_stabs(s1, str.data,
783 (sym->type.t & VT_STATIC) && data_section == s
784 ? N_STSYM : N_LCSYM, 0, s, sym->c);
785 cstr_free (&str);
788 static void tcc_debug_typedef(TCCState *s1, Sym *sym)
790 CString str;
792 if (!s1->do_debug)
793 return;
794 cstr_new (&str);
795 cstr_printf (&str, "%s:t",
796 (sym->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
797 ? "" : get_tok_str(sym->v & ~SYM_FIELD, NULL));
798 tcc_get_debug_info(s1, sym, &str);
799 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
800 cstr_free (&str);
803 /* ------------------------------------------------------------------------- */
804 /* for section layout see lib/tcov.c */
806 static void tcc_tcov_block_end(int line);
808 static void tcc_tcov_block_begin(void)
810 SValue sv;
811 void *ptr;
812 unsigned long last_offset = tcov_data.offset;
814 tcc_tcov_block_end (0);
815 if (tcc_state->test_coverage == 0 || nocode_wanted)
816 return;
818 if (tcov_data.last_file_name == 0 ||
819 strcmp ((const char *)(tcov_section->data + tcov_data.last_file_name),
820 file->true_filename) != 0) {
821 char wd[1024];
822 CString cstr;
824 if (tcov_data.last_func_name)
825 section_ptr_add(tcov_section, 1);
826 if (tcov_data.last_file_name)
827 section_ptr_add(tcov_section, 1);
828 tcov_data.last_func_name = 0;
829 cstr_new (&cstr);
830 if (file->true_filename[0] == '/') {
831 tcov_data.last_file_name = tcov_section->data_offset;
832 cstr_printf (&cstr, "%s", file->true_filename);
834 else {
835 getcwd (wd, sizeof(wd));
836 tcov_data.last_file_name = tcov_section->data_offset + strlen(wd) + 1;
837 cstr_printf (&cstr, "%s/%s", wd, file->true_filename);
839 ptr = section_ptr_add(tcov_section, cstr.size + 1);
840 strcpy((char *)ptr, cstr.data);
841 #ifdef _WIN32
842 normalize_slashes((char *)ptr);
843 #endif
844 cstr_free (&cstr);
846 if (tcov_data.last_func_name == 0 ||
847 strcmp ((const char *)(tcov_section->data + tcov_data.last_func_name),
848 funcname) != 0) {
849 size_t len;
851 if (tcov_data.last_func_name)
852 section_ptr_add(tcov_section, 1);
853 tcov_data.last_func_name = tcov_section->data_offset;
854 len = strlen (funcname);
855 ptr = section_ptr_add(tcov_section, len + 1);
856 strcpy((char *)ptr, funcname);
857 section_ptr_add(tcov_section, -tcov_section->data_offset & 7);
858 ptr = section_ptr_add(tcov_section, 8);
859 write64le (ptr, file->line_num);
861 if (ind == tcov_data.ind && tcov_data.line == file->line_num)
862 tcov_data.offset = last_offset;
863 else {
864 Sym label = {0};
865 label.type.t = VT_LLONG | VT_STATIC;
867 ptr = section_ptr_add(tcov_section, 16);
868 tcov_data.line = file->line_num;
869 write64le (ptr, (tcov_data.line << 8) | 0xff);
870 put_extern_sym(&label, tcov_section,
871 ((unsigned char *)ptr - tcov_section->data) + 8, 0);
872 sv.type = label.type;
873 sv.r = VT_SYM | VT_LVAL | VT_CONST;
874 sv.r2 = VT_CONST;
875 sv.c.i = 0;
876 sv.sym = &label;
877 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || \
878 defined TCC_TARGET_ARM || defined TCC_TARGET_ARM64 || \
879 defined TCC_TARGET_RISCV64
880 gen_increment_tcov (&sv);
881 #else
882 vpushv(&sv);
883 inc(0, TOK_INC);
884 vpop();
885 #endif
886 tcov_data.offset = (unsigned char *)ptr - tcov_section->data;
887 tcov_data.ind = ind;
891 static void tcc_tcov_block_end(int line)
893 if (tcc_state->test_coverage == 0)
894 return;
895 if (tcov_data.offset) {
896 void *ptr = tcov_section->data + tcov_data.offset;
897 unsigned long long nline = line ? line : file->line_num;
899 write64le (ptr, (read64le (ptr) & 0xfffffffffull) | (nline << 36));
900 tcov_data.offset = 0;
904 static void tcc_tcov_check_line(int start)
906 if (tcc_state->test_coverage == 0)
907 return;
908 if (tcov_data.line != file->line_num) {
909 if ((tcov_data.line + 1) != file->line_num) {
910 tcc_tcov_block_end (tcov_data.line);
911 if (start)
912 tcc_tcov_block_begin ();
914 else
915 tcov_data.line = file->line_num;
919 static void tcc_tcov_start(void)
921 if (tcc_state->test_coverage == 0)
922 return;
923 memset (&tcov_data, 0, sizeof (tcov_data));
924 if (tcov_section == NULL) {
925 tcov_section = new_section(tcc_state, ".tcov", SHT_PROGBITS,
926 SHF_ALLOC | SHF_WRITE);
927 section_ptr_add(tcov_section, 4); // pointer to executable name
931 static void tcc_tcov_end(void)
933 if (tcc_state->test_coverage == 0)
934 return;
935 if (tcov_data.last_func_name)
936 section_ptr_add(tcov_section, 1);
937 if (tcov_data.last_file_name)
938 section_ptr_add(tcov_section, 1);
941 /* ------------------------------------------------------------------------- */
942 /* initialize vstack and types. This must be done also for tcc -E */
943 ST_FUNC void tccgen_init(TCCState *s1)
945 vtop = vstack - 1;
946 memset(vtop, 0, sizeof *vtop);
948 /* define some often used types */
949 int_type.t = VT_INT;
951 char_type.t = VT_BYTE;
952 if (s1->char_is_unsigned)
953 char_type.t |= VT_UNSIGNED;
954 char_pointer_type = char_type;
955 mk_pointer(&char_pointer_type);
957 func_old_type.t = VT_FUNC;
958 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
959 func_old_type.ref->f.func_call = FUNC_CDECL;
960 func_old_type.ref->f.func_type = FUNC_OLD;
961 #ifdef precedence_parser
962 init_prec();
963 #endif
964 cstr_new(&initstr);
967 ST_FUNC int tccgen_compile(TCCState *s1)
969 cur_text_section = NULL;
970 funcname = "";
971 anon_sym = SYM_FIRST_ANOM;
972 section_sym = 0;
973 const_wanted = 0;
974 nocode_wanted = 0x80000000;
975 local_scope = 0;
976 debug_modes = s1->do_debug | s1->test_coverage << 1;
978 tcc_debug_start(s1);
979 tcc_tcov_start ();
980 #ifdef TCC_TARGET_ARM
981 arm_init(s1);
982 #endif
983 #ifdef INC_DEBUG
984 printf("%s: **** new file\n", file->filename);
985 #endif
986 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
987 next();
988 decl(VT_CONST);
989 gen_inline_functions(s1);
990 check_vstack();
991 /* end of translation unit info */
992 tcc_debug_end(s1);
993 tcc_tcov_end ();
994 return 0;
997 ST_FUNC void tccgen_finish(TCCState *s1)
999 cstr_free(&initstr);
1000 free_inline_functions(s1);
1001 sym_pop(&global_stack, NULL, 0);
1002 sym_pop(&local_stack, NULL, 0);
1003 /* free preprocessor macros */
1004 free_defines(NULL);
1005 /* free sym_pools */
1006 dynarray_reset(&sym_pools, &nb_sym_pools);
1007 sym_free_first = NULL;
1010 /* ------------------------------------------------------------------------- */
1011 ST_FUNC ElfSym *elfsym(Sym *s)
1013 if (!s || !s->c)
1014 return NULL;
1015 return &((ElfSym *)symtab_section->data)[s->c];
1018 /* apply storage attributes to Elf symbol */
1019 ST_FUNC void update_storage(Sym *sym)
1021 ElfSym *esym;
1022 int sym_bind, old_sym_bind;
1024 esym = elfsym(sym);
1025 if (!esym)
1026 return;
1028 if (sym->a.visibility)
1029 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
1030 | sym->a.visibility;
1032 if (sym->type.t & (VT_STATIC | VT_INLINE))
1033 sym_bind = STB_LOCAL;
1034 else if (sym->a.weak)
1035 sym_bind = STB_WEAK;
1036 else
1037 sym_bind = STB_GLOBAL;
1038 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
1039 if (sym_bind != old_sym_bind) {
1040 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
1043 #ifdef TCC_TARGET_PE
1044 if (sym->a.dllimport)
1045 esym->st_other |= ST_PE_IMPORT;
1046 if (sym->a.dllexport)
1047 esym->st_other |= ST_PE_EXPORT;
1048 #endif
1050 #if 0
1051 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
1052 get_tok_str(sym->v, NULL),
1053 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
1054 sym->a.visibility,
1055 sym->a.dllexport,
1056 sym->a.dllimport
1058 #endif
1061 /* ------------------------------------------------------------------------- */
1062 /* update sym->c so that it points to an external symbol in section
1063 'section' with value 'value' */
1065 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
1066 addr_t value, unsigned long size,
1067 int can_add_underscore)
1069 int sym_type, sym_bind, info, other, t;
1070 ElfSym *esym;
1071 const char *name;
1072 char buf1[256];
1074 if (!sym->c) {
1075 name = get_tok_str(sym->v, NULL);
1076 t = sym->type.t;
1077 if ((t & VT_BTYPE) == VT_FUNC) {
1078 sym_type = STT_FUNC;
1079 } else if ((t & VT_BTYPE) == VT_VOID) {
1080 sym_type = STT_NOTYPE;
1081 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
1082 sym_type = STT_FUNC;
1083 } else {
1084 sym_type = STT_OBJECT;
1086 if (t & (VT_STATIC | VT_INLINE))
1087 sym_bind = STB_LOCAL;
1088 else
1089 sym_bind = STB_GLOBAL;
1090 other = 0;
1092 #ifdef TCC_TARGET_PE
1093 if (sym_type == STT_FUNC && sym->type.ref) {
1094 Sym *ref = sym->type.ref;
1095 if (ref->a.nodecorate) {
1096 can_add_underscore = 0;
1098 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
1099 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
1100 name = buf1;
1101 other |= ST_PE_STDCALL;
1102 can_add_underscore = 0;
1105 #endif
1107 if (sym->asm_label) {
1108 name = get_tok_str(sym->asm_label, NULL);
1109 can_add_underscore = 0;
1112 if (tcc_state->leading_underscore && can_add_underscore) {
1113 buf1[0] = '_';
1114 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
1115 name = buf1;
1118 info = ELFW(ST_INFO)(sym_bind, sym_type);
1119 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
1121 if (debug_modes)
1122 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
1124 } else {
1125 esym = elfsym(sym);
1126 esym->st_value = value;
1127 esym->st_size = size;
1128 esym->st_shndx = sh_num;
1130 update_storage(sym);
1133 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
1134 addr_t value, unsigned long size)
1136 int sh_num = section ? section->sh_num : SHN_UNDEF;
1137 put_extern_sym2(sym, sh_num, value, size, 1);
1140 /* add a new relocation entry to symbol 'sym' in section 's' */
1141 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
1142 addr_t addend)
1144 int c = 0;
1146 if (nocode_wanted && s == cur_text_section)
1147 return;
1149 if (sym) {
1150 if (0 == sym->c)
1151 put_extern_sym(sym, NULL, 0, 0);
1152 c = sym->c;
1155 /* now we can add ELF relocation info */
1156 put_elf_reloca(symtab_section, s, offset, type, c, addend);
1159 #if PTR_SIZE == 4
1160 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
1162 greloca(s, sym, offset, type, 0);
1164 #endif
1166 /* ------------------------------------------------------------------------- */
1167 /* symbol allocator */
1168 static Sym *__sym_malloc(void)
1170 Sym *sym_pool, *sym, *last_sym;
1171 int i;
1173 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
1174 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
1176 last_sym = sym_free_first;
1177 sym = sym_pool;
1178 for(i = 0; i < SYM_POOL_NB; i++) {
1179 sym->next = last_sym;
1180 last_sym = sym;
1181 sym++;
1183 sym_free_first = last_sym;
1184 return last_sym;
1187 static inline Sym *sym_malloc(void)
1189 Sym *sym;
1190 #ifndef SYM_DEBUG
1191 sym = sym_free_first;
1192 if (!sym)
1193 sym = __sym_malloc();
1194 sym_free_first = sym->next;
1195 return sym;
1196 #else
1197 sym = tcc_malloc(sizeof(Sym));
1198 return sym;
1199 #endif
1202 ST_INLN void sym_free(Sym *sym)
1204 #ifndef SYM_DEBUG
1205 sym->next = sym_free_first;
1206 sym_free_first = sym;
1207 #else
1208 tcc_free(sym);
1209 #endif
1212 /* push, without hashing */
1213 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
1215 Sym *s;
1217 s = sym_malloc();
1218 memset(s, 0, sizeof *s);
1219 s->v = v;
1220 s->type.t = t;
1221 s->c = c;
1222 /* add in stack */
1223 s->prev = *ps;
1224 *ps = s;
1225 return s;
1228 /* find a symbol and return its associated structure. 's' is the top
1229 of the symbol stack */
1230 ST_FUNC Sym *sym_find2(Sym *s, int v)
1232 while (s) {
1233 if (s->v == v)
1234 return s;
1235 else if (s->v == -1)
1236 return NULL;
1237 s = s->prev;
1239 return NULL;
1242 /* structure lookup */
1243 ST_INLN Sym *struct_find(int v)
1245 v -= TOK_IDENT;
1246 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1247 return NULL;
1248 return table_ident[v]->sym_struct;
1251 /* find an identifier */
1252 ST_INLN Sym *sym_find(int v)
1254 v -= TOK_IDENT;
1255 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1256 return NULL;
1257 return table_ident[v]->sym_identifier;
1260 static int sym_scope(Sym *s)
1262 if (IS_ENUM_VAL (s->type.t))
1263 return s->type.ref->sym_scope;
1264 else
1265 return s->sym_scope;
1268 /* push a given symbol on the symbol stack */
1269 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
1271 Sym *s, **ps;
1272 TokenSym *ts;
1274 if (local_stack)
1275 ps = &local_stack;
1276 else
1277 ps = &global_stack;
1278 s = sym_push2(ps, v, type->t, c);
1279 s->type.ref = type->ref;
1280 s->r = r;
1281 /* don't record fields or anonymous symbols */
1282 /* XXX: simplify */
1283 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1284 /* record symbol in token array */
1285 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1286 if (v & SYM_STRUCT)
1287 ps = &ts->sym_struct;
1288 else
1289 ps = &ts->sym_identifier;
1290 s->prev_tok = *ps;
1291 *ps = s;
1292 s->sym_scope = local_scope;
1293 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
1294 tcc_error("redeclaration of '%s'",
1295 get_tok_str(v & ~SYM_STRUCT, NULL));
1297 return s;
1300 /* push a global identifier */
1301 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
1303 Sym *s, **ps;
1304 s = sym_push2(&global_stack, v, t, c);
1305 s->r = VT_CONST | VT_SYM;
1306 /* don't record anonymous symbol */
1307 if (v < SYM_FIRST_ANOM) {
1308 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
1309 /* modify the top most local identifier, so that sym_identifier will
1310 point to 's' when popped; happens when called from inline asm */
1311 while (*ps != NULL && (*ps)->sym_scope)
1312 ps = &(*ps)->prev_tok;
1313 s->prev_tok = *ps;
1314 *ps = s;
1316 return s;
1319 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1320 pop them yet from the list, but do remove them from the token array. */
1321 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
1323 Sym *s, *ss, **ps;
1324 TokenSym *ts;
1325 int v;
1327 s = *ptop;
1328 while(s != b) {
1329 ss = s->prev;
1330 v = s->v;
1331 /* remove symbol in token array */
1332 /* XXX: simplify */
1333 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1334 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1335 if (v & SYM_STRUCT)
1336 ps = &ts->sym_struct;
1337 else
1338 ps = &ts->sym_identifier;
1339 *ps = s->prev_tok;
1341 if (!keep)
1342 sym_free(s);
1343 s = ss;
1345 if (!keep)
1346 *ptop = b;
1349 /* ------------------------------------------------------------------------- */
1350 static void vcheck_cmp(void)
1352 /* cannot let cpu flags if other instruction are generated. Also
1353 avoid leaving VT_JMP anywhere except on the top of the stack
1354 because it would complicate the code generator.
1356 Don't do this when nocode_wanted. vtop might come from
1357 !nocode_wanted regions (see 88_codeopt.c) and transforming
1358 it to a register without actually generating code is wrong
1359 as their value might still be used for real. All values
1360 we push under nocode_wanted will eventually be popped
1361 again, so that the VT_CMP/VT_JMP value will be in vtop
1362 when code is unsuppressed again. */
1364 if (vtop->r == VT_CMP && !nocode_wanted)
1365 gv(RC_INT);
1368 static void vsetc(CType *type, int r, CValue *vc)
1370 if (vtop >= vstack + (VSTACK_SIZE - 1))
1371 tcc_error("memory full (vstack)");
1372 vcheck_cmp();
1373 vtop++;
1374 vtop->type = *type;
1375 vtop->r = r;
1376 vtop->r2 = VT_CONST;
1377 vtop->c = *vc;
1378 vtop->sym = NULL;
1381 ST_FUNC void vswap(void)
1383 SValue tmp;
1385 vcheck_cmp();
1386 tmp = vtop[0];
1387 vtop[0] = vtop[-1];
1388 vtop[-1] = tmp;
1391 /* pop stack value */
1392 ST_FUNC void vpop(void)
1394 int v;
1395 v = vtop->r & VT_VALMASK;
1396 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1397 /* for x86, we need to pop the FP stack */
1398 if (v == TREG_ST0) {
1399 o(0xd8dd); /* fstp %st(0) */
1400 } else
1401 #endif
1402 if (v == VT_CMP) {
1403 /* need to put correct jump if && or || without test */
1404 gsym(vtop->jtrue);
1405 gsym(vtop->jfalse);
1407 vtop--;
1410 /* push constant of type "type" with useless value */
1411 static void vpush(CType *type)
1413 vset(type, VT_CONST, 0);
1416 /* push arbitrary 64bit constant */
1417 static void vpush64(int ty, unsigned long long v)
1419 CValue cval;
1420 CType ctype;
1421 ctype.t = ty;
1422 ctype.ref = NULL;
1423 cval.i = v;
1424 vsetc(&ctype, VT_CONST, &cval);
1427 /* push integer constant */
1428 ST_FUNC void vpushi(int v)
1430 vpush64(VT_INT, v);
1433 /* push a pointer sized constant */
1434 static void vpushs(addr_t v)
1436 vpush64(VT_SIZE_T, v);
1439 /* push long long constant */
1440 static inline void vpushll(long long v)
1442 vpush64(VT_LLONG, v);
1445 ST_FUNC void vset(CType *type, int r, int v)
1447 CValue cval;
1448 cval.i = v;
1449 vsetc(type, r, &cval);
1452 static void vseti(int r, int v)
1454 CType type;
1455 type.t = VT_INT;
1456 type.ref = NULL;
1457 vset(&type, r, v);
1460 ST_FUNC void vpushv(SValue *v)
1462 if (vtop >= vstack + (VSTACK_SIZE - 1))
1463 tcc_error("memory full (vstack)");
1464 vtop++;
1465 *vtop = *v;
1468 static void vdup(void)
1470 vpushv(vtop);
1473 /* rotate n first stack elements to the bottom
1474 I1 ... In -> I2 ... In I1 [top is right]
1476 ST_FUNC void vrotb(int n)
1478 int i;
1479 SValue tmp;
1481 vcheck_cmp();
1482 tmp = vtop[-n + 1];
1483 for(i=-n+1;i!=0;i++)
1484 vtop[i] = vtop[i+1];
1485 vtop[0] = tmp;
1488 /* rotate the n elements before entry e towards the top
1489 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1491 ST_FUNC void vrote(SValue *e, int n)
1493 int i;
1494 SValue tmp;
1496 vcheck_cmp();
1497 tmp = *e;
1498 for(i = 0;i < n - 1; i++)
1499 e[-i] = e[-i - 1];
1500 e[-n + 1] = tmp;
1503 /* rotate n first stack elements to the top
1504 I1 ... In -> In I1 ... I(n-1) [top is right]
1506 ST_FUNC void vrott(int n)
1508 vrote(vtop, n);
1511 /* ------------------------------------------------------------------------- */
1512 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1514 /* called from generators to set the result from relational ops */
1515 ST_FUNC void vset_VT_CMP(int op)
1517 vtop->r = VT_CMP;
1518 vtop->cmp_op = op;
1519 vtop->jfalse = 0;
1520 vtop->jtrue = 0;
1523 /* called once before asking generators to load VT_CMP to a register */
1524 static void vset_VT_JMP(void)
1526 int op = vtop->cmp_op;
1528 if (vtop->jtrue || vtop->jfalse) {
1529 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1530 int inv = op & (op < 2); /* small optimization */
1531 vseti(VT_JMP+inv, gvtst(inv, 0));
1532 } else {
1533 /* otherwise convert flags (rsp. 0/1) to register */
1534 vtop->c.i = op;
1535 if (op < 2) /* doesn't seem to happen */
1536 vtop->r = VT_CONST;
1540 /* Set CPU Flags, doesn't yet jump */
1541 static void gvtst_set(int inv, int t)
1543 int *p;
1545 if (vtop->r != VT_CMP) {
1546 vpushi(0);
1547 gen_op(TOK_NE);
1548 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1549 vset_VT_CMP(vtop->c.i != 0);
1552 p = inv ? &vtop->jfalse : &vtop->jtrue;
1553 *p = gjmp_append(*p, t);
1556 /* Generate value test
1558 * Generate a test for any value (jump, comparison and integers) */
1559 static int gvtst(int inv, int t)
1561 int op, x, u;
1563 gvtst_set(inv, t);
1564 t = vtop->jtrue, u = vtop->jfalse;
1565 if (inv)
1566 x = u, u = t, t = x;
1567 op = vtop->cmp_op;
1569 /* jump to the wanted target */
1570 if (op > 1)
1571 t = gjmp_cond(op ^ inv, t);
1572 else if (op != inv)
1573 t = gjmp(t);
1574 /* resolve complementary jumps to here */
1575 gsym(u);
1577 vtop--;
1578 return t;
1581 /* generate a zero or nozero test */
1582 static void gen_test_zero(int op)
1584 if (vtop->r == VT_CMP) {
1585 int j;
1586 if (op == TOK_EQ) {
1587 j = vtop->jfalse;
1588 vtop->jfalse = vtop->jtrue;
1589 vtop->jtrue = j;
1590 vtop->cmp_op ^= 1;
1592 } else {
1593 vpushi(0);
1594 gen_op(op);
1598 /* ------------------------------------------------------------------------- */
1599 /* push a symbol value of TYPE */
1600 ST_FUNC void vpushsym(CType *type, Sym *sym)
1602 CValue cval;
1603 cval.i = 0;
1604 vsetc(type, VT_CONST | VT_SYM, &cval);
1605 vtop->sym = sym;
1608 /* Return a static symbol pointing to a section */
1609 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1611 int v;
1612 Sym *sym;
1614 v = anon_sym++;
1615 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1616 sym->type.t |= VT_STATIC;
1617 put_extern_sym(sym, sec, offset, size);
1618 return sym;
1621 /* push a reference to a section offset by adding a dummy symbol */
1622 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1624 vpushsym(type, get_sym_ref(type, sec, offset, size));
1627 /* define a new external reference to a symbol 'v' of type 'u' */
1628 ST_FUNC Sym *external_global_sym(int v, CType *type)
1630 Sym *s;
1632 s = sym_find(v);
1633 if (!s) {
1634 /* push forward reference */
1635 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1636 s->type.ref = type->ref;
1637 } else if (IS_ASM_SYM(s)) {
1638 s->type.t = type->t | (s->type.t & VT_EXTERN);
1639 s->type.ref = type->ref;
1640 update_storage(s);
1642 return s;
1645 /* create an external reference with no specific type similar to asm labels.
1646 This avoids type conflicts if the symbol is used from C too */
1647 ST_FUNC Sym *external_helper_sym(int v)
1649 CType ct = { VT_ASM_FUNC, NULL };
1650 return external_global_sym(v, &ct);
1653 /* push a reference to an helper function (such as memmove) */
1654 ST_FUNC void vpush_helper_func(int v)
1656 vpushsym(&func_old_type, external_helper_sym(v));
1659 /* Merge symbol attributes. */
1660 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1662 if (sa1->aligned && !sa->aligned)
1663 sa->aligned = sa1->aligned;
1664 sa->packed |= sa1->packed;
1665 sa->weak |= sa1->weak;
1666 if (sa1->visibility != STV_DEFAULT) {
1667 int vis = sa->visibility;
1668 if (vis == STV_DEFAULT
1669 || vis > sa1->visibility)
1670 vis = sa1->visibility;
1671 sa->visibility = vis;
1673 sa->dllexport |= sa1->dllexport;
1674 sa->nodecorate |= sa1->nodecorate;
1675 sa->dllimport |= sa1->dllimport;
1678 /* Merge function attributes. */
1679 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1681 if (fa1->func_call && !fa->func_call)
1682 fa->func_call = fa1->func_call;
1683 if (fa1->func_type && !fa->func_type)
1684 fa->func_type = fa1->func_type;
1685 if (fa1->func_args && !fa->func_args)
1686 fa->func_args = fa1->func_args;
1687 if (fa1->func_noreturn)
1688 fa->func_noreturn = 1;
1689 if (fa1->func_ctor)
1690 fa->func_ctor = 1;
1691 if (fa1->func_dtor)
1692 fa->func_dtor = 1;
1695 /* Merge attributes. */
1696 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1698 merge_symattr(&ad->a, &ad1->a);
1699 merge_funcattr(&ad->f, &ad1->f);
1701 if (ad1->section)
1702 ad->section = ad1->section;
1703 if (ad1->alias_target)
1704 ad->alias_target = ad1->alias_target;
1705 if (ad1->asm_label)
1706 ad->asm_label = ad1->asm_label;
1707 if (ad1->attr_mode)
1708 ad->attr_mode = ad1->attr_mode;
1711 /* Merge some type attributes. */
1712 static void patch_type(Sym *sym, CType *type)
1714 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1715 if (!(sym->type.t & VT_EXTERN))
1716 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1717 sym->type.t &= ~VT_EXTERN;
1720 if (IS_ASM_SYM(sym)) {
1721 /* stay static if both are static */
1722 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1723 sym->type.ref = type->ref;
1726 if (!is_compatible_types(&sym->type, type)) {
1727 tcc_error("incompatible types for redefinition of '%s'",
1728 get_tok_str(sym->v, NULL));
1730 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1731 int static_proto = sym->type.t & VT_STATIC;
1732 /* warn if static follows non-static function declaration */
1733 if ((type->t & VT_STATIC) && !static_proto
1734 /* XXX this test for inline shouldn't be here. Until we
1735 implement gnu-inline mode again it silences a warning for
1736 mingw caused by our workarounds. */
1737 && !((type->t | sym->type.t) & VT_INLINE))
1738 tcc_warning("static storage ignored for redefinition of '%s'",
1739 get_tok_str(sym->v, NULL));
1741 /* set 'inline' if both agree or if one has static */
1742 if ((type->t | sym->type.t) & VT_INLINE) {
1743 if (!((type->t ^ sym->type.t) & VT_INLINE)
1744 || ((type->t | sym->type.t) & VT_STATIC))
1745 static_proto |= VT_INLINE;
1748 if (0 == (type->t & VT_EXTERN)) {
1749 struct FuncAttr f = sym->type.ref->f;
1750 /* put complete type, use static from prototype */
1751 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1752 sym->type.ref = type->ref;
1753 merge_funcattr(&sym->type.ref->f, &f);
1754 } else {
1755 sym->type.t &= ~VT_INLINE | static_proto;
1758 if (sym->type.ref->f.func_type == FUNC_OLD
1759 && type->ref->f.func_type != FUNC_OLD) {
1760 sym->type.ref = type->ref;
1763 } else {
1764 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1765 /* set array size if it was omitted in extern declaration */
1766 sym->type.ref->c = type->ref->c;
1768 if ((type->t ^ sym->type.t) & VT_STATIC)
1769 tcc_warning("storage mismatch for redefinition of '%s'",
1770 get_tok_str(sym->v, NULL));
1774 /* Merge some storage attributes. */
1775 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1777 if (type)
1778 patch_type(sym, type);
1780 #ifdef TCC_TARGET_PE
1781 if (sym->a.dllimport != ad->a.dllimport)
1782 tcc_error("incompatible dll linkage for redefinition of '%s'",
1783 get_tok_str(sym->v, NULL));
1784 #endif
1785 merge_symattr(&sym->a, &ad->a);
1786 if (ad->asm_label)
1787 sym->asm_label = ad->asm_label;
1788 update_storage(sym);
1791 /* copy sym to other stack */
1792 static Sym *sym_copy(Sym *s0, Sym **ps)
1794 Sym *s;
1795 s = sym_malloc(), *s = *s0;
1796 s->prev = *ps, *ps = s;
1797 if (s->v < SYM_FIRST_ANOM) {
1798 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1799 s->prev_tok = *ps, *ps = s;
1801 return s;
1804 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1805 static void sym_copy_ref(Sym *s, Sym **ps)
1807 int bt = s->type.t & VT_BTYPE;
1808 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1809 Sym **sp = &s->type.ref;
1810 for (s = *sp, *sp = NULL; s; s = s->next) {
1811 Sym *s2 = sym_copy(s, ps);
1812 sp = &(*sp = s2)->next;
1813 sym_copy_ref(s2, ps);
1818 /* define a new external reference to a symbol 'v' */
1819 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1821 Sym *s;
1823 /* look for global symbol */
1824 s = sym_find(v);
1825 while (s && s->sym_scope)
1826 s = s->prev_tok;
1828 if (!s) {
1829 /* push forward reference */
1830 s = global_identifier_push(v, type->t, 0);
1831 s->r |= r;
1832 s->a = ad->a;
1833 s->asm_label = ad->asm_label;
1834 s->type.ref = type->ref;
1835 /* copy type to the global stack */
1836 if (local_stack)
1837 sym_copy_ref(s, &global_stack);
1838 } else {
1839 patch_storage(s, ad, type);
1841 /* push variables on local_stack if any */
1842 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1843 s = sym_copy(s, &local_stack);
1844 return s;
1847 /* save registers up to (vtop - n) stack entry */
1848 ST_FUNC void save_regs(int n)
1850 SValue *p, *p1;
1851 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1852 save_reg(p->r);
1855 /* save r to the memory stack, and mark it as being free */
1856 ST_FUNC void save_reg(int r)
1858 save_reg_upstack(r, 0);
1861 /* save r to the memory stack, and mark it as being free,
1862 if seen up to (vtop - n) stack entry */
1863 ST_FUNC void save_reg_upstack(int r, int n)
1865 int l, size, align, bt;
1866 SValue *p, *p1, sv;
1868 if ((r &= VT_VALMASK) >= VT_CONST)
1869 return;
1870 if (nocode_wanted)
1871 return;
1872 l = 0;
1873 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1874 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1875 /* must save value on stack if not already done */
1876 if (!l) {
1877 bt = p->type.t & VT_BTYPE;
1878 if (bt == VT_VOID)
1879 continue;
1880 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1881 bt = VT_PTR;
1882 sv.type.t = bt;
1883 size = type_size(&sv.type, &align);
1884 l = get_temp_local_var(size,align);
1885 sv.r = VT_LOCAL | VT_LVAL;
1886 sv.c.i = l;
1887 store(p->r & VT_VALMASK, &sv);
1888 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1889 /* x86 specific: need to pop fp register ST0 if saved */
1890 if (r == TREG_ST0) {
1891 o(0xd8dd); /* fstp %st(0) */
1893 #endif
1894 /* special long long case */
1895 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1896 sv.c.i += PTR_SIZE;
1897 store(p->r2, &sv);
1900 /* mark that stack entry as being saved on the stack */
1901 if (p->r & VT_LVAL) {
1902 /* also clear the bounded flag because the
1903 relocation address of the function was stored in
1904 p->c.i */
1905 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1906 } else {
1907 p->r = VT_LVAL | VT_LOCAL;
1909 p->sym = NULL;
1910 p->r2 = VT_CONST;
1911 p->c.i = l;
1916 #ifdef TCC_TARGET_ARM
1917 /* find a register of class 'rc2' with at most one reference on stack.
1918 * If none, call get_reg(rc) */
1919 ST_FUNC int get_reg_ex(int rc, int rc2)
1921 int r;
1922 SValue *p;
1924 for(r=0;r<NB_REGS;r++) {
1925 if (reg_classes[r] & rc2) {
1926 int n;
1927 n=0;
1928 for(p = vstack; p <= vtop; p++) {
1929 if ((p->r & VT_VALMASK) == r ||
1930 p->r2 == r)
1931 n++;
1933 if (n <= 1)
1934 return r;
1937 return get_reg(rc);
1939 #endif
1941 /* find a free register of class 'rc'. If none, save one register */
1942 ST_FUNC int get_reg(int rc)
1944 int r;
1945 SValue *p;
1947 /* find a free register */
1948 for(r=0;r<NB_REGS;r++) {
1949 if (reg_classes[r] & rc) {
1950 if (nocode_wanted)
1951 return r;
1952 for(p=vstack;p<=vtop;p++) {
1953 if ((p->r & VT_VALMASK) == r ||
1954 p->r2 == r)
1955 goto notfound;
1957 return r;
1959 notfound: ;
1962 /* no register left : free the first one on the stack (VERY
1963 IMPORTANT to start from the bottom to ensure that we don't
1964 spill registers used in gen_opi()) */
1965 for(p=vstack;p<=vtop;p++) {
1966 /* look at second register (if long long) */
1967 r = p->r2;
1968 if (r < VT_CONST && (reg_classes[r] & rc))
1969 goto save_found;
1970 r = p->r & VT_VALMASK;
1971 if (r < VT_CONST && (reg_classes[r] & rc)) {
1972 save_found:
1973 save_reg(r);
1974 return r;
1977 /* Should never comes here */
1978 return -1;
1981 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1982 static int get_temp_local_var(int size,int align){
1983 int i;
1984 struct temp_local_variable *temp_var;
1985 int found_var;
1986 SValue *p;
1987 int r;
1988 char free;
1989 char found;
1990 found=0;
1991 for(i=0;i<nb_temp_local_vars;i++){
1992 temp_var=&arr_temp_local_vars[i];
1993 if(temp_var->size<size||align!=temp_var->align){
1994 continue;
1996 /*check if temp_var is free*/
1997 free=1;
1998 for(p=vstack;p<=vtop;p++) {
1999 r=p->r&VT_VALMASK;
2000 if(r==VT_LOCAL||r==VT_LLOCAL){
2001 if(p->c.i==temp_var->location){
2002 free=0;
2003 break;
2007 if(free){
2008 found_var=temp_var->location;
2009 found=1;
2010 break;
2013 if(!found){
2014 loc = (loc - size) & -align;
2015 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
2016 temp_var=&arr_temp_local_vars[i];
2017 temp_var->location=loc;
2018 temp_var->size=size;
2019 temp_var->align=align;
2020 nb_temp_local_vars++;
2022 found_var=loc;
2024 return found_var;
2027 static void clear_temp_local_var_list(){
2028 nb_temp_local_vars=0;
2031 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
2032 if needed */
2033 static void move_reg(int r, int s, int t)
2035 SValue sv;
2037 if (r != s) {
2038 save_reg(r);
2039 sv.type.t = t;
2040 sv.type.ref = NULL;
2041 sv.r = s;
2042 sv.c.i = 0;
2043 load(r, &sv);
2047 /* get address of vtop (vtop MUST BE an lvalue) */
2048 ST_FUNC void gaddrof(void)
2050 vtop->r &= ~VT_LVAL;
2051 /* tricky: if saved lvalue, then we can go back to lvalue */
2052 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
2053 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
2056 #ifdef CONFIG_TCC_BCHECK
2057 /* generate a bounded pointer addition */
2058 static void gen_bounded_ptr_add(void)
2060 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
2061 if (save) {
2062 vpushv(&vtop[-1]);
2063 vrott(3);
2065 vpush_helper_func(TOK___bound_ptr_add);
2066 vrott(3);
2067 gfunc_call(2);
2068 vtop -= save;
2069 vpushi(0);
2070 /* returned pointer is in REG_IRET */
2071 vtop->r = REG_IRET | VT_BOUNDED;
2072 if (nocode_wanted)
2073 return;
2074 /* relocation offset of the bounding function call point */
2075 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
2078 /* patch pointer addition in vtop so that pointer dereferencing is
2079 also tested */
2080 static void gen_bounded_ptr_deref(void)
2082 addr_t func;
2083 int size, align;
2084 ElfW_Rel *rel;
2085 Sym *sym;
2087 if (nocode_wanted)
2088 return;
2090 size = type_size(&vtop->type, &align);
2091 switch(size) {
2092 case 1: func = TOK___bound_ptr_indir1; break;
2093 case 2: func = TOK___bound_ptr_indir2; break;
2094 case 4: func = TOK___bound_ptr_indir4; break;
2095 case 8: func = TOK___bound_ptr_indir8; break;
2096 case 12: func = TOK___bound_ptr_indir12; break;
2097 case 16: func = TOK___bound_ptr_indir16; break;
2098 default:
2099 /* may happen with struct member access */
2100 return;
2102 sym = external_helper_sym(func);
2103 if (!sym->c)
2104 put_extern_sym(sym, NULL, 0, 0);
2105 /* patch relocation */
2106 /* XXX: find a better solution ? */
2107 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
2108 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
2111 /* generate lvalue bound code */
2112 static void gbound(void)
2114 CType type1;
2116 vtop->r &= ~VT_MUSTBOUND;
2117 /* if lvalue, then use checking code before dereferencing */
2118 if (vtop->r & VT_LVAL) {
2119 /* if not VT_BOUNDED value, then make one */
2120 if (!(vtop->r & VT_BOUNDED)) {
2121 /* must save type because we must set it to int to get pointer */
2122 type1 = vtop->type;
2123 vtop->type.t = VT_PTR;
2124 gaddrof();
2125 vpushi(0);
2126 gen_bounded_ptr_add();
2127 vtop->r |= VT_LVAL;
2128 vtop->type = type1;
2130 /* then check for dereferencing */
2131 gen_bounded_ptr_deref();
2135 /* we need to call __bound_ptr_add before we start to load function
2136 args into registers */
2137 ST_FUNC void gbound_args(int nb_args)
2139 int i, v;
2140 SValue *sv;
2142 for (i = 1; i <= nb_args; ++i)
2143 if (vtop[1 - i].r & VT_MUSTBOUND) {
2144 vrotb(i);
2145 gbound();
2146 vrott(i);
2149 sv = vtop - nb_args;
2150 if (sv->r & VT_SYM) {
2151 v = sv->sym->v;
2152 if (v == TOK_setjmp
2153 || v == TOK__setjmp
2154 #ifndef TCC_TARGET_PE
2155 || v == TOK_sigsetjmp
2156 || v == TOK___sigsetjmp
2157 #endif
2159 vpush_helper_func(TOK___bound_setjmp);
2160 vpushv(sv + 1);
2161 gfunc_call(1);
2162 func_bound_add_epilog = 1;
2164 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
2165 if (v == TOK_alloca)
2166 func_bound_add_epilog = 1;
2167 #endif
2168 #if TARGETOS_NetBSD
2169 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
2170 sv->sym->asm_label = TOK___bound_longjmp;
2171 #endif
2175 /* Add bounds for local symbols from S to E (via ->prev) */
2176 static void add_local_bounds(Sym *s, Sym *e)
2178 for (; s != e; s = s->prev) {
2179 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
2180 continue;
2181 /* Add arrays/structs/unions because we always take address */
2182 if ((s->type.t & VT_ARRAY)
2183 || (s->type.t & VT_BTYPE) == VT_STRUCT
2184 || s->a.addrtaken) {
2185 /* add local bound info */
2186 int align, size = type_size(&s->type, &align);
2187 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
2188 2 * sizeof(addr_t));
2189 bounds_ptr[0] = s->c;
2190 bounds_ptr[1] = size;
2194 #endif
2196 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2197 static void pop_local_syms(Sym *b, int keep)
2199 #ifdef CONFIG_TCC_BCHECK
2200 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
2201 add_local_bounds(local_stack, b);
2202 #endif
2203 if (debug_modes)
2204 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
2205 sym_pop(&local_stack, b, keep);
2208 static void incr_bf_adr(int o)
2210 vtop->type = char_pointer_type;
2211 gaddrof();
2212 vpushs(o);
2213 gen_op('+');
2214 vtop->type.t = VT_BYTE | VT_UNSIGNED;
2215 vtop->r |= VT_LVAL;
2218 /* single-byte load mode for packed or otherwise unaligned bitfields */
2219 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
2221 int n, o, bits;
2222 save_reg_upstack(vtop->r, 1);
2223 vpush64(type->t & VT_BTYPE, 0); // B X
2224 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2225 do {
2226 vswap(); // X B
2227 incr_bf_adr(o);
2228 vdup(); // X B B
2229 n = 8 - bit_pos;
2230 if (n > bit_size)
2231 n = bit_size;
2232 if (bit_pos)
2233 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
2234 if (n < 8)
2235 vpushi((1 << n) - 1), gen_op('&');
2236 gen_cast(type);
2237 if (bits)
2238 vpushi(bits), gen_op(TOK_SHL);
2239 vrotb(3); // B Y X
2240 gen_op('|'); // B X
2241 bits += n, bit_size -= n, o = 1;
2242 } while (bit_size);
2243 vswap(), vpop();
2244 if (!(type->t & VT_UNSIGNED)) {
2245 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
2246 vpushi(n), gen_op(TOK_SHL);
2247 vpushi(n), gen_op(TOK_SAR);
2251 /* single-byte store mode for packed or otherwise unaligned bitfields */
2252 static void store_packed_bf(int bit_pos, int bit_size)
2254 int bits, n, o, m, c;
2255 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2256 vswap(); // X B
2257 save_reg_upstack(vtop->r, 1);
2258 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2259 do {
2260 incr_bf_adr(o); // X B
2261 vswap(); //B X
2262 c ? vdup() : gv_dup(); // B V X
2263 vrott(3); // X B V
2264 if (bits)
2265 vpushi(bits), gen_op(TOK_SHR);
2266 if (bit_pos)
2267 vpushi(bit_pos), gen_op(TOK_SHL);
2268 n = 8 - bit_pos;
2269 if (n > bit_size)
2270 n = bit_size;
2271 if (n < 8) {
2272 m = ((1 << n) - 1) << bit_pos;
2273 vpushi(m), gen_op('&'); // X B V1
2274 vpushv(vtop-1); // X B V1 B
2275 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
2276 gen_op('&'); // X B V1 B1
2277 gen_op('|'); // X B V2
2279 vdup(), vtop[-1] = vtop[-2]; // X B B V2
2280 vstore(), vpop(); // X B
2281 bits += n, bit_size -= n, bit_pos = 0, o = 1;
2282 } while (bit_size);
2283 vpop(), vpop();
2286 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
2288 int t;
2289 if (0 == sv->type.ref)
2290 return 0;
2291 t = sv->type.ref->auxtype;
2292 if (t != -1 && t != VT_STRUCT) {
2293 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
2294 sv->r |= VT_LVAL;
2296 return t;
2299 /* store vtop a register belonging to class 'rc'. lvalues are
2300 converted to values. Cannot be used if cannot be converted to
2301 register value (such as structures). */
2302 ST_FUNC int gv(int rc)
2304 int r, r2, r_ok, r2_ok, rc2, bt;
2305 int bit_pos, bit_size, size, align;
2307 /* NOTE: get_reg can modify vstack[] */
2308 if (vtop->type.t & VT_BITFIELD) {
2309 CType type;
2311 bit_pos = BIT_POS(vtop->type.t);
2312 bit_size = BIT_SIZE(vtop->type.t);
2313 /* remove bit field info to avoid loops */
2314 vtop->type.t &= ~VT_STRUCT_MASK;
2316 type.ref = NULL;
2317 type.t = vtop->type.t & VT_UNSIGNED;
2318 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
2319 type.t |= VT_UNSIGNED;
2321 r = adjust_bf(vtop, bit_pos, bit_size);
2323 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2324 type.t |= VT_LLONG;
2325 else
2326 type.t |= VT_INT;
2328 if (r == VT_STRUCT) {
2329 load_packed_bf(&type, bit_pos, bit_size);
2330 } else {
2331 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
2332 /* cast to int to propagate signedness in following ops */
2333 gen_cast(&type);
2334 /* generate shifts */
2335 vpushi(bits - (bit_pos + bit_size));
2336 gen_op(TOK_SHL);
2337 vpushi(bits - bit_size);
2338 /* NOTE: transformed to SHR if unsigned */
2339 gen_op(TOK_SAR);
2341 r = gv(rc);
2342 } else {
2343 if (is_float(vtop->type.t) &&
2344 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2345 /* CPUs usually cannot use float constants, so we store them
2346 generically in data segment */
2347 init_params p = { rodata_section };
2348 unsigned long offset;
2349 size = type_size(&vtop->type, &align);
2350 if (NODATA_WANTED)
2351 size = 0, align = 1;
2352 offset = section_add(p.sec, size, align);
2353 vpush_ref(&vtop->type, p.sec, offset, size);
2354 vswap();
2355 init_putv(&p, &vtop->type, offset);
2356 vtop->r |= VT_LVAL;
2358 #ifdef CONFIG_TCC_BCHECK
2359 if (vtop->r & VT_MUSTBOUND)
2360 gbound();
2361 #endif
2363 bt = vtop->type.t & VT_BTYPE;
2365 #ifdef TCC_TARGET_RISCV64
2366 /* XXX mega hack */
2367 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
2368 rc = RC_INT;
2369 #endif
2370 rc2 = RC2_TYPE(bt, rc);
2372 /* need to reload if:
2373 - constant
2374 - lvalue (need to dereference pointer)
2375 - already a register, but not in the right class */
2376 r = vtop->r & VT_VALMASK;
2377 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
2378 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
2380 if (!r_ok || !r2_ok) {
2381 if (!r_ok)
2382 r = get_reg(rc);
2383 if (rc2) {
2384 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
2385 int original_type = vtop->type.t;
2387 /* two register type load :
2388 expand to two words temporarily */
2389 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2390 /* load constant */
2391 unsigned long long ll = vtop->c.i;
2392 vtop->c.i = ll; /* first word */
2393 load(r, vtop);
2394 vtop->r = r; /* save register value */
2395 vpushi(ll >> 32); /* second word */
2396 } else if (vtop->r & VT_LVAL) {
2397 /* We do not want to modifier the long long pointer here.
2398 So we save any other instances down the stack */
2399 save_reg_upstack(vtop->r, 1);
2400 /* load from memory */
2401 vtop->type.t = load_type;
2402 load(r, vtop);
2403 vdup();
2404 vtop[-1].r = r; /* save register value */
2405 /* increment pointer to get second word */
2406 vtop->type.t = VT_PTRDIFF_T;
2407 gaddrof();
2408 vpushs(PTR_SIZE);
2409 gen_op('+');
2410 vtop->r |= VT_LVAL;
2411 vtop->type.t = load_type;
2412 } else {
2413 /* move registers */
2414 if (!r_ok)
2415 load(r, vtop);
2416 if (r2_ok && vtop->r2 < VT_CONST)
2417 goto done;
2418 vdup();
2419 vtop[-1].r = r; /* save register value */
2420 vtop->r = vtop[-1].r2;
2422 /* Allocate second register. Here we rely on the fact that
2423 get_reg() tries first to free r2 of an SValue. */
2424 r2 = get_reg(rc2);
2425 load(r2, vtop);
2426 vpop();
2427 /* write second register */
2428 vtop->r2 = r2;
2429 done:
2430 vtop->type.t = original_type;
2431 } else {
2432 if (vtop->r == VT_CMP)
2433 vset_VT_JMP();
2434 /* one register type load */
2435 load(r, vtop);
2438 vtop->r = r;
2439 #ifdef TCC_TARGET_C67
2440 /* uses register pairs for doubles */
2441 if (bt == VT_DOUBLE)
2442 vtop->r2 = r+1;
2443 #endif
2445 return r;
2448 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2449 ST_FUNC void gv2(int rc1, int rc2)
2451 /* generate more generic register first. But VT_JMP or VT_CMP
2452 values must be generated first in all cases to avoid possible
2453 reload errors */
2454 if (vtop->r != VT_CMP && rc1 <= rc2) {
2455 vswap();
2456 gv(rc1);
2457 vswap();
2458 gv(rc2);
2459 /* test if reload is needed for first register */
2460 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
2461 vswap();
2462 gv(rc1);
2463 vswap();
2465 } else {
2466 gv(rc2);
2467 vswap();
2468 gv(rc1);
2469 vswap();
2470 /* test if reload is needed for first register */
2471 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
2472 gv(rc2);
2477 #if PTR_SIZE == 4
2478 /* expand 64bit on stack in two ints */
2479 ST_FUNC void lexpand(void)
2481 int u, v;
2482 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
2483 v = vtop->r & (VT_VALMASK | VT_LVAL);
2484 if (v == VT_CONST) {
2485 vdup();
2486 vtop[0].c.i >>= 32;
2487 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
2488 vdup();
2489 vtop[0].c.i += 4;
2490 } else {
2491 gv(RC_INT);
2492 vdup();
2493 vtop[0].r = vtop[-1].r2;
2494 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
2496 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
2498 #endif
2500 #if PTR_SIZE == 4
2501 /* build a long long from two ints */
2502 static void lbuild(int t)
2504 gv2(RC_INT, RC_INT);
2505 vtop[-1].r2 = vtop[0].r;
2506 vtop[-1].type.t = t;
2507 vpop();
2509 #endif
2511 /* convert stack entry to register and duplicate its value in another
2512 register */
2513 static void gv_dup(void)
2515 int t, rc, r;
2517 t = vtop->type.t;
2518 #if PTR_SIZE == 4
2519 if ((t & VT_BTYPE) == VT_LLONG) {
2520 if (t & VT_BITFIELD) {
2521 gv(RC_INT);
2522 t = vtop->type.t;
2524 lexpand();
2525 gv_dup();
2526 vswap();
2527 vrotb(3);
2528 gv_dup();
2529 vrotb(4);
2530 /* stack: H L L1 H1 */
2531 lbuild(t);
2532 vrotb(3);
2533 vrotb(3);
2534 vswap();
2535 lbuild(t);
2536 vswap();
2537 return;
2539 #endif
2540 /* duplicate value */
2541 rc = RC_TYPE(t);
2542 gv(rc);
2543 r = get_reg(rc);
2544 vdup();
2545 load(r, vtop);
2546 vtop->r = r;
2549 #if PTR_SIZE == 4
2550 /* generate CPU independent (unsigned) long long operations */
2551 static void gen_opl(int op)
2553 int t, a, b, op1, c, i;
2554 int func;
2555 unsigned short reg_iret = REG_IRET;
2556 unsigned short reg_lret = REG_IRE2;
2557 SValue tmp;
2559 switch(op) {
2560 case '/':
2561 case TOK_PDIV:
2562 func = TOK___divdi3;
2563 goto gen_func;
2564 case TOK_UDIV:
2565 func = TOK___udivdi3;
2566 goto gen_func;
2567 case '%':
2568 func = TOK___moddi3;
2569 goto gen_mod_func;
2570 case TOK_UMOD:
2571 func = TOK___umoddi3;
2572 gen_mod_func:
2573 #ifdef TCC_ARM_EABI
2574 reg_iret = TREG_R2;
2575 reg_lret = TREG_R3;
2576 #endif
2577 gen_func:
2578 /* call generic long long function */
2579 vpush_helper_func(func);
2580 vrott(3);
2581 gfunc_call(2);
2582 vpushi(0);
2583 vtop->r = reg_iret;
2584 vtop->r2 = reg_lret;
2585 break;
2586 case '^':
2587 case '&':
2588 case '|':
2589 case '*':
2590 case '+':
2591 case '-':
2592 //pv("gen_opl A",0,2);
2593 t = vtop->type.t;
2594 vswap();
2595 lexpand();
2596 vrotb(3);
2597 lexpand();
2598 /* stack: L1 H1 L2 H2 */
2599 tmp = vtop[0];
2600 vtop[0] = vtop[-3];
2601 vtop[-3] = tmp;
2602 tmp = vtop[-2];
2603 vtop[-2] = vtop[-3];
2604 vtop[-3] = tmp;
2605 vswap();
2606 /* stack: H1 H2 L1 L2 */
2607 //pv("gen_opl B",0,4);
2608 if (op == '*') {
2609 vpushv(vtop - 1);
2610 vpushv(vtop - 1);
2611 gen_op(TOK_UMULL);
2612 lexpand();
2613 /* stack: H1 H2 L1 L2 ML MH */
2614 for(i=0;i<4;i++)
2615 vrotb(6);
2616 /* stack: ML MH H1 H2 L1 L2 */
2617 tmp = vtop[0];
2618 vtop[0] = vtop[-2];
2619 vtop[-2] = tmp;
2620 /* stack: ML MH H1 L2 H2 L1 */
2621 gen_op('*');
2622 vrotb(3);
2623 vrotb(3);
2624 gen_op('*');
2625 /* stack: ML MH M1 M2 */
2626 gen_op('+');
2627 gen_op('+');
2628 } else if (op == '+' || op == '-') {
2629 /* XXX: add non carry method too (for MIPS or alpha) */
2630 if (op == '+')
2631 op1 = TOK_ADDC1;
2632 else
2633 op1 = TOK_SUBC1;
2634 gen_op(op1);
2635 /* stack: H1 H2 (L1 op L2) */
2636 vrotb(3);
2637 vrotb(3);
2638 gen_op(op1 + 1); /* TOK_xxxC2 */
2639 } else {
2640 gen_op(op);
2641 /* stack: H1 H2 (L1 op L2) */
2642 vrotb(3);
2643 vrotb(3);
2644 /* stack: (L1 op L2) H1 H2 */
2645 gen_op(op);
2646 /* stack: (L1 op L2) (H1 op H2) */
2648 /* stack: L H */
2649 lbuild(t);
2650 break;
2651 case TOK_SAR:
2652 case TOK_SHR:
2653 case TOK_SHL:
2654 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2655 t = vtop[-1].type.t;
2656 vswap();
2657 lexpand();
2658 vrotb(3);
2659 /* stack: L H shift */
2660 c = (int)vtop->c.i;
2661 /* constant: simpler */
2662 /* NOTE: all comments are for SHL. the other cases are
2663 done by swapping words */
2664 vpop();
2665 if (op != TOK_SHL)
2666 vswap();
2667 if (c >= 32) {
2668 /* stack: L H */
2669 vpop();
2670 if (c > 32) {
2671 vpushi(c - 32);
2672 gen_op(op);
2674 if (op != TOK_SAR) {
2675 vpushi(0);
2676 } else {
2677 gv_dup();
2678 vpushi(31);
2679 gen_op(TOK_SAR);
2681 vswap();
2682 } else {
2683 vswap();
2684 gv_dup();
2685 /* stack: H L L */
2686 vpushi(c);
2687 gen_op(op);
2688 vswap();
2689 vpushi(32 - c);
2690 if (op == TOK_SHL)
2691 gen_op(TOK_SHR);
2692 else
2693 gen_op(TOK_SHL);
2694 vrotb(3);
2695 /* stack: L L H */
2696 vpushi(c);
2697 if (op == TOK_SHL)
2698 gen_op(TOK_SHL);
2699 else
2700 gen_op(TOK_SHR);
2701 gen_op('|');
2703 if (op != TOK_SHL)
2704 vswap();
2705 lbuild(t);
2706 } else {
2707 /* XXX: should provide a faster fallback on x86 ? */
2708 switch(op) {
2709 case TOK_SAR:
2710 func = TOK___ashrdi3;
2711 goto gen_func;
2712 case TOK_SHR:
2713 func = TOK___lshrdi3;
2714 goto gen_func;
2715 case TOK_SHL:
2716 func = TOK___ashldi3;
2717 goto gen_func;
2720 break;
2721 default:
2722 /* compare operations */
2723 t = vtop->type.t;
2724 vswap();
2725 lexpand();
2726 vrotb(3);
2727 lexpand();
2728 /* stack: L1 H1 L2 H2 */
2729 tmp = vtop[-1];
2730 vtop[-1] = vtop[-2];
2731 vtop[-2] = tmp;
2732 /* stack: L1 L2 H1 H2 */
2733 save_regs(4);
2734 /* compare high */
2735 op1 = op;
2736 /* when values are equal, we need to compare low words. since
2737 the jump is inverted, we invert the test too. */
2738 if (op1 == TOK_LT)
2739 op1 = TOK_LE;
2740 else if (op1 == TOK_GT)
2741 op1 = TOK_GE;
2742 else if (op1 == TOK_ULT)
2743 op1 = TOK_ULE;
2744 else if (op1 == TOK_UGT)
2745 op1 = TOK_UGE;
2746 a = 0;
2747 b = 0;
2748 gen_op(op1);
2749 if (op == TOK_NE) {
2750 b = gvtst(0, 0);
2751 } else {
2752 a = gvtst(1, 0);
2753 if (op != TOK_EQ) {
2754 /* generate non equal test */
2755 vpushi(0);
2756 vset_VT_CMP(TOK_NE);
2757 b = gvtst(0, 0);
2760 /* compare low. Always unsigned */
2761 op1 = op;
2762 if (op1 == TOK_LT)
2763 op1 = TOK_ULT;
2764 else if (op1 == TOK_LE)
2765 op1 = TOK_ULE;
2766 else if (op1 == TOK_GT)
2767 op1 = TOK_UGT;
2768 else if (op1 == TOK_GE)
2769 op1 = TOK_UGE;
2770 gen_op(op1);
2771 #if 0//def TCC_TARGET_I386
2772 if (op == TOK_NE) { gsym(b); break; }
2773 if (op == TOK_EQ) { gsym(a); break; }
2774 #endif
2775 gvtst_set(1, a);
2776 gvtst_set(0, b);
2777 break;
2780 #endif
2782 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2784 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2785 return (a ^ b) >> 63 ? -x : x;
2788 static int gen_opic_lt(uint64_t a, uint64_t b)
2790 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2793 /* handle integer constant optimizations and various machine
2794 independent opt */
2795 static void gen_opic(int op)
2797 SValue *v1 = vtop - 1;
2798 SValue *v2 = vtop;
2799 int t1 = v1->type.t & VT_BTYPE;
2800 int t2 = v2->type.t & VT_BTYPE;
2801 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2802 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2803 uint64_t l1 = c1 ? v1->c.i : 0;
2804 uint64_t l2 = c2 ? v2->c.i : 0;
2805 int shm = (t1 == VT_LLONG) ? 63 : 31;
2807 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2808 l1 = ((uint32_t)l1 |
2809 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2810 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2811 l2 = ((uint32_t)l2 |
2812 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2814 if (c1 && c2) {
2815 switch(op) {
2816 case '+': l1 += l2; break;
2817 case '-': l1 -= l2; break;
2818 case '&': l1 &= l2; break;
2819 case '^': l1 ^= l2; break;
2820 case '|': l1 |= l2; break;
2821 case '*': l1 *= l2; break;
2823 case TOK_PDIV:
2824 case '/':
2825 case '%':
2826 case TOK_UDIV:
2827 case TOK_UMOD:
2828 /* if division by zero, generate explicit division */
2829 if (l2 == 0) {
2830 if (const_wanted && !(nocode_wanted & unevalmask))
2831 tcc_error("division by zero in constant");
2832 goto general_case;
2834 switch(op) {
2835 default: l1 = gen_opic_sdiv(l1, l2); break;
2836 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2837 case TOK_UDIV: l1 = l1 / l2; break;
2838 case TOK_UMOD: l1 = l1 % l2; break;
2840 break;
2841 case TOK_SHL: l1 <<= (l2 & shm); break;
2842 case TOK_SHR: l1 >>= (l2 & shm); break;
2843 case TOK_SAR:
2844 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2845 break;
2846 /* tests */
2847 case TOK_ULT: l1 = l1 < l2; break;
2848 case TOK_UGE: l1 = l1 >= l2; break;
2849 case TOK_EQ: l1 = l1 == l2; break;
2850 case TOK_NE: l1 = l1 != l2; break;
2851 case TOK_ULE: l1 = l1 <= l2; break;
2852 case TOK_UGT: l1 = l1 > l2; break;
2853 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2854 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2855 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2856 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2857 /* logical */
2858 case TOK_LAND: l1 = l1 && l2; break;
2859 case TOK_LOR: l1 = l1 || l2; break;
2860 default:
2861 goto general_case;
2863 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2864 l1 = ((uint32_t)l1 |
2865 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2866 v1->c.i = l1;
2867 vtop--;
2868 } else {
2869 /* if commutative ops, put c2 as constant */
2870 if (c1 && (op == '+' || op == '&' || op == '^' ||
2871 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2872 vswap();
2873 c2 = c1; //c = c1, c1 = c2, c2 = c;
2874 l2 = l1; //l = l1, l1 = l2, l2 = l;
2876 if (!const_wanted &&
2877 c1 && ((l1 == 0 &&
2878 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2879 (l1 == -1 && op == TOK_SAR))) {
2880 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2881 vtop--;
2882 } else if (!const_wanted &&
2883 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2884 (op == '|' &&
2885 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2886 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2887 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2888 if (l2 == 1)
2889 vtop->c.i = 0;
2890 vswap();
2891 vtop--;
2892 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2893 op == TOK_PDIV) &&
2894 l2 == 1) ||
2895 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2896 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2897 l2 == 0) ||
2898 (op == '&' &&
2899 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2900 /* filter out NOP operations like x*1, x-0, x&-1... */
2901 vtop--;
2902 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2903 /* try to use shifts instead of muls or divs */
2904 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2905 int n = -1;
2906 while (l2) {
2907 l2 >>= 1;
2908 n++;
2910 vtop->c.i = n;
2911 if (op == '*')
2912 op = TOK_SHL;
2913 else if (op == TOK_PDIV)
2914 op = TOK_SAR;
2915 else
2916 op = TOK_SHR;
2918 goto general_case;
2919 } else if (c2 && (op == '+' || op == '-') &&
2920 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2921 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2922 /* symbol + constant case */
2923 if (op == '-')
2924 l2 = -l2;
2925 l2 += vtop[-1].c.i;
2926 /* The backends can't always deal with addends to symbols
2927 larger than +-1<<31. Don't construct such. */
2928 if ((int)l2 != l2)
2929 goto general_case;
2930 vtop--;
2931 vtop->c.i = l2;
2932 } else {
2933 general_case:
2934 /* call low level op generator */
2935 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2936 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2937 gen_opl(op);
2938 else
2939 gen_opi(op);
2944 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2945 # define gen_negf gen_opf
2946 #elif defined TCC_TARGET_ARM
2947 void gen_negf(int op)
2949 /* arm will detect 0-x and replace by vneg */
2950 vpushi(0), vswap(), gen_op('-');
2952 #else
2953 /* XXX: implement in gen_opf() for other backends too */
2954 void gen_negf(int op)
2956 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2957 subtract(-0, x), but with them it's really a sign flip
2958 operation. We implement this with bit manipulation and have
2959 to do some type reinterpretation for this, which TCC can do
2960 only via memory. */
2962 int align, size, bt;
2964 size = type_size(&vtop->type, &align);
2965 bt = vtop->type.t & VT_BTYPE;
2966 save_reg(gv(RC_TYPE(bt)));
2967 vdup();
2968 incr_bf_adr(size - 1);
2969 vdup();
2970 vpushi(0x80); /* flip sign */
2971 gen_op('^');
2972 vstore();
2973 vpop();
2975 #endif
2977 /* generate a floating point operation with constant propagation */
2978 static void gen_opif(int op)
2980 int c1, c2;
2981 SValue *v1, *v2;
2982 #if defined _MSC_VER && defined __x86_64__
2983 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2984 volatile
2985 #endif
2986 long double f1, f2;
2988 v1 = vtop - 1;
2989 v2 = vtop;
2990 if (op == TOK_NEG)
2991 v1 = v2;
2993 /* currently, we cannot do computations with forward symbols */
2994 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2995 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2996 if (c1 && c2) {
2997 if (v1->type.t == VT_FLOAT) {
2998 f1 = v1->c.f;
2999 f2 = v2->c.f;
3000 } else if (v1->type.t == VT_DOUBLE) {
3001 f1 = v1->c.d;
3002 f2 = v2->c.d;
3003 } else {
3004 f1 = v1->c.ld;
3005 f2 = v2->c.ld;
3007 /* NOTE: we only do constant propagation if finite number (not
3008 NaN or infinity) (ANSI spec) */
3009 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
3010 goto general_case;
3011 switch(op) {
3012 case '+': f1 += f2; break;
3013 case '-': f1 -= f2; break;
3014 case '*': f1 *= f2; break;
3015 case '/':
3016 if (f2 == 0.0) {
3017 union { float f; unsigned u; } x1, x2, y;
3018 /* If not in initializer we need to potentially generate
3019 FP exceptions at runtime, otherwise we want to fold. */
3020 if (!const_wanted)
3021 goto general_case;
3022 /* the run-time result of 0.0/0.0 on x87, also of other compilers
3023 when used to compile the f1 /= f2 below, would be -nan */
3024 x1.f = f1, x2.f = f2;
3025 if (f1 == 0.0)
3026 y.u = 0x7fc00000; /* nan */
3027 else
3028 y.u = 0x7f800000; /* infinity */
3029 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
3030 f1 = y.f;
3031 break;
3033 f1 /= f2;
3034 break;
3035 case TOK_NEG:
3036 f1 = -f1;
3037 goto unary_result;
3038 /* XXX: also handles tests ? */
3039 default:
3040 goto general_case;
3042 vtop--;
3043 unary_result:
3044 /* XXX: overflow test ? */
3045 if (v1->type.t == VT_FLOAT) {
3046 v1->c.f = f1;
3047 } else if (v1->type.t == VT_DOUBLE) {
3048 v1->c.d = f1;
3049 } else {
3050 v1->c.ld = f1;
3052 } else {
3053 general_case:
3054 if (op == TOK_NEG) {
3055 gen_negf(op);
3056 } else {
3057 gen_opf(op);
3062 /* print a type. If 'varstr' is not NULL, then the variable is also
3063 printed in the type */
3064 /* XXX: union */
3065 /* XXX: add array and function pointers */
3066 static void type_to_str(char *buf, int buf_size,
3067 CType *type, const char *varstr)
3069 int bt, v, t;
3070 Sym *s, *sa;
3071 char buf1[256];
3072 const char *tstr;
3074 t = type->t;
3075 bt = t & VT_BTYPE;
3076 buf[0] = '\0';
3078 if (t & VT_EXTERN)
3079 pstrcat(buf, buf_size, "extern ");
3080 if (t & VT_STATIC)
3081 pstrcat(buf, buf_size, "static ");
3082 if (t & VT_TYPEDEF)
3083 pstrcat(buf, buf_size, "typedef ");
3084 if (t & VT_INLINE)
3085 pstrcat(buf, buf_size, "inline ");
3086 if (bt != VT_PTR) {
3087 if (t & VT_VOLATILE)
3088 pstrcat(buf, buf_size, "volatile ");
3089 if (t & VT_CONSTANT)
3090 pstrcat(buf, buf_size, "const ");
3092 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3093 || ((t & VT_UNSIGNED)
3094 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3095 && !IS_ENUM(t)
3097 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3099 buf_size -= strlen(buf);
3100 buf += strlen(buf);
3102 switch(bt) {
3103 case VT_VOID:
3104 tstr = "void";
3105 goto add_tstr;
3106 case VT_BOOL:
3107 tstr = "_Bool";
3108 goto add_tstr;
3109 case VT_BYTE:
3110 tstr = "char";
3111 goto add_tstr;
3112 case VT_SHORT:
3113 tstr = "short";
3114 goto add_tstr;
3115 case VT_INT:
3116 tstr = "int";
3117 goto maybe_long;
3118 case VT_LLONG:
3119 tstr = "long long";
3120 maybe_long:
3121 if (t & VT_LONG)
3122 tstr = "long";
3123 if (!IS_ENUM(t))
3124 goto add_tstr;
3125 tstr = "enum ";
3126 goto tstruct;
3127 case VT_FLOAT:
3128 tstr = "float";
3129 goto add_tstr;
3130 case VT_DOUBLE:
3131 tstr = "double";
3132 if (!(t & VT_LONG))
3133 goto add_tstr;
3134 case VT_LDOUBLE:
3135 tstr = "long double";
3136 add_tstr:
3137 pstrcat(buf, buf_size, tstr);
3138 break;
3139 case VT_STRUCT:
3140 tstr = "struct ";
3141 if (IS_UNION(t))
3142 tstr = "union ";
3143 tstruct:
3144 pstrcat(buf, buf_size, tstr);
3145 v = type->ref->v & ~SYM_STRUCT;
3146 if (v >= SYM_FIRST_ANOM)
3147 pstrcat(buf, buf_size, "<anonymous>");
3148 else
3149 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3150 break;
3151 case VT_FUNC:
3152 s = type->ref;
3153 buf1[0]=0;
3154 if (varstr && '*' == *varstr) {
3155 pstrcat(buf1, sizeof(buf1), "(");
3156 pstrcat(buf1, sizeof(buf1), varstr);
3157 pstrcat(buf1, sizeof(buf1), ")");
3159 pstrcat(buf1, buf_size, "(");
3160 sa = s->next;
3161 while (sa != NULL) {
3162 char buf2[256];
3163 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3164 pstrcat(buf1, sizeof(buf1), buf2);
3165 sa = sa->next;
3166 if (sa)
3167 pstrcat(buf1, sizeof(buf1), ", ");
3169 if (s->f.func_type == FUNC_ELLIPSIS)
3170 pstrcat(buf1, sizeof(buf1), ", ...");
3171 pstrcat(buf1, sizeof(buf1), ")");
3172 type_to_str(buf, buf_size, &s->type, buf1);
3173 goto no_var;
3174 case VT_PTR:
3175 s = type->ref;
3176 if (t & VT_ARRAY) {
3177 if (varstr && '*' == *varstr)
3178 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3179 else
3180 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3181 type_to_str(buf, buf_size, &s->type, buf1);
3182 goto no_var;
3184 pstrcpy(buf1, sizeof(buf1), "*");
3185 if (t & VT_CONSTANT)
3186 pstrcat(buf1, buf_size, "const ");
3187 if (t & VT_VOLATILE)
3188 pstrcat(buf1, buf_size, "volatile ");
3189 if (varstr)
3190 pstrcat(buf1, sizeof(buf1), varstr);
3191 type_to_str(buf, buf_size, &s->type, buf1);
3192 goto no_var;
3194 if (varstr) {
3195 pstrcat(buf, buf_size, " ");
3196 pstrcat(buf, buf_size, varstr);
3198 no_var: ;
3201 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
3203 char buf1[256], buf2[256];
3204 type_to_str(buf1, sizeof(buf1), st, NULL);
3205 type_to_str(buf2, sizeof(buf2), dt, NULL);
3206 tcc_error(fmt, buf1, buf2);
3209 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
3211 char buf1[256], buf2[256];
3212 type_to_str(buf1, sizeof(buf1), st, NULL);
3213 type_to_str(buf2, sizeof(buf2), dt, NULL);
3214 tcc_warning(fmt, buf1, buf2);
3217 static int pointed_size(CType *type)
3219 int align;
3220 return type_size(pointed_type(type), &align);
3223 static void vla_runtime_pointed_size(CType *type)
3225 int align;
3226 vla_runtime_type_size(pointed_type(type), &align);
3229 static inline int is_null_pointer(SValue *p)
3231 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
3232 return 0;
3233 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
3234 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
3235 ((p->type.t & VT_BTYPE) == VT_PTR &&
3236 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
3237 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
3238 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
3242 /* compare function types. OLD functions match any new functions */
3243 static int is_compatible_func(CType *type1, CType *type2)
3245 Sym *s1, *s2;
3247 s1 = type1->ref;
3248 s2 = type2->ref;
3249 if (s1->f.func_call != s2->f.func_call)
3250 return 0;
3251 if (s1->f.func_type != s2->f.func_type
3252 && s1->f.func_type != FUNC_OLD
3253 && s2->f.func_type != FUNC_OLD)
3254 return 0;
3255 for (;;) {
3256 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
3257 return 0;
3258 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
3259 return 1;
3260 s1 = s1->next;
3261 s2 = s2->next;
3262 if (!s1)
3263 return !s2;
3264 if (!s2)
3265 return 0;
3269 /* return true if type1 and type2 are the same. If unqualified is
3270 true, qualifiers on the types are ignored.
3272 static int compare_types(CType *type1, CType *type2, int unqualified)
3274 int bt1, t1, t2;
3276 t1 = type1->t & VT_TYPE;
3277 t2 = type2->t & VT_TYPE;
3278 if (unqualified) {
3279 /* strip qualifiers before comparing */
3280 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3281 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3284 /* Default Vs explicit signedness only matters for char */
3285 if ((t1 & VT_BTYPE) != VT_BYTE) {
3286 t1 &= ~VT_DEFSIGN;
3287 t2 &= ~VT_DEFSIGN;
3289 /* XXX: bitfields ? */
3290 if (t1 != t2)
3291 return 0;
3293 if ((t1 & VT_ARRAY)
3294 && !(type1->ref->c < 0
3295 || type2->ref->c < 0
3296 || type1->ref->c == type2->ref->c))
3297 return 0;
3299 /* test more complicated cases */
3300 bt1 = t1 & VT_BTYPE;
3301 if (bt1 == VT_PTR) {
3302 type1 = pointed_type(type1);
3303 type2 = pointed_type(type2);
3304 return is_compatible_types(type1, type2);
3305 } else if (bt1 == VT_STRUCT) {
3306 return (type1->ref == type2->ref);
3307 } else if (bt1 == VT_FUNC) {
3308 return is_compatible_func(type1, type2);
3309 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
3310 /* If both are enums then they must be the same, if only one is then
3311 t1 and t2 must be equal, which was checked above already. */
3312 return type1->ref == type2->ref;
3313 } else {
3314 return 1;
3318 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3319 type is stored in DEST if non-null (except for pointer plus/minus) . */
3320 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
3322 CType *type1 = &op1->type, *type2 = &op2->type, type;
3323 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
3324 int ret = 1;
3326 type.t = VT_VOID;
3327 type.ref = NULL;
3329 if (bt1 == VT_VOID || bt2 == VT_VOID) {
3330 ret = op == '?' ? 1 : 0;
3331 /* NOTE: as an extension, we accept void on only one side */
3332 type.t = VT_VOID;
3333 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3334 if (op == '+') ; /* Handled in caller */
3335 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3336 /* If one is a null ptr constant the result type is the other. */
3337 else if (is_null_pointer (op2)) type = *type1;
3338 else if (is_null_pointer (op1)) type = *type2;
3339 else if (bt1 != bt2) {
3340 /* accept comparison or cond-expr between pointer and integer
3341 with a warning */
3342 if ((op == '?' || TOK_ISCOND(op))
3343 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
3344 tcc_warning("pointer/integer mismatch in %s",
3345 op == '?' ? "conditional expression" : "comparison");
3346 else if (op != '-' || !is_integer_btype(bt2))
3347 ret = 0;
3348 type = *(bt1 == VT_PTR ? type1 : type2);
3349 } else {
3350 CType *pt1 = pointed_type(type1);
3351 CType *pt2 = pointed_type(type2);
3352 int pbt1 = pt1->t & VT_BTYPE;
3353 int pbt2 = pt2->t & VT_BTYPE;
3354 int newquals, copied = 0;
3355 if (pbt1 != VT_VOID && pbt2 != VT_VOID
3356 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
3357 if (op != '?' && !TOK_ISCOND(op))
3358 ret = 0;
3359 else
3360 type_incompatibility_warning(type1, type2,
3361 op == '?'
3362 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3363 : "pointer type mismatch in comparison('%s' and '%s')");
3365 if (op == '?') {
3366 /* pointers to void get preferred, otherwise the
3367 pointed to types minus qualifs should be compatible */
3368 type = *((pbt1 == VT_VOID) ? type1 : type2);
3369 /* combine qualifs */
3370 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
3371 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
3372 & newquals)
3374 /* copy the pointer target symbol */
3375 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3376 0, type.ref->c);
3377 copied = 1;
3378 pointed_type(&type)->t |= newquals;
3380 /* pointers to incomplete arrays get converted to
3381 pointers to completed ones if possible */
3382 if (pt1->t & VT_ARRAY
3383 && pt2->t & VT_ARRAY
3384 && pointed_type(&type)->ref->c < 0
3385 && (pt1->ref->c > 0 || pt2->ref->c > 0))
3387 if (!copied)
3388 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3389 0, type.ref->c);
3390 pointed_type(&type)->ref =
3391 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
3392 0, pointed_type(&type)->ref->c);
3393 pointed_type(&type)->ref->c =
3394 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
3398 if (TOK_ISCOND(op))
3399 type.t = VT_SIZE_T;
3400 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
3401 if (op != '?' || !compare_types(type1, type2, 1))
3402 ret = 0;
3403 type = *type1;
3404 } else if (is_float(bt1) || is_float(bt2)) {
3405 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
3406 type.t = VT_LDOUBLE;
3407 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
3408 type.t = VT_DOUBLE;
3409 } else {
3410 type.t = VT_FLOAT;
3412 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
3413 /* cast to biggest op */
3414 type.t = VT_LLONG | VT_LONG;
3415 if (bt1 == VT_LLONG)
3416 type.t &= t1;
3417 if (bt2 == VT_LLONG)
3418 type.t &= t2;
3419 /* convert to unsigned if it does not fit in a long long */
3420 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
3421 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
3422 type.t |= VT_UNSIGNED;
3423 } else {
3424 /* integer operations */
3425 type.t = VT_INT | (VT_LONG & (t1 | t2));
3426 /* convert to unsigned if it does not fit in an integer */
3427 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
3428 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
3429 type.t |= VT_UNSIGNED;
3431 if (dest)
3432 *dest = type;
3433 return ret;
3436 /* generic gen_op: handles types problems */
3437 ST_FUNC void gen_op(int op)
3439 int u, t1, t2, bt1, bt2, t;
3440 CType type1, combtype;
3442 redo:
3443 t1 = vtop[-1].type.t;
3444 t2 = vtop[0].type.t;
3445 bt1 = t1 & VT_BTYPE;
3446 bt2 = t2 & VT_BTYPE;
3448 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
3449 if (bt2 == VT_FUNC) {
3450 mk_pointer(&vtop->type);
3451 gaddrof();
3453 if (bt1 == VT_FUNC) {
3454 vswap();
3455 mk_pointer(&vtop->type);
3456 gaddrof();
3457 vswap();
3459 goto redo;
3460 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
3461 tcc_error_noabort("invalid operand types for binary operation");
3462 vpop();
3463 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3464 /* at least one operand is a pointer */
3465 /* relational op: must be both pointers */
3466 if (TOK_ISCOND(op))
3467 goto std_op;
3468 /* if both pointers, then it must be the '-' op */
3469 if (bt1 == VT_PTR && bt2 == VT_PTR) {
3470 if (op != '-')
3471 tcc_error("cannot use pointers here");
3472 if (vtop[-1].type.t & VT_VLA) {
3473 vla_runtime_pointed_size(&vtop[-1].type);
3474 } else {
3475 vpushi(pointed_size(&vtop[-1].type));
3477 vrott(3);
3478 gen_opic(op);
3479 vtop->type.t = VT_PTRDIFF_T;
3480 vswap();
3481 gen_op(TOK_PDIV);
3482 } else {
3483 /* exactly one pointer : must be '+' or '-'. */
3484 if (op != '-' && op != '+')
3485 tcc_error("cannot use pointers here");
3486 /* Put pointer as first operand */
3487 if (bt2 == VT_PTR) {
3488 vswap();
3489 t = t1, t1 = t2, t2 = t;
3491 #if PTR_SIZE == 4
3492 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
3493 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3494 gen_cast_s(VT_INT);
3495 #endif
3496 type1 = vtop[-1].type;
3497 if (vtop[-1].type.ref->type.t & VT_VLA)
3498 vla_runtime_pointed_size(&vtop[-1].type);
3499 else {
3500 u = pointed_size(&vtop[-1].type);
3501 if (u < 0)
3502 tcc_error("unknown array element size");
3503 #if PTR_SIZE == 8
3504 vpushll(u);
3505 #else
3506 /* XXX: cast to int ? (long long case) */
3507 vpushi(u);
3508 #endif
3510 gen_op('*');
3511 #ifdef CONFIG_TCC_BCHECK
3512 if (tcc_state->do_bounds_check && !const_wanted) {
3513 /* if bounded pointers, we generate a special code to
3514 test bounds */
3515 if (op == '-') {
3516 vpushi(0);
3517 vswap();
3518 gen_op('-');
3520 gen_bounded_ptr_add();
3521 } else
3522 #endif
3524 gen_opic(op);
3526 type1.t &= ~VT_ARRAY;
3527 /* put again type if gen_opic() swaped operands */
3528 vtop->type = type1;
3530 } else {
3531 /* floats can only be used for a few operations */
3532 if (is_float(combtype.t)
3533 && op != '+' && op != '-' && op != '*' && op != '/'
3534 && !TOK_ISCOND(op))
3535 tcc_error("invalid operands for binary operation");
3536 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
3537 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
3538 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
3539 t |= VT_UNSIGNED;
3540 t |= (VT_LONG & t1);
3541 combtype.t = t;
3543 std_op:
3544 t = t2 = combtype.t;
3545 /* XXX: currently, some unsigned operations are explicit, so
3546 we modify them here */
3547 if (t & VT_UNSIGNED) {
3548 if (op == TOK_SAR)
3549 op = TOK_SHR;
3550 else if (op == '/')
3551 op = TOK_UDIV;
3552 else if (op == '%')
3553 op = TOK_UMOD;
3554 else if (op == TOK_LT)
3555 op = TOK_ULT;
3556 else if (op == TOK_GT)
3557 op = TOK_UGT;
3558 else if (op == TOK_LE)
3559 op = TOK_ULE;
3560 else if (op == TOK_GE)
3561 op = TOK_UGE;
3563 vswap();
3564 gen_cast_s(t);
3565 vswap();
3566 /* special case for shifts and long long: we keep the shift as
3567 an integer */
3568 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
3569 t2 = VT_INT;
3570 gen_cast_s(t2);
3571 if (is_float(t))
3572 gen_opif(op);
3573 else
3574 gen_opic(op);
3575 if (TOK_ISCOND(op)) {
3576 /* relational op: the result is an int */
3577 vtop->type.t = VT_INT;
3578 } else {
3579 vtop->type.t = t;
3582 // Make sure that we have converted to an rvalue:
3583 if (vtop->r & VT_LVAL)
3584 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3587 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3588 #define gen_cvt_itof1 gen_cvt_itof
3589 #else
3590 /* generic itof for unsigned long long case */
3591 static void gen_cvt_itof1(int t)
3593 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3594 (VT_LLONG | VT_UNSIGNED)) {
3596 if (t == VT_FLOAT)
3597 vpush_helper_func(TOK___floatundisf);
3598 #if LDOUBLE_SIZE != 8
3599 else if (t == VT_LDOUBLE)
3600 vpush_helper_func(TOK___floatundixf);
3601 #endif
3602 else
3603 vpush_helper_func(TOK___floatundidf);
3604 vrott(2);
3605 gfunc_call(1);
3606 vpushi(0);
3607 PUT_R_RET(vtop, t);
3608 } else {
3609 gen_cvt_itof(t);
3612 #endif
3614 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3615 #define gen_cvt_ftoi1 gen_cvt_ftoi
3616 #else
3617 /* generic ftoi for unsigned long long case */
3618 static void gen_cvt_ftoi1(int t)
3620 int st;
3621 if (t == (VT_LLONG | VT_UNSIGNED)) {
3622 /* not handled natively */
3623 st = vtop->type.t & VT_BTYPE;
3624 if (st == VT_FLOAT)
3625 vpush_helper_func(TOK___fixunssfdi);
3626 #if LDOUBLE_SIZE != 8
3627 else if (st == VT_LDOUBLE)
3628 vpush_helper_func(TOK___fixunsxfdi);
3629 #endif
3630 else
3631 vpush_helper_func(TOK___fixunsdfdi);
3632 vrott(2);
3633 gfunc_call(1);
3634 vpushi(0);
3635 PUT_R_RET(vtop, t);
3636 } else {
3637 gen_cvt_ftoi(t);
3640 #endif
3642 /* special delayed cast for char/short */
3643 static void force_charshort_cast(void)
3645 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3646 int dbt = vtop->type.t;
3647 vtop->r &= ~VT_MUSTCAST;
3648 vtop->type.t = sbt;
3649 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3650 vtop->type.t = dbt;
3653 static void gen_cast_s(int t)
3655 CType type;
3656 type.t = t;
3657 type.ref = NULL;
3658 gen_cast(&type);
3661 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3662 static void gen_cast(CType *type)
3664 int sbt, dbt, sf, df, c;
3665 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3667 /* special delayed cast for char/short */
3668 if (vtop->r & VT_MUSTCAST)
3669 force_charshort_cast();
3671 /* bitfields first get cast to ints */
3672 if (vtop->type.t & VT_BITFIELD)
3673 gv(RC_INT);
3675 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3676 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3677 if (sbt == VT_FUNC)
3678 sbt = VT_PTR;
3680 again:
3681 if (sbt != dbt) {
3682 sf = is_float(sbt);
3683 df = is_float(dbt);
3684 dbt_bt = dbt & VT_BTYPE;
3685 sbt_bt = sbt & VT_BTYPE;
3686 if (dbt_bt == VT_VOID)
3687 goto done;
3688 if (sbt_bt == VT_VOID) {
3689 error:
3690 cast_error(&vtop->type, type);
3693 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3694 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3695 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3696 #endif
3697 if (c) {
3698 /* constant case: we can do it now */
3699 /* XXX: in ISOC, cannot do it if error in convert */
3700 if (sbt == VT_FLOAT)
3701 vtop->c.ld = vtop->c.f;
3702 else if (sbt == VT_DOUBLE)
3703 vtop->c.ld = vtop->c.d;
3705 if (df) {
3706 if (sbt_bt == VT_LLONG) {
3707 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3708 vtop->c.ld = vtop->c.i;
3709 else
3710 vtop->c.ld = -(long double)-vtop->c.i;
3711 } else if(!sf) {
3712 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3713 vtop->c.ld = (uint32_t)vtop->c.i;
3714 else
3715 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3718 if (dbt == VT_FLOAT)
3719 vtop->c.f = (float)vtop->c.ld;
3720 else if (dbt == VT_DOUBLE)
3721 vtop->c.d = (double)vtop->c.ld;
3722 } else if (sf && dbt == VT_BOOL) {
3723 vtop->c.i = (vtop->c.ld != 0);
3724 } else {
3725 if(sf)
3726 vtop->c.i = vtop->c.ld;
3727 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3729 else if (sbt & VT_UNSIGNED)
3730 vtop->c.i = (uint32_t)vtop->c.i;
3731 else
3732 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3734 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3736 else if (dbt == VT_BOOL)
3737 vtop->c.i = (vtop->c.i != 0);
3738 else {
3739 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3740 dbt_bt == VT_SHORT ? 0xffff :
3741 0xffffffff;
3742 vtop->c.i &= m;
3743 if (!(dbt & VT_UNSIGNED))
3744 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3747 goto done;
3749 } else if (dbt == VT_BOOL
3750 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3751 == (VT_CONST | VT_SYM)) {
3752 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3753 vtop->r = VT_CONST;
3754 vtop->c.i = 1;
3755 goto done;
3758 /* cannot generate code for global or static initializers */
3759 if (STATIC_DATA_WANTED)
3760 goto done;
3762 /* non constant case: generate code */
3763 if (dbt == VT_BOOL) {
3764 gen_test_zero(TOK_NE);
3765 goto done;
3768 if (sf || df) {
3769 if (sf && df) {
3770 /* convert from fp to fp */
3771 gen_cvt_ftof(dbt);
3772 } else if (df) {
3773 /* convert int to fp */
3774 gen_cvt_itof1(dbt);
3775 } else {
3776 /* convert fp to int */
3777 sbt = dbt;
3778 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3779 sbt = VT_INT;
3780 gen_cvt_ftoi1(sbt);
3781 goto again; /* may need char/short cast */
3783 goto done;
3786 ds = btype_size(dbt_bt);
3787 ss = btype_size(sbt_bt);
3788 if (ds == 0 || ss == 0)
3789 goto error;
3791 if (IS_ENUM(type->t) && type->ref->c < 0)
3792 tcc_error("cast to incomplete type");
3794 /* same size and no sign conversion needed */
3795 if (ds == ss && ds >= 4)
3796 goto done;
3797 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3798 tcc_warning("cast between pointer and integer of different size");
3799 if (sbt_bt == VT_PTR) {
3800 /* put integer type to allow logical operations below */
3801 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3805 /* processor allows { int a = 0, b = *(char*)&a; }
3806 That means that if we cast to less width, we can just
3807 change the type and read it still later. */
3808 #define ALLOW_SUBTYPE_ACCESS 1
3810 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3811 /* value still in memory */
3812 if (ds <= ss)
3813 goto done;
3814 /* ss <= 4 here */
3815 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3816 gv(RC_INT);
3817 goto done; /* no 64bit envolved */
3820 gv(RC_INT);
3822 trunc = 0;
3823 #if PTR_SIZE == 4
3824 if (ds == 8) {
3825 /* generate high word */
3826 if (sbt & VT_UNSIGNED) {
3827 vpushi(0);
3828 gv(RC_INT);
3829 } else {
3830 gv_dup();
3831 vpushi(31);
3832 gen_op(TOK_SAR);
3834 lbuild(dbt);
3835 } else if (ss == 8) {
3836 /* from long long: just take low order word */
3837 lexpand();
3838 vpop();
3840 ss = 4;
3842 #elif PTR_SIZE == 8
3843 if (ds == 8) {
3844 /* need to convert from 32bit to 64bit */
3845 if (sbt & VT_UNSIGNED) {
3846 #if defined(TCC_TARGET_RISCV64)
3847 /* RISC-V keeps 32bit vals in registers sign-extended.
3848 So here we need a zero-extension. */
3849 trunc = 32;
3850 #else
3851 goto done;
3852 #endif
3853 } else {
3854 gen_cvt_sxtw();
3855 goto done;
3857 ss = ds, ds = 4, dbt = sbt;
3858 } else if (ss == 8) {
3859 /* RISC-V keeps 32bit vals in registers sign-extended.
3860 So here we need a sign-extension for signed types and
3861 zero-extension. for unsigned types. */
3862 #if !defined(TCC_TARGET_RISCV64)
3863 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3864 #endif
3865 } else {
3866 ss = 4;
3868 #endif
3870 if (ds >= ss)
3871 goto done;
3872 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3873 if (ss == 4) {
3874 gen_cvt_csti(dbt);
3875 goto done;
3877 #endif
3878 bits = (ss - ds) * 8;
3879 /* for unsigned, gen_op will convert SAR to SHR */
3880 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3881 vpushi(bits);
3882 gen_op(TOK_SHL);
3883 vpushi(bits - trunc);
3884 gen_op(TOK_SAR);
3885 vpushi(trunc);
3886 gen_op(TOK_SHR);
3888 done:
3889 vtop->type = *type;
3890 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3893 /* return type size as known at compile time. Put alignment at 'a' */
3894 ST_FUNC int type_size(CType *type, int *a)
3896 Sym *s;
3897 int bt;
3899 bt = type->t & VT_BTYPE;
3900 if (bt == VT_STRUCT) {
3901 /* struct/union */
3902 s = type->ref;
3903 *a = s->r;
3904 return s->c;
3905 } else if (bt == VT_PTR) {
3906 if (type->t & VT_ARRAY) {
3907 int ts;
3909 s = type->ref;
3910 ts = type_size(&s->type, a);
3912 if (ts < 0 && s->c < 0)
3913 ts = -ts;
3915 return ts * s->c;
3916 } else {
3917 *a = PTR_SIZE;
3918 return PTR_SIZE;
3920 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3921 return -1; /* incomplete enum */
3922 } else if (bt == VT_LDOUBLE) {
3923 *a = LDOUBLE_ALIGN;
3924 return LDOUBLE_SIZE;
3925 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3926 #ifdef TCC_TARGET_I386
3927 #ifdef TCC_TARGET_PE
3928 *a = 8;
3929 #else
3930 *a = 4;
3931 #endif
3932 #elif defined(TCC_TARGET_ARM)
3933 #ifdef TCC_ARM_EABI
3934 *a = 8;
3935 #else
3936 *a = 4;
3937 #endif
3938 #else
3939 *a = 8;
3940 #endif
3941 return 8;
3942 } else if (bt == VT_INT || bt == VT_FLOAT) {
3943 *a = 4;
3944 return 4;
3945 } else if (bt == VT_SHORT) {
3946 *a = 2;
3947 return 2;
3948 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3949 *a = 8;
3950 return 16;
3951 } else {
3952 /* char, void, function, _Bool */
3953 *a = 1;
3954 return 1;
3958 /* push type size as known at runtime time on top of value stack. Put
3959 alignment at 'a' */
3960 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
3962 if (type->t & VT_VLA) {
3963 type_size(&type->ref->type, a);
3964 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3965 } else {
3966 vpushi(type_size(type, a));
3970 /* return the pointed type of t */
3971 static inline CType *pointed_type(CType *type)
3973 return &type->ref->type;
3976 /* modify type so that its it is a pointer to type. */
3977 ST_FUNC void mk_pointer(CType *type)
3979 Sym *s;
3980 s = sym_push(SYM_FIELD, type, 0, -1);
3981 type->t = VT_PTR | (type->t & VT_STORAGE);
3982 type->ref = s;
3985 /* return true if type1 and type2 are exactly the same (including
3986 qualifiers).
3988 static int is_compatible_types(CType *type1, CType *type2)
3990 return compare_types(type1,type2,0);
3993 /* return true if type1 and type2 are the same (ignoring qualifiers).
3995 static int is_compatible_unqualified_types(CType *type1, CType *type2)
3997 return compare_types(type1,type2,1);
4000 static void cast_error(CType *st, CType *dt)
4002 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
4005 /* verify type compatibility to store vtop in 'dt' type */
4006 static void verify_assign_cast(CType *dt)
4008 CType *st, *type1, *type2;
4009 int dbt, sbt, qualwarn, lvl;
4011 st = &vtop->type; /* source type */
4012 dbt = dt->t & VT_BTYPE;
4013 sbt = st->t & VT_BTYPE;
4014 if (dt->t & VT_CONSTANT)
4015 tcc_warning("assignment of read-only location");
4016 switch(dbt) {
4017 case VT_VOID:
4018 if (sbt != dbt)
4019 tcc_error("assignment to void expression");
4020 break;
4021 case VT_PTR:
4022 /* special cases for pointers */
4023 /* '0' can also be a pointer */
4024 if (is_null_pointer(vtop))
4025 break;
4026 /* accept implicit pointer to integer cast with warning */
4027 if (is_integer_btype(sbt)) {
4028 tcc_warning("assignment makes pointer from integer without a cast");
4029 break;
4031 type1 = pointed_type(dt);
4032 if (sbt == VT_PTR)
4033 type2 = pointed_type(st);
4034 else if (sbt == VT_FUNC)
4035 type2 = st; /* a function is implicitly a function pointer */
4036 else
4037 goto error;
4038 if (is_compatible_types(type1, type2))
4039 break;
4040 for (qualwarn = lvl = 0;; ++lvl) {
4041 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
4042 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
4043 qualwarn = 1;
4044 dbt = type1->t & (VT_BTYPE|VT_LONG);
4045 sbt = type2->t & (VT_BTYPE|VT_LONG);
4046 if (dbt != VT_PTR || sbt != VT_PTR)
4047 break;
4048 type1 = pointed_type(type1);
4049 type2 = pointed_type(type2);
4051 if (!is_compatible_unqualified_types(type1, type2)) {
4052 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
4053 /* void * can match anything */
4054 } else if (dbt == sbt
4055 && is_integer_btype(sbt & VT_BTYPE)
4056 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
4057 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
4058 /* Like GCC don't warn by default for merely changes
4059 in pointer target signedness. Do warn for different
4060 base types, though, in particular for unsigned enums
4061 and signed int targets. */
4062 } else {
4063 tcc_warning("assignment from incompatible pointer type");
4064 break;
4067 if (qualwarn)
4068 tcc_warning_c(warn_discarded_qualifiers)("assignment discards qualifiers from pointer target type");
4069 break;
4070 case VT_BYTE:
4071 case VT_SHORT:
4072 case VT_INT:
4073 case VT_LLONG:
4074 if (sbt == VT_PTR || sbt == VT_FUNC) {
4075 tcc_warning("assignment makes integer from pointer without a cast");
4076 } else if (sbt == VT_STRUCT) {
4077 goto case_VT_STRUCT;
4079 /* XXX: more tests */
4080 break;
4081 case VT_STRUCT:
4082 case_VT_STRUCT:
4083 if (!is_compatible_unqualified_types(dt, st)) {
4084 error:
4085 cast_error(st, dt);
4087 break;
4091 static void gen_assign_cast(CType *dt)
4093 verify_assign_cast(dt);
4094 gen_cast(dt);
4097 /* store vtop in lvalue pushed on stack */
4098 ST_FUNC void vstore(void)
4100 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
4102 ft = vtop[-1].type.t;
4103 sbt = vtop->type.t & VT_BTYPE;
4104 dbt = ft & VT_BTYPE;
4106 verify_assign_cast(&vtop[-1].type);
4108 if (sbt == VT_STRUCT) {
4109 /* if structure, only generate pointer */
4110 /* structure assignment : generate memcpy */
4111 /* XXX: optimize if small size */
4112 size = type_size(&vtop->type, &align);
4114 /* destination */
4115 vswap();
4116 #ifdef CONFIG_TCC_BCHECK
4117 if (vtop->r & VT_MUSTBOUND)
4118 gbound(); /* check would be wrong after gaddrof() */
4119 #endif
4120 vtop->type.t = VT_PTR;
4121 gaddrof();
4123 /* address of memcpy() */
4124 #ifdef TCC_ARM_EABI
4125 if(!(align & 7))
4126 vpush_helper_func(TOK_memmove8);
4127 else if(!(align & 3))
4128 vpush_helper_func(TOK_memmove4);
4129 else
4130 #endif
4131 /* Use memmove, rather than memcpy, as dest and src may be same: */
4132 vpush_helper_func(TOK_memmove);
4134 vswap();
4135 /* source */
4136 vpushv(vtop - 2);
4137 #ifdef CONFIG_TCC_BCHECK
4138 if (vtop->r & VT_MUSTBOUND)
4139 gbound();
4140 #endif
4141 vtop->type.t = VT_PTR;
4142 gaddrof();
4143 /* type size */
4144 vpushi(size);
4145 gfunc_call(3);
4146 /* leave source on stack */
4148 } else if (ft & VT_BITFIELD) {
4149 /* bitfield store handling */
4151 /* save lvalue as expression result (example: s.b = s.a = n;) */
4152 vdup(), vtop[-1] = vtop[-2];
4154 bit_pos = BIT_POS(ft);
4155 bit_size = BIT_SIZE(ft);
4156 /* remove bit field info to avoid loops */
4157 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
4159 if (dbt == VT_BOOL) {
4160 gen_cast(&vtop[-1].type);
4161 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
4163 r = adjust_bf(vtop - 1, bit_pos, bit_size);
4164 if (dbt != VT_BOOL) {
4165 gen_cast(&vtop[-1].type);
4166 dbt = vtop[-1].type.t & VT_BTYPE;
4168 if (r == VT_STRUCT) {
4169 store_packed_bf(bit_pos, bit_size);
4170 } else {
4171 unsigned long long mask = (1ULL << bit_size) - 1;
4172 if (dbt != VT_BOOL) {
4173 /* mask source */
4174 if (dbt == VT_LLONG)
4175 vpushll(mask);
4176 else
4177 vpushi((unsigned)mask);
4178 gen_op('&');
4180 /* shift source */
4181 vpushi(bit_pos);
4182 gen_op(TOK_SHL);
4183 vswap();
4184 /* duplicate destination */
4185 vdup();
4186 vrott(3);
4187 /* load destination, mask and or with source */
4188 if (dbt == VT_LLONG)
4189 vpushll(~(mask << bit_pos));
4190 else
4191 vpushi(~((unsigned)mask << bit_pos));
4192 gen_op('&');
4193 gen_op('|');
4194 /* store result */
4195 vstore();
4196 /* ... and discard */
4197 vpop();
4199 } else if (dbt == VT_VOID) {
4200 --vtop;
4201 } else {
4202 /* optimize char/short casts */
4203 delayed_cast = 0;
4204 if ((dbt == VT_BYTE || dbt == VT_SHORT)
4205 && is_integer_btype(sbt)
4207 if ((vtop->r & VT_MUSTCAST)
4208 && btype_size(dbt) > btype_size(sbt)
4210 force_charshort_cast();
4211 delayed_cast = 1;
4212 } else {
4213 gen_cast(&vtop[-1].type);
4216 #ifdef CONFIG_TCC_BCHECK
4217 /* bound check case */
4218 if (vtop[-1].r & VT_MUSTBOUND) {
4219 vswap();
4220 gbound();
4221 vswap();
4223 #endif
4224 gv(RC_TYPE(dbt)); /* generate value */
4226 if (delayed_cast) {
4227 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
4228 //tcc_warning("deley cast %x -> %x", sbt, dbt);
4229 vtop->type.t = ft & VT_TYPE;
4232 /* if lvalue was saved on stack, must read it */
4233 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
4234 SValue sv;
4235 r = get_reg(RC_INT);
4236 sv.type.t = VT_PTRDIFF_T;
4237 sv.r = VT_LOCAL | VT_LVAL;
4238 sv.c.i = vtop[-1].c.i;
4239 load(r, &sv);
4240 vtop[-1].r = r | VT_LVAL;
4243 r = vtop->r & VT_VALMASK;
4244 /* two word case handling :
4245 store second register at word + 4 (or +8 for x86-64) */
4246 if (USING_TWO_WORDS(dbt)) {
4247 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
4248 vtop[-1].type.t = load_type;
4249 store(r, vtop - 1);
4250 vswap();
4251 /* convert to int to increment easily */
4252 vtop->type.t = VT_PTRDIFF_T;
4253 gaddrof();
4254 vpushs(PTR_SIZE);
4255 gen_op('+');
4256 vtop->r |= VT_LVAL;
4257 vswap();
4258 vtop[-1].type.t = load_type;
4259 /* XXX: it works because r2 is spilled last ! */
4260 store(vtop->r2, vtop - 1);
4261 } else {
4262 /* single word */
4263 store(r, vtop - 1);
4265 vswap();
4266 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
4270 /* post defines POST/PRE add. c is the token ++ or -- */
4271 ST_FUNC void inc(int post, int c)
4273 test_lvalue();
4274 vdup(); /* save lvalue */
4275 if (post) {
4276 gv_dup(); /* duplicate value */
4277 vrotb(3);
4278 vrotb(3);
4280 /* add constant */
4281 vpushi(c - TOK_MID);
4282 gen_op('+');
4283 vstore(); /* store value */
4284 if (post)
4285 vpop(); /* if post op, return saved value */
4288 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
4290 /* read the string */
4291 if (tok != TOK_STR)
4292 expect(msg);
4293 cstr_new(astr);
4294 while (tok == TOK_STR) {
4295 /* XXX: add \0 handling too ? */
4296 cstr_cat(astr, tokc.str.data, -1);
4297 next();
4299 cstr_ccat(astr, '\0');
4302 /* If I is >= 1 and a power of two, returns log2(i)+1.
4303 If I is 0 returns 0. */
4304 ST_FUNC int exact_log2p1(int i)
4306 int ret;
4307 if (!i)
4308 return 0;
4309 for (ret = 1; i >= 1 << 8; ret += 8)
4310 i >>= 8;
4311 if (i >= 1 << 4)
4312 ret += 4, i >>= 4;
4313 if (i >= 1 << 2)
4314 ret += 2, i >>= 2;
4315 if (i >= 1 << 1)
4316 ret++;
4317 return ret;
4320 /* Parse __attribute__((...)) GNUC extension. */
4321 static void parse_attribute(AttributeDef *ad)
4323 int t, n;
4324 CString astr;
4326 redo:
4327 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
4328 return;
4329 next();
4330 skip('(');
4331 skip('(');
4332 while (tok != ')') {
4333 if (tok < TOK_IDENT)
4334 expect("attribute name");
4335 t = tok;
4336 next();
4337 switch(t) {
4338 case TOK_CLEANUP1:
4339 case TOK_CLEANUP2:
4341 Sym *s;
4343 skip('(');
4344 s = sym_find(tok);
4345 if (!s) {
4346 tcc_warning_c(warn_implicit_function_declaration)(
4347 "implicit declaration of function '%s'", get_tok_str(tok, &tokc));
4348 s = external_global_sym(tok, &func_old_type);
4349 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
4350 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
4351 ad->cleanup_func = s;
4352 next();
4353 skip(')');
4354 break;
4356 case TOK_CONSTRUCTOR1:
4357 case TOK_CONSTRUCTOR2:
4358 ad->f.func_ctor = 1;
4359 break;
4360 case TOK_DESTRUCTOR1:
4361 case TOK_DESTRUCTOR2:
4362 ad->f.func_dtor = 1;
4363 break;
4364 case TOK_ALWAYS_INLINE1:
4365 case TOK_ALWAYS_INLINE2:
4366 ad->f.func_alwinl = 1;
4367 break;
4368 case TOK_SECTION1:
4369 case TOK_SECTION2:
4370 skip('(');
4371 parse_mult_str(&astr, "section name");
4372 ad->section = find_section(tcc_state, (char *)astr.data);
4373 skip(')');
4374 cstr_free(&astr);
4375 break;
4376 case TOK_ALIAS1:
4377 case TOK_ALIAS2:
4378 skip('(');
4379 parse_mult_str(&astr, "alias(\"target\")");
4380 ad->alias_target = /* save string as token, for later */
4381 tok_alloc((char*)astr.data, astr.size-1)->tok;
4382 skip(')');
4383 cstr_free(&astr);
4384 break;
4385 case TOK_VISIBILITY1:
4386 case TOK_VISIBILITY2:
4387 skip('(');
4388 parse_mult_str(&astr,
4389 "visibility(\"default|hidden|internal|protected\")");
4390 if (!strcmp (astr.data, "default"))
4391 ad->a.visibility = STV_DEFAULT;
4392 else if (!strcmp (astr.data, "hidden"))
4393 ad->a.visibility = STV_HIDDEN;
4394 else if (!strcmp (astr.data, "internal"))
4395 ad->a.visibility = STV_INTERNAL;
4396 else if (!strcmp (astr.data, "protected"))
4397 ad->a.visibility = STV_PROTECTED;
4398 else
4399 expect("visibility(\"default|hidden|internal|protected\")");
4400 skip(')');
4401 cstr_free(&astr);
4402 break;
4403 case TOK_ALIGNED1:
4404 case TOK_ALIGNED2:
4405 if (tok == '(') {
4406 next();
4407 n = expr_const();
4408 if (n <= 0 || (n & (n - 1)) != 0)
4409 tcc_error("alignment must be a positive power of two");
4410 skip(')');
4411 } else {
4412 n = MAX_ALIGN;
4414 ad->a.aligned = exact_log2p1(n);
4415 if (n != 1 << (ad->a.aligned - 1))
4416 tcc_error("alignment of %d is larger than implemented", n);
4417 break;
4418 case TOK_PACKED1:
4419 case TOK_PACKED2:
4420 ad->a.packed = 1;
4421 break;
4422 case TOK_WEAK1:
4423 case TOK_WEAK2:
4424 ad->a.weak = 1;
4425 break;
4426 case TOK_UNUSED1:
4427 case TOK_UNUSED2:
4428 /* currently, no need to handle it because tcc does not
4429 track unused objects */
4430 break;
4431 case TOK_NORETURN1:
4432 case TOK_NORETURN2:
4433 ad->f.func_noreturn = 1;
4434 break;
4435 case TOK_CDECL1:
4436 case TOK_CDECL2:
4437 case TOK_CDECL3:
4438 ad->f.func_call = FUNC_CDECL;
4439 break;
4440 case TOK_STDCALL1:
4441 case TOK_STDCALL2:
4442 case TOK_STDCALL3:
4443 ad->f.func_call = FUNC_STDCALL;
4444 break;
4445 #ifdef TCC_TARGET_I386
4446 case TOK_REGPARM1:
4447 case TOK_REGPARM2:
4448 skip('(');
4449 n = expr_const();
4450 if (n > 3)
4451 n = 3;
4452 else if (n < 0)
4453 n = 0;
4454 if (n > 0)
4455 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
4456 skip(')');
4457 break;
4458 case TOK_FASTCALL1:
4459 case TOK_FASTCALL2:
4460 case TOK_FASTCALL3:
4461 ad->f.func_call = FUNC_FASTCALLW;
4462 break;
4463 #endif
4464 case TOK_MODE:
4465 skip('(');
4466 switch(tok) {
4467 case TOK_MODE_DI:
4468 ad->attr_mode = VT_LLONG + 1;
4469 break;
4470 case TOK_MODE_QI:
4471 ad->attr_mode = VT_BYTE + 1;
4472 break;
4473 case TOK_MODE_HI:
4474 ad->attr_mode = VT_SHORT + 1;
4475 break;
4476 case TOK_MODE_SI:
4477 case TOK_MODE_word:
4478 ad->attr_mode = VT_INT + 1;
4479 break;
4480 default:
4481 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
4482 break;
4484 next();
4485 skip(')');
4486 break;
4487 case TOK_DLLEXPORT:
4488 ad->a.dllexport = 1;
4489 break;
4490 case TOK_NODECORATE:
4491 ad->a.nodecorate = 1;
4492 break;
4493 case TOK_DLLIMPORT:
4494 ad->a.dllimport = 1;
4495 break;
4496 default:
4497 tcc_warning_c(warn_unsupported)("'%s' attribute ignored", get_tok_str(t, NULL));
4498 /* skip parameters */
4499 if (tok == '(') {
4500 int parenthesis = 0;
4501 do {
4502 if (tok == '(')
4503 parenthesis++;
4504 else if (tok == ')')
4505 parenthesis--;
4506 next();
4507 } while (parenthesis && tok != -1);
4509 break;
4511 if (tok != ',')
4512 break;
4513 next();
4515 skip(')');
4516 skip(')');
4517 goto redo;
4520 static Sym * find_field (CType *type, int v, int *cumofs)
4522 Sym *s = type->ref;
4523 v |= SYM_FIELD;
4524 while ((s = s->next) != NULL) {
4525 if ((s->v & SYM_FIELD) &&
4526 (s->type.t & VT_BTYPE) == VT_STRUCT &&
4527 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
4528 Sym *ret = find_field (&s->type, v, cumofs);
4529 if (ret) {
4530 *cumofs += s->c;
4531 return ret;
4534 if (s->v == v)
4535 break;
4537 return s;
4540 static void check_fields (CType *type, int check)
4542 Sym *s = type->ref;
4544 while ((s = s->next) != NULL) {
4545 int v = s->v & ~SYM_FIELD;
4546 if (v < SYM_FIRST_ANOM) {
4547 TokenSym *ts = table_ident[v - TOK_IDENT];
4548 if (check && (ts->tok & SYM_FIELD))
4549 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4550 ts->tok ^= SYM_FIELD;
4551 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4552 check_fields (&s->type, check);
4556 static void struct_layout(CType *type, AttributeDef *ad)
4558 int size, align, maxalign, offset, c, bit_pos, bit_size;
4559 int packed, a, bt, prevbt, prev_bit_size;
4560 int pcc = !tcc_state->ms_bitfields;
4561 int pragma_pack = *tcc_state->pack_stack_ptr;
4562 Sym *f;
4564 maxalign = 1;
4565 offset = 0;
4566 c = 0;
4567 bit_pos = 0;
4568 prevbt = VT_STRUCT; /* make it never match */
4569 prev_bit_size = 0;
4571 //#define BF_DEBUG
4573 for (f = type->ref->next; f; f = f->next) {
4574 if (f->type.t & VT_BITFIELD)
4575 bit_size = BIT_SIZE(f->type.t);
4576 else
4577 bit_size = -1;
4578 size = type_size(&f->type, &align);
4579 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4580 packed = 0;
4582 if (pcc && bit_size == 0) {
4583 /* in pcc mode, packing does not affect zero-width bitfields */
4585 } else {
4586 /* in pcc mode, attribute packed overrides if set. */
4587 if (pcc && (f->a.packed || ad->a.packed))
4588 align = packed = 1;
4590 /* pragma pack overrides align if lesser and packs bitfields always */
4591 if (pragma_pack) {
4592 packed = 1;
4593 if (pragma_pack < align)
4594 align = pragma_pack;
4595 /* in pcc mode pragma pack also overrides individual align */
4596 if (pcc && pragma_pack < a)
4597 a = 0;
4600 /* some individual align was specified */
4601 if (a)
4602 align = a;
4604 if (type->ref->type.t == VT_UNION) {
4605 if (pcc && bit_size >= 0)
4606 size = (bit_size + 7) >> 3;
4607 offset = 0;
4608 if (size > c)
4609 c = size;
4611 } else if (bit_size < 0) {
4612 if (pcc)
4613 c += (bit_pos + 7) >> 3;
4614 c = (c + align - 1) & -align;
4615 offset = c;
4616 if (size > 0)
4617 c += size;
4618 bit_pos = 0;
4619 prevbt = VT_STRUCT;
4620 prev_bit_size = 0;
4622 } else {
4623 /* A bit-field. Layout is more complicated. There are two
4624 options: PCC (GCC) compatible and MS compatible */
4625 if (pcc) {
4626 /* In PCC layout a bit-field is placed adjacent to the
4627 preceding bit-fields, except if:
4628 - it has zero-width
4629 - an individual alignment was given
4630 - it would overflow its base type container and
4631 there is no packing */
4632 if (bit_size == 0) {
4633 new_field:
4634 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4635 bit_pos = 0;
4636 } else if (f->a.aligned) {
4637 goto new_field;
4638 } else if (!packed) {
4639 int a8 = align * 8;
4640 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4641 if (ofs > size / align)
4642 goto new_field;
4645 /* in pcc mode, long long bitfields have type int if they fit */
4646 if (size == 8 && bit_size <= 32)
4647 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4649 while (bit_pos >= align * 8)
4650 c += align, bit_pos -= align * 8;
4651 offset = c;
4653 /* In PCC layout named bit-fields influence the alignment
4654 of the containing struct using the base types alignment,
4655 except for packed fields (which here have correct align). */
4656 if (f->v & SYM_FIRST_ANOM
4657 // && bit_size // ??? gcc on ARM/rpi does that
4659 align = 1;
4661 } else {
4662 bt = f->type.t & VT_BTYPE;
4663 if ((bit_pos + bit_size > size * 8)
4664 || (bit_size > 0) == (bt != prevbt)
4666 c = (c + align - 1) & -align;
4667 offset = c;
4668 bit_pos = 0;
4669 /* In MS bitfield mode a bit-field run always uses
4670 at least as many bits as the underlying type.
4671 To start a new run it's also required that this
4672 or the last bit-field had non-zero width. */
4673 if (bit_size || prev_bit_size)
4674 c += size;
4676 /* In MS layout the records alignment is normally
4677 influenced by the field, except for a zero-width
4678 field at the start of a run (but by further zero-width
4679 fields it is again). */
4680 if (bit_size == 0 && prevbt != bt)
4681 align = 1;
4682 prevbt = bt;
4683 prev_bit_size = bit_size;
4686 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4687 | (bit_pos << VT_STRUCT_SHIFT);
4688 bit_pos += bit_size;
4690 if (align > maxalign)
4691 maxalign = align;
4693 #ifdef BF_DEBUG
4694 printf("set field %s offset %-2d size %-2d align %-2d",
4695 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4696 if (f->type.t & VT_BITFIELD) {
4697 printf(" pos %-2d bits %-2d",
4698 BIT_POS(f->type.t),
4699 BIT_SIZE(f->type.t)
4702 printf("\n");
4703 #endif
4705 f->c = offset;
4706 f->r = 0;
4709 if (pcc)
4710 c += (bit_pos + 7) >> 3;
4712 /* store size and alignment */
4713 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4714 if (a < maxalign)
4715 a = maxalign;
4716 type->ref->r = a;
4717 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4718 /* can happen if individual align for some member was given. In
4719 this case MSVC ignores maxalign when aligning the size */
4720 a = pragma_pack;
4721 if (a < bt)
4722 a = bt;
4724 c = (c + a - 1) & -a;
4725 type->ref->c = c;
4727 #ifdef BF_DEBUG
4728 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4729 #endif
4731 /* check whether we can access bitfields by their type */
4732 for (f = type->ref->next; f; f = f->next) {
4733 int s, px, cx, c0;
4734 CType t;
4736 if (0 == (f->type.t & VT_BITFIELD))
4737 continue;
4738 f->type.ref = f;
4739 f->auxtype = -1;
4740 bit_size = BIT_SIZE(f->type.t);
4741 if (bit_size == 0)
4742 continue;
4743 bit_pos = BIT_POS(f->type.t);
4744 size = type_size(&f->type, &align);
4746 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4747 #ifdef TCC_TARGET_ARM
4748 && !(f->c & (align - 1))
4749 #endif
4751 continue;
4753 /* try to access the field using a different type */
4754 c0 = -1, s = align = 1;
4755 t.t = VT_BYTE;
4756 for (;;) {
4757 px = f->c * 8 + bit_pos;
4758 cx = (px >> 3) & -align;
4759 px = px - (cx << 3);
4760 if (c0 == cx)
4761 break;
4762 s = (px + bit_size + 7) >> 3;
4763 if (s > 4) {
4764 t.t = VT_LLONG;
4765 } else if (s > 2) {
4766 t.t = VT_INT;
4767 } else if (s > 1) {
4768 t.t = VT_SHORT;
4769 } else {
4770 t.t = VT_BYTE;
4772 s = type_size(&t, &align);
4773 c0 = cx;
4776 if (px + bit_size <= s * 8 && cx + s <= c
4777 #ifdef TCC_TARGET_ARM
4778 && !(cx & (align - 1))
4779 #endif
4781 /* update offset and bit position */
4782 f->c = cx;
4783 bit_pos = px;
4784 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4785 | (bit_pos << VT_STRUCT_SHIFT);
4786 if (s != size)
4787 f->auxtype = t.t;
4788 #ifdef BF_DEBUG
4789 printf("FIX field %s offset %-2d size %-2d align %-2d "
4790 "pos %-2d bits %-2d\n",
4791 get_tok_str(f->v & ~SYM_FIELD, NULL),
4792 cx, s, align, px, bit_size);
4793 #endif
4794 } else {
4795 /* fall back to load/store single-byte wise */
4796 f->auxtype = VT_STRUCT;
4797 #ifdef BF_DEBUG
4798 printf("FIX field %s : load byte-wise\n",
4799 get_tok_str(f->v & ~SYM_FIELD, NULL));
4800 #endif
4805 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4806 static void struct_decl(CType *type, int u)
4808 int v, c, size, align, flexible;
4809 int bit_size, bsize, bt;
4810 Sym *s, *ss, **ps;
4811 AttributeDef ad, ad1;
4812 CType type1, btype;
4814 memset(&ad, 0, sizeof ad);
4815 next();
4816 parse_attribute(&ad);
4817 if (tok != '{') {
4818 v = tok;
4819 next();
4820 /* struct already defined ? return it */
4821 if (v < TOK_IDENT)
4822 expect("struct/union/enum name");
4823 s = struct_find(v);
4824 if (s && (s->sym_scope == local_scope || tok != '{')) {
4825 if (u == s->type.t)
4826 goto do_decl;
4827 if (u == VT_ENUM && IS_ENUM(s->type.t))
4828 goto do_decl;
4829 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4831 } else {
4832 v = anon_sym++;
4834 /* Record the original enum/struct/union token. */
4835 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4836 type1.ref = NULL;
4837 /* we put an undefined size for struct/union */
4838 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4839 s->r = 0; /* default alignment is zero as gcc */
4840 do_decl:
4841 type->t = s->type.t;
4842 type->ref = s;
4844 if (tok == '{') {
4845 next();
4846 if (s->c != -1)
4847 tcc_error("struct/union/enum already defined");
4848 s->c = -2;
4849 /* cannot be empty */
4850 /* non empty enums are not allowed */
4851 ps = &s->next;
4852 if (u == VT_ENUM) {
4853 long long ll = 0, pl = 0, nl = 0;
4854 CType t;
4855 t.ref = s;
4856 /* enum symbols have static storage */
4857 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4858 for(;;) {
4859 v = tok;
4860 if (v < TOK_UIDENT)
4861 expect("identifier");
4862 ss = sym_find(v);
4863 if (ss && !local_stack)
4864 tcc_error("redefinition of enumerator '%s'",
4865 get_tok_str(v, NULL));
4866 next();
4867 if (tok == '=') {
4868 next();
4869 ll = expr_const64();
4871 ss = sym_push(v, &t, VT_CONST, 0);
4872 ss->enum_val = ll;
4873 *ps = ss, ps = &ss->next;
4874 if (ll < nl)
4875 nl = ll;
4876 if (ll > pl)
4877 pl = ll;
4878 if (tok != ',')
4879 break;
4880 next();
4881 ll++;
4882 /* NOTE: we accept a trailing comma */
4883 if (tok == '}')
4884 break;
4886 skip('}');
4887 /* set integral type of the enum */
4888 t.t = VT_INT;
4889 if (nl >= 0) {
4890 if (pl != (unsigned)pl)
4891 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4892 t.t |= VT_UNSIGNED;
4893 } else if (pl != (int)pl || nl != (int)nl)
4894 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4895 s->type.t = type->t = t.t | VT_ENUM;
4896 s->c = 0;
4897 /* set type for enum members */
4898 for (ss = s->next; ss; ss = ss->next) {
4899 ll = ss->enum_val;
4900 if (ll == (int)ll) /* default is int if it fits */
4901 continue;
4902 if (t.t & VT_UNSIGNED) {
4903 ss->type.t |= VT_UNSIGNED;
4904 if (ll == (unsigned)ll)
4905 continue;
4907 ss->type.t = (ss->type.t & ~VT_BTYPE)
4908 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4910 } else {
4911 c = 0;
4912 flexible = 0;
4913 while (tok != '}') {
4914 if (!parse_btype(&btype, &ad1)) {
4915 skip(';');
4916 continue;
4918 while (1) {
4919 if (flexible)
4920 tcc_error("flexible array member '%s' not at the end of struct",
4921 get_tok_str(v, NULL));
4922 bit_size = -1;
4923 v = 0;
4924 type1 = btype;
4925 if (tok != ':') {
4926 if (tok != ';')
4927 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4928 if (v == 0) {
4929 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4930 expect("identifier");
4931 else {
4932 int v = btype.ref->v;
4933 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4934 if (tcc_state->ms_extensions == 0)
4935 expect("identifier");
4939 if (type_size(&type1, &align) < 0) {
4940 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4941 flexible = 1;
4942 else
4943 tcc_error("field '%s' has incomplete type",
4944 get_tok_str(v, NULL));
4946 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4947 (type1.t & VT_BTYPE) == VT_VOID ||
4948 (type1.t & VT_STORAGE))
4949 tcc_error("invalid type for '%s'",
4950 get_tok_str(v, NULL));
4952 if (tok == ':') {
4953 next();
4954 bit_size = expr_const();
4955 /* XXX: handle v = 0 case for messages */
4956 if (bit_size < 0)
4957 tcc_error("negative width in bit-field '%s'",
4958 get_tok_str(v, NULL));
4959 if (v && bit_size == 0)
4960 tcc_error("zero width for bit-field '%s'",
4961 get_tok_str(v, NULL));
4962 parse_attribute(&ad1);
4964 size = type_size(&type1, &align);
4965 if (bit_size >= 0) {
4966 bt = type1.t & VT_BTYPE;
4967 if (bt != VT_INT &&
4968 bt != VT_BYTE &&
4969 bt != VT_SHORT &&
4970 bt != VT_BOOL &&
4971 bt != VT_LLONG)
4972 tcc_error("bitfields must have scalar type");
4973 bsize = size * 8;
4974 if (bit_size > bsize) {
4975 tcc_error("width of '%s' exceeds its type",
4976 get_tok_str(v, NULL));
4977 } else if (bit_size == bsize
4978 && !ad.a.packed && !ad1.a.packed) {
4979 /* no need for bit fields */
4981 } else if (bit_size == 64) {
4982 tcc_error("field width 64 not implemented");
4983 } else {
4984 type1.t = (type1.t & ~VT_STRUCT_MASK)
4985 | VT_BITFIELD
4986 | (bit_size << (VT_STRUCT_SHIFT + 6));
4989 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4990 /* Remember we've seen a real field to check
4991 for placement of flexible array member. */
4992 c = 1;
4994 /* If member is a struct or bit-field, enforce
4995 placing into the struct (as anonymous). */
4996 if (v == 0 &&
4997 ((type1.t & VT_BTYPE) == VT_STRUCT ||
4998 bit_size >= 0)) {
4999 v = anon_sym++;
5001 if (v) {
5002 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
5003 ss->a = ad1.a;
5004 *ps = ss;
5005 ps = &ss->next;
5007 if (tok == ';' || tok == TOK_EOF)
5008 break;
5009 skip(',');
5011 skip(';');
5013 skip('}');
5014 parse_attribute(&ad);
5015 if (ad.cleanup_func) {
5016 tcc_warning("attribute '__cleanup__' ignored on type");
5018 check_fields(type, 1);
5019 check_fields(type, 0);
5020 struct_layout(type, &ad);
5025 static void sym_to_attr(AttributeDef *ad, Sym *s)
5027 merge_symattr(&ad->a, &s->a);
5028 merge_funcattr(&ad->f, &s->f);
5031 /* Add type qualifiers to a type. If the type is an array then the qualifiers
5032 are added to the element type, copied because it could be a typedef. */
5033 static void parse_btype_qualify(CType *type, int qualifiers)
5035 while (type->t & VT_ARRAY) {
5036 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
5037 type = &type->ref->type;
5039 type->t |= qualifiers;
5042 /* return 0 if no type declaration. otherwise, return the basic type
5043 and skip it.
5045 static int parse_btype(CType *type, AttributeDef *ad)
5047 int t, u, bt, st, type_found, typespec_found, g, n;
5048 Sym *s;
5049 CType type1;
5051 memset(ad, 0, sizeof(AttributeDef));
5052 type_found = 0;
5053 typespec_found = 0;
5054 t = VT_INT;
5055 bt = st = -1;
5056 type->ref = NULL;
5058 while(1) {
5059 switch(tok) {
5060 case TOK_EXTENSION:
5061 /* currently, we really ignore extension */
5062 next();
5063 continue;
5065 /* basic types */
5066 case TOK_CHAR:
5067 u = VT_BYTE;
5068 basic_type:
5069 next();
5070 basic_type1:
5071 if (u == VT_SHORT || u == VT_LONG) {
5072 if (st != -1 || (bt != -1 && bt != VT_INT))
5073 tmbt: tcc_error("too many basic types");
5074 st = u;
5075 } else {
5076 if (bt != -1 || (st != -1 && u != VT_INT))
5077 goto tmbt;
5078 bt = u;
5080 if (u != VT_INT)
5081 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
5082 typespec_found = 1;
5083 break;
5084 case TOK_VOID:
5085 u = VT_VOID;
5086 goto basic_type;
5087 case TOK_SHORT:
5088 u = VT_SHORT;
5089 goto basic_type;
5090 case TOK_INT:
5091 u = VT_INT;
5092 goto basic_type;
5093 case TOK_ALIGNAS:
5094 { int n;
5095 AttributeDef ad1;
5096 next();
5097 skip('(');
5098 memset(&ad1, 0, sizeof(AttributeDef));
5099 if (parse_btype(&type1, &ad1)) {
5100 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
5101 if (ad1.a.aligned)
5102 n = 1 << (ad1.a.aligned - 1);
5103 else
5104 type_size(&type1, &n);
5105 } else {
5106 n = expr_const();
5107 if (n <= 0 || (n & (n - 1)) != 0)
5108 tcc_error("alignment must be a positive power of two");
5110 skip(')');
5111 ad->a.aligned = exact_log2p1(n);
5113 continue;
5114 case TOK_LONG:
5115 if ((t & VT_BTYPE) == VT_DOUBLE) {
5116 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
5117 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
5118 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
5119 } else {
5120 u = VT_LONG;
5121 goto basic_type;
5123 next();
5124 break;
5125 #ifdef TCC_TARGET_ARM64
5126 case TOK_UINT128:
5127 /* GCC's __uint128_t appears in some Linux header files. Make it a
5128 synonym for long double to get the size and alignment right. */
5129 u = VT_LDOUBLE;
5130 goto basic_type;
5131 #endif
5132 case TOK_BOOL:
5133 u = VT_BOOL;
5134 goto basic_type;
5135 case TOK_FLOAT:
5136 u = VT_FLOAT;
5137 goto basic_type;
5138 case TOK_DOUBLE:
5139 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
5140 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
5141 } else {
5142 u = VT_DOUBLE;
5143 goto basic_type;
5145 next();
5146 break;
5147 case TOK_ENUM:
5148 struct_decl(&type1, VT_ENUM);
5149 basic_type2:
5150 u = type1.t;
5151 type->ref = type1.ref;
5152 goto basic_type1;
5153 case TOK_STRUCT:
5154 struct_decl(&type1, VT_STRUCT);
5155 goto basic_type2;
5156 case TOK_UNION:
5157 struct_decl(&type1, VT_UNION);
5158 goto basic_type2;
5160 /* type modifiers */
5161 case TOK__Atomic:
5162 next();
5163 type->t = t;
5164 parse_btype_qualify(type, VT_ATOMIC);
5165 t = type->t;
5166 if (tok == '(') {
5167 parse_expr_type(&type1);
5168 /* remove all storage modifiers except typedef */
5169 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
5170 if (type1.ref)
5171 sym_to_attr(ad, type1.ref);
5172 goto basic_type2;
5174 break;
5175 case TOK_CONST1:
5176 case TOK_CONST2:
5177 case TOK_CONST3:
5178 type->t = t;
5179 parse_btype_qualify(type, VT_CONSTANT);
5180 t = type->t;
5181 next();
5182 break;
5183 case TOK_VOLATILE1:
5184 case TOK_VOLATILE2:
5185 case TOK_VOLATILE3:
5186 type->t = t;
5187 parse_btype_qualify(type, VT_VOLATILE);
5188 t = type->t;
5189 next();
5190 break;
5191 case TOK_SIGNED1:
5192 case TOK_SIGNED2:
5193 case TOK_SIGNED3:
5194 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
5195 tcc_error("signed and unsigned modifier");
5196 t |= VT_DEFSIGN;
5197 next();
5198 typespec_found = 1;
5199 break;
5200 case TOK_REGISTER:
5201 case TOK_AUTO:
5202 case TOK_RESTRICT1:
5203 case TOK_RESTRICT2:
5204 case TOK_RESTRICT3:
5205 next();
5206 break;
5207 case TOK_UNSIGNED:
5208 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
5209 tcc_error("signed and unsigned modifier");
5210 t |= VT_DEFSIGN | VT_UNSIGNED;
5211 next();
5212 typespec_found = 1;
5213 break;
5215 /* storage */
5216 case TOK_EXTERN:
5217 g = VT_EXTERN;
5218 goto storage;
5219 case TOK_STATIC:
5220 g = VT_STATIC;
5221 goto storage;
5222 case TOK_TYPEDEF:
5223 g = VT_TYPEDEF;
5224 goto storage;
5225 storage:
5226 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
5227 tcc_error("multiple storage classes");
5228 t |= g;
5229 next();
5230 break;
5231 case TOK_INLINE1:
5232 case TOK_INLINE2:
5233 case TOK_INLINE3:
5234 t |= VT_INLINE;
5235 next();
5236 break;
5237 case TOK_NORETURN3:
5238 next();
5239 ad->f.func_noreturn = 1;
5240 break;
5241 /* GNUC attribute */
5242 case TOK_ATTRIBUTE1:
5243 case TOK_ATTRIBUTE2:
5244 parse_attribute(ad);
5245 if (ad->attr_mode) {
5246 u = ad->attr_mode -1;
5247 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
5249 continue;
5250 /* GNUC typeof */
5251 case TOK_TYPEOF1:
5252 case TOK_TYPEOF2:
5253 case TOK_TYPEOF3:
5254 next();
5255 parse_expr_type(&type1);
5256 /* remove all storage modifiers except typedef */
5257 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
5258 if (type1.ref)
5259 sym_to_attr(ad, type1.ref);
5260 goto basic_type2;
5261 default:
5262 if (typespec_found)
5263 goto the_end;
5264 s = sym_find(tok);
5265 if (!s || !(s->type.t & VT_TYPEDEF))
5266 goto the_end;
5268 n = tok, next();
5269 if (tok == ':' && !in_generic) {
5270 /* ignore if it's a label */
5271 unget_tok(n);
5272 goto the_end;
5275 t &= ~(VT_BTYPE|VT_LONG);
5276 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
5277 type->t = (s->type.t & ~VT_TYPEDEF) | u;
5278 type->ref = s->type.ref;
5279 if (t)
5280 parse_btype_qualify(type, t);
5281 t = type->t;
5282 /* get attributes from typedef */
5283 sym_to_attr(ad, s);
5284 typespec_found = 1;
5285 st = bt = -2;
5286 break;
5288 type_found = 1;
5290 the_end:
5291 if (tcc_state->char_is_unsigned) {
5292 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
5293 t |= VT_UNSIGNED;
5295 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5296 bt = t & (VT_BTYPE|VT_LONG);
5297 if (bt == VT_LONG)
5298 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
5299 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5300 if (bt == VT_LDOUBLE)
5301 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
5302 #endif
5303 type->t = t;
5304 return type_found;
5307 /* convert a function parameter type (array to pointer and function to
5308 function pointer) */
5309 static inline void convert_parameter_type(CType *pt)
5311 /* remove const and volatile qualifiers (XXX: const could be used
5312 to indicate a const function parameter */
5313 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
5314 /* array must be transformed to pointer according to ANSI C */
5315 pt->t &= ~VT_ARRAY;
5316 if ((pt->t & VT_BTYPE) == VT_FUNC) {
5317 mk_pointer(pt);
5321 ST_FUNC void parse_asm_str(CString *astr)
5323 skip('(');
5324 parse_mult_str(astr, "string constant");
5327 /* Parse an asm label and return the token */
5328 static int asm_label_instr(void)
5330 int v;
5331 CString astr;
5333 next();
5334 parse_asm_str(&astr);
5335 skip(')');
5336 #ifdef ASM_DEBUG
5337 printf("asm_alias: \"%s\"\n", (char *)astr.data);
5338 #endif
5339 v = tok_alloc(astr.data, astr.size - 1)->tok;
5340 cstr_free(&astr);
5341 return v;
5344 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
5346 int n, l, t1, arg_size, align, unused_align;
5347 Sym **plast, *s, *first;
5348 AttributeDef ad1;
5349 CType pt;
5351 if (tok == '(') {
5352 /* function type, or recursive declarator (return if so) */
5353 next();
5354 if (TYPE_DIRECT == (td & (TYPE_DIRECT|TYPE_ABSTRACT)))
5355 return 0;
5356 if (tok == ')')
5357 l = 0;
5358 else if (parse_btype(&pt, &ad1))
5359 l = FUNC_NEW;
5360 else if (td & (TYPE_DIRECT|TYPE_ABSTRACT)) {
5361 merge_attr (ad, &ad1);
5362 return 0;
5363 } else
5364 l = FUNC_OLD;
5366 first = NULL;
5367 plast = &first;
5368 arg_size = 0;
5369 ++local_scope;
5370 if (l) {
5371 for(;;) {
5372 /* read param name and compute offset */
5373 if (l != FUNC_OLD) {
5374 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
5375 break;
5376 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT | TYPE_PARAM);
5377 if ((pt.t & VT_BTYPE) == VT_VOID)
5378 tcc_error("parameter declared as void");
5379 if (n == 0)
5380 n = SYM_FIELD;
5381 } else {
5382 n = tok;
5383 pt.t = VT_VOID; /* invalid type */
5384 pt.ref = NULL;
5385 next();
5387 if (n < TOK_UIDENT)
5388 expect("identifier");
5389 convert_parameter_type(&pt);
5390 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
5391 s = sym_push(n, &pt, 0, 0);
5392 *plast = s;
5393 plast = &s->next;
5394 if (tok == ')')
5395 break;
5396 skip(',');
5397 if (l == FUNC_NEW && tok == TOK_DOTS) {
5398 l = FUNC_ELLIPSIS;
5399 next();
5400 break;
5402 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
5403 tcc_error("invalid type");
5405 } else
5406 /* if no parameters, then old type prototype */
5407 l = FUNC_OLD;
5408 skip(')');
5409 /* remove parameter symbols from token table, keep on stack */
5410 if (first) {
5411 sym_pop(local_stack ? &local_stack : &global_stack, first->prev, 1);
5412 for (s = first; s; s = s->next)
5413 s->v |= SYM_FIELD;
5415 --local_scope;
5416 /* NOTE: const is ignored in returned type as it has a special
5417 meaning in gcc / C++ */
5418 type->t &= ~VT_CONSTANT;
5419 /* some ancient pre-K&R C allows a function to return an array
5420 and the array brackets to be put after the arguments, such
5421 that "int c()[]" means something like "int[] c()" */
5422 if (tok == '[') {
5423 next();
5424 skip(']'); /* only handle simple "[]" */
5425 mk_pointer(type);
5427 /* we push a anonymous symbol which will contain the function prototype */
5428 ad->f.func_args = arg_size;
5429 ad->f.func_type = l;
5430 s = sym_push(SYM_FIELD, type, 0, 0);
5431 s->a = ad->a;
5432 s->f = ad->f;
5433 s->next = first;
5434 type->t = VT_FUNC;
5435 type->ref = s;
5436 } else if (tok == '[') {
5437 int saved_nocode_wanted = nocode_wanted;
5438 /* array definition */
5439 next();
5440 n = -1;
5441 t1 = 0;
5442 if (td & TYPE_PARAM) while (1) {
5443 /* XXX The optional type-quals and static should only be accepted
5444 in parameter decls. The '*' as well, and then even only
5445 in prototypes (not function defs). */
5446 switch (tok) {
5447 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
5448 case TOK_CONST1:
5449 case TOK_VOLATILE1:
5450 case TOK_STATIC:
5451 case '*':
5452 next();
5453 continue;
5454 default:
5455 break;
5457 if (tok != ']') {
5458 nocode_wanted = 1;
5459 gexpr(), vpop();
5461 break;
5463 } else if (tok != ']') {
5464 if (!local_stack || (storage & VT_STATIC))
5465 vpushi(expr_const());
5466 else {
5467 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5468 length must always be evaluated, even under nocode_wanted,
5469 so that its size slot is initialized (e.g. under sizeof
5470 or typeof). */
5471 nocode_wanted = 0;
5472 gexpr();
5474 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5475 n = vtop->c.i;
5476 if (n < 0)
5477 tcc_error("invalid array size");
5478 } else {
5479 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5480 tcc_error("size of variable length array should be an integer");
5481 n = 0;
5482 t1 = VT_VLA;
5485 skip(']');
5486 /* parse next post type */
5487 post_type(type, ad, storage, td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5489 if ((type->t & VT_BTYPE) == VT_FUNC)
5490 tcc_error("declaration of an array of functions");
5491 if ((type->t & VT_BTYPE) == VT_VOID
5492 || type_size(type, &unused_align) < 0)
5493 tcc_error("declaration of an array of incomplete type elements");
5495 t1 |= type->t & VT_VLA;
5497 if (t1 & VT_VLA) {
5498 if (n < 0)
5499 tcc_error("need explicit inner array size in VLAs");
5500 loc -= type_size(&int_type, &align);
5501 loc &= -align;
5502 n = loc;
5504 vla_runtime_type_size(type, &align);
5505 gen_op('*');
5506 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5507 vswap();
5508 vstore();
5510 if (n != -1)
5511 vpop();
5512 nocode_wanted = saved_nocode_wanted;
5514 /* we push an anonymous symbol which will contain the array
5515 element type */
5516 s = sym_push(SYM_FIELD, type, 0, n);
5517 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5518 type->ref = s;
5520 return 1;
5523 /* Parse a type declarator (except basic type), and return the type
5524 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5525 expected. 'type' should contain the basic type. 'ad' is the
5526 attribute definition of the basic type. It can be modified by
5527 type_decl(). If this (possibly abstract) declarator is a pointer chain
5528 it returns the innermost pointed to type (equals *type, but is a different
5529 pointer), otherwise returns type itself, that's used for recursive calls. */
5530 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5532 CType *post, *ret;
5533 int qualifiers, storage;
5535 /* recursive type, remove storage bits first, apply them later again */
5536 storage = type->t & VT_STORAGE;
5537 type->t &= ~VT_STORAGE;
5538 post = ret = type;
5540 while (tok == '*') {
5541 qualifiers = 0;
5542 redo:
5543 next();
5544 switch(tok) {
5545 case TOK__Atomic:
5546 qualifiers |= VT_ATOMIC;
5547 goto redo;
5548 case TOK_CONST1:
5549 case TOK_CONST2:
5550 case TOK_CONST3:
5551 qualifiers |= VT_CONSTANT;
5552 goto redo;
5553 case TOK_VOLATILE1:
5554 case TOK_VOLATILE2:
5555 case TOK_VOLATILE3:
5556 qualifiers |= VT_VOLATILE;
5557 goto redo;
5558 case TOK_RESTRICT1:
5559 case TOK_RESTRICT2:
5560 case TOK_RESTRICT3:
5561 goto redo;
5562 /* XXX: clarify attribute handling */
5563 case TOK_ATTRIBUTE1:
5564 case TOK_ATTRIBUTE2:
5565 parse_attribute(ad);
5566 break;
5568 mk_pointer(type);
5569 type->t |= qualifiers;
5570 if (ret == type)
5571 /* innermost pointed to type is the one for the first derivation */
5572 ret = pointed_type(type);
5575 if (tok == '(') {
5576 /* This is possibly a parameter type list for abstract declarators
5577 ('int ()'), use post_type for testing this. */
5578 if (!post_type(type, ad, 0, td)) {
5579 /* It's not, so it's a nested declarator, and the post operations
5580 apply to the innermost pointed to type (if any). */
5581 /* XXX: this is not correct to modify 'ad' at this point, but
5582 the syntax is not clear */
5583 parse_attribute(ad);
5584 post = type_decl(type, ad, v, td);
5585 skip(')');
5586 } else
5587 goto abstract;
5588 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5589 /* type identifier */
5590 *v = tok;
5591 next();
5592 } else {
5593 abstract:
5594 if (!(td & TYPE_ABSTRACT))
5595 expect("identifier");
5596 *v = 0;
5598 post_type(post, ad, storage, td & ~(TYPE_DIRECT|TYPE_ABSTRACT));
5599 parse_attribute(ad);
5600 type->t |= storage;
5601 return ret;
5604 /* indirection with full error checking and bound check */
5605 ST_FUNC void indir(void)
5607 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5608 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5609 return;
5610 expect("pointer");
5612 if (vtop->r & VT_LVAL)
5613 gv(RC_INT);
5614 vtop->type = *pointed_type(&vtop->type);
5615 /* Arrays and functions are never lvalues */
5616 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5617 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5618 vtop->r |= VT_LVAL;
5619 /* if bound checking, the referenced pointer must be checked */
5620 #ifdef CONFIG_TCC_BCHECK
5621 if (tcc_state->do_bounds_check)
5622 vtop->r |= VT_MUSTBOUND;
5623 #endif
5627 /* pass a parameter to a function and do type checking and casting */
5628 static void gfunc_param_typed(Sym *func, Sym *arg)
5630 int func_type;
5631 CType type;
5633 func_type = func->f.func_type;
5634 if (func_type == FUNC_OLD ||
5635 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5636 /* default casting : only need to convert float to double */
5637 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5638 gen_cast_s(VT_DOUBLE);
5639 } else if (vtop->type.t & VT_BITFIELD) {
5640 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5641 type.ref = vtop->type.ref;
5642 gen_cast(&type);
5643 } else if (vtop->r & VT_MUSTCAST) {
5644 force_charshort_cast();
5646 } else if (arg == NULL) {
5647 tcc_error("too many arguments to function");
5648 } else {
5649 type = arg->type;
5650 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5651 gen_assign_cast(&type);
5655 /* parse an expression and return its type without any side effect. */
5656 static void expr_type(CType *type, void (*expr_fn)(void))
5658 nocode_wanted++;
5659 expr_fn();
5660 *type = vtop->type;
5661 vpop();
5662 nocode_wanted--;
5665 /* parse an expression of the form '(type)' or '(expr)' and return its
5666 type */
5667 static void parse_expr_type(CType *type)
5669 int n;
5670 AttributeDef ad;
5672 skip('(');
5673 if (parse_btype(type, &ad)) {
5674 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5675 } else {
5676 expr_type(type, gexpr);
5678 skip(')');
5681 static void parse_type(CType *type)
5683 AttributeDef ad;
5684 int n;
5686 if (!parse_btype(type, &ad)) {
5687 expect("type");
5689 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5692 static void parse_builtin_params(int nc, const char *args)
5694 char c, sep = '(';
5695 CType type;
5696 if (nc)
5697 nocode_wanted++;
5698 next();
5699 if (*args == 0)
5700 skip(sep);
5701 while ((c = *args++)) {
5702 skip(sep);
5703 sep = ',';
5704 if (c == 't') {
5705 parse_type(&type);
5706 vpush(&type);
5707 continue;
5709 expr_eq();
5710 type.ref = NULL;
5711 type.t = 0;
5712 switch (c) {
5713 case 'e':
5714 continue;
5715 case 'V':
5716 type.t = VT_CONSTANT;
5717 case 'v':
5718 type.t |= VT_VOID;
5719 mk_pointer (&type);
5720 break;
5721 case 'S':
5722 type.t = VT_CONSTANT;
5723 case 's':
5724 type.t |= char_type.t;
5725 mk_pointer (&type);
5726 break;
5727 case 'i':
5728 type.t = VT_INT;
5729 break;
5730 case 'l':
5731 type.t = VT_SIZE_T;
5732 break;
5733 default:
5734 break;
5736 gen_assign_cast(&type);
5738 skip(')');
5739 if (nc)
5740 nocode_wanted--;
5743 static void parse_atomic(int atok)
5745 int size, align, arg;
5746 CType *atom, *atom_ptr, ct = {0};
5747 char buf[40];
5748 static const char *const templates[] = {
5750 * Each entry consists of callback and function template.
5751 * The template represents argument types and return type.
5753 * ? void (return-only)
5754 * b bool
5755 * a atomic
5756 * A read-only atomic
5757 * p pointer to memory
5758 * v value
5759 * m memory model
5762 /* keep in order of appearance in tcctok.h: */
5763 /* __atomic_store */ "avm.?",
5764 /* __atomic_load */ "Am.v",
5765 /* __atomic_exchange */ "avm.v",
5766 /* __atomic_compare_exchange */ "apvbmm.b",
5767 /* __atomic_fetch_add */ "avm.v",
5768 /* __atomic_fetch_sub */ "avm.v",
5769 /* __atomic_fetch_or */ "avm.v",
5770 /* __atomic_fetch_xor */ "avm.v",
5771 /* __atomic_fetch_and */ "avm.v"
5773 const char *template = templates[(atok - TOK___atomic_store)];
5775 atom = atom_ptr = NULL;
5776 size = 0; /* pacify compiler */
5777 next();
5778 skip('(');
5779 for (arg = 0;;) {
5780 expr_eq();
5781 switch (template[arg]) {
5782 case 'a':
5783 case 'A':
5784 atom_ptr = &vtop->type;
5785 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5786 expect("pointer");
5787 atom = pointed_type(atom_ptr);
5788 size = type_size(atom, &align);
5789 if (size > 8
5790 || (size & (size - 1))
5791 || (atok > TOK___atomic_compare_exchange
5792 && (0 == btype_size(atom->t & VT_BTYPE)
5793 || (atom->t & VT_BTYPE) == VT_PTR)))
5794 expect("integral or integer-sized pointer target type");
5795 /* GCC does not care either: */
5796 /* if (!(atom->t & VT_ATOMIC))
5797 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5798 break;
5800 case 'p':
5801 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5802 || type_size(pointed_type(&vtop->type), &align) != size)
5803 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5804 gen_assign_cast(atom_ptr);
5805 break;
5806 case 'v':
5807 gen_assign_cast(atom);
5808 break;
5809 case 'm':
5810 gen_assign_cast(&int_type);
5811 break;
5812 case 'b':
5813 ct.t = VT_BOOL;
5814 gen_assign_cast(&ct);
5815 break;
5817 if ('.' == template[++arg])
5818 break;
5819 skip(',');
5821 skip(')');
5823 ct.t = VT_VOID;
5824 switch (template[arg + 1]) {
5825 case 'b':
5826 ct.t = VT_BOOL;
5827 break;
5828 case 'v':
5829 ct = *atom;
5830 break;
5833 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5834 vpush_helper_func(tok_alloc_const(buf));
5835 vrott(arg + 1);
5836 gfunc_call(arg);
5838 vpush(&ct);
5839 PUT_R_RET(vtop, ct.t);
5840 if (ct.t == VT_BOOL) {
5841 #ifdef PROMOTE_RET
5842 vtop->r |= BFVAL(VT_MUSTCAST, 1);
5843 #else
5844 vtop->type.t = VT_INT;
5845 #endif
5849 ST_FUNC void unary(void)
5851 int n, t, align, size, r, sizeof_caller;
5852 CType type;
5853 Sym *s;
5854 AttributeDef ad;
5856 /* generate line number info */
5857 if (debug_modes)
5858 tcc_debug_line(tcc_state), tcc_tcov_check_line (1);
5860 sizeof_caller = in_sizeof;
5861 in_sizeof = 0;
5862 type.ref = NULL;
5863 /* XXX: GCC 2.95.3 does not generate a table although it should be
5864 better here */
5865 tok_next:
5866 switch(tok) {
5867 case TOK_EXTENSION:
5868 next();
5869 goto tok_next;
5870 case TOK_LCHAR:
5871 #ifdef TCC_TARGET_PE
5872 t = VT_SHORT|VT_UNSIGNED;
5873 goto push_tokc;
5874 #endif
5875 case TOK_CINT:
5876 case TOK_CCHAR:
5877 t = VT_INT;
5878 push_tokc:
5879 type.t = t;
5880 vsetc(&type, VT_CONST, &tokc);
5881 next();
5882 break;
5883 case TOK_CUINT:
5884 t = VT_INT | VT_UNSIGNED;
5885 goto push_tokc;
5886 case TOK_CLLONG:
5887 t = VT_LLONG;
5888 goto push_tokc;
5889 case TOK_CULLONG:
5890 t = VT_LLONG | VT_UNSIGNED;
5891 goto push_tokc;
5892 case TOK_CFLOAT:
5893 t = VT_FLOAT;
5894 goto push_tokc;
5895 case TOK_CDOUBLE:
5896 t = VT_DOUBLE;
5897 goto push_tokc;
5898 case TOK_CLDOUBLE:
5899 t = VT_LDOUBLE;
5900 goto push_tokc;
5901 case TOK_CLONG:
5902 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5903 goto push_tokc;
5904 case TOK_CULONG:
5905 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5906 goto push_tokc;
5907 case TOK___FUNCTION__:
5908 if (!gnu_ext)
5909 goto tok_identifier;
5910 /* fall thru */
5911 case TOK___FUNC__:
5913 Section *sec;
5914 int len;
5915 /* special function name identifier */
5916 len = strlen(funcname) + 1;
5917 /* generate char[len] type */
5918 type.t = char_type.t;
5919 if (tcc_state->warn_write_strings & WARN_ON)
5920 type.t |= VT_CONSTANT;
5921 mk_pointer(&type);
5922 type.t |= VT_ARRAY;
5923 type.ref->c = len;
5924 sec = rodata_section;
5925 vpush_ref(&type, sec, sec->data_offset, len);
5926 if (!NODATA_WANTED)
5927 memcpy(section_ptr_add(sec, len), funcname, len);
5928 next();
5930 break;
5931 case TOK_LSTR:
5932 #ifdef TCC_TARGET_PE
5933 t = VT_SHORT | VT_UNSIGNED;
5934 #else
5935 t = VT_INT;
5936 #endif
5937 goto str_init;
5938 case TOK_STR:
5939 /* string parsing */
5940 t = char_type.t;
5941 str_init:
5942 if (tcc_state->warn_write_strings & WARN_ON)
5943 t |= VT_CONSTANT;
5944 type.t = t;
5945 mk_pointer(&type);
5946 type.t |= VT_ARRAY;
5947 memset(&ad, 0, sizeof(AttributeDef));
5948 ad.section = rodata_section;
5949 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5950 break;
5951 case '(':
5952 next();
5953 /* cast ? */
5954 if (parse_btype(&type, &ad)) {
5955 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5956 skip(')');
5957 /* check ISOC99 compound literal */
5958 if (tok == '{') {
5959 /* data is allocated locally by default */
5960 if (global_expr)
5961 r = VT_CONST;
5962 else
5963 r = VT_LOCAL;
5964 /* all except arrays are lvalues */
5965 if (!(type.t & VT_ARRAY))
5966 r |= VT_LVAL;
5967 memset(&ad, 0, sizeof(AttributeDef));
5968 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5969 } else {
5970 if (sizeof_caller) {
5971 vpush(&type);
5972 return;
5974 unary();
5975 gen_cast(&type);
5977 } else if (tok == '{') {
5978 int saved_nocode_wanted = nocode_wanted;
5979 if (const_wanted && !(nocode_wanted & unevalmask))
5980 expect("constant");
5981 if (0 == local_scope)
5982 tcc_error("statement expression outside of function");
5983 /* save all registers */
5984 save_regs(0);
5985 /* statement expression : we do not accept break/continue
5986 inside as GCC does. We do retain the nocode_wanted state,
5987 as statement expressions can't ever be entered from the
5988 outside, so any reactivation of code emission (from labels
5989 or loop heads) can be disabled again after the end of it. */
5990 block(1);
5991 /* or'ing to keep however possible CODE_OFF() from e.g. "return 0;"
5992 in the statement expression */
5993 nocode_wanted |= saved_nocode_wanted;
5994 skip(')');
5995 } else {
5996 gexpr();
5997 skip(')');
5999 break;
6000 case '*':
6001 next();
6002 unary();
6003 indir();
6004 break;
6005 case '&':
6006 next();
6007 unary();
6008 /* functions names must be treated as function pointers,
6009 except for unary '&' and sizeof. Since we consider that
6010 functions are not lvalues, we only have to handle it
6011 there and in function calls. */
6012 /* arrays can also be used although they are not lvalues */
6013 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
6014 !(vtop->type.t & VT_ARRAY))
6015 test_lvalue();
6016 if (vtop->sym)
6017 vtop->sym->a.addrtaken = 1;
6018 mk_pointer(&vtop->type);
6019 gaddrof();
6020 break;
6021 case '!':
6022 next();
6023 unary();
6024 gen_test_zero(TOK_EQ);
6025 break;
6026 case '~':
6027 next();
6028 unary();
6029 vpushi(-1);
6030 gen_op('^');
6031 break;
6032 case '+':
6033 next();
6034 unary();
6035 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
6036 tcc_error("pointer not accepted for unary plus");
6037 /* In order to force cast, we add zero, except for floating point
6038 where we really need an noop (otherwise -0.0 will be transformed
6039 into +0.0). */
6040 if (!is_float(vtop->type.t)) {
6041 vpushi(0);
6042 gen_op('+');
6044 break;
6045 case TOK_SIZEOF:
6046 case TOK_ALIGNOF1:
6047 case TOK_ALIGNOF2:
6048 case TOK_ALIGNOF3:
6049 t = tok;
6050 next();
6051 in_sizeof++;
6052 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
6053 s = NULL;
6054 if (vtop[1].r & VT_SYM)
6055 s = vtop[1].sym; /* hack: accessing previous vtop */
6056 size = type_size(&type, &align);
6057 if (s && s->a.aligned)
6058 align = 1 << (s->a.aligned - 1);
6059 if (t == TOK_SIZEOF) {
6060 if (!(type.t & VT_VLA)) {
6061 if (size < 0)
6062 tcc_error("sizeof applied to an incomplete type");
6063 vpushs(size);
6064 } else {
6065 vla_runtime_type_size(&type, &align);
6067 } else {
6068 vpushs(align);
6070 vtop->type.t |= VT_UNSIGNED;
6071 break;
6073 case TOK_builtin_expect:
6074 /* __builtin_expect is a no-op for now */
6075 parse_builtin_params(0, "ee");
6076 vpop();
6077 break;
6078 case TOK_builtin_types_compatible_p:
6079 parse_builtin_params(0, "tt");
6080 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
6081 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
6082 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
6083 vtop -= 2;
6084 vpushi(n);
6085 break;
6086 case TOK_builtin_choose_expr:
6088 int64_t c;
6089 next();
6090 skip('(');
6091 c = expr_const64();
6092 skip(',');
6093 if (!c) {
6094 nocode_wanted++;
6096 expr_eq();
6097 if (!c) {
6098 vpop();
6099 nocode_wanted--;
6101 skip(',');
6102 if (c) {
6103 nocode_wanted++;
6105 expr_eq();
6106 if (c) {
6107 vpop();
6108 nocode_wanted--;
6110 skip(')');
6112 break;
6113 case TOK_builtin_constant_p:
6114 parse_builtin_params(1, "e");
6115 n = (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6116 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
6117 vtop--;
6118 vpushi(n);
6119 break;
6120 case TOK_builtin_frame_address:
6121 case TOK_builtin_return_address:
6123 int tok1 = tok;
6124 int level;
6125 next();
6126 skip('(');
6127 if (tok != TOK_CINT) {
6128 tcc_error("%s only takes positive integers",
6129 tok1 == TOK_builtin_return_address ?
6130 "__builtin_return_address" :
6131 "__builtin_frame_address");
6133 level = (uint32_t)tokc.i;
6134 next();
6135 skip(')');
6136 type.t = VT_VOID;
6137 mk_pointer(&type);
6138 vset(&type, VT_LOCAL, 0); /* local frame */
6139 while (level--) {
6140 #ifdef TCC_TARGET_RISCV64
6141 vpushi(2*PTR_SIZE);
6142 gen_op('-');
6143 #endif
6144 mk_pointer(&vtop->type);
6145 indir(); /* -> parent frame */
6147 if (tok1 == TOK_builtin_return_address) {
6148 // assume return address is just above frame pointer on stack
6149 #ifdef TCC_TARGET_ARM
6150 vpushi(2*PTR_SIZE);
6151 gen_op('+');
6152 #elif defined TCC_TARGET_RISCV64
6153 vpushi(PTR_SIZE);
6154 gen_op('-');
6155 #else
6156 vpushi(PTR_SIZE);
6157 gen_op('+');
6158 #endif
6159 mk_pointer(&vtop->type);
6160 indir();
6163 break;
6164 #ifdef TCC_TARGET_RISCV64
6165 case TOK_builtin_va_start:
6166 parse_builtin_params(0, "ee");
6167 r = vtop->r & VT_VALMASK;
6168 if (r == VT_LLOCAL)
6169 r = VT_LOCAL;
6170 if (r != VT_LOCAL)
6171 tcc_error("__builtin_va_start expects a local variable");
6172 gen_va_start();
6173 vstore();
6174 break;
6175 #endif
6176 #ifdef TCC_TARGET_X86_64
6177 #ifdef TCC_TARGET_PE
6178 case TOK_builtin_va_start:
6179 parse_builtin_params(0, "ee");
6180 r = vtop->r & VT_VALMASK;
6181 if (r == VT_LLOCAL)
6182 r = VT_LOCAL;
6183 if (r != VT_LOCAL)
6184 tcc_error("__builtin_va_start expects a local variable");
6185 vtop->r = r;
6186 vtop->type = char_pointer_type;
6187 vtop->c.i += 8;
6188 vstore();
6189 break;
6190 #else
6191 case TOK_builtin_va_arg_types:
6192 parse_builtin_params(0, "t");
6193 vpushi(classify_x86_64_va_arg(&vtop->type));
6194 vswap();
6195 vpop();
6196 break;
6197 #endif
6198 #endif
6200 #ifdef TCC_TARGET_ARM64
6201 case TOK_builtin_va_start: {
6202 parse_builtin_params(0, "ee");
6203 //xx check types
6204 gen_va_start();
6205 vpushi(0);
6206 vtop->type.t = VT_VOID;
6207 break;
6209 case TOK_builtin_va_arg: {
6210 parse_builtin_params(0, "et");
6211 type = vtop->type;
6212 vpop();
6213 //xx check types
6214 gen_va_arg(&type);
6215 vtop->type = type;
6216 break;
6218 case TOK___arm64_clear_cache: {
6219 parse_builtin_params(0, "ee");
6220 gen_clear_cache();
6221 vpushi(0);
6222 vtop->type.t = VT_VOID;
6223 break;
6225 #endif
6227 /* atomic operations */
6228 case TOK___atomic_store:
6229 case TOK___atomic_load:
6230 case TOK___atomic_exchange:
6231 case TOK___atomic_compare_exchange:
6232 case TOK___atomic_fetch_add:
6233 case TOK___atomic_fetch_sub:
6234 case TOK___atomic_fetch_or:
6235 case TOK___atomic_fetch_xor:
6236 case TOK___atomic_fetch_and:
6237 parse_atomic(tok);
6238 break;
6240 /* pre operations */
6241 case TOK_INC:
6242 case TOK_DEC:
6243 t = tok;
6244 next();
6245 unary();
6246 inc(0, t);
6247 break;
6248 case '-':
6249 next();
6250 unary();
6251 if (is_float(vtop->type.t)) {
6252 gen_opif(TOK_NEG);
6253 } else {
6254 vpushi(0);
6255 vswap();
6256 gen_op('-');
6258 break;
6259 case TOK_LAND:
6260 if (!gnu_ext)
6261 goto tok_identifier;
6262 next();
6263 /* allow to take the address of a label */
6264 if (tok < TOK_UIDENT)
6265 expect("label identifier");
6266 s = label_find(tok);
6267 if (!s) {
6268 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6269 } else {
6270 if (s->r == LABEL_DECLARED)
6271 s->r = LABEL_FORWARD;
6273 if (!s->type.t) {
6274 s->type.t = VT_VOID;
6275 mk_pointer(&s->type);
6276 s->type.t |= VT_STATIC;
6278 vpushsym(&s->type, s);
6279 next();
6280 break;
6282 case TOK_GENERIC:
6284 CType controlling_type;
6285 int has_default = 0;
6286 int has_match = 0;
6287 int learn = 0;
6288 TokenString *str = NULL;
6289 int saved_const_wanted = const_wanted;
6291 next();
6292 skip('(');
6293 const_wanted = 0;
6294 expr_type(&controlling_type, expr_eq);
6295 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
6296 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
6297 mk_pointer(&controlling_type);
6298 const_wanted = saved_const_wanted;
6299 for (;;) {
6300 learn = 0;
6301 skip(',');
6302 if (tok == TOK_DEFAULT) {
6303 if (has_default)
6304 tcc_error("too many 'default'");
6305 has_default = 1;
6306 if (!has_match)
6307 learn = 1;
6308 next();
6309 } else {
6310 AttributeDef ad_tmp;
6311 int itmp;
6312 CType cur_type;
6314 in_generic++;
6315 parse_btype(&cur_type, &ad_tmp);
6316 in_generic--;
6318 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
6319 if (compare_types(&controlling_type, &cur_type, 0)) {
6320 if (has_match) {
6321 tcc_error("type match twice");
6323 has_match = 1;
6324 learn = 1;
6327 skip(':');
6328 if (learn) {
6329 if (str)
6330 tok_str_free(str);
6331 skip_or_save_block(&str);
6332 } else {
6333 skip_or_save_block(NULL);
6335 if (tok == ')')
6336 break;
6338 if (!str) {
6339 char buf[60];
6340 type_to_str(buf, sizeof buf, &controlling_type, NULL);
6341 tcc_error("type '%s' does not match any association", buf);
6343 begin_macro(str, 1);
6344 next();
6345 expr_eq();
6346 if (tok != TOK_EOF)
6347 expect(",");
6348 end_macro();
6349 next();
6350 break;
6352 // special qnan , snan and infinity values
6353 case TOK___NAN__:
6354 n = 0x7fc00000;
6355 special_math_val:
6356 vpushi(n);
6357 vtop->type.t = VT_FLOAT;
6358 next();
6359 break;
6360 case TOK___SNAN__:
6361 n = 0x7f800001;
6362 goto special_math_val;
6363 case TOK___INF__:
6364 n = 0x7f800000;
6365 goto special_math_val;
6367 default:
6368 tok_identifier:
6369 t = tok;
6370 next();
6371 if (t < TOK_UIDENT)
6372 expect("identifier");
6373 s = sym_find(t);
6374 if (!s || IS_ASM_SYM(s)) {
6375 const char *name = get_tok_str(t, NULL);
6376 if (tok != '(')
6377 tcc_error("'%s' undeclared", name);
6378 /* for simple function calls, we tolerate undeclared
6379 external reference to int() function */
6380 tcc_warning_c(warn_implicit_function_declaration)(
6381 "implicit declaration of function '%s'", name);
6382 s = external_global_sym(t, &func_old_type);
6385 r = s->r;
6386 /* A symbol that has a register is a local register variable,
6387 which starts out as VT_LOCAL value. */
6388 if ((r & VT_VALMASK) < VT_CONST)
6389 r = (r & ~VT_VALMASK) | VT_LOCAL;
6391 vset(&s->type, r, s->c);
6392 /* Point to s as backpointer (even without r&VT_SYM).
6393 Will be used by at least the x86 inline asm parser for
6394 regvars. */
6395 vtop->sym = s;
6397 if (r & VT_SYM) {
6398 vtop->c.i = 0;
6399 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
6400 vtop->c.i = s->enum_val;
6402 break;
6405 /* post operations */
6406 while (1) {
6407 if (tok == TOK_INC || tok == TOK_DEC) {
6408 inc(1, tok);
6409 next();
6410 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
6411 int qualifiers, cumofs = 0;
6412 /* field */
6413 if (tok == TOK_ARROW)
6414 indir();
6415 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
6416 test_lvalue();
6417 gaddrof();
6418 /* expect pointer on structure */
6419 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
6420 expect("struct or union");
6421 if (tok == TOK_CDOUBLE)
6422 expect("field name");
6423 next();
6424 if (tok == TOK_CINT || tok == TOK_CUINT)
6425 expect("field name");
6426 s = find_field(&vtop->type, tok, &cumofs);
6427 if (!s)
6428 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
6429 /* add field offset to pointer */
6430 vtop->type = char_pointer_type; /* change type to 'char *' */
6431 vpushi(cumofs + s->c);
6432 gen_op('+');
6433 /* change type to field type, and set to lvalue */
6434 vtop->type = s->type;
6435 vtop->type.t |= qualifiers;
6436 /* an array is never an lvalue */
6437 if (!(vtop->type.t & VT_ARRAY)) {
6438 vtop->r |= VT_LVAL;
6439 #ifdef CONFIG_TCC_BCHECK
6440 /* if bound checking, the referenced pointer must be checked */
6441 if (tcc_state->do_bounds_check)
6442 vtop->r |= VT_MUSTBOUND;
6443 #endif
6445 next();
6446 } else if (tok == '[') {
6447 next();
6448 gexpr();
6449 gen_op('+');
6450 indir();
6451 skip(']');
6452 } else if (tok == '(') {
6453 SValue ret;
6454 Sym *sa;
6455 int nb_args, ret_nregs, ret_align, regsize, variadic;
6457 /* function call */
6458 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
6459 /* pointer test (no array accepted) */
6460 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
6461 vtop->type = *pointed_type(&vtop->type);
6462 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
6463 goto error_func;
6464 } else {
6465 error_func:
6466 expect("function pointer");
6468 } else {
6469 vtop->r &= ~VT_LVAL; /* no lvalue */
6471 /* get return type */
6472 s = vtop->type.ref;
6473 next();
6474 sa = s->next; /* first parameter */
6475 nb_args = regsize = 0;
6476 ret.r2 = VT_CONST;
6477 /* compute first implicit argument if a structure is returned */
6478 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
6479 variadic = (s->f.func_type == FUNC_ELLIPSIS);
6480 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6481 &ret_align, &regsize);
6482 if (ret_nregs <= 0) {
6483 /* get some space for the returned structure */
6484 size = type_size(&s->type, &align);
6485 #ifdef TCC_TARGET_ARM64
6486 /* On arm64, a small struct is return in registers.
6487 It is much easier to write it to memory if we know
6488 that we are allowed to write some extra bytes, so
6489 round the allocated space up to a power of 2: */
6490 if (size < 16)
6491 while (size & (size - 1))
6492 size = (size | (size - 1)) + 1;
6493 #endif
6494 loc = (loc - size) & -align;
6495 ret.type = s->type;
6496 ret.r = VT_LOCAL | VT_LVAL;
6497 /* pass it as 'int' to avoid structure arg passing
6498 problems */
6499 vseti(VT_LOCAL, loc);
6500 #ifdef CONFIG_TCC_BCHECK
6501 if (tcc_state->do_bounds_check)
6502 --loc;
6503 #endif
6504 ret.c = vtop->c;
6505 if (ret_nregs < 0)
6506 vtop--;
6507 else
6508 nb_args++;
6510 } else {
6511 ret_nregs = 1;
6512 ret.type = s->type;
6515 if (ret_nregs > 0) {
6516 /* return in register */
6517 ret.c.i = 0;
6518 PUT_R_RET(&ret, ret.type.t);
6520 if (tok != ')') {
6521 for(;;) {
6522 expr_eq();
6523 gfunc_param_typed(s, sa);
6524 nb_args++;
6525 if (sa)
6526 sa = sa->next;
6527 if (tok == ')')
6528 break;
6529 skip(',');
6532 if (sa)
6533 tcc_error("too few arguments to function");
6534 skip(')');
6535 gfunc_call(nb_args);
6537 if (ret_nregs < 0) {
6538 vsetc(&ret.type, ret.r, &ret.c);
6539 #ifdef TCC_TARGET_RISCV64
6540 arch_transfer_ret_regs(1);
6541 #endif
6542 } else {
6543 /* return value */
6544 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6545 vsetc(&ret.type, r, &ret.c);
6546 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6549 /* handle packed struct return */
6550 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6551 int addr, offset;
6553 size = type_size(&s->type, &align);
6554 /* We're writing whole regs often, make sure there's enough
6555 space. Assume register size is power of 2. */
6556 if (regsize > align)
6557 align = regsize;
6558 loc = (loc - size) & -align;
6559 addr = loc;
6560 offset = 0;
6561 for (;;) {
6562 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6563 vswap();
6564 vstore();
6565 vtop--;
6566 if (--ret_nregs == 0)
6567 break;
6568 offset += regsize;
6570 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6573 /* Promote char/short return values. This is matters only
6574 for calling function that were not compiled by TCC and
6575 only on some architectures. For those where it doesn't
6576 matter we expect things to be already promoted to int,
6577 but not larger. */
6578 t = s->type.t & VT_BTYPE;
6579 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6580 #ifdef PROMOTE_RET
6581 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6582 #else
6583 vtop->type.t = VT_INT;
6584 #endif
6587 if (s->f.func_noreturn) {
6588 if (debug_modes)
6589 tcc_tcov_block_end (tcov_data.line);
6590 CODE_OFF();
6592 } else {
6593 break;
6598 #ifndef precedence_parser /* original top-down parser */
6600 static void expr_prod(void)
6602 int t;
6604 unary();
6605 while ((t = tok) == '*' || t == '/' || t == '%') {
6606 next();
6607 unary();
6608 gen_op(t);
6612 static void expr_sum(void)
6614 int t;
6616 expr_prod();
6617 while ((t = tok) == '+' || t == '-') {
6618 next();
6619 expr_prod();
6620 gen_op(t);
6624 static void expr_shift(void)
6626 int t;
6628 expr_sum();
6629 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6630 next();
6631 expr_sum();
6632 gen_op(t);
6636 static void expr_cmp(void)
6638 int t;
6640 expr_shift();
6641 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6642 t == TOK_ULT || t == TOK_UGE) {
6643 next();
6644 expr_shift();
6645 gen_op(t);
6649 static void expr_cmpeq(void)
6651 int t;
6653 expr_cmp();
6654 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6655 next();
6656 expr_cmp();
6657 gen_op(t);
6661 static void expr_and(void)
6663 expr_cmpeq();
6664 while (tok == '&') {
6665 next();
6666 expr_cmpeq();
6667 gen_op('&');
6671 static void expr_xor(void)
6673 expr_and();
6674 while (tok == '^') {
6675 next();
6676 expr_and();
6677 gen_op('^');
6681 static void expr_or(void)
6683 expr_xor();
6684 while (tok == '|') {
6685 next();
6686 expr_xor();
6687 gen_op('|');
6691 static void expr_landor(int op);
6693 static void expr_land(void)
6695 expr_or();
6696 if (tok == TOK_LAND)
6697 expr_landor(tok);
6700 static void expr_lor(void)
6702 expr_land();
6703 if (tok == TOK_LOR)
6704 expr_landor(tok);
6707 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6708 #else /* defined precedence_parser */
6709 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6710 # define expr_lor() unary(), expr_infix(1)
6712 static int precedence(int tok)
6714 switch (tok) {
6715 case TOK_LOR: return 1;
6716 case TOK_LAND: return 2;
6717 case '|': return 3;
6718 case '^': return 4;
6719 case '&': return 5;
6720 case TOK_EQ: case TOK_NE: return 6;
6721 relat: case TOK_ULT: case TOK_UGE: return 7;
6722 case TOK_SHL: case TOK_SAR: return 8;
6723 case '+': case '-': return 9;
6724 case '*': case '/': case '%': return 10;
6725 default:
6726 if (tok >= TOK_ULE && tok <= TOK_GT)
6727 goto relat;
6728 return 0;
6731 static unsigned char prec[256];
6732 static void init_prec(void)
6734 int i;
6735 for (i = 0; i < 256; i++)
6736 prec[i] = precedence(i);
6738 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6740 static void expr_landor(int op);
6742 static void expr_infix(int p)
6744 int t = tok, p2;
6745 while ((p2 = precedence(t)) >= p) {
6746 if (t == TOK_LOR || t == TOK_LAND) {
6747 expr_landor(t);
6748 } else {
6749 next();
6750 unary();
6751 if (precedence(tok) > p2)
6752 expr_infix(p2 + 1);
6753 gen_op(t);
6755 t = tok;
6758 #endif
6760 /* Assuming vtop is a value used in a conditional context
6761 (i.e. compared with zero) return 0 if it's false, 1 if
6762 true and -1 if it can't be statically determined. */
6763 static int condition_3way(void)
6765 int c = -1;
6766 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6767 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6768 vdup();
6769 gen_cast_s(VT_BOOL);
6770 c = vtop->c.i;
6771 vpop();
6773 return c;
6776 static void expr_landor(int op)
6778 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6779 for(;;) {
6780 c = f ? i : condition_3way();
6781 if (c < 0)
6782 save_regs(1), cc = 0;
6783 else if (c != i)
6784 nocode_wanted++, f = 1;
6785 if (tok != op)
6786 break;
6787 if (c < 0)
6788 t = gvtst(i, t);
6789 else
6790 vpop();
6791 next();
6792 expr_landor_next(op);
6794 if (cc || f) {
6795 vpop();
6796 vpushi(i ^ f);
6797 gsym(t);
6798 nocode_wanted -= f;
6799 } else {
6800 gvtst_set(i, t);
6804 static int is_cond_bool(SValue *sv)
6806 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6807 && (sv->type.t & VT_BTYPE) == VT_INT)
6808 return (unsigned)sv->c.i < 2;
6809 if (sv->r == VT_CMP)
6810 return 1;
6811 return 0;
6814 static void expr_cond(void)
6816 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6817 SValue sv;
6818 CType type;
6819 int ncw_prev;
6821 expr_lor();
6822 if (tok == '?') {
6823 next();
6824 c = condition_3way();
6825 g = (tok == ':' && gnu_ext);
6826 tt = 0;
6827 if (!g) {
6828 if (c < 0) {
6829 save_regs(1);
6830 tt = gvtst(1, 0);
6831 } else {
6832 vpop();
6834 } else if (c < 0) {
6835 /* needed to avoid having different registers saved in
6836 each branch */
6837 save_regs(1);
6838 gv_dup();
6839 tt = gvtst(0, 0);
6842 ncw_prev = nocode_wanted;
6843 if (c == 0)
6844 nocode_wanted++;
6845 if (!g)
6846 gexpr();
6848 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6849 mk_pointer(&vtop->type);
6850 sv = *vtop; /* save value to handle it later */
6851 vtop--; /* no vpop so that FP stack is not flushed */
6853 if (g) {
6854 u = tt;
6855 } else if (c < 0) {
6856 u = gjmp(0);
6857 gsym(tt);
6858 } else
6859 u = 0;
6861 nocode_wanted = ncw_prev;
6862 if (c == 1)
6863 nocode_wanted++;
6864 skip(':');
6865 expr_cond();
6867 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6868 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6869 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6870 this code jumps directly to the if's then/else branches. */
6871 t1 = gvtst(0, 0);
6872 t2 = gjmp(0);
6873 gsym(u);
6874 vpushv(&sv);
6875 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6876 gvtst_set(0, t1);
6877 gvtst_set(1, t2);
6878 nocode_wanted = ncw_prev;
6879 // tcc_warning("two conditions expr_cond");
6880 return;
6883 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6884 mk_pointer(&vtop->type);
6886 /* cast operands to correct type according to ISOC rules */
6887 if (!combine_types(&type, &sv, vtop, '?'))
6888 type_incompatibility_error(&sv.type, &vtop->type,
6889 "type mismatch in conditional expression (have '%s' and '%s')");
6890 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6891 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6892 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6894 /* now we convert second operand */
6895 if (c != 1) {
6896 gen_cast(&type);
6897 if (islv) {
6898 mk_pointer(&vtop->type);
6899 gaddrof();
6900 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6901 gaddrof();
6904 rc = RC_TYPE(type.t);
6905 /* for long longs, we use fixed registers to avoid having
6906 to handle a complicated move */
6907 if (USING_TWO_WORDS(type.t))
6908 rc = RC_RET(type.t);
6910 tt = r2 = 0;
6911 if (c < 0) {
6912 r2 = gv(rc);
6913 tt = gjmp(0);
6915 gsym(u);
6916 nocode_wanted = ncw_prev;
6918 /* this is horrible, but we must also convert first
6919 operand */
6920 if (c != 0) {
6921 *vtop = sv;
6922 gen_cast(&type);
6923 if (islv) {
6924 mk_pointer(&vtop->type);
6925 gaddrof();
6926 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6927 gaddrof();
6930 if (c < 0) {
6931 r1 = gv(rc);
6932 move_reg(r2, r1, islv ? VT_PTR : type.t);
6933 vtop->r = r2;
6934 gsym(tt);
6937 if (islv)
6938 indir();
6942 static void expr_eq(void)
6944 int t;
6946 expr_cond();
6947 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6948 test_lvalue();
6949 next();
6950 if (t == '=') {
6951 expr_eq();
6952 } else {
6953 vdup();
6954 expr_eq();
6955 gen_op(TOK_ASSIGN_OP(t));
6957 vstore();
6961 ST_FUNC void gexpr(void)
6963 while (1) {
6964 expr_eq();
6965 if (tok != ',')
6966 break;
6967 vpop();
6968 next();
6972 /* parse a constant expression and return value in vtop. */
6973 static void expr_const1(void)
6975 const_wanted++;
6976 nocode_wanted += unevalmask + 1;
6977 expr_cond();
6978 nocode_wanted -= unevalmask + 1;
6979 const_wanted--;
6982 /* parse an integer constant and return its value. */
6983 static inline int64_t expr_const64(void)
6985 int64_t c;
6986 expr_const1();
6987 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6988 expect("constant expression");
6989 c = vtop->c.i;
6990 vpop();
6991 return c;
6994 /* parse an integer constant and return its value.
6995 Complain if it doesn't fit 32bit (signed or unsigned). */
6996 ST_FUNC int expr_const(void)
6998 int c;
6999 int64_t wc = expr_const64();
7000 c = wc;
7001 if (c != wc && (unsigned)c != wc)
7002 tcc_error("constant exceeds 32 bit");
7003 return c;
7006 /* ------------------------------------------------------------------------- */
7007 /* return from function */
7009 #ifndef TCC_TARGET_ARM64
7010 static void gfunc_return(CType *func_type)
7012 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
7013 CType type, ret_type;
7014 int ret_align, ret_nregs, regsize;
7015 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
7016 &ret_align, &regsize);
7017 if (ret_nregs < 0) {
7018 #ifdef TCC_TARGET_RISCV64
7019 arch_transfer_ret_regs(0);
7020 #endif
7021 } else if (0 == ret_nregs) {
7022 /* if returning structure, must copy it to implicit
7023 first pointer arg location */
7024 type = *func_type;
7025 mk_pointer(&type);
7026 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
7027 indir();
7028 vswap();
7029 /* copy structure value to pointer */
7030 vstore();
7031 } else {
7032 /* returning structure packed into registers */
7033 int size, addr, align, rc;
7034 size = type_size(func_type,&align);
7035 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
7036 (vtop->c.i & (ret_align-1)))
7037 && (align & (ret_align-1))) {
7038 loc = (loc - size) & -ret_align;
7039 addr = loc;
7040 type = *func_type;
7041 vset(&type, VT_LOCAL | VT_LVAL, addr);
7042 vswap();
7043 vstore();
7044 vpop();
7045 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
7047 vtop->type = ret_type;
7048 rc = RC_RET(ret_type.t);
7049 if (ret_nregs == 1)
7050 gv(rc);
7051 else {
7052 for (;;) {
7053 vdup();
7054 gv(rc);
7055 vpop();
7056 if (--ret_nregs == 0)
7057 break;
7058 /* We assume that when a structure is returned in multiple
7059 registers, their classes are consecutive values of the
7060 suite s(n) = 2^n */
7061 rc <<= 1;
7062 vtop->c.i += regsize;
7066 } else {
7067 gv(RC_RET(func_type->t));
7069 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
7071 #endif
7073 static void check_func_return(void)
7075 if ((func_vt.t & VT_BTYPE) == VT_VOID)
7076 return;
7077 if (!strcmp (funcname, "main")
7078 && (func_vt.t & VT_BTYPE) == VT_INT) {
7079 /* main returns 0 by default */
7080 vpushi(0);
7081 gen_assign_cast(&func_vt);
7082 gfunc_return(&func_vt);
7083 } else {
7084 tcc_warning("function might return no value: '%s'", funcname);
7088 /* ------------------------------------------------------------------------- */
7089 /* switch/case */
7091 static int case_cmpi(const void *pa, const void *pb)
7093 int64_t a = (*(struct case_t**) pa)->v1;
7094 int64_t b = (*(struct case_t**) pb)->v1;
7095 return a < b ? -1 : a > b;
7098 static int case_cmpu(const void *pa, const void *pb)
7100 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
7101 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
7102 return a < b ? -1 : a > b;
7105 static void gtst_addr(int t, int a)
7107 gsym_addr(gvtst(0, t), a);
7110 static void gcase(struct case_t **base, int len, int *bsym)
7112 struct case_t *p;
7113 int e;
7114 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
7115 while (len > 8) {
7116 /* binary search */
7117 p = base[len/2];
7118 vdup();
7119 if (ll)
7120 vpushll(p->v2);
7121 else
7122 vpushi(p->v2);
7123 gen_op(TOK_LE);
7124 e = gvtst(1, 0);
7125 vdup();
7126 if (ll)
7127 vpushll(p->v1);
7128 else
7129 vpushi(p->v1);
7130 gen_op(TOK_GE);
7131 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
7132 /* x < v1 */
7133 gcase(base, len/2, bsym);
7134 /* x > v2 */
7135 gsym(e);
7136 e = len/2 + 1;
7137 base += e; len -= e;
7139 /* linear scan */
7140 while (len--) {
7141 p = *base++;
7142 vdup();
7143 if (ll)
7144 vpushll(p->v2);
7145 else
7146 vpushi(p->v2);
7147 if (p->v1 == p->v2) {
7148 gen_op(TOK_EQ);
7149 gtst_addr(0, p->sym);
7150 } else {
7151 gen_op(TOK_LE);
7152 e = gvtst(1, 0);
7153 vdup();
7154 if (ll)
7155 vpushll(p->v1);
7156 else
7157 vpushi(p->v1);
7158 gen_op(TOK_GE);
7159 gtst_addr(0, p->sym);
7160 gsym(e);
7163 *bsym = gjmp(*bsym);
7166 /* ------------------------------------------------------------------------- */
7167 /* __attribute__((cleanup(fn))) */
7169 static void try_call_scope_cleanup(Sym *stop)
7171 Sym *cls = cur_scope->cl.s;
7173 for (; cls != stop; cls = cls->ncl) {
7174 Sym *fs = cls->next;
7175 Sym *vs = cls->prev_tok;
7177 vpushsym(&fs->type, fs);
7178 vset(&vs->type, vs->r, vs->c);
7179 vtop->sym = vs;
7180 mk_pointer(&vtop->type);
7181 gaddrof();
7182 gfunc_call(1);
7186 static void try_call_cleanup_goto(Sym *cleanupstate)
7188 Sym *oc, *cc;
7189 int ocd, ccd;
7191 if (!cur_scope->cl.s)
7192 return;
7194 /* search NCA of both cleanup chains given parents and initial depth */
7195 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
7196 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
7198 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
7200 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
7203 try_call_scope_cleanup(cc);
7206 /* call 'func' for each __attribute__((cleanup(func))) */
7207 static void block_cleanup(struct scope *o)
7209 int jmp = 0;
7210 Sym *g, **pg;
7211 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
7212 if (g->prev_tok->r & LABEL_FORWARD) {
7213 Sym *pcl = g->next;
7214 if (!jmp)
7215 jmp = gjmp(0);
7216 gsym(pcl->jnext);
7217 try_call_scope_cleanup(o->cl.s);
7218 pcl->jnext = gjmp(0);
7219 if (!o->cl.n)
7220 goto remove_pending;
7221 g->c = o->cl.n;
7222 pg = &g->prev;
7223 } else {
7224 remove_pending:
7225 *pg = g->prev;
7226 sym_free(g);
7229 gsym(jmp);
7230 try_call_scope_cleanup(o->cl.s);
7233 /* ------------------------------------------------------------------------- */
7234 /* VLA */
7236 static void vla_restore(int loc)
7238 if (loc)
7239 gen_vla_sp_restore(loc);
7242 static void vla_leave(struct scope *o)
7244 struct scope *c = cur_scope, *v = NULL;
7245 for (; c != o && c; c = c->prev)
7246 if (c->vla.num)
7247 v = c;
7248 if (v)
7249 vla_restore(v->vla.locorig);
7252 /* ------------------------------------------------------------------------- */
7253 /* local scopes */
7255 static void new_scope(struct scope *o)
7257 /* copy and link previous scope */
7258 *o = *cur_scope;
7259 o->prev = cur_scope;
7260 cur_scope = o;
7261 cur_scope->vla.num = 0;
7263 /* record local declaration stack position */
7264 o->lstk = local_stack;
7265 o->llstk = local_label_stack;
7266 ++local_scope;
7268 if (debug_modes)
7269 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
7272 static void prev_scope(struct scope *o, int is_expr)
7274 vla_leave(o->prev);
7276 if (o->cl.s != o->prev->cl.s)
7277 block_cleanup(o->prev);
7279 /* pop locally defined labels */
7280 label_pop(&local_label_stack, o->llstk, is_expr);
7282 /* In the is_expr case (a statement expression is finished here),
7283 vtop might refer to symbols on the local_stack. Either via the
7284 type or via vtop->sym. We can't pop those nor any that in turn
7285 might be referred to. To make it easier we don't roll back
7286 any symbols in that case; some upper level call to block() will
7287 do that. We do have to remove such symbols from the lookup
7288 tables, though. sym_pop will do that. */
7290 /* pop locally defined symbols */
7291 pop_local_syms(o->lstk, is_expr);
7292 cur_scope = o->prev;
7293 --local_scope;
7295 if (debug_modes)
7296 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
7299 /* leave a scope via break/continue(/goto) */
7300 static void leave_scope(struct scope *o)
7302 if (!o)
7303 return;
7304 try_call_scope_cleanup(o->cl.s);
7305 vla_leave(o);
7308 /* ------------------------------------------------------------------------- */
7309 /* call block from 'for do while' loops */
7311 static void lblock(int *bsym, int *csym)
7313 struct scope *lo = loop_scope, *co = cur_scope;
7314 int *b = co->bsym, *c = co->csym;
7315 if (csym) {
7316 co->csym = csym;
7317 loop_scope = co;
7319 co->bsym = bsym;
7320 block(0);
7321 co->bsym = b;
7322 if (csym) {
7323 co->csym = c;
7324 loop_scope = lo;
7328 static void block(int is_expr)
7330 int a, b, c, d, e, t;
7331 struct scope o;
7332 Sym *s;
7334 if (is_expr) {
7335 /* default return value is (void) */
7336 vpushi(0);
7337 vtop->type.t = VT_VOID;
7340 again:
7341 t = tok;
7342 /* If the token carries a value, next() might destroy it. Only with
7343 invalid code such as f(){"123"4;} */
7344 if (TOK_HAS_VALUE(t))
7345 goto expr;
7346 next();
7348 if (debug_modes)
7349 tcc_tcov_check_line (0), tcc_tcov_block_begin ();
7351 if (t == TOK_IF) {
7352 skip('(');
7353 gexpr();
7354 skip(')');
7355 a = gvtst(1, 0);
7356 block(0);
7357 if (tok == TOK_ELSE) {
7358 d = gjmp(0);
7359 gsym(a);
7360 next();
7361 block(0);
7362 gsym(d); /* patch else jmp */
7363 } else {
7364 gsym(a);
7367 } else if (t == TOK_WHILE) {
7368 d = gind();
7369 skip('(');
7370 gexpr();
7371 skip(')');
7372 a = gvtst(1, 0);
7373 b = 0;
7374 lblock(&a, &b);
7375 gjmp_addr(d);
7376 gsym_addr(b, d);
7377 gsym(a);
7379 } else if (t == '{') {
7380 new_scope(&o);
7382 /* handle local labels declarations */
7383 while (tok == TOK_LABEL) {
7384 do {
7385 next();
7386 if (tok < TOK_UIDENT)
7387 expect("label identifier");
7388 label_push(&local_label_stack, tok, LABEL_DECLARED);
7389 next();
7390 } while (tok == ',');
7391 skip(';');
7394 while (tok != '}') {
7395 decl(VT_LOCAL);
7396 if (tok != '}') {
7397 if (is_expr)
7398 vpop();
7399 block(is_expr);
7403 prev_scope(&o, is_expr);
7404 if (local_scope)
7405 next();
7406 else if (!nocode_wanted)
7407 check_func_return();
7409 } else if (t == TOK_RETURN) {
7410 b = (func_vt.t & VT_BTYPE) != VT_VOID;
7411 if (tok != ';') {
7412 gexpr();
7413 if (b) {
7414 gen_assign_cast(&func_vt);
7415 } else {
7416 if (vtop->type.t != VT_VOID)
7417 tcc_warning("void function returns a value");
7418 vtop--;
7420 } else if (b) {
7421 tcc_warning("'return' with no value");
7422 b = 0;
7424 leave_scope(root_scope);
7425 if (b)
7426 gfunc_return(&func_vt);
7427 skip(';');
7428 /* jump unless last stmt in top-level block */
7429 if (tok != '}' || local_scope != 1)
7430 rsym = gjmp(rsym);
7431 if (debug_modes)
7432 tcc_tcov_block_end (tcov_data.line);
7433 CODE_OFF();
7435 } else if (t == TOK_BREAK) {
7436 /* compute jump */
7437 if (!cur_scope->bsym)
7438 tcc_error("cannot break");
7439 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
7440 leave_scope(cur_switch->scope);
7441 else
7442 leave_scope(loop_scope);
7443 *cur_scope->bsym = gjmp(*cur_scope->bsym);
7444 skip(';');
7446 } else if (t == TOK_CONTINUE) {
7447 /* compute jump */
7448 if (!cur_scope->csym)
7449 tcc_error("cannot continue");
7450 leave_scope(loop_scope);
7451 *cur_scope->csym = gjmp(*cur_scope->csym);
7452 skip(';');
7454 } else if (t == TOK_FOR) {
7455 new_scope(&o);
7457 skip('(');
7458 if (tok != ';') {
7459 /* c99 for-loop init decl? */
7460 if (!decl0(VT_LOCAL, 1, NULL)) {
7461 /* no, regular for-loop init expr */
7462 gexpr();
7463 vpop();
7466 skip(';');
7467 a = b = 0;
7468 c = d = gind();
7469 if (tok != ';') {
7470 gexpr();
7471 a = gvtst(1, 0);
7473 skip(';');
7474 if (tok != ')') {
7475 e = gjmp(0);
7476 d = gind();
7477 gexpr();
7478 vpop();
7479 gjmp_addr(c);
7480 gsym(e);
7482 skip(')');
7483 lblock(&a, &b);
7484 gjmp_addr(d);
7485 gsym_addr(b, d);
7486 gsym(a);
7487 prev_scope(&o, 0);
7489 } else if (t == TOK_DO) {
7490 a = b = 0;
7491 d = gind();
7492 lblock(&a, &b);
7493 gsym(b);
7494 skip(TOK_WHILE);
7495 skip('(');
7496 gexpr();
7497 skip(')');
7498 skip(';');
7499 c = gvtst(0, 0);
7500 gsym_addr(c, d);
7501 gsym(a);
7503 } else if (t == TOK_SWITCH) {
7504 struct switch_t *sw;
7506 sw = tcc_mallocz(sizeof *sw);
7507 sw->bsym = &a;
7508 sw->scope = cur_scope;
7509 sw->prev = cur_switch;
7510 cur_switch = sw;
7512 skip('(');
7513 gexpr();
7514 skip(')');
7515 sw->sv = *vtop--; /* save switch value */
7517 a = 0;
7518 b = gjmp(0); /* jump to first case */
7519 lblock(&a, NULL);
7520 a = gjmp(a); /* add implicit break */
7521 /* case lookup */
7522 gsym(b);
7524 if (sw->sv.type.t & VT_UNSIGNED)
7525 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7526 else
7527 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7529 for (b = 1; b < sw->n; b++)
7530 if (sw->sv.type.t & VT_UNSIGNED
7531 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7532 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7533 tcc_error("duplicate case value");
7535 vpushv(&sw->sv);
7536 gv(RC_INT);
7537 d = 0, gcase(sw->p, sw->n, &d);
7538 vpop();
7539 if (sw->def_sym)
7540 gsym_addr(d, sw->def_sym);
7541 else
7542 gsym(d);
7543 /* break label */
7544 gsym(a);
7546 dynarray_reset(&sw->p, &sw->n);
7547 cur_switch = sw->prev;
7548 tcc_free(sw);
7550 } else if (t == TOK_CASE) {
7551 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7552 if (!cur_switch)
7553 expect("switch");
7554 cr->v1 = cr->v2 = expr_const64();
7555 if (gnu_ext && tok == TOK_DOTS) {
7556 next();
7557 cr->v2 = expr_const64();
7558 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7559 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7560 tcc_warning("empty case range");
7562 tcov_data.ind = 0;
7563 cr->sym = gind();
7564 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7565 skip(':');
7566 is_expr = 0;
7567 goto block_after_label;
7569 } else if (t == TOK_DEFAULT) {
7570 if (!cur_switch)
7571 expect("switch");
7572 if (cur_switch->def_sym)
7573 tcc_error("too many 'default'");
7574 tcov_data.ind = 0;
7575 cur_switch->def_sym = gind();
7576 skip(':');
7577 is_expr = 0;
7578 goto block_after_label;
7580 } else if (t == TOK_GOTO) {
7581 if (cur_scope->vla.num)
7582 vla_restore(cur_scope->vla.locorig);
7583 if (tok == '*' && gnu_ext) {
7584 /* computed goto */
7585 next();
7586 gexpr();
7587 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7588 expect("pointer");
7589 ggoto();
7591 } else if (tok >= TOK_UIDENT) {
7592 s = label_find(tok);
7593 /* put forward definition if needed */
7594 if (!s)
7595 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7596 else if (s->r == LABEL_DECLARED)
7597 s->r = LABEL_FORWARD;
7599 if (s->r & LABEL_FORWARD) {
7600 /* start new goto chain for cleanups, linked via label->next */
7601 if (cur_scope->cl.s && !nocode_wanted) {
7602 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7603 pending_gotos->prev_tok = s;
7604 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7605 pending_gotos->next = s;
7607 s->jnext = gjmp(s->jnext);
7608 } else {
7609 try_call_cleanup_goto(s->cleanupstate);
7610 gjmp_addr(s->jnext);
7612 next();
7614 } else {
7615 expect("label identifier");
7617 skip(';');
7619 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7620 asm_instr();
7622 } else {
7623 if (tok == ':' && t >= TOK_UIDENT) {
7624 /* label case */
7625 next();
7626 s = label_find(t);
7627 if (s) {
7628 if (s->r == LABEL_DEFINED)
7629 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7630 s->r = LABEL_DEFINED;
7631 if (s->next) {
7632 Sym *pcl; /* pending cleanup goto */
7633 for (pcl = s->next; pcl; pcl = pcl->prev)
7634 gsym(pcl->jnext);
7635 sym_pop(&s->next, NULL, 0);
7636 } else
7637 gsym(s->jnext);
7638 } else {
7639 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7641 s->jnext = gind();
7642 s->cleanupstate = cur_scope->cl.s;
7644 block_after_label:
7645 vla_restore(cur_scope->vla.loc);
7646 if (tok != '}')
7647 goto again;
7648 /* we accept this, but it is a mistake */
7649 tcc_warning_c(warn_all)("deprecated use of label at end of compound statement");
7651 } else {
7652 /* expression case */
7653 if (t != ';') {
7654 unget_tok(t);
7655 expr:
7656 if (is_expr) {
7657 vpop();
7658 gexpr();
7659 } else {
7660 gexpr();
7661 vpop();
7663 skip(';');
7668 if (debug_modes)
7669 tcc_tcov_check_line (0), tcc_tcov_block_end (0);
7672 /* This skips over a stream of tokens containing balanced {} and ()
7673 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7674 with a '{'). If STR then allocates and stores the skipped tokens
7675 in *STR. This doesn't check if () and {} are nested correctly,
7676 i.e. "({)}" is accepted. */
7677 static void skip_or_save_block(TokenString **str)
7679 int braces = tok == '{';
7680 int level = 0;
7681 if (str)
7682 *str = tok_str_alloc();
7684 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
7685 int t;
7686 if (tok == TOK_EOF) {
7687 if (str || level > 0)
7688 tcc_error("unexpected end of file");
7689 else
7690 break;
7692 if (str)
7693 tok_str_add_tok(*str);
7694 t = tok;
7695 next();
7696 if (t == '{' || t == '(') {
7697 level++;
7698 } else if (t == '}' || t == ')') {
7699 level--;
7700 if (level == 0 && braces && t == '}')
7701 break;
7704 if (str) {
7705 tok_str_add(*str, -1);
7706 tok_str_add(*str, 0);
7710 #define EXPR_CONST 1
7711 #define EXPR_ANY 2
7713 static void parse_init_elem(int expr_type)
7715 int saved_global_expr;
7716 switch(expr_type) {
7717 case EXPR_CONST:
7718 /* compound literals must be allocated globally in this case */
7719 saved_global_expr = global_expr;
7720 global_expr = 1;
7721 expr_const1();
7722 global_expr = saved_global_expr;
7723 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7724 (compound literals). */
7725 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7726 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7727 || vtop->sym->v < SYM_FIRST_ANOM))
7728 #ifdef TCC_TARGET_PE
7729 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7730 #endif
7732 tcc_error("initializer element is not constant");
7733 break;
7734 case EXPR_ANY:
7735 expr_eq();
7736 break;
7740 #if 1
7741 static void init_assert(init_params *p, int offset)
7743 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7744 : !nocode_wanted && offset > p->local_offset)
7745 tcc_internal_error("initializer overflow");
7747 #else
7748 #define init_assert(sec, offset)
7749 #endif
7751 /* put zeros for variable based init */
7752 static void init_putz(init_params *p, unsigned long c, int size)
7754 init_assert(p, c + size);
7755 if (p->sec) {
7756 /* nothing to do because globals are already set to zero */
7757 } else {
7758 vpush_helper_func(TOK_memset);
7759 vseti(VT_LOCAL, c);
7760 #ifdef TCC_TARGET_ARM
7761 vpushs(size);
7762 vpushi(0);
7763 #else
7764 vpushi(0);
7765 vpushs(size);
7766 #endif
7767 gfunc_call(3);
7771 #define DIF_FIRST 1
7772 #define DIF_SIZE_ONLY 2
7773 #define DIF_HAVE_ELEM 4
7774 #define DIF_CLEAR 8
7776 /* delete relocations for specified range c ... c + size. Unfortunatly
7777 in very special cases, relocations may occur unordered */
7778 static void decl_design_delrels(Section *sec, int c, int size)
7780 ElfW_Rel *rel, *rel2, *rel_end;
7781 if (!sec || !sec->reloc)
7782 return;
7783 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7784 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7785 while (rel < rel_end) {
7786 if (rel->r_offset >= c && rel->r_offset < c + size) {
7787 sec->reloc->data_offset -= sizeof *rel;
7788 } else {
7789 if (rel2 != rel)
7790 memcpy(rel2, rel, sizeof *rel);
7791 ++rel2;
7793 ++rel;
7797 static void decl_design_flex(init_params *p, Sym *ref, int index)
7799 if (ref == p->flex_array_ref) {
7800 if (index >= ref->c)
7801 ref->c = index + 1;
7802 } else if (ref->c < 0)
7803 tcc_error("flexible array has zero size in this context");
7806 /* t is the array or struct type. c is the array or struct
7807 address. cur_field is the pointer to the current
7808 field, for arrays the 'c' member contains the current start
7809 index. 'flags' is as in decl_initializer.
7810 'al' contains the already initialized length of the
7811 current container (starting at c). This returns the new length of that. */
7812 static int decl_designator(init_params *p, CType *type, unsigned long c,
7813 Sym **cur_field, int flags, int al)
7815 Sym *s, *f;
7816 int index, index_last, align, l, nb_elems, elem_size;
7817 unsigned long corig = c;
7819 elem_size = 0;
7820 nb_elems = 1;
7822 if (flags & DIF_HAVE_ELEM)
7823 goto no_designator;
7825 if (gnu_ext && tok >= TOK_UIDENT) {
7826 l = tok, next();
7827 if (tok == ':')
7828 goto struct_field;
7829 unget_tok(l);
7832 /* NOTE: we only support ranges for last designator */
7833 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7834 if (tok == '[') {
7835 if (!(type->t & VT_ARRAY))
7836 expect("array type");
7837 next();
7838 index = index_last = expr_const();
7839 if (tok == TOK_DOTS && gnu_ext) {
7840 next();
7841 index_last = expr_const();
7843 skip(']');
7844 s = type->ref;
7845 decl_design_flex(p, s, index_last);
7846 if (index < 0 || index_last >= s->c || index_last < index)
7847 tcc_error("index exceeds array bounds or range is empty");
7848 if (cur_field)
7849 (*cur_field)->c = index_last;
7850 type = pointed_type(type);
7851 elem_size = type_size(type, &align);
7852 c += index * elem_size;
7853 nb_elems = index_last - index + 1;
7854 } else {
7855 int cumofs;
7856 next();
7857 l = tok;
7858 struct_field:
7859 next();
7860 if ((type->t & VT_BTYPE) != VT_STRUCT)
7861 expect("struct/union type");
7862 cumofs = 0;
7863 f = find_field(type, l, &cumofs);
7864 if (!f)
7865 expect("field");
7866 if (cur_field)
7867 *cur_field = f;
7868 type = &f->type;
7869 c += cumofs + f->c;
7871 cur_field = NULL;
7873 if (!cur_field) {
7874 if (tok == '=') {
7875 next();
7876 } else if (!gnu_ext) {
7877 expect("=");
7879 } else {
7880 no_designator:
7881 if (type->t & VT_ARRAY) {
7882 index = (*cur_field)->c;
7883 s = type->ref;
7884 decl_design_flex(p, s, index);
7885 if (index >= s->c)
7886 tcc_error("too many initializers");
7887 type = pointed_type(type);
7888 elem_size = type_size(type, &align);
7889 c += index * elem_size;
7890 } else {
7891 f = *cur_field;
7892 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
7893 *cur_field = f = f->next;
7894 if (!f)
7895 tcc_error("too many initializers");
7896 type = &f->type;
7897 c += f->c;
7901 if (!elem_size) /* for structs */
7902 elem_size = type_size(type, &align);
7904 /* Using designators the same element can be initialized more
7905 than once. In that case we need to delete possibly already
7906 existing relocations. */
7907 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7908 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7909 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7912 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7914 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7915 Sym aref = {0};
7916 CType t1;
7917 int i;
7918 if (p->sec || (type->t & VT_ARRAY)) {
7919 /* make init_putv/vstore believe it were a struct */
7920 aref.c = elem_size;
7921 t1.t = VT_STRUCT, t1.ref = &aref;
7922 type = &t1;
7924 if (p->sec)
7925 vpush_ref(type, p->sec, c, elem_size);
7926 else
7927 vset(type, VT_LOCAL|VT_LVAL, c);
7928 for (i = 1; i < nb_elems; i++) {
7929 vdup();
7930 init_putv(p, type, c + elem_size * i);
7932 vpop();
7935 c += nb_elems * elem_size;
7936 if (c - corig > al)
7937 al = c - corig;
7938 return al;
7941 /* store a value or an expression directly in global data or in local array */
7942 static void init_putv(init_params *p, CType *type, unsigned long c)
7944 int bt;
7945 void *ptr;
7946 CType dtype;
7947 int size, align;
7948 Section *sec = p->sec;
7949 uint64_t val;
7951 dtype = *type;
7952 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7954 size = type_size(type, &align);
7955 if (type->t & VT_BITFIELD)
7956 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7957 init_assert(p, c + size);
7959 if (sec) {
7960 /* XXX: not portable */
7961 /* XXX: generate error if incorrect relocation */
7962 gen_assign_cast(&dtype);
7963 bt = type->t & VT_BTYPE;
7965 if ((vtop->r & VT_SYM)
7966 && bt != VT_PTR
7967 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7968 || (type->t & VT_BITFIELD))
7969 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7971 tcc_error("initializer element is not computable at load time");
7973 if (NODATA_WANTED) {
7974 vtop--;
7975 return;
7978 ptr = sec->data + c;
7979 val = vtop->c.i;
7981 /* XXX: make code faster ? */
7982 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7983 vtop->sym->v >= SYM_FIRST_ANOM &&
7984 /* XXX This rejects compound literals like
7985 '(void *){ptr}'. The problem is that '&sym' is
7986 represented the same way, which would be ruled out
7987 by the SYM_FIRST_ANOM check above, but also '"string"'
7988 in 'char *p = "string"' is represented the same
7989 with the type being VT_PTR and the symbol being an
7990 anonymous one. That is, there's no difference in vtop
7991 between '(void *){x}' and '&(void *){x}'. Ignore
7992 pointer typed entities here. Hopefully no real code
7993 will ever use compound literals with scalar type. */
7994 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7995 /* These come from compound literals, memcpy stuff over. */
7996 Section *ssec;
7997 ElfSym *esym;
7998 ElfW_Rel *rel;
7999 esym = elfsym(vtop->sym);
8000 ssec = tcc_state->sections[esym->st_shndx];
8001 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
8002 if (ssec->reloc) {
8003 /* We need to copy over all memory contents, and that
8004 includes relocations. Use the fact that relocs are
8005 created it order, so look from the end of relocs
8006 until we hit one before the copied region. */
8007 unsigned long relofs = ssec->reloc->data_offset;
8008 while (relofs >= sizeof(*rel)) {
8009 relofs -= sizeof(*rel);
8010 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
8011 if (rel->r_offset >= esym->st_value + size)
8012 continue;
8013 if (rel->r_offset < esym->st_value)
8014 break;
8015 put_elf_reloca(symtab_section, sec,
8016 c + rel->r_offset - esym->st_value,
8017 ELFW(R_TYPE)(rel->r_info),
8018 ELFW(R_SYM)(rel->r_info),
8019 #if PTR_SIZE == 8
8020 rel->r_addend
8021 #else
8023 #endif
8027 } else {
8028 if (type->t & VT_BITFIELD) {
8029 int bit_pos, bit_size, bits, n;
8030 unsigned char *p, v, m;
8031 bit_pos = BIT_POS(vtop->type.t);
8032 bit_size = BIT_SIZE(vtop->type.t);
8033 p = (unsigned char*)ptr + (bit_pos >> 3);
8034 bit_pos &= 7, bits = 0;
8035 while (bit_size) {
8036 n = 8 - bit_pos;
8037 if (n > bit_size)
8038 n = bit_size;
8039 v = val >> bits << bit_pos;
8040 m = ((1 << n) - 1) << bit_pos;
8041 *p = (*p & ~m) | (v & m);
8042 bits += n, bit_size -= n, bit_pos = 0, ++p;
8044 } else
8045 switch(bt) {
8046 case VT_BOOL:
8047 *(char *)ptr = val != 0;
8048 break;
8049 case VT_BYTE:
8050 *(char *)ptr = val;
8051 break;
8052 case VT_SHORT:
8053 write16le(ptr, val);
8054 break;
8055 case VT_FLOAT:
8056 write32le(ptr, val);
8057 break;
8058 case VT_DOUBLE:
8059 write64le(ptr, val);
8060 break;
8061 case VT_LDOUBLE:
8062 #if defined TCC_IS_NATIVE_387
8063 /* Host and target platform may be different but both have x87.
8064 On windows, tcc does not use VT_LDOUBLE, except when it is a
8065 cross compiler. In this case a mingw gcc as host compiler
8066 comes here with 10-byte long doubles, while msvc or tcc won't.
8067 tcc itself can still translate by asm.
8068 In any case we avoid possibly random bytes 11 and 12.
8070 if (sizeof (long double) >= 10)
8071 memcpy(ptr, &vtop->c.ld, 10);
8072 #ifdef __TINYC__
8073 else if (sizeof (long double) == sizeof (double))
8074 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
8075 #endif
8076 else if (vtop->c.ld == 0.0)
8078 else
8079 #endif
8080 /* For other platforms it should work natively, but may not work
8081 for cross compilers */
8082 if (sizeof(long double) == LDOUBLE_SIZE)
8083 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
8084 else if (sizeof(double) == LDOUBLE_SIZE)
8085 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
8086 #ifndef TCC_CROSS_TEST
8087 else
8088 tcc_error("can't cross compile long double constants");
8089 #endif
8090 break;
8092 #if PTR_SIZE == 8
8093 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
8094 case VT_LLONG:
8095 case VT_PTR:
8096 if (vtop->r & VT_SYM)
8097 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
8098 else
8099 write64le(ptr, val);
8100 break;
8101 case VT_INT:
8102 write32le(ptr, val);
8103 break;
8104 #else
8105 case VT_LLONG:
8106 write64le(ptr, val);
8107 break;
8108 case VT_PTR:
8109 case VT_INT:
8110 if (vtop->r & VT_SYM)
8111 greloc(sec, vtop->sym, c, R_DATA_PTR);
8112 write32le(ptr, val);
8113 break;
8114 #endif
8115 default:
8116 //tcc_internal_error("unexpected type");
8117 break;
8120 vtop--;
8121 } else {
8122 vset(&dtype, VT_LOCAL|VT_LVAL, c);
8123 vswap();
8124 vstore();
8125 vpop();
8129 /* 't' contains the type and storage info. 'c' is the offset of the
8130 object in section 'sec'. If 'sec' is NULL, it means stack based
8131 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
8132 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
8133 size only evaluation is wanted (only for arrays). */
8134 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
8136 int len, n, no_oblock, i;
8137 int size1, align1;
8138 Sym *s, *f;
8139 Sym indexsym;
8140 CType *t1;
8142 /* generate line number info */
8143 if (debug_modes && !p->sec)
8144 tcc_debug_line(tcc_state), tcc_tcov_check_line (1);
8146 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
8147 /* In case of strings we have special handling for arrays, so
8148 don't consume them as initializer value (which would commit them
8149 to some anonymous symbol). */
8150 tok != TOK_LSTR && tok != TOK_STR &&
8151 !(flags & DIF_SIZE_ONLY)) {
8152 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
8153 flags |= DIF_HAVE_ELEM;
8156 if ((flags & DIF_HAVE_ELEM) &&
8157 !(type->t & VT_ARRAY) &&
8158 /* Use i_c_parameter_t, to strip toplevel qualifiers.
8159 The source type might have VT_CONSTANT set, which is
8160 of course assignable to non-const elements. */
8161 is_compatible_unqualified_types(type, &vtop->type)) {
8162 goto init_putv;
8164 } else if (type->t & VT_ARRAY) {
8165 no_oblock = 1;
8166 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
8167 tok == '{') {
8168 skip('{');
8169 no_oblock = 0;
8172 s = type->ref;
8173 n = s->c;
8174 t1 = pointed_type(type);
8175 size1 = type_size(t1, &align1);
8177 /* only parse strings here if correct type (otherwise: handle
8178 them as ((w)char *) expressions */
8179 if ((tok == TOK_LSTR &&
8180 #ifdef TCC_TARGET_PE
8181 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
8182 #else
8183 (t1->t & VT_BTYPE) == VT_INT
8184 #endif
8185 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
8186 len = 0;
8187 cstr_reset(&initstr);
8188 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
8189 tcc_error("unhandled string literal merging");
8190 while (tok == TOK_STR || tok == TOK_LSTR) {
8191 if (initstr.size)
8192 initstr.size -= size1;
8193 if (tok == TOK_STR)
8194 len += tokc.str.size;
8195 else
8196 len += tokc.str.size / sizeof(nwchar_t);
8197 len--;
8198 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
8199 next();
8201 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
8202 && tok != TOK_EOF) {
8203 /* Not a lone literal but part of a bigger expression. */
8204 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
8205 tokc.str.size = initstr.size;
8206 tokc.str.data = initstr.data;
8207 goto do_init_array;
8210 decl_design_flex(p, s, len);
8211 if (!(flags & DIF_SIZE_ONLY)) {
8212 int nb = n;
8213 if (len < nb)
8214 nb = len;
8215 if (len > nb)
8216 tcc_warning("initializer-string for array is too long");
8217 /* in order to go faster for common case (char
8218 string in global variable, we handle it
8219 specifically */
8220 if (p->sec && size1 == 1) {
8221 init_assert(p, c + nb);
8222 if (!NODATA_WANTED)
8223 memcpy(p->sec->data + c, initstr.data, nb);
8224 } else {
8225 for(i=0;i<n;i++) {
8226 if (i >= nb) {
8227 /* only add trailing zero if enough storage (no
8228 warning in this case since it is standard) */
8229 if (flags & DIF_CLEAR)
8230 break;
8231 if (n - i >= 4) {
8232 init_putz(p, c + i * size1, (n - i) * size1);
8233 break;
8235 ch = 0;
8236 } else if (size1 == 1)
8237 ch = ((unsigned char *)initstr.data)[i];
8238 else
8239 ch = ((nwchar_t *)initstr.data)[i];
8240 vpushi(ch);
8241 init_putv(p, t1, c + i * size1);
8245 } else {
8247 do_init_array:
8248 indexsym.c = 0;
8249 f = &indexsym;
8251 do_init_list:
8252 /* zero memory once in advance */
8253 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
8254 init_putz(p, c, n*size1);
8255 flags |= DIF_CLEAR;
8258 len = 0;
8259 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
8260 len = decl_designator(p, type, c, &f, flags, len);
8261 flags &= ~DIF_HAVE_ELEM;
8262 if (type->t & VT_ARRAY) {
8263 ++indexsym.c;
8264 /* special test for multi dimensional arrays (may not
8265 be strictly correct if designators are used at the
8266 same time) */
8267 if (no_oblock && len >= n*size1)
8268 break;
8269 } else {
8270 if (s->type.t == VT_UNION)
8271 f = NULL;
8272 else
8273 f = f->next;
8274 if (no_oblock && f == NULL)
8275 break;
8278 if (tok == '}')
8279 break;
8280 skip(',');
8283 if (!no_oblock)
8284 skip('}');
8285 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
8286 no_oblock = 1;
8287 if ((flags & DIF_FIRST) || tok == '{') {
8288 skip('{');
8289 no_oblock = 0;
8291 s = type->ref;
8292 f = s->next;
8293 n = s->c;
8294 size1 = 1;
8295 goto do_init_list;
8296 } else if (tok == '{') {
8297 if (flags & DIF_HAVE_ELEM)
8298 skip(';');
8299 next();
8300 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
8301 skip('}');
8302 } else if ((flags & DIF_SIZE_ONLY)) {
8303 /* If we supported only ISO C we wouldn't have to accept calling
8304 this on anything than an array if DIF_SIZE_ONLY (and even then
8305 only on the outermost level, so no recursion would be needed),
8306 because initializing a flex array member isn't supported.
8307 But GNU C supports it, so we need to recurse even into
8308 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
8309 /* just skip expression */
8310 skip_or_save_block(NULL);
8311 } else {
8312 if (!(flags & DIF_HAVE_ELEM)) {
8313 /* This should happen only when we haven't parsed
8314 the init element above for fear of committing a
8315 string constant to memory too early. */
8316 if (tok != TOK_STR && tok != TOK_LSTR)
8317 expect("string constant");
8318 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
8320 init_putv:
8321 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
8322 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
8323 && vtop->c.i == 0
8324 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
8326 vpop();
8327 else
8328 init_putv(p, type, c);
8332 /* parse an initializer for type 't' if 'has_init' is non zero, and
8333 allocate space in local or global data space ('r' is either
8334 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
8335 variable 'v' of scope 'scope' is declared before initializers
8336 are parsed. If 'v' is zero, then a reference to the new object
8337 is put in the value stack. If 'has_init' is 2, a special parsing
8338 is done to handle string constants. */
8339 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
8340 int has_init, int v, int scope)
8342 int size, align, addr;
8343 TokenString *init_str = NULL;
8345 Section *sec;
8346 Sym *flexible_array;
8347 Sym *sym;
8348 int saved_nocode_wanted = nocode_wanted;
8349 #ifdef CONFIG_TCC_BCHECK
8350 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
8351 #endif
8352 init_params p = {0};
8354 /* Always allocate static or global variables */
8355 if (v && (r & VT_VALMASK) == VT_CONST)
8356 nocode_wanted |= 0x80000000;
8358 flexible_array = NULL;
8359 size = type_size(type, &align);
8361 /* exactly one flexible array may be initialized, either the
8362 toplevel array or the last member of the toplevel struct */
8364 if (size < 0) {
8365 /* If the base type itself was an array type of unspecified size
8366 (like in 'typedef int arr[]; arr x = {1};') then we will
8367 overwrite the unknown size by the real one for this decl.
8368 We need to unshare the ref symbol holding that size. */
8369 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
8370 p.flex_array_ref = type->ref;
8372 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
8373 Sym *field = type->ref->next;
8374 if (field) {
8375 while (field->next)
8376 field = field->next;
8377 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
8378 flexible_array = field;
8379 p.flex_array_ref = field->type.ref;
8380 size = -1;
8385 if (size < 0) {
8386 /* If unknown size, do a dry-run 1st pass */
8387 if (!has_init)
8388 tcc_error("unknown type size");
8389 if (has_init == 2) {
8390 /* only get strings */
8391 init_str = tok_str_alloc();
8392 while (tok == TOK_STR || tok == TOK_LSTR) {
8393 tok_str_add_tok(init_str);
8394 next();
8396 tok_str_add(init_str, -1);
8397 tok_str_add(init_str, 0);
8398 } else
8399 skip_or_save_block(&init_str);
8400 unget_tok(0);
8402 /* compute size */
8403 begin_macro(init_str, 1);
8404 next();
8405 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
8406 /* prepare second initializer parsing */
8407 macro_ptr = init_str->str;
8408 next();
8410 /* if still unknown size, error */
8411 size = type_size(type, &align);
8412 if (size < 0)
8413 tcc_error("unknown type size");
8415 /* If there's a flex member and it was used in the initializer
8416 adjust size. */
8417 if (flexible_array && flexible_array->type.ref->c > 0)
8418 size += flexible_array->type.ref->c
8419 * pointed_size(&flexible_array->type);
8422 /* take into account specified alignment if bigger */
8423 if (ad->a.aligned) {
8424 int speca = 1 << (ad->a.aligned - 1);
8425 if (speca > align)
8426 align = speca;
8427 } else if (ad->a.packed) {
8428 align = 1;
8431 if (!v && NODATA_WANTED)
8432 size = 0, align = 1;
8434 if ((r & VT_VALMASK) == VT_LOCAL) {
8435 sec = NULL;
8436 #ifdef CONFIG_TCC_BCHECK
8437 if (bcheck && v) {
8438 /* add padding between stack variables for bound checking */
8439 loc -= align;
8441 #endif
8442 loc = (loc - size) & -align;
8443 addr = loc;
8444 p.local_offset = addr + size;
8445 #ifdef CONFIG_TCC_BCHECK
8446 if (bcheck && v) {
8447 /* add padding between stack variables for bound checking */
8448 loc -= align;
8450 #endif
8451 if (v) {
8452 /* local variable */
8453 #ifdef CONFIG_TCC_ASM
8454 if (ad->asm_label) {
8455 int reg = asm_parse_regvar(ad->asm_label);
8456 if (reg >= 0)
8457 r = (r & ~VT_VALMASK) | reg;
8459 #endif
8460 sym = sym_push(v, type, r, addr);
8461 if (ad->cleanup_func) {
8462 Sym *cls = sym_push2(&all_cleanups,
8463 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8464 cls->prev_tok = sym;
8465 cls->next = ad->cleanup_func;
8466 cls->ncl = cur_scope->cl.s;
8467 cur_scope->cl.s = cls;
8470 sym->a = ad->a;
8471 } else {
8472 /* push local reference */
8473 vset(type, r, addr);
8475 } else {
8476 sym = NULL;
8477 if (v && scope == VT_CONST) {
8478 /* see if the symbol was already defined */
8479 sym = sym_find(v);
8480 if (sym) {
8481 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
8482 && sym->type.ref->c > type->ref->c) {
8483 /* flex array was already declared with explicit size
8484 extern int arr[10];
8485 int arr[] = { 1,2,3 }; */
8486 type->ref->c = sym->type.ref->c;
8487 size = type_size(type, &align);
8489 patch_storage(sym, ad, type);
8490 /* we accept several definitions of the same global variable. */
8491 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8492 goto no_alloc;
8496 /* allocate symbol in corresponding section */
8497 sec = ad->section;
8498 if (!sec) {
8499 CType *tp = type;
8500 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8501 tp = &tp->ref->type;
8502 if (tp->t & VT_CONSTANT) {
8503 sec = rodata_section;
8504 } else if (has_init) {
8505 sec = data_section;
8506 /*if (tcc_state->g_debug & 4)
8507 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8508 } else if (tcc_state->nocommon)
8509 sec = bss_section;
8512 if (sec) {
8513 addr = section_add(sec, size, align);
8514 #ifdef CONFIG_TCC_BCHECK
8515 /* add padding if bound check */
8516 if (bcheck)
8517 section_add(sec, 1, 1);
8518 #endif
8519 } else {
8520 addr = align; /* SHN_COMMON is special, symbol value is align */
8521 sec = common_section;
8524 if (v) {
8525 if (!sym) {
8526 sym = sym_push(v, type, r | VT_SYM, 0);
8527 patch_storage(sym, ad, NULL);
8529 /* update symbol definition */
8530 put_extern_sym(sym, sec, addr, size);
8531 } else {
8532 /* push global reference */
8533 vpush_ref(type, sec, addr, size);
8534 sym = vtop->sym;
8535 vtop->r |= r;
8538 #ifdef CONFIG_TCC_BCHECK
8539 /* handles bounds now because the symbol must be defined
8540 before for the relocation */
8541 if (bcheck) {
8542 addr_t *bounds_ptr;
8544 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8545 /* then add global bound info */
8546 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8547 bounds_ptr[0] = 0; /* relocated */
8548 bounds_ptr[1] = size;
8550 #endif
8553 if (type->t & VT_VLA) {
8554 int a;
8556 if (NODATA_WANTED)
8557 goto no_alloc;
8559 /* save before-VLA stack pointer if needed */
8560 if (cur_scope->vla.num == 0) {
8561 if (cur_scope->prev && cur_scope->prev->vla.num) {
8562 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8563 } else {
8564 gen_vla_sp_save(loc -= PTR_SIZE);
8565 cur_scope->vla.locorig = loc;
8569 vla_runtime_type_size(type, &a);
8570 gen_vla_alloc(type, a);
8571 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8572 /* on _WIN64, because of the function args scratch area, the
8573 result of alloca differs from RSP and is returned in RAX. */
8574 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8575 #endif
8576 gen_vla_sp_save(addr);
8577 cur_scope->vla.loc = addr;
8578 cur_scope->vla.num++;
8579 } else if (has_init) {
8580 p.sec = sec;
8581 decl_initializer(&p, type, addr, DIF_FIRST);
8582 /* patch flexible array member size back to -1, */
8583 /* for possible subsequent similar declarations */
8584 if (flexible_array)
8585 flexible_array->type.ref->c = -1;
8588 no_alloc:
8589 /* restore parse state if needed */
8590 if (init_str) {
8591 end_macro();
8592 next();
8595 nocode_wanted = saved_nocode_wanted;
8598 /* parse a function defined by symbol 'sym' and generate its code in
8599 'cur_text_section' */
8600 static void gen_function(Sym *sym)
8602 struct scope f = { 0 };
8603 cur_scope = root_scope = &f;
8604 nocode_wanted = 0;
8605 ind = cur_text_section->data_offset;
8606 if (sym->a.aligned) {
8607 size_t newoff = section_add(cur_text_section, 0,
8608 1 << (sym->a.aligned - 1));
8609 gen_fill_nops(newoff - ind);
8611 /* NOTE: we patch the symbol size later */
8612 put_extern_sym(sym, cur_text_section, ind, 0);
8613 if (sym->type.ref->f.func_ctor)
8614 add_array (tcc_state, ".init_array", sym->c);
8615 if (sym->type.ref->f.func_dtor)
8616 add_array (tcc_state, ".fini_array", sym->c);
8618 funcname = get_tok_str(sym->v, NULL);
8619 func_ind = ind;
8620 func_vt = sym->type.ref->type;
8621 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8623 /* put debug symbol */
8624 tcc_debug_funcstart(tcc_state, sym);
8625 /* push a dummy symbol to enable local sym storage */
8626 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8627 local_scope = 1; /* for function parameters */
8628 gfunc_prolog(sym);
8629 local_scope = 0;
8630 rsym = 0;
8631 clear_temp_local_var_list();
8632 block(0);
8633 gsym(rsym);
8634 nocode_wanted = 0;
8635 /* reset local stack */
8636 pop_local_syms(NULL, 0);
8637 gfunc_epilog();
8638 cur_text_section->data_offset = ind;
8639 local_scope = 0;
8640 label_pop(&global_label_stack, NULL, 0);
8641 sym_pop(&all_cleanups, NULL, 0);
8642 /* patch symbol size */
8643 elfsym(sym)->st_size = ind - func_ind;
8644 /* end of function */
8645 tcc_debug_funcend(tcc_state, ind - func_ind);
8646 /* It's better to crash than to generate wrong code */
8647 cur_text_section = NULL;
8648 funcname = ""; /* for safety */
8649 func_vt.t = VT_VOID; /* for safety */
8650 func_var = 0; /* for safety */
8651 ind = 0; /* for safety */
8652 nocode_wanted = 0x80000000;
8653 check_vstack();
8654 /* do this after funcend debug info */
8655 next();
8658 static void gen_inline_functions(TCCState *s)
8660 Sym *sym;
8661 int inline_generated, i;
8662 struct InlineFunc *fn;
8664 tcc_open_bf(s, ":inline:", 0);
8665 /* iterate while inline function are referenced */
8666 do {
8667 inline_generated = 0;
8668 for (i = 0; i < s->nb_inline_fns; ++i) {
8669 fn = s->inline_fns[i];
8670 sym = fn->sym;
8671 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8672 /* the function was used or forced (and then not internal):
8673 generate its code and convert it to a normal function */
8674 fn->sym = NULL;
8675 tcc_debug_putfile(s, fn->filename);
8676 begin_macro(fn->func_str, 1);
8677 next();
8678 cur_text_section = text_section;
8679 gen_function(sym);
8680 end_macro();
8682 inline_generated = 1;
8685 } while (inline_generated);
8686 tcc_close();
8689 static void free_inline_functions(TCCState *s)
8691 int i;
8692 /* free tokens of unused inline functions */
8693 for (i = 0; i < s->nb_inline_fns; ++i) {
8694 struct InlineFunc *fn = s->inline_fns[i];
8695 if (fn->sym)
8696 tok_str_free(fn->func_str);
8698 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8701 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8702 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8703 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
8705 int v, has_init, r, oldint;
8706 CType type, btype;
8707 Sym *sym;
8708 AttributeDef ad, adbase;
8710 while (1) {
8711 if (tok == TOK_STATIC_ASSERT) {
8712 CString error_str;
8713 int c;
8715 next();
8716 skip('(');
8717 c = expr_const();
8719 if (tok == ')') {
8720 if (!c)
8721 tcc_error("_Static_assert fail");
8722 next();
8723 goto static_assert_out;
8726 skip(',');
8727 parse_mult_str(&error_str, "string constant");
8728 if (c == 0)
8729 tcc_error("%s", (char *)error_str.data);
8730 cstr_free(&error_str);
8731 skip(')');
8732 static_assert_out:
8733 skip(';');
8734 continue;
8737 oldint = 0;
8738 if (!parse_btype(&btype, &adbase)) {
8739 if (is_for_loop_init)
8740 return 0;
8741 /* skip redundant ';' if not in old parameter decl scope */
8742 if (tok == ';' && l != VT_CMP) {
8743 next();
8744 continue;
8746 if (l != VT_CONST)
8747 break;
8748 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8749 /* global asm block */
8750 asm_global_instr();
8751 continue;
8753 if (tok >= TOK_UIDENT) {
8754 /* special test for old K&R protos without explicit int
8755 type. Only accepted when defining global data */
8756 btype.t = VT_INT;
8757 oldint = 1;
8758 } else {
8759 if (tok != TOK_EOF)
8760 expect("declaration");
8761 break;
8765 if (tok == ';') {
8766 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8767 v = btype.ref->v;
8768 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8769 tcc_warning("unnamed struct/union that defines no instances");
8770 next();
8771 continue;
8773 if (IS_ENUM(btype.t)) {
8774 next();
8775 continue;
8779 while (1) { /* iterate thru each declaration */
8780 type = btype;
8781 ad = adbase;
8782 type_decl(&type, &ad, &v, TYPE_DIRECT);
8783 #if 0
8785 char buf[500];
8786 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8787 printf("type = '%s'\n", buf);
8789 #endif
8790 if ((type.t & VT_BTYPE) == VT_FUNC) {
8791 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
8792 tcc_error("function without file scope cannot be static");
8793 /* if old style function prototype, we accept a
8794 declaration list */
8795 sym = type.ref;
8796 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
8797 decl0(VT_CMP, 0, sym);
8798 #ifdef TCC_TARGET_MACHO
8799 if (sym->f.func_alwinl
8800 && ((type.t & (VT_EXTERN | VT_INLINE))
8801 == (VT_EXTERN | VT_INLINE))) {
8802 /* always_inline functions must be handled as if they
8803 don't generate multiple global defs, even if extern
8804 inline, i.e. GNU inline semantics for those. Rewrite
8805 them into static inline. */
8806 type.t &= ~VT_EXTERN;
8807 type.t |= VT_STATIC;
8809 #endif
8810 /* always compile 'extern inline' */
8811 if (type.t & VT_EXTERN)
8812 type.t &= ~VT_INLINE;
8814 } else if (oldint) {
8815 tcc_warning("type defaults to int");
8818 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8819 ad.asm_label = asm_label_instr();
8820 /* parse one last attribute list, after asm label */
8821 parse_attribute(&ad);
8822 #if 0
8823 /* gcc does not allow __asm__("label") with function definition,
8824 but why not ... */
8825 if (tok == '{')
8826 expect(";");
8827 #endif
8830 #ifdef TCC_TARGET_PE
8831 if (ad.a.dllimport || ad.a.dllexport) {
8832 if (type.t & VT_STATIC)
8833 tcc_error("cannot have dll linkage with static");
8834 if (type.t & VT_TYPEDEF) {
8835 tcc_warning("'%s' attribute ignored for typedef",
8836 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8837 (ad.a.dllexport = 0, "dllexport"));
8838 } else if (ad.a.dllimport) {
8839 if ((type.t & VT_BTYPE) == VT_FUNC)
8840 ad.a.dllimport = 0;
8841 else
8842 type.t |= VT_EXTERN;
8845 #endif
8846 if (tok == '{') {
8847 if (l != VT_CONST)
8848 tcc_error("cannot use local functions");
8849 if ((type.t & VT_BTYPE) != VT_FUNC)
8850 expect("function definition");
8852 /* reject abstract declarators in function definition
8853 make old style params without decl have int type */
8854 sym = type.ref;
8855 while ((sym = sym->next) != NULL) {
8856 if (!(sym->v & ~SYM_FIELD))
8857 expect("identifier");
8858 if (sym->type.t == VT_VOID)
8859 sym->type = int_type;
8862 /* apply post-declaraton attributes */
8863 merge_funcattr(&type.ref->f, &ad.f);
8865 /* put function symbol */
8866 type.t &= ~VT_EXTERN;
8867 sym = external_sym(v, &type, 0, &ad);
8869 /* static inline functions are just recorded as a kind
8870 of macro. Their code will be emitted at the end of
8871 the compilation unit only if they are used */
8872 if (sym->type.t & VT_INLINE) {
8873 struct InlineFunc *fn;
8874 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8875 strcpy(fn->filename, file->filename);
8876 fn->sym = sym;
8877 skip_or_save_block(&fn->func_str);
8878 dynarray_add(&tcc_state->inline_fns,
8879 &tcc_state->nb_inline_fns, fn);
8880 } else {
8881 /* compute text section */
8882 cur_text_section = ad.section;
8883 if (!cur_text_section)
8884 cur_text_section = text_section;
8885 gen_function(sym);
8887 break;
8888 } else {
8889 if (l == VT_CMP) {
8890 /* find parameter in function parameter list */
8891 for (sym = func_sym->next; sym; sym = sym->next)
8892 if ((sym->v & ~SYM_FIELD) == v)
8893 goto found;
8894 tcc_error("declaration for parameter '%s' but no such parameter",
8895 get_tok_str(v, NULL));
8896 found:
8897 if (type.t & VT_STORAGE) /* 'register' is okay */
8898 tcc_error("storage class specified for '%s'",
8899 get_tok_str(v, NULL));
8900 if (sym->type.t != VT_VOID)
8901 tcc_error("redefinition of parameter '%s'",
8902 get_tok_str(v, NULL));
8903 convert_parameter_type(&type);
8904 sym->type = type;
8905 } else if (type.t & VT_TYPEDEF) {
8906 /* save typedefed type */
8907 /* XXX: test storage specifiers ? */
8908 sym = sym_find(v);
8909 if (sym && sym->sym_scope == local_scope) {
8910 if (!is_compatible_types(&sym->type, &type)
8911 || !(sym->type.t & VT_TYPEDEF))
8912 tcc_error("incompatible redefinition of '%s'",
8913 get_tok_str(v, NULL));
8914 sym->type = type;
8915 } else {
8916 sym = sym_push(v, &type, 0, 0);
8918 sym->a = ad.a;
8919 sym->f = ad.f;
8920 if (debug_modes)
8921 tcc_debug_typedef (tcc_state, sym);
8922 } else if ((type.t & VT_BTYPE) == VT_VOID
8923 && !(type.t & VT_EXTERN)) {
8924 tcc_error("declaration of void object");
8925 } else {
8926 r = 0;
8927 if ((type.t & VT_BTYPE) == VT_FUNC) {
8928 /* external function definition */
8929 /* specific case for func_call attribute */
8930 type.ref->f = ad.f;
8931 } else if (!(type.t & VT_ARRAY)) {
8932 /* not lvalue if array */
8933 r |= VT_LVAL;
8935 has_init = (tok == '=');
8936 if (has_init && (type.t & VT_VLA))
8937 tcc_error("variable length array cannot be initialized");
8938 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8939 || (type.t & VT_BTYPE) == VT_FUNC
8940 /* as with GCC, uninitialized global arrays with no size
8941 are considered extern: */
8942 || ((type.t & VT_ARRAY) && !has_init
8943 && l == VT_CONST && type.ref->c < 0)
8945 /* external variable or function */
8946 type.t |= VT_EXTERN;
8947 sym = external_sym(v, &type, r, &ad);
8948 if (ad.alias_target) {
8949 /* Aliases need to be emitted when their target
8950 symbol is emitted, even if perhaps unreferenced.
8951 We only support the case where the base is
8952 already defined, otherwise we would need
8953 deferring to emit the aliases until the end of
8954 the compile unit. */
8955 Sym *alias_target = sym_find(ad.alias_target);
8956 ElfSym *esym = elfsym(alias_target);
8957 if (!esym)
8958 tcc_error("unsupported forward __alias__ attribute");
8959 put_extern_sym2(sym, esym->st_shndx,
8960 esym->st_value, esym->st_size, 1);
8962 } else {
8963 if (type.t & VT_STATIC)
8964 r |= VT_CONST;
8965 else
8966 r |= l;
8967 if (has_init)
8968 next();
8969 else if (l == VT_CONST)
8970 /* uninitialized global variables may be overridden */
8971 type.t |= VT_EXTERN;
8972 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
8975 if (tok != ',') {
8976 if (is_for_loop_init)
8977 return 1;
8978 skip(';');
8979 break;
8981 next();
8985 return 0;
8988 static void decl(int l)
8990 decl0(l, 0, NULL);
8993 /* ------------------------------------------------------------------------- */
8994 #undef gjmp_addr
8995 #undef gjmp
8996 /* ------------------------------------------------------------------------- */