Fix Makefile as suggested by Urs Janßen
[tinycc.git] / tccgen.c
blobfe0c2137d71baf365ac0dac5a425279c90842f6a
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 static int section_sym;
49 ST_DATA char debug_modes;
51 ST_DATA SValue *vtop;
52 static SValue _vstack[1 + VSTACK_SIZE];
53 #define vstack (_vstack + 1)
55 ST_DATA int const_wanted; /* true if constant wanted */
56 ST_DATA int nocode_wanted; /* no code generation wanted */
57 #define unevalmask 0xffff /* unevaluated subexpression */
58 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
59 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
61 /* Automagical code suppression ----> */
62 #define CODE_OFF() (nocode_wanted |= 0x20000000)
63 #define CODE_ON() (nocode_wanted &= ~0x20000000)
65 static void tcc_tcov_block_begin(void);
67 /* Clear 'nocode_wanted' at label if it was used */
68 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
69 static int gind(void) { int t = ind; CODE_ON(); if (debug_modes) tcc_tcov_block_begin(); return t; }
71 /* Set 'nocode_wanted' after unconditional jumps */
72 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
73 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
75 /* These are #undef'd at the end of this file */
76 #define gjmp_addr gjmp_addr_acs
77 #define gjmp gjmp_acs
78 /* <---- */
80 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
81 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
82 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
83 ST_DATA int func_vc;
84 static int last_line_num, new_file, func_ind; /* debug info control */
85 ST_DATA const char *funcname;
86 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
87 static CString initstr;
89 #if PTR_SIZE == 4
90 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_INT
92 #elif LONG_SIZE == 4
93 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T VT_LLONG
95 #else
96 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
97 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
98 #endif
100 ST_DATA struct switch_t {
101 struct case_t {
102 int64_t v1, v2;
103 int sym;
104 } **p; int n; /* list of case ranges */
105 int def_sym; /* default symbol */
106 int *bsym;
107 struct scope *scope;
108 struct switch_t *prev;
109 SValue sv;
110 } *cur_switch; /* current switch */
112 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
113 /*list of temporary local variables on the stack in current function. */
114 ST_DATA struct temp_local_variable {
115 int location; //offset on stack. Svalue.c.i
116 short size;
117 short align;
118 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
119 short nb_temp_local_vars;
121 static struct scope {
122 struct scope *prev;
123 struct { int loc, locorig, num; } vla;
124 struct { Sym *s; int n; } cl;
125 int *bsym, *csym;
126 Sym *lstk, *llstk;
127 } *cur_scope, *loop_scope, *root_scope;
129 typedef struct {
130 Section *sec;
131 int local_offset;
132 Sym *flex_array_ref;
133 } init_params;
135 /********************************************************/
136 /* stab debug support */
138 static const struct {
139 int type;
140 const char *name;
141 } default_debug[] = {
142 { VT_INT, "int:t1=r1;-2147483648;2147483647;" },
143 { VT_BYTE, "char:t2=r2;0;127;" },
144 #if LONG_SIZE == 4
145 { VT_LONG | VT_INT, "long int:t3=r3;-2147483648;2147483647;" },
146 #else
147 { VT_LLONG | VT_LONG, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
148 #endif
149 { VT_INT | VT_UNSIGNED, "unsigned int:t4=r4;0;037777777777;" },
150 #if LONG_SIZE == 4
151 { VT_LONG | VT_INT | VT_UNSIGNED, "long unsigned int:t5=r5;0;037777777777;" },
152 #else
153 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
154 { VT_LLONG | VT_LONG | VT_UNSIGNED, "long unsigned int:t5=r5;0;01777777777777777777777;" },
155 #endif
156 { VT_QLONG, "__int128:t6=r6;0;-1;" },
157 { VT_QLONG | VT_UNSIGNED, "__int128 unsigned:t7=r7;0;-1;" },
158 { VT_LLONG, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
159 { VT_LLONG | VT_UNSIGNED, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
160 { VT_SHORT, "short int:t10=r10;-32768;32767;" },
161 { VT_SHORT | VT_UNSIGNED, "short unsigned int:t11=r11;0;65535;" },
162 { VT_BYTE | VT_DEFSIGN, "signed char:t12=r12;-128;127;" },
163 { VT_BYTE | VT_DEFSIGN | VT_UNSIGNED, "unsigned char:t13=r13;0;255;" },
164 { VT_FLOAT, "float:t14=r1;4;0;" },
165 { VT_DOUBLE, "double:t15=r1;8;0;" },
166 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
167 { VT_DOUBLE | VT_LONG, "long double:t16=r1;8;0;" },
168 #else
169 { VT_LDOUBLE, "long double:t16=r1;16;0;" },
170 #endif
171 { -1, "_Float32:t17=r1;4;0;" },
172 { -1, "_Float64:t18=r1;8;0;" },
173 { -1, "_Float128:t19=r1;16;0;" },
174 { -1, "_Float32x:t20=r1;8;0;" },
175 { -1, "_Float64x:t21=r1;16;0;" },
176 { -1, "_Decimal32:t22=r1;4;0;" },
177 { -1, "_Decimal64:t23=r1;8;0;" },
178 { -1, "_Decimal128:t24=r1;16;0;" },
179 /* if default char is unsigned */
180 { VT_BYTE | VT_UNSIGNED, "unsigned char:t25=r25;0;255;" },
181 /* boolean type */
182 { VT_BOOL, "bool:t26=r26;0;255;" },
183 { VT_VOID, "void:t27=27" },
186 static int debug_next_type;
188 static struct debug_hash {
189 int debug_type;
190 Sym *type;
191 } *debug_hash;
193 static int n_debug_hash;
195 static struct debug_info {
196 int start;
197 int end;
198 int n_sym;
199 struct debug_sym {
200 int type;
201 unsigned long value;
202 char *str;
203 Section *sec;
204 int sym_index;
205 } *sym;
206 struct debug_info *child, *next, *last, *parent;
207 } *debug_info, *debug_info_root;
209 static struct {
210 unsigned long offset;
211 unsigned long last_file_name;
212 unsigned long last_func_name;
213 int ind;
214 int line;
215 } tcov_data;
217 /********************************************************/
218 #if 1
219 #define precedence_parser
220 static void init_prec(void);
221 #endif
222 /********************************************************/
223 #ifndef CONFIG_TCC_ASM
224 ST_FUNC void asm_instr(void)
226 tcc_error("inline asm() not supported");
228 ST_FUNC void asm_global_instr(void)
230 tcc_error("inline asm() not supported");
232 #endif
234 /* ------------------------------------------------------------------------- */
235 static void gen_cast(CType *type);
236 static void gen_cast_s(int t);
237 static inline CType *pointed_type(CType *type);
238 static int is_compatible_types(CType *type1, CType *type2);
239 static int parse_btype(CType *type, AttributeDef *ad);
240 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
241 static void parse_expr_type(CType *type);
242 static void init_putv(init_params *p, CType *type, unsigned long c);
243 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
244 static void block(int is_expr);
245 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
246 static void decl(int l);
247 static int decl0(int l, int is_for_loop_init, Sym *);
248 static void expr_eq(void);
249 static void vla_runtime_type_size(CType *type, int *a);
250 static int is_compatible_unqualified_types(CType *type1, CType *type2);
251 static inline int64_t expr_const64(void);
252 static void vpush64(int ty, unsigned long long v);
253 static void vpush(CType *type);
254 static int gvtst(int inv, int t);
255 static void gen_inline_functions(TCCState *s);
256 static void free_inline_functions(TCCState *s);
257 static void skip_or_save_block(TokenString **str);
258 static void gv_dup(void);
259 static int get_temp_local_var(int size,int align);
260 static void clear_temp_local_var_list();
261 static void cast_error(CType *st, CType *dt);
263 ST_INLN int is_float(int t)
265 int bt = t & VT_BTYPE;
266 return bt == VT_LDOUBLE
267 || bt == VT_DOUBLE
268 || bt == VT_FLOAT
269 || bt == VT_QFLOAT;
272 static inline int is_integer_btype(int bt)
274 return bt == VT_BYTE
275 || bt == VT_BOOL
276 || bt == VT_SHORT
277 || bt == VT_INT
278 || bt == VT_LLONG;
281 static int btype_size(int bt)
283 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
284 bt == VT_SHORT ? 2 :
285 bt == VT_INT ? 4 :
286 bt == VT_LLONG ? 8 :
287 bt == VT_PTR ? PTR_SIZE : 0;
290 /* returns function return register from type */
291 static int R_RET(int t)
293 if (!is_float(t))
294 return REG_IRET;
295 #ifdef TCC_TARGET_X86_64
296 if ((t & VT_BTYPE) == VT_LDOUBLE)
297 return TREG_ST0;
298 #elif defined TCC_TARGET_RISCV64
299 if ((t & VT_BTYPE) == VT_LDOUBLE)
300 return REG_IRET;
301 #endif
302 return REG_FRET;
305 /* returns 2nd function return register, if any */
306 static int R2_RET(int t)
308 t &= VT_BTYPE;
309 #if PTR_SIZE == 4
310 if (t == VT_LLONG)
311 return REG_IRE2;
312 #elif defined TCC_TARGET_X86_64
313 if (t == VT_QLONG)
314 return REG_IRE2;
315 if (t == VT_QFLOAT)
316 return REG_FRE2;
317 #elif defined TCC_TARGET_RISCV64
318 if (t == VT_LDOUBLE)
319 return REG_IRE2;
320 #endif
321 return VT_CONST;
324 /* returns true for two-word types */
325 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
327 /* put function return registers to stack value */
328 static void PUT_R_RET(SValue *sv, int t)
330 sv->r = R_RET(t), sv->r2 = R2_RET(t);
333 /* returns function return register class for type t */
334 static int RC_RET(int t)
336 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
339 /* returns generic register class for type t */
340 static int RC_TYPE(int t)
342 if (!is_float(t))
343 return RC_INT;
344 #ifdef TCC_TARGET_X86_64
345 if ((t & VT_BTYPE) == VT_LDOUBLE)
346 return RC_ST0;
347 if ((t & VT_BTYPE) == VT_QFLOAT)
348 return RC_FRET;
349 #elif defined TCC_TARGET_RISCV64
350 if ((t & VT_BTYPE) == VT_LDOUBLE)
351 return RC_INT;
352 #endif
353 return RC_FLOAT;
356 /* returns 2nd register class corresponding to t and rc */
357 static int RC2_TYPE(int t, int rc)
359 if (!USING_TWO_WORDS(t))
360 return 0;
361 #ifdef RC_IRE2
362 if (rc == RC_IRET)
363 return RC_IRE2;
364 #endif
365 #ifdef RC_FRE2
366 if (rc == RC_FRET)
367 return RC_FRE2;
368 #endif
369 if (rc & RC_FLOAT)
370 return RC_FLOAT;
371 return RC_INT;
374 /* we use our own 'finite' function to avoid potential problems with
375 non standard math libs */
376 /* XXX: endianness dependent */
377 ST_FUNC int ieee_finite(double d)
379 int p[4];
380 memcpy(p, &d, sizeof(double));
381 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
384 /* compiling intel long double natively */
385 #if (defined __i386__ || defined __x86_64__) \
386 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
387 # define TCC_IS_NATIVE_387
388 #endif
390 ST_FUNC void test_lvalue(void)
392 if (!(vtop->r & VT_LVAL))
393 expect("lvalue");
396 ST_FUNC void check_vstack(void)
398 if (vtop != vstack - 1)
399 tcc_error("internal compiler error: vstack leak (%d)",
400 (int)(vtop - vstack + 1));
403 /* ------------------------------------------------------------------------- */
404 /* vstack debugging aid */
406 #if 0
407 void pv (const char *lbl, int a, int b)
409 int i;
410 for (i = a; i < a + b; ++i) {
411 SValue *p = &vtop[-i];
412 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
413 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
416 #endif
418 /* ------------------------------------------------------------------------- */
419 /* start of translation unit info */
420 ST_FUNC void tcc_debug_start(TCCState *s1)
422 if (s1->do_debug) {
423 int i;
424 char buf[512];
426 /* file info: full path + filename */
427 section_sym = put_elf_sym(symtab_section, 0, 0,
428 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
429 text_section->sh_num, NULL);
430 getcwd(buf, sizeof(buf));
431 #ifdef _WIN32
432 normalize_slashes(buf);
433 #endif
434 pstrcat(buf, sizeof(buf), "/");
435 put_stabs_r(s1, buf, N_SO, 0, 0,
436 text_section->data_offset, text_section, section_sym);
437 put_stabs_r(s1, file->prev ? file->prev->filename : file->filename,
438 N_SO, 0, 0,
439 text_section->data_offset, text_section, section_sym);
440 for (i = 0; i < sizeof (default_debug) / sizeof (default_debug[0]); i++)
441 put_stabs(s1, default_debug[i].name, N_LSYM, 0, 0, 0);
443 new_file = last_line_num = 0;
444 func_ind = -1;
445 debug_next_type = sizeof(default_debug) / sizeof(default_debug[0]);
446 debug_hash = NULL;
447 n_debug_hash = 0;
449 /* we're currently 'including' the <command line> */
450 tcc_debug_bincl(s1);
453 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
454 symbols can be safely used */
455 put_elf_sym(symtab_section, 0, 0,
456 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
457 SHN_ABS, file->filename);
460 /* put end of translation unit info */
461 ST_FUNC void tcc_debug_end(TCCState *s1)
463 if (!s1->do_debug)
464 return;
465 put_stabs_r(s1, NULL, N_SO, 0, 0,
466 text_section->data_offset, text_section, section_sym);
467 tcc_free(debug_hash);
470 static BufferedFile* put_new_file(TCCState *s1)
472 BufferedFile *f = file;
473 /* use upper file if from inline ":asm:" */
474 if (f->filename[0] == ':')
475 f = f->prev;
476 if (f && new_file) {
477 put_stabs_r(s1, f->filename, N_SOL, 0, 0, ind, text_section, section_sym);
478 new_file = last_line_num = 0;
480 return f;
483 /* put alternative filename */
484 ST_FUNC void tcc_debug_putfile(TCCState *s1, const char *filename)
486 if (0 == strcmp(file->filename, filename))
487 return;
488 pstrcpy(file->filename, sizeof(file->filename), filename);
489 new_file = 1;
492 /* begin of #include */
493 ST_FUNC void tcc_debug_bincl(TCCState *s1)
495 if (!s1->do_debug)
496 return;
497 put_stabs(s1, file->filename, N_BINCL, 0, 0, 0);
498 new_file = 1;
501 /* end of #include */
502 ST_FUNC void tcc_debug_eincl(TCCState *s1)
504 if (!s1->do_debug)
505 return;
506 put_stabn(s1, N_EINCL, 0, 0, 0);
507 new_file = 1;
510 /* generate line number info */
511 static void tcc_debug_line(TCCState *s1)
513 BufferedFile *f;
514 if (!s1->do_debug
515 || cur_text_section != text_section
516 || !(f = put_new_file(s1))
517 || last_line_num == f->line_num)
518 return;
519 if (func_ind != -1) {
520 put_stabn(s1, N_SLINE, 0, f->line_num, ind - func_ind);
521 } else {
522 /* from tcc_assemble */
523 put_stabs_r(s1, NULL, N_SLINE, 0, f->line_num, ind, text_section, section_sym);
525 last_line_num = f->line_num;
528 static void tcc_debug_stabs (TCCState *s1, const char *str, int type, unsigned long value,
529 Section *sec, int sym_index)
531 struct debug_sym *s;
533 if (debug_info) {
534 debug_info->sym =
535 (struct debug_sym *)tcc_realloc (debug_info->sym,
536 sizeof(struct debug_sym) *
537 (debug_info->n_sym + 1));
538 s = debug_info->sym + debug_info->n_sym++;
539 s->type = type;
540 s->value = value;
541 s->str = tcc_strdup(str);
542 s->sec = sec;
543 s->sym_index = sym_index;
545 else if (sec)
546 put_stabs_r (s1, str, type, 0, 0, value, sec, sym_index);
547 else
548 put_stabs (s1, str, type, 0, 0, value);
551 static void tcc_debug_stabn(TCCState *s1, int type, int value)
553 if (!s1->do_debug)
554 return;
555 if (type == N_LBRAC) {
556 struct debug_info *info =
557 (struct debug_info *) tcc_mallocz(sizeof (*info));
559 info->start = value;
560 info->parent = debug_info;
561 if (debug_info) {
562 if (debug_info->child) {
563 if (debug_info->child->last)
564 debug_info->child->last->next = info;
565 else
566 debug_info->child->next = info;
567 debug_info->child->last = info;
569 else
570 debug_info->child = info;
572 else
573 debug_info_root = info;
574 debug_info = info;
576 else {
577 debug_info->end = value;
578 debug_info = debug_info->parent;
582 static void tcc_get_debug_info(TCCState *s1, Sym *s, CString *result)
584 int type;
585 int n = 0;
586 int debug_type = -1;
587 Sym *t = s;
588 CString str;
590 for (;;) {
591 type = t->type.t & ~(VT_STORAGE | VT_CONSTANT | VT_VOLATILE);
592 if ((type & VT_BTYPE) != VT_BYTE)
593 type &= ~VT_DEFSIGN;
594 if (type == VT_PTR || type == (VT_PTR | VT_ARRAY))
595 n++, t = t->type.ref;
596 else
597 break;
599 if ((type & VT_BTYPE) == VT_STRUCT) {
600 int i;
602 t = t->type.ref;
603 for (i = 0; i < n_debug_hash; i++) {
604 if (t == debug_hash[i].type) {
605 debug_type = debug_hash[i].debug_type;
606 break;
609 if (debug_type == -1) {
610 debug_type = ++debug_next_type;
611 debug_hash = (struct debug_hash *)
612 tcc_realloc (debug_hash,
613 (n_debug_hash + 1) * sizeof(*debug_hash));
614 debug_hash[n_debug_hash].debug_type = debug_type;
615 debug_hash[n_debug_hash++].type = t;
616 cstr_new (&str);
617 cstr_printf (&str, "%s:T%d=%c%d",
618 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
619 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
620 debug_type,
621 IS_UNION (t->type.t) ? 'u' : 's',
622 t->c);
623 while (t->next) {
624 int pos, size, align;
626 t = t->next;
627 cstr_printf (&str, "%s:",
628 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
629 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
630 tcc_get_debug_info (s1, t, &str);
631 if (t->type.t & VT_BITFIELD) {
632 pos = t->c * 8 + BIT_POS(t->type.t);
633 size = BIT_SIZE(t->type.t);
635 else {
636 pos = t->c * 8;
637 size = type_size(&t->type, &align) * 8;
639 cstr_printf (&str, ",%d,%d;", pos, size);
641 cstr_printf (&str, ";");
642 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
643 cstr_free (&str);
646 else if (IS_ENUM(type)) {
647 Sym *e = t = t->type.ref;
649 debug_type = ++debug_next_type;
650 cstr_new (&str);
651 cstr_printf (&str, "%s:T%d=e",
652 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
653 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
654 debug_type);
655 while (t->next) {
656 t = t->next;
657 cstr_printf (&str, "%s:",
658 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
659 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
660 cstr_printf (&str, e->type.t & VT_UNSIGNED ? "%u," : "%d,",
661 (int)t->enum_val);
663 cstr_printf (&str, ";");
664 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
665 cstr_free (&str);
667 else if ((type & VT_BTYPE) != VT_FUNC) {
668 type &= ~VT_STRUCT_MASK;
669 for (debug_type = 1;
670 debug_type <= sizeof(default_debug) / sizeof(default_debug[0]);
671 debug_type++)
672 if (default_debug[debug_type - 1].type == type)
673 break;
674 if (debug_type > sizeof(default_debug) / sizeof(default_debug[0]))
675 return;
677 if (n > 0)
678 cstr_printf (result, "%d=", ++debug_next_type);
679 t = s;
680 for (;;) {
681 type = t->type.t & ~(VT_STORAGE | VT_CONSTANT | VT_VOLATILE);
682 if ((type & VT_BTYPE) != VT_BYTE)
683 type &= ~VT_DEFSIGN;
684 if (type == VT_PTR)
685 cstr_printf (result, "%d=*", ++debug_next_type);
686 else if (type == (VT_PTR | VT_ARRAY))
687 cstr_printf (result, "%d=ar1;0;%d;",
688 ++debug_next_type, t->type.ref->c - 1);
689 else if (type == VT_FUNC) {
690 cstr_printf (result, "%d=f", ++debug_next_type);
691 tcc_get_debug_info (s1, t->type.ref, result);
692 return;
694 else
695 break;
696 t = t->type.ref;
698 cstr_printf (result, "%d", debug_type);
701 static void tcc_debug_finish (TCCState *s1, struct debug_info *cur)
703 while (cur) {
704 int i;
705 struct debug_info *next = cur->next;
707 for (i = 0; i < cur->n_sym; i++) {
708 struct debug_sym *s = &cur->sym[i];
710 if (s->sec)
711 put_stabs_r(s1, s->str, s->type, 0, 0, s->value,
712 s->sec, s->sym_index);
713 else
714 put_stabs(s1, s->str, s->type, 0, 0, s->value);
715 tcc_free (s->str);
717 tcc_free (cur->sym);
718 put_stabn(s1, N_LBRAC, 0, 0, cur->start);
719 tcc_debug_finish (s1, cur->child);
720 put_stabn(s1, N_RBRAC, 0, 0, cur->end);
721 tcc_free (cur);
722 cur = next;
726 static void tcc_add_debug_info(TCCState *s1, int param, Sym *s, Sym *e)
728 CString debug_str;
729 if (!s1->do_debug)
730 return;
731 cstr_new (&debug_str);
732 for (; s != e; s = s->prev) {
733 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
734 continue;
735 cstr_reset (&debug_str);
736 cstr_printf (&debug_str, "%s:%s", get_tok_str(s->v, NULL), param ? "p" : "");
737 tcc_get_debug_info(s1, s, &debug_str);
738 tcc_debug_stabs(s1, debug_str.data, param ? N_PSYM : N_LSYM, s->c, NULL, 0);
740 cstr_free (&debug_str);
743 /* put function symbol */
744 static void tcc_debug_funcstart(TCCState *s1, Sym *sym)
746 CString debug_str;
747 BufferedFile *f;
748 if (!s1->do_debug)
749 return;
750 debug_info_root = NULL;
751 debug_info = NULL;
752 tcc_debug_stabn(s1, N_LBRAC, ind - func_ind);
753 if (!(f = put_new_file(s1)))
754 return;
755 cstr_new (&debug_str);
756 cstr_printf(&debug_str, "%s:%c", funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
757 tcc_get_debug_info(s1, sym->type.ref, &debug_str);
758 put_stabs_r(s1, debug_str.data, N_FUN, 0, f->line_num, 0, cur_text_section, sym->c);
759 cstr_free (&debug_str);
761 tcc_debug_line(s1);
764 /* put function size */
765 static void tcc_debug_funcend(TCCState *s1, int size)
767 if (!s1->do_debug)
768 return;
769 tcc_debug_stabn(s1, N_RBRAC, size);
770 tcc_debug_finish (s1, debug_info_root);
774 static void tcc_debug_extern_sym(TCCState *s1, Sym *sym, int sh_num, int sym_bind, int sym_type)
776 Section *s;
777 CString str;
779 if (!s1->do_debug)
780 return;
781 if (sym_type == STT_FUNC || sym->v >= SYM_FIRST_ANOM)
782 return;
783 s = s1->sections[sh_num];
785 cstr_new (&str);
786 cstr_printf (&str, "%s:%c",
787 get_tok_str(sym->v, NULL),
788 sym_bind == STB_GLOBAL ? 'G' : local_scope ? 'V' : 'S'
790 tcc_get_debug_info(s1, sym, &str);
791 if (sym_bind == STB_GLOBAL)
792 tcc_debug_stabs(s1, str.data, N_GSYM, 0, NULL, 0);
793 else
794 tcc_debug_stabs(s1, str.data,
795 (sym->type.t & VT_STATIC) && data_section == s
796 ? N_STSYM : N_LCSYM, 0, s, sym->c);
797 cstr_free (&str);
800 static void tcc_debug_typedef(TCCState *s1, Sym *sym)
802 CString str;
804 if (!s1->do_debug)
805 return;
806 cstr_new (&str);
807 cstr_printf (&str, "%s:t",
808 (sym->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
809 ? "" : get_tok_str(sym->v & ~SYM_FIELD, NULL));
810 tcc_get_debug_info(s1, sym, &str);
811 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
812 cstr_free (&str);
815 /* ------------------------------------------------------------------------- */
816 /* for section layout see lib/tcov.c */
818 static void tcc_tcov_block_end(int line);
820 static void tcc_tcov_block_begin(void)
822 SValue sv;
823 void *ptr;
824 unsigned long last_offset = tcov_data.offset;
826 tcc_tcov_block_end (0);
827 if (tcc_state->test_coverage == 0 || nocode_wanted)
828 return;
830 if (tcov_data.last_file_name == 0 ||
831 strcmp ((const char *)(tcov_section->data + tcov_data.last_file_name),
832 file->true_filename) != 0) {
833 char wd[1024];
834 CString cstr;
836 if (tcov_data.last_func_name)
837 section_ptr_add(tcov_section, 1);
838 if (tcov_data.last_file_name)
839 section_ptr_add(tcov_section, 1);
840 tcov_data.last_func_name = 0;
841 cstr_new (&cstr);
842 if (file->true_filename[0] == '/') {
843 tcov_data.last_file_name = tcov_section->data_offset;
844 cstr_printf (&cstr, "%s", file->true_filename);
846 else {
847 getcwd (wd, sizeof(wd));
848 tcov_data.last_file_name = tcov_section->data_offset + strlen(wd) + 1;
849 cstr_printf (&cstr, "%s/%s", wd, file->true_filename);
851 ptr = section_ptr_add(tcov_section, cstr.size + 1);
852 strcpy((char *)ptr, cstr.data);
853 #ifdef _WIN32
854 normalize_slashes((char *)ptr);
855 #endif
856 cstr_free (&cstr);
858 if (tcov_data.last_func_name == 0 ||
859 strcmp ((const char *)(tcov_section->data + tcov_data.last_func_name),
860 funcname) != 0) {
861 size_t len;
863 if (tcov_data.last_func_name)
864 section_ptr_add(tcov_section, 1);
865 tcov_data.last_func_name = tcov_section->data_offset;
866 len = strlen (funcname);
867 ptr = section_ptr_add(tcov_section, len + 1);
868 strcpy((char *)ptr, funcname);
869 section_ptr_add(tcov_section, -tcov_section->data_offset & 7);
870 ptr = section_ptr_add(tcov_section, 8);
871 write64le (ptr, file->line_num);
873 if (ind == tcov_data.ind && tcov_data.line == file->line_num)
874 tcov_data.offset = last_offset;
875 else {
876 Sym label = {0};
877 label.type.t = VT_LLONG | VT_STATIC;
879 ptr = section_ptr_add(tcov_section, 16);
880 tcov_data.line = file->line_num;
881 write64le (ptr, (tcov_data.line << 8) | 0xff);
882 put_extern_sym(&label, tcov_section,
883 ((unsigned char *)ptr - tcov_section->data) + 8, 0);
884 sv.type = label.type;
885 sv.r = VT_SYM | VT_LVAL | VT_CONST;
886 sv.r2 = VT_CONST;
887 sv.c.i = 0;
888 sv.sym = &label;
889 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || \
890 defined TCC_TARGET_ARM || defined TCC_TARGET_ARM64 || \
891 defined TCC_TARGET_RISCV64
892 gen_increment_tcov (&sv);
893 #else
894 vpushv(&sv);
895 inc(0, TOK_INC);
896 vpop();
897 #endif
898 tcov_data.offset = (unsigned char *)ptr - tcov_section->data;
899 tcov_data.ind = ind;
903 static void tcc_tcov_block_end(int line)
905 if (tcc_state->test_coverage == 0)
906 return;
907 if (tcov_data.offset) {
908 void *ptr = tcov_section->data + tcov_data.offset;
909 unsigned long long nline = line ? line : file->line_num;
911 write64le (ptr, (read64le (ptr) & 0xfffffffffull) | (nline << 36));
912 tcov_data.offset = 0;
916 static void tcc_tcov_check_line(int start)
918 if (tcc_state->test_coverage == 0)
919 return;
920 if (tcov_data.line != file->line_num) {
921 if ((tcov_data.line + 1) != file->line_num) {
922 tcc_tcov_block_end (tcov_data.line);
923 if (start)
924 tcc_tcov_block_begin ();
926 else
927 tcov_data.line = file->line_num;
931 static void tcc_tcov_start(void)
933 if (tcc_state->test_coverage == 0)
934 return;
935 memset (&tcov_data, 0, sizeof (tcov_data));
936 if (tcov_section == NULL) {
937 tcov_section = new_section(tcc_state, ".tcov", SHT_PROGBITS,
938 SHF_ALLOC | SHF_WRITE);
939 section_ptr_add(tcov_section, 4); // pointer to executable name
943 static void tcc_tcov_end(void)
945 if (tcc_state->test_coverage == 0)
946 return;
947 if (tcov_data.last_func_name)
948 section_ptr_add(tcov_section, 1);
949 if (tcov_data.last_file_name)
950 section_ptr_add(tcov_section, 1);
953 /* ------------------------------------------------------------------------- */
954 /* initialize vstack and types. This must be done also for tcc -E */
955 ST_FUNC void tccgen_init(TCCState *s1)
957 vtop = vstack - 1;
958 memset(vtop, 0, sizeof *vtop);
960 /* define some often used types */
961 int_type.t = VT_INT;
963 char_type.t = VT_BYTE;
964 if (s1->char_is_unsigned)
965 char_type.t |= VT_UNSIGNED;
966 char_pointer_type = char_type;
967 mk_pointer(&char_pointer_type);
969 func_old_type.t = VT_FUNC;
970 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
971 func_old_type.ref->f.func_call = FUNC_CDECL;
972 func_old_type.ref->f.func_type = FUNC_OLD;
973 #ifdef precedence_parser
974 init_prec();
975 #endif
976 cstr_new(&initstr);
979 ST_FUNC int tccgen_compile(TCCState *s1)
981 cur_text_section = NULL;
982 funcname = "";
983 anon_sym = SYM_FIRST_ANOM;
984 section_sym = 0;
985 const_wanted = 0;
986 nocode_wanted = 0x80000000;
987 local_scope = 0;
988 debug_modes = s1->do_debug | s1->test_coverage << 1;
990 tcc_debug_start(s1);
991 tcc_tcov_start ();
992 #ifdef TCC_TARGET_ARM
993 arm_init(s1);
994 #endif
995 #ifdef INC_DEBUG
996 printf("%s: **** new file\n", file->filename);
997 #endif
998 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
999 next();
1000 decl(VT_CONST);
1001 gen_inline_functions(s1);
1002 check_vstack();
1003 /* end of translation unit info */
1004 tcc_debug_end(s1);
1005 tcc_tcov_end ();
1006 return 0;
1009 ST_FUNC void tccgen_finish(TCCState *s1)
1011 cstr_free(&initstr);
1012 free_inline_functions(s1);
1013 sym_pop(&global_stack, NULL, 0);
1014 sym_pop(&local_stack, NULL, 0);
1015 /* free preprocessor macros */
1016 free_defines(NULL);
1017 /* free sym_pools */
1018 dynarray_reset(&sym_pools, &nb_sym_pools);
1019 sym_free_first = NULL;
1022 /* ------------------------------------------------------------------------- */
1023 ST_FUNC ElfSym *elfsym(Sym *s)
1025 if (!s || !s->c)
1026 return NULL;
1027 return &((ElfSym *)symtab_section->data)[s->c];
1030 /* apply storage attributes to Elf symbol */
1031 ST_FUNC void update_storage(Sym *sym)
1033 ElfSym *esym;
1034 int sym_bind, old_sym_bind;
1036 esym = elfsym(sym);
1037 if (!esym)
1038 return;
1040 if (sym->a.visibility)
1041 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
1042 | sym->a.visibility;
1044 if (sym->type.t & (VT_STATIC | VT_INLINE))
1045 sym_bind = STB_LOCAL;
1046 else if (sym->a.weak)
1047 sym_bind = STB_WEAK;
1048 else
1049 sym_bind = STB_GLOBAL;
1050 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
1051 if (sym_bind != old_sym_bind) {
1052 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
1055 #ifdef TCC_TARGET_PE
1056 if (sym->a.dllimport)
1057 esym->st_other |= ST_PE_IMPORT;
1058 if (sym->a.dllexport)
1059 esym->st_other |= ST_PE_EXPORT;
1060 #endif
1062 #if 0
1063 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
1064 get_tok_str(sym->v, NULL),
1065 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
1066 sym->a.visibility,
1067 sym->a.dllexport,
1068 sym->a.dllimport
1070 #endif
1073 /* ------------------------------------------------------------------------- */
1074 /* update sym->c so that it points to an external symbol in section
1075 'section' with value 'value' */
1077 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
1078 addr_t value, unsigned long size,
1079 int can_add_underscore)
1081 int sym_type, sym_bind, info, other, t;
1082 ElfSym *esym;
1083 const char *name;
1084 char buf1[256];
1086 if (!sym->c) {
1087 name = get_tok_str(sym->v, NULL);
1088 t = sym->type.t;
1089 if ((t & VT_BTYPE) == VT_FUNC) {
1090 sym_type = STT_FUNC;
1091 } else if ((t & VT_BTYPE) == VT_VOID) {
1092 sym_type = STT_NOTYPE;
1093 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
1094 sym_type = STT_FUNC;
1095 } else {
1096 sym_type = STT_OBJECT;
1098 if (t & (VT_STATIC | VT_INLINE))
1099 sym_bind = STB_LOCAL;
1100 else
1101 sym_bind = STB_GLOBAL;
1102 other = 0;
1104 #ifdef TCC_TARGET_PE
1105 if (sym_type == STT_FUNC && sym->type.ref) {
1106 Sym *ref = sym->type.ref;
1107 if (ref->a.nodecorate) {
1108 can_add_underscore = 0;
1110 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
1111 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
1112 name = buf1;
1113 other |= ST_PE_STDCALL;
1114 can_add_underscore = 0;
1117 #endif
1119 if (sym->asm_label) {
1120 name = get_tok_str(sym->asm_label, NULL);
1121 can_add_underscore = 0;
1124 if (tcc_state->leading_underscore && can_add_underscore) {
1125 buf1[0] = '_';
1126 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
1127 name = buf1;
1130 info = ELFW(ST_INFO)(sym_bind, sym_type);
1131 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
1133 if (debug_modes)
1134 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
1136 } else {
1137 esym = elfsym(sym);
1138 esym->st_value = value;
1139 esym->st_size = size;
1140 esym->st_shndx = sh_num;
1142 update_storage(sym);
1145 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
1146 addr_t value, unsigned long size)
1148 int sh_num = section ? section->sh_num : SHN_UNDEF;
1149 put_extern_sym2(sym, sh_num, value, size, 1);
1152 /* add a new relocation entry to symbol 'sym' in section 's' */
1153 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
1154 addr_t addend)
1156 int c = 0;
1158 if (nocode_wanted && s == cur_text_section)
1159 return;
1161 if (sym) {
1162 if (0 == sym->c)
1163 put_extern_sym(sym, NULL, 0, 0);
1164 c = sym->c;
1167 /* now we can add ELF relocation info */
1168 put_elf_reloca(symtab_section, s, offset, type, c, addend);
1171 #if PTR_SIZE == 4
1172 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
1174 greloca(s, sym, offset, type, 0);
1176 #endif
1178 /* ------------------------------------------------------------------------- */
1179 /* symbol allocator */
1180 static Sym *__sym_malloc(void)
1182 Sym *sym_pool, *sym, *last_sym;
1183 int i;
1185 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
1186 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
1188 last_sym = sym_free_first;
1189 sym = sym_pool;
1190 for(i = 0; i < SYM_POOL_NB; i++) {
1191 sym->next = last_sym;
1192 last_sym = sym;
1193 sym++;
1195 sym_free_first = last_sym;
1196 return last_sym;
1199 static inline Sym *sym_malloc(void)
1201 Sym *sym;
1202 #ifndef SYM_DEBUG
1203 sym = sym_free_first;
1204 if (!sym)
1205 sym = __sym_malloc();
1206 sym_free_first = sym->next;
1207 return sym;
1208 #else
1209 sym = tcc_malloc(sizeof(Sym));
1210 return sym;
1211 #endif
1214 ST_INLN void sym_free(Sym *sym)
1216 #ifndef SYM_DEBUG
1217 sym->next = sym_free_first;
1218 sym_free_first = sym;
1219 #else
1220 tcc_free(sym);
1221 #endif
1224 /* push, without hashing */
1225 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
1227 Sym *s;
1229 s = sym_malloc();
1230 memset(s, 0, sizeof *s);
1231 s->v = v;
1232 s->type.t = t;
1233 s->c = c;
1234 /* add in stack */
1235 s->prev = *ps;
1236 *ps = s;
1237 return s;
1240 /* find a symbol and return its associated structure. 's' is the top
1241 of the symbol stack */
1242 ST_FUNC Sym *sym_find2(Sym *s, int v)
1244 while (s) {
1245 if (s->v == v)
1246 return s;
1247 else if (s->v == -1)
1248 return NULL;
1249 s = s->prev;
1251 return NULL;
1254 /* structure lookup */
1255 ST_INLN Sym *struct_find(int v)
1257 v -= TOK_IDENT;
1258 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1259 return NULL;
1260 return table_ident[v]->sym_struct;
1263 /* find an identifier */
1264 ST_INLN Sym *sym_find(int v)
1266 v -= TOK_IDENT;
1267 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1268 return NULL;
1269 return table_ident[v]->sym_identifier;
1272 static int sym_scope(Sym *s)
1274 if (IS_ENUM_VAL (s->type.t))
1275 return s->type.ref->sym_scope;
1276 else
1277 return s->sym_scope;
1280 /* push a given symbol on the symbol stack */
1281 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
1283 Sym *s, **ps;
1284 TokenSym *ts;
1286 if (local_stack)
1287 ps = &local_stack;
1288 else
1289 ps = &global_stack;
1290 s = sym_push2(ps, v, type->t, c);
1291 s->type.ref = type->ref;
1292 s->r = r;
1293 /* don't record fields or anonymous symbols */
1294 /* XXX: simplify */
1295 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1296 /* record symbol in token array */
1297 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1298 if (v & SYM_STRUCT)
1299 ps = &ts->sym_struct;
1300 else
1301 ps = &ts->sym_identifier;
1302 s->prev_tok = *ps;
1303 *ps = s;
1304 s->sym_scope = local_scope;
1305 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
1306 tcc_error("redeclaration of '%s'",
1307 get_tok_str(v & ~SYM_STRUCT, NULL));
1309 return s;
1312 /* push a global identifier */
1313 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
1315 Sym *s, **ps;
1316 s = sym_push2(&global_stack, v, t, c);
1317 s->r = VT_CONST | VT_SYM;
1318 /* don't record anonymous symbol */
1319 if (v < SYM_FIRST_ANOM) {
1320 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
1321 /* modify the top most local identifier, so that sym_identifier will
1322 point to 's' when popped; happens when called from inline asm */
1323 while (*ps != NULL && (*ps)->sym_scope)
1324 ps = &(*ps)->prev_tok;
1325 s->prev_tok = *ps;
1326 *ps = s;
1328 return s;
1331 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1332 pop them yet from the list, but do remove them from the token array. */
1333 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
1335 Sym *s, *ss, **ps;
1336 TokenSym *ts;
1337 int v;
1339 s = *ptop;
1340 while(s != b) {
1341 ss = s->prev;
1342 v = s->v;
1343 /* remove symbol in token array */
1344 /* XXX: simplify */
1345 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1346 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1347 if (v & SYM_STRUCT)
1348 ps = &ts->sym_struct;
1349 else
1350 ps = &ts->sym_identifier;
1351 *ps = s->prev_tok;
1353 if (!keep)
1354 sym_free(s);
1355 s = ss;
1357 if (!keep)
1358 *ptop = b;
1361 /* ------------------------------------------------------------------------- */
1362 static void vcheck_cmp(void)
1364 /* cannot let cpu flags if other instruction are generated. Also
1365 avoid leaving VT_JMP anywhere except on the top of the stack
1366 because it would complicate the code generator.
1368 Don't do this when nocode_wanted. vtop might come from
1369 !nocode_wanted regions (see 88_codeopt.c) and transforming
1370 it to a register without actually generating code is wrong
1371 as their value might still be used for real. All values
1372 we push under nocode_wanted will eventually be popped
1373 again, so that the VT_CMP/VT_JMP value will be in vtop
1374 when code is unsuppressed again. */
1376 if (vtop->r == VT_CMP && !nocode_wanted)
1377 gv(RC_INT);
1380 static void vsetc(CType *type, int r, CValue *vc)
1382 if (vtop >= vstack + (VSTACK_SIZE - 1))
1383 tcc_error("memory full (vstack)");
1384 vcheck_cmp();
1385 vtop++;
1386 vtop->type = *type;
1387 vtop->r = r;
1388 vtop->r2 = VT_CONST;
1389 vtop->c = *vc;
1390 vtop->sym = NULL;
1393 ST_FUNC void vswap(void)
1395 SValue tmp;
1397 vcheck_cmp();
1398 tmp = vtop[0];
1399 vtop[0] = vtop[-1];
1400 vtop[-1] = tmp;
1403 /* pop stack value */
1404 ST_FUNC void vpop(void)
1406 int v;
1407 v = vtop->r & VT_VALMASK;
1408 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1409 /* for x86, we need to pop the FP stack */
1410 if (v == TREG_ST0) {
1411 o(0xd8dd); /* fstp %st(0) */
1412 } else
1413 #endif
1414 if (v == VT_CMP) {
1415 /* need to put correct jump if && or || without test */
1416 gsym(vtop->jtrue);
1417 gsym(vtop->jfalse);
1419 vtop--;
1422 /* push constant of type "type" with useless value */
1423 static void vpush(CType *type)
1425 vset(type, VT_CONST, 0);
1428 /* push arbitrary 64bit constant */
1429 static void vpush64(int ty, unsigned long long v)
1431 CValue cval;
1432 CType ctype;
1433 ctype.t = ty;
1434 ctype.ref = NULL;
1435 cval.i = v;
1436 vsetc(&ctype, VT_CONST, &cval);
1439 /* push integer constant */
1440 ST_FUNC void vpushi(int v)
1442 vpush64(VT_INT, v);
1445 /* push a pointer sized constant */
1446 static void vpushs(addr_t v)
1448 vpush64(VT_SIZE_T, v);
1451 /* push long long constant */
1452 static inline void vpushll(long long v)
1454 vpush64(VT_LLONG, v);
1457 ST_FUNC void vset(CType *type, int r, int v)
1459 CValue cval;
1460 cval.i = v;
1461 vsetc(type, r, &cval);
1464 static void vseti(int r, int v)
1466 CType type;
1467 type.t = VT_INT;
1468 type.ref = NULL;
1469 vset(&type, r, v);
1472 ST_FUNC void vpushv(SValue *v)
1474 if (vtop >= vstack + (VSTACK_SIZE - 1))
1475 tcc_error("memory full (vstack)");
1476 vtop++;
1477 *vtop = *v;
1480 static void vdup(void)
1482 vpushv(vtop);
1485 /* rotate n first stack elements to the bottom
1486 I1 ... In -> I2 ... In I1 [top is right]
1488 ST_FUNC void vrotb(int n)
1490 int i;
1491 SValue tmp;
1493 vcheck_cmp();
1494 tmp = vtop[-n + 1];
1495 for(i=-n+1;i!=0;i++)
1496 vtop[i] = vtop[i+1];
1497 vtop[0] = tmp;
1500 /* rotate the n elements before entry e towards the top
1501 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1503 ST_FUNC void vrote(SValue *e, int n)
1505 int i;
1506 SValue tmp;
1508 vcheck_cmp();
1509 tmp = *e;
1510 for(i = 0;i < n - 1; i++)
1511 e[-i] = e[-i - 1];
1512 e[-n + 1] = tmp;
1515 /* rotate n first stack elements to the top
1516 I1 ... In -> In I1 ... I(n-1) [top is right]
1518 ST_FUNC void vrott(int n)
1520 vrote(vtop, n);
1523 /* ------------------------------------------------------------------------- */
1524 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1526 /* called from generators to set the result from relational ops */
1527 ST_FUNC void vset_VT_CMP(int op)
1529 vtop->r = VT_CMP;
1530 vtop->cmp_op = op;
1531 vtop->jfalse = 0;
1532 vtop->jtrue = 0;
1535 /* called once before asking generators to load VT_CMP to a register */
1536 static void vset_VT_JMP(void)
1538 int op = vtop->cmp_op;
1540 if (vtop->jtrue || vtop->jfalse) {
1541 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1542 int inv = op & (op < 2); /* small optimization */
1543 vseti(VT_JMP+inv, gvtst(inv, 0));
1544 } else {
1545 /* otherwise convert flags (rsp. 0/1) to register */
1546 vtop->c.i = op;
1547 if (op < 2) /* doesn't seem to happen */
1548 vtop->r = VT_CONST;
1552 /* Set CPU Flags, doesn't yet jump */
1553 static void gvtst_set(int inv, int t)
1555 int *p;
1557 if (vtop->r != VT_CMP) {
1558 vpushi(0);
1559 gen_op(TOK_NE);
1560 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1561 vset_VT_CMP(vtop->c.i != 0);
1564 p = inv ? &vtop->jfalse : &vtop->jtrue;
1565 *p = gjmp_append(*p, t);
1568 /* Generate value test
1570 * Generate a test for any value (jump, comparison and integers) */
1571 static int gvtst(int inv, int t)
1573 int op, x, u;
1575 gvtst_set(inv, t);
1576 t = vtop->jtrue, u = vtop->jfalse;
1577 if (inv)
1578 x = u, u = t, t = x;
1579 op = vtop->cmp_op;
1581 /* jump to the wanted target */
1582 if (op > 1)
1583 t = gjmp_cond(op ^ inv, t);
1584 else if (op != inv)
1585 t = gjmp(t);
1586 /* resolve complementary jumps to here */
1587 gsym(u);
1589 vtop--;
1590 return t;
1593 /* generate a zero or nozero test */
1594 static void gen_test_zero(int op)
1596 if (vtop->r == VT_CMP) {
1597 int j;
1598 if (op == TOK_EQ) {
1599 j = vtop->jfalse;
1600 vtop->jfalse = vtop->jtrue;
1601 vtop->jtrue = j;
1602 vtop->cmp_op ^= 1;
1604 } else {
1605 vpushi(0);
1606 gen_op(op);
1610 /* ------------------------------------------------------------------------- */
1611 /* push a symbol value of TYPE */
1612 ST_FUNC void vpushsym(CType *type, Sym *sym)
1614 CValue cval;
1615 cval.i = 0;
1616 vsetc(type, VT_CONST | VT_SYM, &cval);
1617 vtop->sym = sym;
1620 /* Return a static symbol pointing to a section */
1621 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1623 int v;
1624 Sym *sym;
1626 v = anon_sym++;
1627 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1628 sym->type.t |= VT_STATIC;
1629 put_extern_sym(sym, sec, offset, size);
1630 return sym;
1633 /* push a reference to a section offset by adding a dummy symbol */
1634 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1636 vpushsym(type, get_sym_ref(type, sec, offset, size));
1639 /* define a new external reference to a symbol 'v' of type 'u' */
1640 ST_FUNC Sym *external_global_sym(int v, CType *type)
1642 Sym *s;
1644 s = sym_find(v);
1645 if (!s) {
1646 /* push forward reference */
1647 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1648 s->type.ref = type->ref;
1649 } else if (IS_ASM_SYM(s)) {
1650 s->type.t = type->t | (s->type.t & VT_EXTERN);
1651 s->type.ref = type->ref;
1652 update_storage(s);
1654 return s;
1657 /* create an external reference with no specific type similar to asm labels.
1658 This avoids type conflicts if the symbol is used from C too */
1659 ST_FUNC Sym *external_helper_sym(int v)
1661 CType ct = { VT_ASM_FUNC, NULL };
1662 return external_global_sym(v, &ct);
1665 /* push a reference to an helper function (such as memmove) */
1666 ST_FUNC void vpush_helper_func(int v)
1668 vpushsym(&func_old_type, external_helper_sym(v));
1671 /* Merge symbol attributes. */
1672 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1674 if (sa1->aligned && !sa->aligned)
1675 sa->aligned = sa1->aligned;
1676 sa->packed |= sa1->packed;
1677 sa->weak |= sa1->weak;
1678 if (sa1->visibility != STV_DEFAULT) {
1679 int vis = sa->visibility;
1680 if (vis == STV_DEFAULT
1681 || vis > sa1->visibility)
1682 vis = sa1->visibility;
1683 sa->visibility = vis;
1685 sa->dllexport |= sa1->dllexport;
1686 sa->nodecorate |= sa1->nodecorate;
1687 sa->dllimport |= sa1->dllimport;
1690 /* Merge function attributes. */
1691 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1693 if (fa1->func_call && !fa->func_call)
1694 fa->func_call = fa1->func_call;
1695 if (fa1->func_type && !fa->func_type)
1696 fa->func_type = fa1->func_type;
1697 if (fa1->func_args && !fa->func_args)
1698 fa->func_args = fa1->func_args;
1699 if (fa1->func_noreturn)
1700 fa->func_noreturn = 1;
1701 if (fa1->func_ctor)
1702 fa->func_ctor = 1;
1703 if (fa1->func_dtor)
1704 fa->func_dtor = 1;
1707 /* Merge attributes. */
1708 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1710 merge_symattr(&ad->a, &ad1->a);
1711 merge_funcattr(&ad->f, &ad1->f);
1713 if (ad1->section)
1714 ad->section = ad1->section;
1715 if (ad1->alias_target)
1716 ad->alias_target = ad1->alias_target;
1717 if (ad1->asm_label)
1718 ad->asm_label = ad1->asm_label;
1719 if (ad1->attr_mode)
1720 ad->attr_mode = ad1->attr_mode;
1723 /* Merge some type attributes. */
1724 static void patch_type(Sym *sym, CType *type)
1726 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1727 if (!(sym->type.t & VT_EXTERN))
1728 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1729 sym->type.t &= ~VT_EXTERN;
1732 if (IS_ASM_SYM(sym)) {
1733 /* stay static if both are static */
1734 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1735 sym->type.ref = type->ref;
1738 if (!is_compatible_types(&sym->type, type)) {
1739 tcc_error("incompatible types for redefinition of '%s'",
1740 get_tok_str(sym->v, NULL));
1742 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1743 int static_proto = sym->type.t & VT_STATIC;
1744 /* warn if static follows non-static function declaration */
1745 if ((type->t & VT_STATIC) && !static_proto
1746 /* XXX this test for inline shouldn't be here. Until we
1747 implement gnu-inline mode again it silences a warning for
1748 mingw caused by our workarounds. */
1749 && !((type->t | sym->type.t) & VT_INLINE))
1750 tcc_warning("static storage ignored for redefinition of '%s'",
1751 get_tok_str(sym->v, NULL));
1753 /* set 'inline' if both agree or if one has static */
1754 if ((type->t | sym->type.t) & VT_INLINE) {
1755 if (!((type->t ^ sym->type.t) & VT_INLINE)
1756 || ((type->t | sym->type.t) & VT_STATIC))
1757 static_proto |= VT_INLINE;
1760 if (0 == (type->t & VT_EXTERN)) {
1761 struct FuncAttr f = sym->type.ref->f;
1762 /* put complete type, use static from prototype */
1763 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1764 sym->type.ref = type->ref;
1765 merge_funcattr(&sym->type.ref->f, &f);
1766 } else {
1767 sym->type.t &= ~VT_INLINE | static_proto;
1770 if (sym->type.ref->f.func_type == FUNC_OLD
1771 && type->ref->f.func_type != FUNC_OLD) {
1772 sym->type.ref = type->ref;
1775 } else {
1776 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1777 /* set array size if it was omitted in extern declaration */
1778 sym->type.ref->c = type->ref->c;
1780 if ((type->t ^ sym->type.t) & VT_STATIC)
1781 tcc_warning("storage mismatch for redefinition of '%s'",
1782 get_tok_str(sym->v, NULL));
1786 /* Merge some storage attributes. */
1787 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1789 if (type)
1790 patch_type(sym, type);
1792 #ifdef TCC_TARGET_PE
1793 if (sym->a.dllimport != ad->a.dllimport)
1794 tcc_error("incompatible dll linkage for redefinition of '%s'",
1795 get_tok_str(sym->v, NULL));
1796 #endif
1797 merge_symattr(&sym->a, &ad->a);
1798 if (ad->asm_label)
1799 sym->asm_label = ad->asm_label;
1800 update_storage(sym);
1803 /* copy sym to other stack */
1804 static Sym *sym_copy(Sym *s0, Sym **ps)
1806 Sym *s;
1807 s = sym_malloc(), *s = *s0;
1808 s->prev = *ps, *ps = s;
1809 if (s->v < SYM_FIRST_ANOM) {
1810 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1811 s->prev_tok = *ps, *ps = s;
1813 return s;
1816 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1817 static void sym_copy_ref(Sym *s, Sym **ps)
1819 int bt = s->type.t & VT_BTYPE;
1820 if (bt == VT_FUNC || bt == VT_PTR || (bt == VT_STRUCT && s->sym_scope)) {
1821 Sym **sp = &s->type.ref;
1822 for (s = *sp, *sp = NULL; s; s = s->next) {
1823 Sym *s2 = sym_copy(s, ps);
1824 sp = &(*sp = s2)->next;
1825 sym_copy_ref(s2, ps);
1830 /* define a new external reference to a symbol 'v' */
1831 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1833 Sym *s;
1835 /* look for global symbol */
1836 s = sym_find(v);
1837 while (s && s->sym_scope)
1838 s = s->prev_tok;
1840 if (!s) {
1841 /* push forward reference */
1842 s = global_identifier_push(v, type->t, 0);
1843 s->r |= r;
1844 s->a = ad->a;
1845 s->asm_label = ad->asm_label;
1846 s->type.ref = type->ref;
1847 /* copy type to the global stack */
1848 if (local_stack)
1849 sym_copy_ref(s, &global_stack);
1850 } else {
1851 patch_storage(s, ad, type);
1853 /* push variables on local_stack if any */
1854 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1855 s = sym_copy(s, &local_stack);
1856 return s;
1859 /* save registers up to (vtop - n) stack entry */
1860 ST_FUNC void save_regs(int n)
1862 SValue *p, *p1;
1863 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1864 save_reg(p->r);
1867 /* save r to the memory stack, and mark it as being free */
1868 ST_FUNC void save_reg(int r)
1870 save_reg_upstack(r, 0);
1873 /* save r to the memory stack, and mark it as being free,
1874 if seen up to (vtop - n) stack entry */
1875 ST_FUNC void save_reg_upstack(int r, int n)
1877 int l, size, align, bt;
1878 SValue *p, *p1, sv;
1880 if ((r &= VT_VALMASK) >= VT_CONST)
1881 return;
1882 if (nocode_wanted)
1883 return;
1884 l = 0;
1885 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1886 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1887 /* must save value on stack if not already done */
1888 if (!l) {
1889 bt = p->type.t & VT_BTYPE;
1890 if (bt == VT_VOID)
1891 continue;
1892 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1893 bt = VT_PTR;
1894 sv.type.t = bt;
1895 size = type_size(&sv.type, &align);
1896 l = get_temp_local_var(size,align);
1897 sv.r = VT_LOCAL | VT_LVAL;
1898 sv.c.i = l;
1899 store(p->r & VT_VALMASK, &sv);
1900 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1901 /* x86 specific: need to pop fp register ST0 if saved */
1902 if (r == TREG_ST0) {
1903 o(0xd8dd); /* fstp %st(0) */
1905 #endif
1906 /* special long long case */
1907 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1908 sv.c.i += PTR_SIZE;
1909 store(p->r2, &sv);
1912 /* mark that stack entry as being saved on the stack */
1913 if (p->r & VT_LVAL) {
1914 /* also clear the bounded flag because the
1915 relocation address of the function was stored in
1916 p->c.i */
1917 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1918 } else {
1919 p->r = VT_LVAL | VT_LOCAL;
1921 p->sym = NULL;
1922 p->r2 = VT_CONST;
1923 p->c.i = l;
1928 #ifdef TCC_TARGET_ARM
1929 /* find a register of class 'rc2' with at most one reference on stack.
1930 * If none, call get_reg(rc) */
1931 ST_FUNC int get_reg_ex(int rc, int rc2)
1933 int r;
1934 SValue *p;
1936 for(r=0;r<NB_REGS;r++) {
1937 if (reg_classes[r] & rc2) {
1938 int n;
1939 n=0;
1940 for(p = vstack; p <= vtop; p++) {
1941 if ((p->r & VT_VALMASK) == r ||
1942 p->r2 == r)
1943 n++;
1945 if (n <= 1)
1946 return r;
1949 return get_reg(rc);
1951 #endif
1953 /* find a free register of class 'rc'. If none, save one register */
1954 ST_FUNC int get_reg(int rc)
1956 int r;
1957 SValue *p;
1959 /* find a free register */
1960 for(r=0;r<NB_REGS;r++) {
1961 if (reg_classes[r] & rc) {
1962 if (nocode_wanted)
1963 return r;
1964 for(p=vstack;p<=vtop;p++) {
1965 if ((p->r & VT_VALMASK) == r ||
1966 p->r2 == r)
1967 goto notfound;
1969 return r;
1971 notfound: ;
1974 /* no register left : free the first one on the stack (VERY
1975 IMPORTANT to start from the bottom to ensure that we don't
1976 spill registers used in gen_opi()) */
1977 for(p=vstack;p<=vtop;p++) {
1978 /* look at second register (if long long) */
1979 r = p->r2;
1980 if (r < VT_CONST && (reg_classes[r] & rc))
1981 goto save_found;
1982 r = p->r & VT_VALMASK;
1983 if (r < VT_CONST && (reg_classes[r] & rc)) {
1984 save_found:
1985 save_reg(r);
1986 return r;
1989 /* Should never comes here */
1990 return -1;
1993 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1994 static int get_temp_local_var(int size,int align){
1995 int i;
1996 struct temp_local_variable *temp_var;
1997 int found_var;
1998 SValue *p;
1999 int r;
2000 char free;
2001 char found;
2002 found=0;
2003 for(i=0;i<nb_temp_local_vars;i++){
2004 temp_var=&arr_temp_local_vars[i];
2005 if(temp_var->size<size||align!=temp_var->align){
2006 continue;
2008 /*check if temp_var is free*/
2009 free=1;
2010 for(p=vstack;p<=vtop;p++) {
2011 r=p->r&VT_VALMASK;
2012 if(r==VT_LOCAL||r==VT_LLOCAL){
2013 if(p->c.i==temp_var->location){
2014 free=0;
2015 break;
2019 if(free){
2020 found_var=temp_var->location;
2021 found=1;
2022 break;
2025 if(!found){
2026 loc = (loc - size) & -align;
2027 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
2028 temp_var=&arr_temp_local_vars[i];
2029 temp_var->location=loc;
2030 temp_var->size=size;
2031 temp_var->align=align;
2032 nb_temp_local_vars++;
2034 found_var=loc;
2036 return found_var;
2039 static void clear_temp_local_var_list(){
2040 nb_temp_local_vars=0;
2043 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
2044 if needed */
2045 static void move_reg(int r, int s, int t)
2047 SValue sv;
2049 if (r != s) {
2050 save_reg(r);
2051 sv.type.t = t;
2052 sv.type.ref = NULL;
2053 sv.r = s;
2054 sv.c.i = 0;
2055 load(r, &sv);
2059 /* get address of vtop (vtop MUST BE an lvalue) */
2060 ST_FUNC void gaddrof(void)
2062 vtop->r &= ~VT_LVAL;
2063 /* tricky: if saved lvalue, then we can go back to lvalue */
2064 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
2065 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
2068 #ifdef CONFIG_TCC_BCHECK
2069 /* generate a bounded pointer addition */
2070 static void gen_bounded_ptr_add(void)
2072 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
2073 if (save) {
2074 vpushv(&vtop[-1]);
2075 vrott(3);
2077 vpush_helper_func(TOK___bound_ptr_add);
2078 vrott(3);
2079 gfunc_call(2);
2080 vtop -= save;
2081 vpushi(0);
2082 /* returned pointer is in REG_IRET */
2083 vtop->r = REG_IRET | VT_BOUNDED;
2084 if (nocode_wanted)
2085 return;
2086 /* relocation offset of the bounding function call point */
2087 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
2090 /* patch pointer addition in vtop so that pointer dereferencing is
2091 also tested */
2092 static void gen_bounded_ptr_deref(void)
2094 addr_t func;
2095 int size, align;
2096 ElfW_Rel *rel;
2097 Sym *sym;
2099 if (nocode_wanted)
2100 return;
2102 size = type_size(&vtop->type, &align);
2103 switch(size) {
2104 case 1: func = TOK___bound_ptr_indir1; break;
2105 case 2: func = TOK___bound_ptr_indir2; break;
2106 case 4: func = TOK___bound_ptr_indir4; break;
2107 case 8: func = TOK___bound_ptr_indir8; break;
2108 case 12: func = TOK___bound_ptr_indir12; break;
2109 case 16: func = TOK___bound_ptr_indir16; break;
2110 default:
2111 /* may happen with struct member access */
2112 return;
2114 sym = external_helper_sym(func);
2115 if (!sym->c)
2116 put_extern_sym(sym, NULL, 0, 0);
2117 /* patch relocation */
2118 /* XXX: find a better solution ? */
2119 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
2120 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
2123 /* generate lvalue bound code */
2124 static void gbound(void)
2126 CType type1;
2128 vtop->r &= ~VT_MUSTBOUND;
2129 /* if lvalue, then use checking code before dereferencing */
2130 if (vtop->r & VT_LVAL) {
2131 /* if not VT_BOUNDED value, then make one */
2132 if (!(vtop->r & VT_BOUNDED)) {
2133 /* must save type because we must set it to int to get pointer */
2134 type1 = vtop->type;
2135 vtop->type.t = VT_PTR;
2136 gaddrof();
2137 vpushi(0);
2138 gen_bounded_ptr_add();
2139 vtop->r |= VT_LVAL;
2140 vtop->type = type1;
2142 /* then check for dereferencing */
2143 gen_bounded_ptr_deref();
2147 /* we need to call __bound_ptr_add before we start to load function
2148 args into registers */
2149 ST_FUNC void gbound_args(int nb_args)
2151 int i, v;
2152 SValue *sv;
2154 for (i = 1; i <= nb_args; ++i)
2155 if (vtop[1 - i].r & VT_MUSTBOUND) {
2156 vrotb(i);
2157 gbound();
2158 vrott(i);
2161 sv = vtop - nb_args;
2162 if (sv->r & VT_SYM) {
2163 v = sv->sym->v;
2164 if (v == TOK_setjmp
2165 || v == TOK__setjmp
2166 #ifndef TCC_TARGET_PE
2167 || v == TOK_sigsetjmp
2168 || v == TOK___sigsetjmp
2169 #endif
2171 vpush_helper_func(TOK___bound_setjmp);
2172 vpushv(sv + 1);
2173 gfunc_call(1);
2174 func_bound_add_epilog = 1;
2176 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
2177 if (v == TOK_alloca)
2178 func_bound_add_epilog = 1;
2179 #endif
2180 #if TARGETOS_NetBSD
2181 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
2182 sv->sym->asm_label = TOK___bound_longjmp;
2183 #endif
2187 /* Add bounds for local symbols from S to E (via ->prev) */
2188 static void add_local_bounds(Sym *s, Sym *e)
2190 for (; s != e; s = s->prev) {
2191 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
2192 continue;
2193 /* Add arrays/structs/unions because we always take address */
2194 if ((s->type.t & VT_ARRAY)
2195 || (s->type.t & VT_BTYPE) == VT_STRUCT
2196 || s->a.addrtaken) {
2197 /* add local bound info */
2198 int align, size = type_size(&s->type, &align);
2199 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
2200 2 * sizeof(addr_t));
2201 bounds_ptr[0] = s->c;
2202 bounds_ptr[1] = size;
2206 #endif
2208 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2209 static void pop_local_syms(Sym *b, int keep)
2211 #ifdef CONFIG_TCC_BCHECK
2212 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
2213 add_local_bounds(local_stack, b);
2214 #endif
2215 if (debug_modes)
2216 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
2217 sym_pop(&local_stack, b, keep);
2220 static void incr_bf_adr(int o)
2222 vtop->type = char_pointer_type;
2223 gaddrof();
2224 vpushs(o);
2225 gen_op('+');
2226 vtop->type.t = VT_BYTE | VT_UNSIGNED;
2227 vtop->r |= VT_LVAL;
2230 /* single-byte load mode for packed or otherwise unaligned bitfields */
2231 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
2233 int n, o, bits;
2234 save_reg_upstack(vtop->r, 1);
2235 vpush64(type->t & VT_BTYPE, 0); // B X
2236 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2237 do {
2238 vswap(); // X B
2239 incr_bf_adr(o);
2240 vdup(); // X B B
2241 n = 8 - bit_pos;
2242 if (n > bit_size)
2243 n = bit_size;
2244 if (bit_pos)
2245 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
2246 if (n < 8)
2247 vpushi((1 << n) - 1), gen_op('&');
2248 gen_cast(type);
2249 if (bits)
2250 vpushi(bits), gen_op(TOK_SHL);
2251 vrotb(3); // B Y X
2252 gen_op('|'); // B X
2253 bits += n, bit_size -= n, o = 1;
2254 } while (bit_size);
2255 vswap(), vpop();
2256 if (!(type->t & VT_UNSIGNED)) {
2257 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
2258 vpushi(n), gen_op(TOK_SHL);
2259 vpushi(n), gen_op(TOK_SAR);
2263 /* single-byte store mode for packed or otherwise unaligned bitfields */
2264 static void store_packed_bf(int bit_pos, int bit_size)
2266 int bits, n, o, m, c;
2267 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2268 vswap(); // X B
2269 save_reg_upstack(vtop->r, 1);
2270 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2271 do {
2272 incr_bf_adr(o); // X B
2273 vswap(); //B X
2274 c ? vdup() : gv_dup(); // B V X
2275 vrott(3); // X B V
2276 if (bits)
2277 vpushi(bits), gen_op(TOK_SHR);
2278 if (bit_pos)
2279 vpushi(bit_pos), gen_op(TOK_SHL);
2280 n = 8 - bit_pos;
2281 if (n > bit_size)
2282 n = bit_size;
2283 if (n < 8) {
2284 m = ((1 << n) - 1) << bit_pos;
2285 vpushi(m), gen_op('&'); // X B V1
2286 vpushv(vtop-1); // X B V1 B
2287 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
2288 gen_op('&'); // X B V1 B1
2289 gen_op('|'); // X B V2
2291 vdup(), vtop[-1] = vtop[-2]; // X B B V2
2292 vstore(), vpop(); // X B
2293 bits += n, bit_size -= n, bit_pos = 0, o = 1;
2294 } while (bit_size);
2295 vpop(), vpop();
2298 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
2300 int t;
2301 if (0 == sv->type.ref)
2302 return 0;
2303 t = sv->type.ref->auxtype;
2304 if (t != -1 && t != VT_STRUCT) {
2305 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
2306 sv->r |= VT_LVAL;
2308 return t;
2311 /* store vtop a register belonging to class 'rc'. lvalues are
2312 converted to values. Cannot be used if cannot be converted to
2313 register value (such as structures). */
2314 ST_FUNC int gv(int rc)
2316 int r, r2, r_ok, r2_ok, rc2, bt;
2317 int bit_pos, bit_size, size, align;
2319 /* NOTE: get_reg can modify vstack[] */
2320 if (vtop->type.t & VT_BITFIELD) {
2321 CType type;
2323 bit_pos = BIT_POS(vtop->type.t);
2324 bit_size = BIT_SIZE(vtop->type.t);
2325 /* remove bit field info to avoid loops */
2326 vtop->type.t &= ~VT_STRUCT_MASK;
2328 type.ref = NULL;
2329 type.t = vtop->type.t & VT_UNSIGNED;
2330 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
2331 type.t |= VT_UNSIGNED;
2333 r = adjust_bf(vtop, bit_pos, bit_size);
2335 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2336 type.t |= VT_LLONG;
2337 else
2338 type.t |= VT_INT;
2340 if (r == VT_STRUCT) {
2341 load_packed_bf(&type, bit_pos, bit_size);
2342 } else {
2343 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
2344 /* cast to int to propagate signedness in following ops */
2345 gen_cast(&type);
2346 /* generate shifts */
2347 vpushi(bits - (bit_pos + bit_size));
2348 gen_op(TOK_SHL);
2349 vpushi(bits - bit_size);
2350 /* NOTE: transformed to SHR if unsigned */
2351 gen_op(TOK_SAR);
2353 r = gv(rc);
2354 } else {
2355 if (is_float(vtop->type.t) &&
2356 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2357 /* CPUs usually cannot use float constants, so we store them
2358 generically in data segment */
2359 init_params p = { rodata_section };
2360 unsigned long offset;
2361 size = type_size(&vtop->type, &align);
2362 if (NODATA_WANTED)
2363 size = 0, align = 1;
2364 offset = section_add(p.sec, size, align);
2365 vpush_ref(&vtop->type, p.sec, offset, size);
2366 vswap();
2367 init_putv(&p, &vtop->type, offset);
2368 vtop->r |= VT_LVAL;
2370 #ifdef CONFIG_TCC_BCHECK
2371 if (vtop->r & VT_MUSTBOUND)
2372 gbound();
2373 #endif
2375 bt = vtop->type.t & VT_BTYPE;
2377 #ifdef TCC_TARGET_RISCV64
2378 /* XXX mega hack */
2379 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
2380 rc = RC_INT;
2381 #endif
2382 rc2 = RC2_TYPE(bt, rc);
2384 /* need to reload if:
2385 - constant
2386 - lvalue (need to dereference pointer)
2387 - already a register, but not in the right class */
2388 r = vtop->r & VT_VALMASK;
2389 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
2390 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
2392 if (!r_ok || !r2_ok) {
2393 if (!r_ok)
2394 r = get_reg(rc);
2395 if (rc2) {
2396 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
2397 int original_type = vtop->type.t;
2399 /* two register type load :
2400 expand to two words temporarily */
2401 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2402 /* load constant */
2403 unsigned long long ll = vtop->c.i;
2404 vtop->c.i = ll; /* first word */
2405 load(r, vtop);
2406 vtop->r = r; /* save register value */
2407 vpushi(ll >> 32); /* second word */
2408 } else if (vtop->r & VT_LVAL) {
2409 /* We do not want to modifier the long long pointer here.
2410 So we save any other instances down the stack */
2411 save_reg_upstack(vtop->r, 1);
2412 /* load from memory */
2413 vtop->type.t = load_type;
2414 load(r, vtop);
2415 vdup();
2416 vtop[-1].r = r; /* save register value */
2417 /* increment pointer to get second word */
2418 vtop->type.t = VT_PTRDIFF_T;
2419 gaddrof();
2420 vpushs(PTR_SIZE);
2421 gen_op('+');
2422 vtop->r |= VT_LVAL;
2423 vtop->type.t = load_type;
2424 } else {
2425 /* move registers */
2426 if (!r_ok)
2427 load(r, vtop);
2428 if (r2_ok && vtop->r2 < VT_CONST)
2429 goto done;
2430 vdup();
2431 vtop[-1].r = r; /* save register value */
2432 vtop->r = vtop[-1].r2;
2434 /* Allocate second register. Here we rely on the fact that
2435 get_reg() tries first to free r2 of an SValue. */
2436 r2 = get_reg(rc2);
2437 load(r2, vtop);
2438 vpop();
2439 /* write second register */
2440 vtop->r2 = r2;
2441 done:
2442 vtop->type.t = original_type;
2443 } else {
2444 if (vtop->r == VT_CMP)
2445 vset_VT_JMP();
2446 /* one register type load */
2447 load(r, vtop);
2450 vtop->r = r;
2451 #ifdef TCC_TARGET_C67
2452 /* uses register pairs for doubles */
2453 if (bt == VT_DOUBLE)
2454 vtop->r2 = r+1;
2455 #endif
2457 return r;
2460 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2461 ST_FUNC void gv2(int rc1, int rc2)
2463 /* generate more generic register first. But VT_JMP or VT_CMP
2464 values must be generated first in all cases to avoid possible
2465 reload errors */
2466 if (vtop->r != VT_CMP && rc1 <= rc2) {
2467 vswap();
2468 gv(rc1);
2469 vswap();
2470 gv(rc2);
2471 /* test if reload is needed for first register */
2472 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
2473 vswap();
2474 gv(rc1);
2475 vswap();
2477 } else {
2478 gv(rc2);
2479 vswap();
2480 gv(rc1);
2481 vswap();
2482 /* test if reload is needed for first register */
2483 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
2484 gv(rc2);
2489 #if PTR_SIZE == 4
2490 /* expand 64bit on stack in two ints */
2491 ST_FUNC void lexpand(void)
2493 int u, v;
2494 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
2495 v = vtop->r & (VT_VALMASK | VT_LVAL);
2496 if (v == VT_CONST) {
2497 vdup();
2498 vtop[0].c.i >>= 32;
2499 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
2500 vdup();
2501 vtop[0].c.i += 4;
2502 } else {
2503 gv(RC_INT);
2504 vdup();
2505 vtop[0].r = vtop[-1].r2;
2506 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
2508 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
2510 #endif
2512 #if PTR_SIZE == 4
2513 /* build a long long from two ints */
2514 static void lbuild(int t)
2516 gv2(RC_INT, RC_INT);
2517 vtop[-1].r2 = vtop[0].r;
2518 vtop[-1].type.t = t;
2519 vpop();
2521 #endif
2523 /* convert stack entry to register and duplicate its value in another
2524 register */
2525 static void gv_dup(void)
2527 int t, rc, r;
2529 t = vtop->type.t;
2530 #if PTR_SIZE == 4
2531 if ((t & VT_BTYPE) == VT_LLONG) {
2532 if (t & VT_BITFIELD) {
2533 gv(RC_INT);
2534 t = vtop->type.t;
2536 lexpand();
2537 gv_dup();
2538 vswap();
2539 vrotb(3);
2540 gv_dup();
2541 vrotb(4);
2542 /* stack: H L L1 H1 */
2543 lbuild(t);
2544 vrotb(3);
2545 vrotb(3);
2546 vswap();
2547 lbuild(t);
2548 vswap();
2549 return;
2551 #endif
2552 /* duplicate value */
2553 rc = RC_TYPE(t);
2554 gv(rc);
2555 r = get_reg(rc);
2556 vdup();
2557 load(r, vtop);
2558 vtop->r = r;
2561 #if PTR_SIZE == 4
2562 /* generate CPU independent (unsigned) long long operations */
2563 static void gen_opl(int op)
2565 int t, a, b, op1, c, i;
2566 int func;
2567 unsigned short reg_iret = REG_IRET;
2568 unsigned short reg_lret = REG_IRE2;
2569 SValue tmp;
2571 switch(op) {
2572 case '/':
2573 case TOK_PDIV:
2574 func = TOK___divdi3;
2575 goto gen_func;
2576 case TOK_UDIV:
2577 func = TOK___udivdi3;
2578 goto gen_func;
2579 case '%':
2580 func = TOK___moddi3;
2581 goto gen_mod_func;
2582 case TOK_UMOD:
2583 func = TOK___umoddi3;
2584 gen_mod_func:
2585 #ifdef TCC_ARM_EABI
2586 reg_iret = TREG_R2;
2587 reg_lret = TREG_R3;
2588 #endif
2589 gen_func:
2590 /* call generic long long function */
2591 vpush_helper_func(func);
2592 vrott(3);
2593 gfunc_call(2);
2594 vpushi(0);
2595 vtop->r = reg_iret;
2596 vtop->r2 = reg_lret;
2597 break;
2598 case '^':
2599 case '&':
2600 case '|':
2601 case '*':
2602 case '+':
2603 case '-':
2604 //pv("gen_opl A",0,2);
2605 t = vtop->type.t;
2606 vswap();
2607 lexpand();
2608 vrotb(3);
2609 lexpand();
2610 /* stack: L1 H1 L2 H2 */
2611 tmp = vtop[0];
2612 vtop[0] = vtop[-3];
2613 vtop[-3] = tmp;
2614 tmp = vtop[-2];
2615 vtop[-2] = vtop[-3];
2616 vtop[-3] = tmp;
2617 vswap();
2618 /* stack: H1 H2 L1 L2 */
2619 //pv("gen_opl B",0,4);
2620 if (op == '*') {
2621 vpushv(vtop - 1);
2622 vpushv(vtop - 1);
2623 gen_op(TOK_UMULL);
2624 lexpand();
2625 /* stack: H1 H2 L1 L2 ML MH */
2626 for(i=0;i<4;i++)
2627 vrotb(6);
2628 /* stack: ML MH H1 H2 L1 L2 */
2629 tmp = vtop[0];
2630 vtop[0] = vtop[-2];
2631 vtop[-2] = tmp;
2632 /* stack: ML MH H1 L2 H2 L1 */
2633 gen_op('*');
2634 vrotb(3);
2635 vrotb(3);
2636 gen_op('*');
2637 /* stack: ML MH M1 M2 */
2638 gen_op('+');
2639 gen_op('+');
2640 } else if (op == '+' || op == '-') {
2641 /* XXX: add non carry method too (for MIPS or alpha) */
2642 if (op == '+')
2643 op1 = TOK_ADDC1;
2644 else
2645 op1 = TOK_SUBC1;
2646 gen_op(op1);
2647 /* stack: H1 H2 (L1 op L2) */
2648 vrotb(3);
2649 vrotb(3);
2650 gen_op(op1 + 1); /* TOK_xxxC2 */
2651 } else {
2652 gen_op(op);
2653 /* stack: H1 H2 (L1 op L2) */
2654 vrotb(3);
2655 vrotb(3);
2656 /* stack: (L1 op L2) H1 H2 */
2657 gen_op(op);
2658 /* stack: (L1 op L2) (H1 op H2) */
2660 /* stack: L H */
2661 lbuild(t);
2662 break;
2663 case TOK_SAR:
2664 case TOK_SHR:
2665 case TOK_SHL:
2666 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2667 t = vtop[-1].type.t;
2668 vswap();
2669 lexpand();
2670 vrotb(3);
2671 /* stack: L H shift */
2672 c = (int)vtop->c.i;
2673 /* constant: simpler */
2674 /* NOTE: all comments are for SHL. the other cases are
2675 done by swapping words */
2676 vpop();
2677 if (op != TOK_SHL)
2678 vswap();
2679 if (c >= 32) {
2680 /* stack: L H */
2681 vpop();
2682 if (c > 32) {
2683 vpushi(c - 32);
2684 gen_op(op);
2686 if (op != TOK_SAR) {
2687 vpushi(0);
2688 } else {
2689 gv_dup();
2690 vpushi(31);
2691 gen_op(TOK_SAR);
2693 vswap();
2694 } else {
2695 vswap();
2696 gv_dup();
2697 /* stack: H L L */
2698 vpushi(c);
2699 gen_op(op);
2700 vswap();
2701 vpushi(32 - c);
2702 if (op == TOK_SHL)
2703 gen_op(TOK_SHR);
2704 else
2705 gen_op(TOK_SHL);
2706 vrotb(3);
2707 /* stack: L L H */
2708 vpushi(c);
2709 if (op == TOK_SHL)
2710 gen_op(TOK_SHL);
2711 else
2712 gen_op(TOK_SHR);
2713 gen_op('|');
2715 if (op != TOK_SHL)
2716 vswap();
2717 lbuild(t);
2718 } else {
2719 /* XXX: should provide a faster fallback on x86 ? */
2720 switch(op) {
2721 case TOK_SAR:
2722 func = TOK___ashrdi3;
2723 goto gen_func;
2724 case TOK_SHR:
2725 func = TOK___lshrdi3;
2726 goto gen_func;
2727 case TOK_SHL:
2728 func = TOK___ashldi3;
2729 goto gen_func;
2732 break;
2733 default:
2734 /* compare operations */
2735 t = vtop->type.t;
2736 vswap();
2737 lexpand();
2738 vrotb(3);
2739 lexpand();
2740 /* stack: L1 H1 L2 H2 */
2741 tmp = vtop[-1];
2742 vtop[-1] = vtop[-2];
2743 vtop[-2] = tmp;
2744 /* stack: L1 L2 H1 H2 */
2745 save_regs(4);
2746 /* compare high */
2747 op1 = op;
2748 /* when values are equal, we need to compare low words. since
2749 the jump is inverted, we invert the test too. */
2750 if (op1 == TOK_LT)
2751 op1 = TOK_LE;
2752 else if (op1 == TOK_GT)
2753 op1 = TOK_GE;
2754 else if (op1 == TOK_ULT)
2755 op1 = TOK_ULE;
2756 else if (op1 == TOK_UGT)
2757 op1 = TOK_UGE;
2758 a = 0;
2759 b = 0;
2760 gen_op(op1);
2761 if (op == TOK_NE) {
2762 b = gvtst(0, 0);
2763 } else {
2764 a = gvtst(1, 0);
2765 if (op != TOK_EQ) {
2766 /* generate non equal test */
2767 vpushi(0);
2768 vset_VT_CMP(TOK_NE);
2769 b = gvtst(0, 0);
2772 /* compare low. Always unsigned */
2773 op1 = op;
2774 if (op1 == TOK_LT)
2775 op1 = TOK_ULT;
2776 else if (op1 == TOK_LE)
2777 op1 = TOK_ULE;
2778 else if (op1 == TOK_GT)
2779 op1 = TOK_UGT;
2780 else if (op1 == TOK_GE)
2781 op1 = TOK_UGE;
2782 gen_op(op1);
2783 #if 0//def TCC_TARGET_I386
2784 if (op == TOK_NE) { gsym(b); break; }
2785 if (op == TOK_EQ) { gsym(a); break; }
2786 #endif
2787 gvtst_set(1, a);
2788 gvtst_set(0, b);
2789 break;
2792 #endif
2794 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2796 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2797 return (a ^ b) >> 63 ? -x : x;
2800 static int gen_opic_lt(uint64_t a, uint64_t b)
2802 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2805 /* handle integer constant optimizations and various machine
2806 independent opt */
2807 static void gen_opic(int op)
2809 SValue *v1 = vtop - 1;
2810 SValue *v2 = vtop;
2811 int t1 = v1->type.t & VT_BTYPE;
2812 int t2 = v2->type.t & VT_BTYPE;
2813 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2814 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2815 uint64_t l1 = c1 ? v1->c.i : 0;
2816 uint64_t l2 = c2 ? v2->c.i : 0;
2817 int shm = (t1 == VT_LLONG) ? 63 : 31;
2819 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2820 l1 = ((uint32_t)l1 |
2821 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2822 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2823 l2 = ((uint32_t)l2 |
2824 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2826 if (c1 && c2) {
2827 switch(op) {
2828 case '+': l1 += l2; break;
2829 case '-': l1 -= l2; break;
2830 case '&': l1 &= l2; break;
2831 case '^': l1 ^= l2; break;
2832 case '|': l1 |= l2; break;
2833 case '*': l1 *= l2; break;
2835 case TOK_PDIV:
2836 case '/':
2837 case '%':
2838 case TOK_UDIV:
2839 case TOK_UMOD:
2840 /* if division by zero, generate explicit division */
2841 if (l2 == 0) {
2842 if (const_wanted && !(nocode_wanted & unevalmask))
2843 tcc_error("division by zero in constant");
2844 goto general_case;
2846 switch(op) {
2847 default: l1 = gen_opic_sdiv(l1, l2); break;
2848 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2849 case TOK_UDIV: l1 = l1 / l2; break;
2850 case TOK_UMOD: l1 = l1 % l2; break;
2852 break;
2853 case TOK_SHL: l1 <<= (l2 & shm); break;
2854 case TOK_SHR: l1 >>= (l2 & shm); break;
2855 case TOK_SAR:
2856 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2857 break;
2858 /* tests */
2859 case TOK_ULT: l1 = l1 < l2; break;
2860 case TOK_UGE: l1 = l1 >= l2; break;
2861 case TOK_EQ: l1 = l1 == l2; break;
2862 case TOK_NE: l1 = l1 != l2; break;
2863 case TOK_ULE: l1 = l1 <= l2; break;
2864 case TOK_UGT: l1 = l1 > l2; break;
2865 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2866 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2867 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2868 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2869 /* logical */
2870 case TOK_LAND: l1 = l1 && l2; break;
2871 case TOK_LOR: l1 = l1 || l2; break;
2872 default:
2873 goto general_case;
2875 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2876 l1 = ((uint32_t)l1 |
2877 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2878 v1->c.i = l1;
2879 vtop--;
2880 } else {
2881 /* if commutative ops, put c2 as constant */
2882 if (c1 && (op == '+' || op == '&' || op == '^' ||
2883 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2884 vswap();
2885 c2 = c1; //c = c1, c1 = c2, c2 = c;
2886 l2 = l1; //l = l1, l1 = l2, l2 = l;
2888 if (!const_wanted &&
2889 c1 && ((l1 == 0 &&
2890 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2891 (l1 == -1 && op == TOK_SAR))) {
2892 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2893 vtop--;
2894 } else if (!const_wanted &&
2895 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2896 (op == '|' &&
2897 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2898 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2899 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2900 if (l2 == 1)
2901 vtop->c.i = 0;
2902 vswap();
2903 vtop--;
2904 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2905 op == TOK_PDIV) &&
2906 l2 == 1) ||
2907 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2908 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2909 l2 == 0) ||
2910 (op == '&' &&
2911 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2912 /* filter out NOP operations like x*1, x-0, x&-1... */
2913 vtop--;
2914 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2915 /* try to use shifts instead of muls or divs */
2916 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2917 int n = -1;
2918 while (l2) {
2919 l2 >>= 1;
2920 n++;
2922 vtop->c.i = n;
2923 if (op == '*')
2924 op = TOK_SHL;
2925 else if (op == TOK_PDIV)
2926 op = TOK_SAR;
2927 else
2928 op = TOK_SHR;
2930 goto general_case;
2931 } else if (c2 && (op == '+' || op == '-') &&
2932 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2933 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2934 /* symbol + constant case */
2935 if (op == '-')
2936 l2 = -l2;
2937 l2 += vtop[-1].c.i;
2938 /* The backends can't always deal with addends to symbols
2939 larger than +-1<<31. Don't construct such. */
2940 if ((int)l2 != l2)
2941 goto general_case;
2942 vtop--;
2943 vtop->c.i = l2;
2944 } else {
2945 general_case:
2946 /* call low level op generator */
2947 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2948 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2949 gen_opl(op);
2950 else
2951 gen_opi(op);
2956 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2957 # define gen_negf gen_opf
2958 #elif defined TCC_TARGET_ARM
2959 void gen_negf(int op)
2961 /* arm will detect 0-x and replace by vneg */
2962 vpushi(0), vswap(), gen_op('-');
2964 #else
2965 /* XXX: implement in gen_opf() for other backends too */
2966 void gen_negf(int op)
2968 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2969 subtract(-0, x), but with them it's really a sign flip
2970 operation. We implement this with bit manipulation and have
2971 to do some type reinterpretation for this, which TCC can do
2972 only via memory. */
2974 int align, size, bt;
2976 size = type_size(&vtop->type, &align);
2977 bt = vtop->type.t & VT_BTYPE;
2978 save_reg(gv(RC_TYPE(bt)));
2979 vdup();
2980 incr_bf_adr(size - 1);
2981 vdup();
2982 vpushi(0x80); /* flip sign */
2983 gen_op('^');
2984 vstore();
2985 vpop();
2987 #endif
2989 /* generate a floating point operation with constant propagation */
2990 static void gen_opif(int op)
2992 int c1, c2;
2993 SValue *v1, *v2;
2994 #if defined _MSC_VER && defined __x86_64__
2995 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2996 volatile
2997 #endif
2998 long double f1, f2;
3000 v1 = vtop - 1;
3001 v2 = vtop;
3002 if (op == TOK_NEG)
3003 v1 = v2;
3005 /* currently, we cannot do computations with forward symbols */
3006 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3007 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3008 if (c1 && c2) {
3009 if (v1->type.t == VT_FLOAT) {
3010 f1 = v1->c.f;
3011 f2 = v2->c.f;
3012 } else if (v1->type.t == VT_DOUBLE) {
3013 f1 = v1->c.d;
3014 f2 = v2->c.d;
3015 } else {
3016 f1 = v1->c.ld;
3017 f2 = v2->c.ld;
3019 /* NOTE: we only do constant propagation if finite number (not
3020 NaN or infinity) (ANSI spec) */
3021 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
3022 goto general_case;
3023 switch(op) {
3024 case '+': f1 += f2; break;
3025 case '-': f1 -= f2; break;
3026 case '*': f1 *= f2; break;
3027 case '/':
3028 if (f2 == 0.0) {
3029 union { float f; unsigned u; } x1, x2, y;
3030 /* If not in initializer we need to potentially generate
3031 FP exceptions at runtime, otherwise we want to fold. */
3032 if (!const_wanted)
3033 goto general_case;
3034 /* the run-time result of 0.0/0.0 on x87, also of other compilers
3035 when used to compile the f1 /= f2 below, would be -nan */
3036 x1.f = f1, x2.f = f2;
3037 if (f1 == 0.0)
3038 y.u = 0x7fc00000; /* nan */
3039 else
3040 y.u = 0x7f800000; /* infinity */
3041 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
3042 f1 = y.f;
3043 break;
3045 f1 /= f2;
3046 break;
3047 case TOK_NEG:
3048 f1 = -f1;
3049 goto unary_result;
3050 /* XXX: also handles tests ? */
3051 default:
3052 goto general_case;
3054 vtop--;
3055 unary_result:
3056 /* XXX: overflow test ? */
3057 if (v1->type.t == VT_FLOAT) {
3058 v1->c.f = f1;
3059 } else if (v1->type.t == VT_DOUBLE) {
3060 v1->c.d = f1;
3061 } else {
3062 v1->c.ld = f1;
3064 } else {
3065 general_case:
3066 if (op == TOK_NEG) {
3067 gen_negf(op);
3068 } else {
3069 gen_opf(op);
3074 /* print a type. If 'varstr' is not NULL, then the variable is also
3075 printed in the type */
3076 /* XXX: union */
3077 /* XXX: add array and function pointers */
3078 static void type_to_str(char *buf, int buf_size,
3079 CType *type, const char *varstr)
3081 int bt, v, t;
3082 Sym *s, *sa;
3083 char buf1[256];
3084 const char *tstr;
3086 t = type->t;
3087 bt = t & VT_BTYPE;
3088 buf[0] = '\0';
3090 if (t & VT_EXTERN)
3091 pstrcat(buf, buf_size, "extern ");
3092 if (t & VT_STATIC)
3093 pstrcat(buf, buf_size, "static ");
3094 if (t & VT_TYPEDEF)
3095 pstrcat(buf, buf_size, "typedef ");
3096 if (t & VT_INLINE)
3097 pstrcat(buf, buf_size, "inline ");
3098 if (bt != VT_PTR) {
3099 if (t & VT_VOLATILE)
3100 pstrcat(buf, buf_size, "volatile ");
3101 if (t & VT_CONSTANT)
3102 pstrcat(buf, buf_size, "const ");
3104 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3105 || ((t & VT_UNSIGNED)
3106 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3107 && !IS_ENUM(t)
3109 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3111 buf_size -= strlen(buf);
3112 buf += strlen(buf);
3114 switch(bt) {
3115 case VT_VOID:
3116 tstr = "void";
3117 goto add_tstr;
3118 case VT_BOOL:
3119 tstr = "_Bool";
3120 goto add_tstr;
3121 case VT_BYTE:
3122 tstr = "char";
3123 goto add_tstr;
3124 case VT_SHORT:
3125 tstr = "short";
3126 goto add_tstr;
3127 case VT_INT:
3128 tstr = "int";
3129 goto maybe_long;
3130 case VT_LLONG:
3131 tstr = "long long";
3132 maybe_long:
3133 if (t & VT_LONG)
3134 tstr = "long";
3135 if (!IS_ENUM(t))
3136 goto add_tstr;
3137 tstr = "enum ";
3138 goto tstruct;
3139 case VT_FLOAT:
3140 tstr = "float";
3141 goto add_tstr;
3142 case VT_DOUBLE:
3143 tstr = "double";
3144 if (!(t & VT_LONG))
3145 goto add_tstr;
3146 case VT_LDOUBLE:
3147 tstr = "long double";
3148 add_tstr:
3149 pstrcat(buf, buf_size, tstr);
3150 break;
3151 case VT_STRUCT:
3152 tstr = "struct ";
3153 if (IS_UNION(t))
3154 tstr = "union ";
3155 tstruct:
3156 pstrcat(buf, buf_size, tstr);
3157 v = type->ref->v & ~SYM_STRUCT;
3158 if (v >= SYM_FIRST_ANOM)
3159 pstrcat(buf, buf_size, "<anonymous>");
3160 else
3161 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3162 break;
3163 case VT_FUNC:
3164 s = type->ref;
3165 buf1[0]=0;
3166 if (varstr && '*' == *varstr) {
3167 pstrcat(buf1, sizeof(buf1), "(");
3168 pstrcat(buf1, sizeof(buf1), varstr);
3169 pstrcat(buf1, sizeof(buf1), ")");
3171 pstrcat(buf1, buf_size, "(");
3172 sa = s->next;
3173 while (sa != NULL) {
3174 char buf2[256];
3175 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3176 pstrcat(buf1, sizeof(buf1), buf2);
3177 sa = sa->next;
3178 if (sa)
3179 pstrcat(buf1, sizeof(buf1), ", ");
3181 if (s->f.func_type == FUNC_ELLIPSIS)
3182 pstrcat(buf1, sizeof(buf1), ", ...");
3183 pstrcat(buf1, sizeof(buf1), ")");
3184 type_to_str(buf, buf_size, &s->type, buf1);
3185 goto no_var;
3186 case VT_PTR:
3187 s = type->ref;
3188 if (t & VT_ARRAY) {
3189 if (varstr && '*' == *varstr)
3190 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3191 else
3192 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3193 type_to_str(buf, buf_size, &s->type, buf1);
3194 goto no_var;
3196 pstrcpy(buf1, sizeof(buf1), "*");
3197 if (t & VT_CONSTANT)
3198 pstrcat(buf1, buf_size, "const ");
3199 if (t & VT_VOLATILE)
3200 pstrcat(buf1, buf_size, "volatile ");
3201 if (varstr)
3202 pstrcat(buf1, sizeof(buf1), varstr);
3203 type_to_str(buf, buf_size, &s->type, buf1);
3204 goto no_var;
3206 if (varstr) {
3207 pstrcat(buf, buf_size, " ");
3208 pstrcat(buf, buf_size, varstr);
3210 no_var: ;
3213 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
3215 char buf1[256], buf2[256];
3216 type_to_str(buf1, sizeof(buf1), st, NULL);
3217 type_to_str(buf2, sizeof(buf2), dt, NULL);
3218 tcc_error(fmt, buf1, buf2);
3221 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
3223 char buf1[256], buf2[256];
3224 type_to_str(buf1, sizeof(buf1), st, NULL);
3225 type_to_str(buf2, sizeof(buf2), dt, NULL);
3226 tcc_warning(fmt, buf1, buf2);
3229 static int pointed_size(CType *type)
3231 int align;
3232 return type_size(pointed_type(type), &align);
3235 static void vla_runtime_pointed_size(CType *type)
3237 int align;
3238 vla_runtime_type_size(pointed_type(type), &align);
3241 static inline int is_null_pointer(SValue *p)
3243 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
3244 return 0;
3245 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
3246 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
3247 ((p->type.t & VT_BTYPE) == VT_PTR &&
3248 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
3249 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
3250 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
3254 /* compare function types. OLD functions match any new functions */
3255 static int is_compatible_func(CType *type1, CType *type2)
3257 Sym *s1, *s2;
3259 s1 = type1->ref;
3260 s2 = type2->ref;
3261 if (s1->f.func_call != s2->f.func_call)
3262 return 0;
3263 if (s1->f.func_type != s2->f.func_type
3264 && s1->f.func_type != FUNC_OLD
3265 && s2->f.func_type != FUNC_OLD)
3266 return 0;
3267 for (;;) {
3268 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
3269 return 0;
3270 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
3271 return 1;
3272 s1 = s1->next;
3273 s2 = s2->next;
3274 if (!s1)
3275 return !s2;
3276 if (!s2)
3277 return 0;
3281 /* return true if type1 and type2 are the same. If unqualified is
3282 true, qualifiers on the types are ignored.
3284 static int compare_types(CType *type1, CType *type2, int unqualified)
3286 int bt1, t1, t2;
3288 t1 = type1->t & VT_TYPE;
3289 t2 = type2->t & VT_TYPE;
3290 if (unqualified) {
3291 /* strip qualifiers before comparing */
3292 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3293 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3296 /* Default Vs explicit signedness only matters for char */
3297 if ((t1 & VT_BTYPE) != VT_BYTE) {
3298 t1 &= ~VT_DEFSIGN;
3299 t2 &= ~VT_DEFSIGN;
3301 /* XXX: bitfields ? */
3302 if (t1 != t2)
3303 return 0;
3305 if ((t1 & VT_ARRAY)
3306 && !(type1->ref->c < 0
3307 || type2->ref->c < 0
3308 || type1->ref->c == type2->ref->c))
3309 return 0;
3311 /* test more complicated cases */
3312 bt1 = t1 & VT_BTYPE;
3313 if (bt1 == VT_PTR) {
3314 type1 = pointed_type(type1);
3315 type2 = pointed_type(type2);
3316 return is_compatible_types(type1, type2);
3317 } else if (bt1 == VT_STRUCT) {
3318 return (type1->ref == type2->ref);
3319 } else if (bt1 == VT_FUNC) {
3320 return is_compatible_func(type1, type2);
3321 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
3322 /* If both are enums then they must be the same, if only one is then
3323 t1 and t2 must be equal, which was checked above already. */
3324 return type1->ref == type2->ref;
3325 } else {
3326 return 1;
3330 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3331 type is stored in DEST if non-null (except for pointer plus/minus) . */
3332 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
3334 CType *type1 = &op1->type, *type2 = &op2->type, type;
3335 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
3336 int ret = 1;
3338 type.t = VT_VOID;
3339 type.ref = NULL;
3341 if (bt1 == VT_VOID || bt2 == VT_VOID) {
3342 ret = op == '?' ? 1 : 0;
3343 /* NOTE: as an extension, we accept void on only one side */
3344 type.t = VT_VOID;
3345 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3346 if (op == '+') ; /* Handled in caller */
3347 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3348 /* If one is a null ptr constant the result type is the other. */
3349 else if (is_null_pointer (op2)) type = *type1;
3350 else if (is_null_pointer (op1)) type = *type2;
3351 else if (bt1 != bt2) {
3352 /* accept comparison or cond-expr between pointer and integer
3353 with a warning */
3354 if ((op == '?' || TOK_ISCOND(op))
3355 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
3356 tcc_warning("pointer/integer mismatch in %s",
3357 op == '?' ? "conditional expression" : "comparison");
3358 else if (op != '-' || !is_integer_btype(bt2))
3359 ret = 0;
3360 type = *(bt1 == VT_PTR ? type1 : type2);
3361 } else {
3362 CType *pt1 = pointed_type(type1);
3363 CType *pt2 = pointed_type(type2);
3364 int pbt1 = pt1->t & VT_BTYPE;
3365 int pbt2 = pt2->t & VT_BTYPE;
3366 int newquals, copied = 0;
3367 if (pbt1 != VT_VOID && pbt2 != VT_VOID
3368 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
3369 if (op != '?' && !TOK_ISCOND(op))
3370 ret = 0;
3371 else
3372 type_incompatibility_warning(type1, type2,
3373 op == '?'
3374 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3375 : "pointer type mismatch in comparison('%s' and '%s')");
3377 if (op == '?') {
3378 /* pointers to void get preferred, otherwise the
3379 pointed to types minus qualifs should be compatible */
3380 type = *((pbt1 == VT_VOID) ? type1 : type2);
3381 /* combine qualifs */
3382 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
3383 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
3384 & newquals)
3386 /* copy the pointer target symbol */
3387 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3388 0, type.ref->c);
3389 copied = 1;
3390 pointed_type(&type)->t |= newquals;
3392 /* pointers to incomplete arrays get converted to
3393 pointers to completed ones if possible */
3394 if (pt1->t & VT_ARRAY
3395 && pt2->t & VT_ARRAY
3396 && pointed_type(&type)->ref->c < 0
3397 && (pt1->ref->c > 0 || pt2->ref->c > 0))
3399 if (!copied)
3400 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3401 0, type.ref->c);
3402 pointed_type(&type)->ref =
3403 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
3404 0, pointed_type(&type)->ref->c);
3405 pointed_type(&type)->ref->c =
3406 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
3410 if (TOK_ISCOND(op))
3411 type.t = VT_SIZE_T;
3412 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
3413 if (op != '?' || !compare_types(type1, type2, 1))
3414 ret = 0;
3415 type = *type1;
3416 } else if (is_float(bt1) || is_float(bt2)) {
3417 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
3418 type.t = VT_LDOUBLE;
3419 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
3420 type.t = VT_DOUBLE;
3421 } else {
3422 type.t = VT_FLOAT;
3424 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
3425 /* cast to biggest op */
3426 type.t = VT_LLONG | VT_LONG;
3427 if (bt1 == VT_LLONG)
3428 type.t &= t1;
3429 if (bt2 == VT_LLONG)
3430 type.t &= t2;
3431 /* convert to unsigned if it does not fit in a long long */
3432 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
3433 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
3434 type.t |= VT_UNSIGNED;
3435 } else {
3436 /* integer operations */
3437 type.t = VT_INT | (VT_LONG & (t1 | t2));
3438 /* convert to unsigned if it does not fit in an integer */
3439 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
3440 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
3441 type.t |= VT_UNSIGNED;
3443 if (dest)
3444 *dest = type;
3445 return ret;
3448 /* generic gen_op: handles types problems */
3449 ST_FUNC void gen_op(int op)
3451 int u, t1, t2, bt1, bt2, t;
3452 CType type1, combtype;
3454 redo:
3455 t1 = vtop[-1].type.t;
3456 t2 = vtop[0].type.t;
3457 bt1 = t1 & VT_BTYPE;
3458 bt2 = t2 & VT_BTYPE;
3460 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
3461 if (bt2 == VT_FUNC) {
3462 mk_pointer(&vtop->type);
3463 gaddrof();
3465 if (bt1 == VT_FUNC) {
3466 vswap();
3467 mk_pointer(&vtop->type);
3468 gaddrof();
3469 vswap();
3471 goto redo;
3472 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
3473 tcc_error_noabort("invalid operand types for binary operation");
3474 vpop();
3475 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3476 /* at least one operand is a pointer */
3477 /* relational op: must be both pointers */
3478 if (TOK_ISCOND(op))
3479 goto std_op;
3480 /* if both pointers, then it must be the '-' op */
3481 if (bt1 == VT_PTR && bt2 == VT_PTR) {
3482 if (op != '-')
3483 tcc_error("cannot use pointers here");
3484 if (vtop[-1].type.t & VT_VLA) {
3485 vla_runtime_pointed_size(&vtop[-1].type);
3486 } else {
3487 vpushi(pointed_size(&vtop[-1].type));
3489 vrott(3);
3490 gen_opic(op);
3491 vtop->type.t = VT_PTRDIFF_T;
3492 vswap();
3493 gen_op(TOK_PDIV);
3494 } else {
3495 /* exactly one pointer : must be '+' or '-'. */
3496 if (op != '-' && op != '+')
3497 tcc_error("cannot use pointers here");
3498 /* Put pointer as first operand */
3499 if (bt2 == VT_PTR) {
3500 vswap();
3501 t = t1, t1 = t2, t2 = t;
3503 #if PTR_SIZE == 4
3504 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
3505 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3506 gen_cast_s(VT_INT);
3507 #endif
3508 type1 = vtop[-1].type;
3509 if (vtop[-1].type.t & VT_VLA)
3510 vla_runtime_pointed_size(&vtop[-1].type);
3511 else {
3512 u = pointed_size(&vtop[-1].type);
3513 if (u < 0)
3514 tcc_error("unknown array element size");
3515 #if PTR_SIZE == 8
3516 vpushll(u);
3517 #else
3518 /* XXX: cast to int ? (long long case) */
3519 vpushi(u);
3520 #endif
3522 gen_op('*');
3523 #ifdef CONFIG_TCC_BCHECK
3524 if (tcc_state->do_bounds_check && !const_wanted) {
3525 /* if bounded pointers, we generate a special code to
3526 test bounds */
3527 if (op == '-') {
3528 vpushi(0);
3529 vswap();
3530 gen_op('-');
3532 gen_bounded_ptr_add();
3533 } else
3534 #endif
3536 gen_opic(op);
3538 type1.t &= ~VT_ARRAY;
3539 /* put again type if gen_opic() swaped operands */
3540 vtop->type = type1;
3542 } else {
3543 /* floats can only be used for a few operations */
3544 if (is_float(combtype.t)
3545 && op != '+' && op != '-' && op != '*' && op != '/'
3546 && !TOK_ISCOND(op))
3547 tcc_error("invalid operands for binary operation");
3548 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
3549 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
3550 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
3551 t |= VT_UNSIGNED;
3552 t |= (VT_LONG & t1);
3553 combtype.t = t;
3555 std_op:
3556 t = t2 = combtype.t;
3557 /* XXX: currently, some unsigned operations are explicit, so
3558 we modify them here */
3559 if (t & VT_UNSIGNED) {
3560 if (op == TOK_SAR)
3561 op = TOK_SHR;
3562 else if (op == '/')
3563 op = TOK_UDIV;
3564 else if (op == '%')
3565 op = TOK_UMOD;
3566 else if (op == TOK_LT)
3567 op = TOK_ULT;
3568 else if (op == TOK_GT)
3569 op = TOK_UGT;
3570 else if (op == TOK_LE)
3571 op = TOK_ULE;
3572 else if (op == TOK_GE)
3573 op = TOK_UGE;
3575 vswap();
3576 gen_cast_s(t);
3577 vswap();
3578 /* special case for shifts and long long: we keep the shift as
3579 an integer */
3580 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
3581 t2 = VT_INT;
3582 gen_cast_s(t2);
3583 if (is_float(t))
3584 gen_opif(op);
3585 else
3586 gen_opic(op);
3587 if (TOK_ISCOND(op)) {
3588 /* relational op: the result is an int */
3589 vtop->type.t = VT_INT;
3590 } else {
3591 vtop->type.t = t;
3594 // Make sure that we have converted to an rvalue:
3595 if (vtop->r & VT_LVAL)
3596 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3599 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3600 #define gen_cvt_itof1 gen_cvt_itof
3601 #else
3602 /* generic itof for unsigned long long case */
3603 static void gen_cvt_itof1(int t)
3605 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3606 (VT_LLONG | VT_UNSIGNED)) {
3608 if (t == VT_FLOAT)
3609 vpush_helper_func(TOK___floatundisf);
3610 #if LDOUBLE_SIZE != 8
3611 else if (t == VT_LDOUBLE)
3612 vpush_helper_func(TOK___floatundixf);
3613 #endif
3614 else
3615 vpush_helper_func(TOK___floatundidf);
3616 vrott(2);
3617 gfunc_call(1);
3618 vpushi(0);
3619 PUT_R_RET(vtop, t);
3620 } else {
3621 gen_cvt_itof(t);
3624 #endif
3626 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3627 #define gen_cvt_ftoi1 gen_cvt_ftoi
3628 #else
3629 /* generic ftoi for unsigned long long case */
3630 static void gen_cvt_ftoi1(int t)
3632 int st;
3633 if (t == (VT_LLONG | VT_UNSIGNED)) {
3634 /* not handled natively */
3635 st = vtop->type.t & VT_BTYPE;
3636 if (st == VT_FLOAT)
3637 vpush_helper_func(TOK___fixunssfdi);
3638 #if LDOUBLE_SIZE != 8
3639 else if (st == VT_LDOUBLE)
3640 vpush_helper_func(TOK___fixunsxfdi);
3641 #endif
3642 else
3643 vpush_helper_func(TOK___fixunsdfdi);
3644 vrott(2);
3645 gfunc_call(1);
3646 vpushi(0);
3647 PUT_R_RET(vtop, t);
3648 } else {
3649 gen_cvt_ftoi(t);
3652 #endif
3654 /* special delayed cast for char/short */
3655 static void force_charshort_cast(void)
3657 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3658 int dbt = vtop->type.t;
3659 vtop->r &= ~VT_MUSTCAST;
3660 vtop->type.t = sbt;
3661 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3662 vtop->type.t = dbt;
3665 static void gen_cast_s(int t)
3667 CType type;
3668 type.t = t;
3669 type.ref = NULL;
3670 gen_cast(&type);
3673 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3674 static void gen_cast(CType *type)
3676 int sbt, dbt, sf, df, c;
3677 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3679 /* special delayed cast for char/short */
3680 if (vtop->r & VT_MUSTCAST)
3681 force_charshort_cast();
3683 /* bitfields first get cast to ints */
3684 if (vtop->type.t & VT_BITFIELD)
3685 gv(RC_INT);
3687 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3688 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3689 if (sbt == VT_FUNC)
3690 sbt = VT_PTR;
3692 again:
3693 if (sbt != dbt) {
3694 sf = is_float(sbt);
3695 df = is_float(dbt);
3696 dbt_bt = dbt & VT_BTYPE;
3697 sbt_bt = sbt & VT_BTYPE;
3698 if (dbt_bt == VT_VOID)
3699 goto done;
3700 if (sbt_bt == VT_VOID) {
3701 error:
3702 cast_error(&vtop->type, type);
3705 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3706 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3707 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3708 #endif
3709 if (c) {
3710 /* constant case: we can do it now */
3711 /* XXX: in ISOC, cannot do it if error in convert */
3712 if (sbt == VT_FLOAT)
3713 vtop->c.ld = vtop->c.f;
3714 else if (sbt == VT_DOUBLE)
3715 vtop->c.ld = vtop->c.d;
3717 if (df) {
3718 if (sbt_bt == VT_LLONG) {
3719 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3720 vtop->c.ld = vtop->c.i;
3721 else
3722 vtop->c.ld = -(long double)-vtop->c.i;
3723 } else if(!sf) {
3724 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3725 vtop->c.ld = (uint32_t)vtop->c.i;
3726 else
3727 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3730 if (dbt == VT_FLOAT)
3731 vtop->c.f = (float)vtop->c.ld;
3732 else if (dbt == VT_DOUBLE)
3733 vtop->c.d = (double)vtop->c.ld;
3734 } else if (sf && dbt == VT_BOOL) {
3735 vtop->c.i = (vtop->c.ld != 0);
3736 } else {
3737 if(sf)
3738 vtop->c.i = vtop->c.ld;
3739 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3741 else if (sbt & VT_UNSIGNED)
3742 vtop->c.i = (uint32_t)vtop->c.i;
3743 else
3744 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3746 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3748 else if (dbt == VT_BOOL)
3749 vtop->c.i = (vtop->c.i != 0);
3750 else {
3751 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3752 dbt_bt == VT_SHORT ? 0xffff :
3753 0xffffffff;
3754 vtop->c.i &= m;
3755 if (!(dbt & VT_UNSIGNED))
3756 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3759 goto done;
3761 } else if (dbt == VT_BOOL
3762 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3763 == (VT_CONST | VT_SYM)) {
3764 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3765 vtop->r = VT_CONST;
3766 vtop->c.i = 1;
3767 goto done;
3770 /* cannot generate code for global or static initializers */
3771 if (STATIC_DATA_WANTED)
3772 goto done;
3774 /* non constant case: generate code */
3775 if (dbt == VT_BOOL) {
3776 gen_test_zero(TOK_NE);
3777 goto done;
3780 if (sf || df) {
3781 if (sf && df) {
3782 /* convert from fp to fp */
3783 gen_cvt_ftof(dbt);
3784 } else if (df) {
3785 /* convert int to fp */
3786 gen_cvt_itof1(dbt);
3787 } else {
3788 /* convert fp to int */
3789 sbt = dbt;
3790 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3791 sbt = VT_INT;
3792 gen_cvt_ftoi1(sbt);
3793 goto again; /* may need char/short cast */
3795 goto done;
3798 ds = btype_size(dbt_bt);
3799 ss = btype_size(sbt_bt);
3800 if (ds == 0 || ss == 0)
3801 goto error;
3803 if (IS_ENUM(type->t) && type->ref->c < 0)
3804 tcc_error("cast to incomplete type");
3806 /* same size and no sign conversion needed */
3807 if (ds == ss && ds >= 4)
3808 goto done;
3809 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3810 tcc_warning("cast between pointer and integer of different size");
3811 if (sbt_bt == VT_PTR) {
3812 /* put integer type to allow logical operations below */
3813 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3817 /* processor allows { int a = 0, b = *(char*)&a; }
3818 That means that if we cast to less width, we can just
3819 change the type and read it still later. */
3820 #define ALLOW_SUBTYPE_ACCESS 1
3822 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3823 /* value still in memory */
3824 if (ds <= ss)
3825 goto done;
3826 /* ss <= 4 here */
3827 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3828 gv(RC_INT);
3829 goto done; /* no 64bit envolved */
3832 gv(RC_INT);
3834 trunc = 0;
3835 #if PTR_SIZE == 4
3836 if (ds == 8) {
3837 /* generate high word */
3838 if (sbt & VT_UNSIGNED) {
3839 vpushi(0);
3840 gv(RC_INT);
3841 } else {
3842 gv_dup();
3843 vpushi(31);
3844 gen_op(TOK_SAR);
3846 lbuild(dbt);
3847 } else if (ss == 8) {
3848 /* from long long: just take low order word */
3849 lexpand();
3850 vpop();
3852 ss = 4;
3854 #elif PTR_SIZE == 8
3855 if (ds == 8) {
3856 /* need to convert from 32bit to 64bit */
3857 if (sbt & VT_UNSIGNED) {
3858 #if defined(TCC_TARGET_RISCV64)
3859 /* RISC-V keeps 32bit vals in registers sign-extended.
3860 So here we need a zero-extension. */
3861 trunc = 32;
3862 #else
3863 goto done;
3864 #endif
3865 } else {
3866 gen_cvt_sxtw();
3867 goto done;
3869 ss = ds, ds = 4, dbt = sbt;
3870 } else if (ss == 8) {
3871 /* RISC-V keeps 32bit vals in registers sign-extended.
3872 So here we need a sign-extension for signed types and
3873 zero-extension. for unsigned types. */
3874 #if !defined(TCC_TARGET_RISCV64)
3875 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3876 #endif
3877 } else {
3878 ss = 4;
3880 #endif
3882 if (ds >= ss)
3883 goto done;
3884 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3885 if (ss == 4) {
3886 gen_cvt_csti(dbt);
3887 goto done;
3889 #endif
3890 bits = (ss - ds) * 8;
3891 /* for unsigned, gen_op will convert SAR to SHR */
3892 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3893 vpushi(bits);
3894 gen_op(TOK_SHL);
3895 vpushi(bits - trunc);
3896 gen_op(TOK_SAR);
3897 vpushi(trunc);
3898 gen_op(TOK_SHR);
3900 done:
3901 vtop->type = *type;
3902 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3905 /* return type size as known at compile time. Put alignment at 'a' */
3906 ST_FUNC int type_size(CType *type, int *a)
3908 Sym *s;
3909 int bt;
3911 bt = type->t & VT_BTYPE;
3912 if (bt == VT_STRUCT) {
3913 /* struct/union */
3914 s = type->ref;
3915 *a = s->r;
3916 return s->c;
3917 } else if (bt == VT_PTR) {
3918 if (type->t & VT_ARRAY) {
3919 int ts;
3921 s = type->ref;
3922 ts = type_size(&s->type, a);
3924 if (ts < 0 && s->c < 0)
3925 ts = -ts;
3927 return ts * s->c;
3928 } else {
3929 *a = PTR_SIZE;
3930 return PTR_SIZE;
3932 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3933 return -1; /* incomplete enum */
3934 } else if (bt == VT_LDOUBLE) {
3935 *a = LDOUBLE_ALIGN;
3936 return LDOUBLE_SIZE;
3937 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3938 #ifdef TCC_TARGET_I386
3939 #ifdef TCC_TARGET_PE
3940 *a = 8;
3941 #else
3942 *a = 4;
3943 #endif
3944 #elif defined(TCC_TARGET_ARM)
3945 #ifdef TCC_ARM_EABI
3946 *a = 8;
3947 #else
3948 *a = 4;
3949 #endif
3950 #else
3951 *a = 8;
3952 #endif
3953 return 8;
3954 } else if (bt == VT_INT || bt == VT_FLOAT) {
3955 *a = 4;
3956 return 4;
3957 } else if (bt == VT_SHORT) {
3958 *a = 2;
3959 return 2;
3960 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3961 *a = 8;
3962 return 16;
3963 } else {
3964 /* char, void, function, _Bool */
3965 *a = 1;
3966 return 1;
3970 /* push type size as known at runtime time on top of value stack. Put
3971 alignment at 'a' */
3972 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
3974 if (type->t & VT_VLA) {
3975 type_size(&type->ref->type, a);
3976 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3977 } else {
3978 vpushi(type_size(type, a));
3982 /* return the pointed type of t */
3983 static inline CType *pointed_type(CType *type)
3985 return &type->ref->type;
3988 /* modify type so that its it is a pointer to type. */
3989 ST_FUNC void mk_pointer(CType *type)
3991 Sym *s;
3992 s = sym_push(SYM_FIELD, type, 0, -1);
3993 type->t = VT_PTR | (type->t & VT_STORAGE);
3994 type->ref = s;
3997 /* return true if type1 and type2 are exactly the same (including
3998 qualifiers).
4000 static int is_compatible_types(CType *type1, CType *type2)
4002 return compare_types(type1,type2,0);
4005 /* return true if type1 and type2 are the same (ignoring qualifiers).
4007 static int is_compatible_unqualified_types(CType *type1, CType *type2)
4009 return compare_types(type1,type2,1);
4012 static void cast_error(CType *st, CType *dt)
4014 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
4017 /* verify type compatibility to store vtop in 'dt' type */
4018 static void verify_assign_cast(CType *dt)
4020 CType *st, *type1, *type2;
4021 int dbt, sbt, qualwarn, lvl;
4023 st = &vtop->type; /* source type */
4024 dbt = dt->t & VT_BTYPE;
4025 sbt = st->t & VT_BTYPE;
4026 if (dt->t & VT_CONSTANT)
4027 tcc_warning("assignment of read-only location");
4028 switch(dbt) {
4029 case VT_VOID:
4030 if (sbt != dbt)
4031 tcc_error("assignment to void expression");
4032 break;
4033 case VT_PTR:
4034 /* special cases for pointers */
4035 /* '0' can also be a pointer */
4036 if (is_null_pointer(vtop))
4037 break;
4038 /* accept implicit pointer to integer cast with warning */
4039 if (is_integer_btype(sbt)) {
4040 tcc_warning("assignment makes pointer from integer without a cast");
4041 break;
4043 type1 = pointed_type(dt);
4044 if (sbt == VT_PTR)
4045 type2 = pointed_type(st);
4046 else if (sbt == VT_FUNC)
4047 type2 = st; /* a function is implicitly a function pointer */
4048 else
4049 goto error;
4050 if (is_compatible_types(type1, type2))
4051 break;
4052 for (qualwarn = lvl = 0;; ++lvl) {
4053 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
4054 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
4055 qualwarn = 1;
4056 dbt = type1->t & (VT_BTYPE|VT_LONG);
4057 sbt = type2->t & (VT_BTYPE|VT_LONG);
4058 if (dbt != VT_PTR || sbt != VT_PTR)
4059 break;
4060 type1 = pointed_type(type1);
4061 type2 = pointed_type(type2);
4063 if (!is_compatible_unqualified_types(type1, type2)) {
4064 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
4065 /* void * can match anything */
4066 } else if (dbt == sbt
4067 && is_integer_btype(sbt & VT_BTYPE)
4068 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
4069 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
4070 /* Like GCC don't warn by default for merely changes
4071 in pointer target signedness. Do warn for different
4072 base types, though, in particular for unsigned enums
4073 and signed int targets. */
4074 } else {
4075 tcc_warning("assignment from incompatible pointer type");
4076 break;
4079 if (qualwarn)
4080 tcc_warning("assignment discards qualifiers from pointer target type");
4081 break;
4082 case VT_BYTE:
4083 case VT_SHORT:
4084 case VT_INT:
4085 case VT_LLONG:
4086 if (sbt == VT_PTR || sbt == VT_FUNC) {
4087 tcc_warning("assignment makes integer from pointer without a cast");
4088 } else if (sbt == VT_STRUCT) {
4089 goto case_VT_STRUCT;
4091 /* XXX: more tests */
4092 break;
4093 case VT_STRUCT:
4094 case_VT_STRUCT:
4095 if (!is_compatible_unqualified_types(dt, st)) {
4096 error:
4097 cast_error(st, dt);
4099 break;
4103 static void gen_assign_cast(CType *dt)
4105 verify_assign_cast(dt);
4106 gen_cast(dt);
4109 /* store vtop in lvalue pushed on stack */
4110 ST_FUNC void vstore(void)
4112 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
4114 ft = vtop[-1].type.t;
4115 sbt = vtop->type.t & VT_BTYPE;
4116 dbt = ft & VT_BTYPE;
4118 verify_assign_cast(&vtop[-1].type);
4120 if (sbt == VT_STRUCT) {
4121 /* if structure, only generate pointer */
4122 /* structure assignment : generate memcpy */
4123 /* XXX: optimize if small size */
4124 size = type_size(&vtop->type, &align);
4126 /* destination */
4127 vswap();
4128 #ifdef CONFIG_TCC_BCHECK
4129 if (vtop->r & VT_MUSTBOUND)
4130 gbound(); /* check would be wrong after gaddrof() */
4131 #endif
4132 vtop->type.t = VT_PTR;
4133 gaddrof();
4135 /* address of memcpy() */
4136 #ifdef TCC_ARM_EABI
4137 if(!(align & 7))
4138 vpush_helper_func(TOK_memmove8);
4139 else if(!(align & 3))
4140 vpush_helper_func(TOK_memmove4);
4141 else
4142 #endif
4143 /* Use memmove, rather than memcpy, as dest and src may be same: */
4144 vpush_helper_func(TOK_memmove);
4146 vswap();
4147 /* source */
4148 vpushv(vtop - 2);
4149 #ifdef CONFIG_TCC_BCHECK
4150 if (vtop->r & VT_MUSTBOUND)
4151 gbound();
4152 #endif
4153 vtop->type.t = VT_PTR;
4154 gaddrof();
4155 /* type size */
4156 vpushi(size);
4157 gfunc_call(3);
4158 /* leave source on stack */
4160 } else if (ft & VT_BITFIELD) {
4161 /* bitfield store handling */
4163 /* save lvalue as expression result (example: s.b = s.a = n;) */
4164 vdup(), vtop[-1] = vtop[-2];
4166 bit_pos = BIT_POS(ft);
4167 bit_size = BIT_SIZE(ft);
4168 /* remove bit field info to avoid loops */
4169 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
4171 if (dbt == VT_BOOL) {
4172 gen_cast(&vtop[-1].type);
4173 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
4175 r = adjust_bf(vtop - 1, bit_pos, bit_size);
4176 if (dbt != VT_BOOL) {
4177 gen_cast(&vtop[-1].type);
4178 dbt = vtop[-1].type.t & VT_BTYPE;
4180 if (r == VT_STRUCT) {
4181 store_packed_bf(bit_pos, bit_size);
4182 } else {
4183 unsigned long long mask = (1ULL << bit_size) - 1;
4184 if (dbt != VT_BOOL) {
4185 /* mask source */
4186 if (dbt == VT_LLONG)
4187 vpushll(mask);
4188 else
4189 vpushi((unsigned)mask);
4190 gen_op('&');
4192 /* shift source */
4193 vpushi(bit_pos);
4194 gen_op(TOK_SHL);
4195 vswap();
4196 /* duplicate destination */
4197 vdup();
4198 vrott(3);
4199 /* load destination, mask and or with source */
4200 if (dbt == VT_LLONG)
4201 vpushll(~(mask << bit_pos));
4202 else
4203 vpushi(~((unsigned)mask << bit_pos));
4204 gen_op('&');
4205 gen_op('|');
4206 /* store result */
4207 vstore();
4208 /* ... and discard */
4209 vpop();
4211 } else if (dbt == VT_VOID) {
4212 --vtop;
4213 } else {
4214 /* optimize char/short casts */
4215 delayed_cast = 0;
4216 if ((dbt == VT_BYTE || dbt == VT_SHORT)
4217 && is_integer_btype(sbt)
4219 if ((vtop->r & VT_MUSTCAST)
4220 && btype_size(dbt) > btype_size(sbt)
4222 force_charshort_cast();
4223 delayed_cast = 1;
4224 } else {
4225 gen_cast(&vtop[-1].type);
4228 #ifdef CONFIG_TCC_BCHECK
4229 /* bound check case */
4230 if (vtop[-1].r & VT_MUSTBOUND) {
4231 vswap();
4232 gbound();
4233 vswap();
4235 #endif
4236 gv(RC_TYPE(dbt)); /* generate value */
4238 if (delayed_cast) {
4239 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
4240 //tcc_warning("deley cast %x -> %x", sbt, dbt);
4241 vtop->type.t = ft & VT_TYPE;
4244 /* if lvalue was saved on stack, must read it */
4245 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
4246 SValue sv;
4247 r = get_reg(RC_INT);
4248 sv.type.t = VT_PTRDIFF_T;
4249 sv.r = VT_LOCAL | VT_LVAL;
4250 sv.c.i = vtop[-1].c.i;
4251 load(r, &sv);
4252 vtop[-1].r = r | VT_LVAL;
4255 r = vtop->r & VT_VALMASK;
4256 /* two word case handling :
4257 store second register at word + 4 (or +8 for x86-64) */
4258 if (USING_TWO_WORDS(dbt)) {
4259 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
4260 vtop[-1].type.t = load_type;
4261 store(r, vtop - 1);
4262 vswap();
4263 /* convert to int to increment easily */
4264 vtop->type.t = VT_PTRDIFF_T;
4265 gaddrof();
4266 vpushs(PTR_SIZE);
4267 gen_op('+');
4268 vtop->r |= VT_LVAL;
4269 vswap();
4270 vtop[-1].type.t = load_type;
4271 /* XXX: it works because r2 is spilled last ! */
4272 store(vtop->r2, vtop - 1);
4273 } else {
4274 /* single word */
4275 store(r, vtop - 1);
4277 vswap();
4278 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
4282 /* post defines POST/PRE add. c is the token ++ or -- */
4283 ST_FUNC void inc(int post, int c)
4285 test_lvalue();
4286 vdup(); /* save lvalue */
4287 if (post) {
4288 gv_dup(); /* duplicate value */
4289 vrotb(3);
4290 vrotb(3);
4292 /* add constant */
4293 vpushi(c - TOK_MID);
4294 gen_op('+');
4295 vstore(); /* store value */
4296 if (post)
4297 vpop(); /* if post op, return saved value */
4300 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
4302 /* read the string */
4303 if (tok != TOK_STR)
4304 expect(msg);
4305 cstr_new(astr);
4306 while (tok == TOK_STR) {
4307 /* XXX: add \0 handling too ? */
4308 cstr_cat(astr, tokc.str.data, -1);
4309 next();
4311 cstr_ccat(astr, '\0');
4314 /* If I is >= 1 and a power of two, returns log2(i)+1.
4315 If I is 0 returns 0. */
4316 ST_FUNC int exact_log2p1(int i)
4318 int ret;
4319 if (!i)
4320 return 0;
4321 for (ret = 1; i >= 1 << 8; ret += 8)
4322 i >>= 8;
4323 if (i >= 1 << 4)
4324 ret += 4, i >>= 4;
4325 if (i >= 1 << 2)
4326 ret += 2, i >>= 2;
4327 if (i >= 1 << 1)
4328 ret++;
4329 return ret;
4332 /* Parse __attribute__((...)) GNUC extension. */
4333 static void parse_attribute(AttributeDef *ad)
4335 int t, n;
4336 CString astr;
4338 redo:
4339 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
4340 return;
4341 next();
4342 skip('(');
4343 skip('(');
4344 while (tok != ')') {
4345 if (tok < TOK_IDENT)
4346 expect("attribute name");
4347 t = tok;
4348 next();
4349 switch(t) {
4350 case TOK_CLEANUP1:
4351 case TOK_CLEANUP2:
4353 Sym *s;
4355 skip('(');
4356 s = sym_find(tok);
4357 if (!s) {
4358 tcc_warning("implicit declaration of function '%s'",
4359 get_tok_str(tok, &tokc));
4360 s = external_global_sym(tok, &func_old_type);
4361 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
4362 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
4363 ad->cleanup_func = s;
4364 next();
4365 skip(')');
4366 break;
4368 case TOK_CONSTRUCTOR1:
4369 case TOK_CONSTRUCTOR2:
4370 ad->f.func_ctor = 1;
4371 break;
4372 case TOK_DESTRUCTOR1:
4373 case TOK_DESTRUCTOR2:
4374 ad->f.func_dtor = 1;
4375 break;
4376 case TOK_ALWAYS_INLINE1:
4377 case TOK_ALWAYS_INLINE2:
4378 ad->f.func_alwinl = 1;
4379 break;
4380 case TOK_SECTION1:
4381 case TOK_SECTION2:
4382 skip('(');
4383 parse_mult_str(&astr, "section name");
4384 ad->section = find_section(tcc_state, (char *)astr.data);
4385 skip(')');
4386 cstr_free(&astr);
4387 break;
4388 case TOK_ALIAS1:
4389 case TOK_ALIAS2:
4390 skip('(');
4391 parse_mult_str(&astr, "alias(\"target\")");
4392 ad->alias_target = /* save string as token, for later */
4393 tok_alloc((char*)astr.data, astr.size-1)->tok;
4394 skip(')');
4395 cstr_free(&astr);
4396 break;
4397 case TOK_VISIBILITY1:
4398 case TOK_VISIBILITY2:
4399 skip('(');
4400 parse_mult_str(&astr,
4401 "visibility(\"default|hidden|internal|protected\")");
4402 if (!strcmp (astr.data, "default"))
4403 ad->a.visibility = STV_DEFAULT;
4404 else if (!strcmp (astr.data, "hidden"))
4405 ad->a.visibility = STV_HIDDEN;
4406 else if (!strcmp (astr.data, "internal"))
4407 ad->a.visibility = STV_INTERNAL;
4408 else if (!strcmp (astr.data, "protected"))
4409 ad->a.visibility = STV_PROTECTED;
4410 else
4411 expect("visibility(\"default|hidden|internal|protected\")");
4412 skip(')');
4413 cstr_free(&astr);
4414 break;
4415 case TOK_ALIGNED1:
4416 case TOK_ALIGNED2:
4417 if (tok == '(') {
4418 next();
4419 n = expr_const();
4420 if (n <= 0 || (n & (n - 1)) != 0)
4421 tcc_error("alignment must be a positive power of two");
4422 skip(')');
4423 } else {
4424 n = MAX_ALIGN;
4426 ad->a.aligned = exact_log2p1(n);
4427 if (n != 1 << (ad->a.aligned - 1))
4428 tcc_error("alignment of %d is larger than implemented", n);
4429 break;
4430 case TOK_PACKED1:
4431 case TOK_PACKED2:
4432 ad->a.packed = 1;
4433 break;
4434 case TOK_WEAK1:
4435 case TOK_WEAK2:
4436 ad->a.weak = 1;
4437 break;
4438 case TOK_UNUSED1:
4439 case TOK_UNUSED2:
4440 /* currently, no need to handle it because tcc does not
4441 track unused objects */
4442 break;
4443 case TOK_NORETURN1:
4444 case TOK_NORETURN2:
4445 ad->f.func_noreturn = 1;
4446 break;
4447 case TOK_CDECL1:
4448 case TOK_CDECL2:
4449 case TOK_CDECL3:
4450 ad->f.func_call = FUNC_CDECL;
4451 break;
4452 case TOK_STDCALL1:
4453 case TOK_STDCALL2:
4454 case TOK_STDCALL3:
4455 ad->f.func_call = FUNC_STDCALL;
4456 break;
4457 #ifdef TCC_TARGET_I386
4458 case TOK_REGPARM1:
4459 case TOK_REGPARM2:
4460 skip('(');
4461 n = expr_const();
4462 if (n > 3)
4463 n = 3;
4464 else if (n < 0)
4465 n = 0;
4466 if (n > 0)
4467 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
4468 skip(')');
4469 break;
4470 case TOK_FASTCALL1:
4471 case TOK_FASTCALL2:
4472 case TOK_FASTCALL3:
4473 ad->f.func_call = FUNC_FASTCALLW;
4474 break;
4475 #endif
4476 case TOK_MODE:
4477 skip('(');
4478 switch(tok) {
4479 case TOK_MODE_DI:
4480 ad->attr_mode = VT_LLONG + 1;
4481 break;
4482 case TOK_MODE_QI:
4483 ad->attr_mode = VT_BYTE + 1;
4484 break;
4485 case TOK_MODE_HI:
4486 ad->attr_mode = VT_SHORT + 1;
4487 break;
4488 case TOK_MODE_SI:
4489 case TOK_MODE_word:
4490 ad->attr_mode = VT_INT + 1;
4491 break;
4492 default:
4493 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
4494 break;
4496 next();
4497 skip(')');
4498 break;
4499 case TOK_DLLEXPORT:
4500 ad->a.dllexport = 1;
4501 break;
4502 case TOK_NODECORATE:
4503 ad->a.nodecorate = 1;
4504 break;
4505 case TOK_DLLIMPORT:
4506 ad->a.dllimport = 1;
4507 break;
4508 default:
4509 if (tcc_state->warn_unsupported)
4510 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
4511 /* skip parameters */
4512 if (tok == '(') {
4513 int parenthesis = 0;
4514 do {
4515 if (tok == '(')
4516 parenthesis++;
4517 else if (tok == ')')
4518 parenthesis--;
4519 next();
4520 } while (parenthesis && tok != -1);
4522 break;
4524 if (tok != ',')
4525 break;
4526 next();
4528 skip(')');
4529 skip(')');
4530 goto redo;
4533 static Sym * find_field (CType *type, int v, int *cumofs)
4535 Sym *s = type->ref;
4536 v |= SYM_FIELD;
4537 while ((s = s->next) != NULL) {
4538 if ((s->v & SYM_FIELD) &&
4539 (s->type.t & VT_BTYPE) == VT_STRUCT &&
4540 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
4541 Sym *ret = find_field (&s->type, v, cumofs);
4542 if (ret) {
4543 *cumofs += s->c;
4544 return ret;
4547 if (s->v == v)
4548 break;
4550 return s;
4553 static void check_fields (CType *type, int check)
4555 Sym *s = type->ref;
4557 while ((s = s->next) != NULL) {
4558 int v = s->v & ~SYM_FIELD;
4559 if (v < SYM_FIRST_ANOM) {
4560 TokenSym *ts = table_ident[v - TOK_IDENT];
4561 if (check && (ts->tok & SYM_FIELD))
4562 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4563 ts->tok ^= SYM_FIELD;
4564 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4565 check_fields (&s->type, check);
4569 static void struct_layout(CType *type, AttributeDef *ad)
4571 int size, align, maxalign, offset, c, bit_pos, bit_size;
4572 int packed, a, bt, prevbt, prev_bit_size;
4573 int pcc = !tcc_state->ms_bitfields;
4574 int pragma_pack = *tcc_state->pack_stack_ptr;
4575 Sym *f;
4577 maxalign = 1;
4578 offset = 0;
4579 c = 0;
4580 bit_pos = 0;
4581 prevbt = VT_STRUCT; /* make it never match */
4582 prev_bit_size = 0;
4584 //#define BF_DEBUG
4586 for (f = type->ref->next; f; f = f->next) {
4587 if (f->type.t & VT_BITFIELD)
4588 bit_size = BIT_SIZE(f->type.t);
4589 else
4590 bit_size = -1;
4591 size = type_size(&f->type, &align);
4592 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4593 packed = 0;
4595 if (pcc && bit_size == 0) {
4596 /* in pcc mode, packing does not affect zero-width bitfields */
4598 } else {
4599 /* in pcc mode, attribute packed overrides if set. */
4600 if (pcc && (f->a.packed || ad->a.packed))
4601 align = packed = 1;
4603 /* pragma pack overrides align if lesser and packs bitfields always */
4604 if (pragma_pack) {
4605 packed = 1;
4606 if (pragma_pack < align)
4607 align = pragma_pack;
4608 /* in pcc mode pragma pack also overrides individual align */
4609 if (pcc && pragma_pack < a)
4610 a = 0;
4613 /* some individual align was specified */
4614 if (a)
4615 align = a;
4617 if (type->ref->type.t == VT_UNION) {
4618 if (pcc && bit_size >= 0)
4619 size = (bit_size + 7) >> 3;
4620 offset = 0;
4621 if (size > c)
4622 c = size;
4624 } else if (bit_size < 0) {
4625 if (pcc)
4626 c += (bit_pos + 7) >> 3;
4627 c = (c + align - 1) & -align;
4628 offset = c;
4629 if (size > 0)
4630 c += size;
4631 bit_pos = 0;
4632 prevbt = VT_STRUCT;
4633 prev_bit_size = 0;
4635 } else {
4636 /* A bit-field. Layout is more complicated. There are two
4637 options: PCC (GCC) compatible and MS compatible */
4638 if (pcc) {
4639 /* In PCC layout a bit-field is placed adjacent to the
4640 preceding bit-fields, except if:
4641 - it has zero-width
4642 - an individual alignment was given
4643 - it would overflow its base type container and
4644 there is no packing */
4645 if (bit_size == 0) {
4646 new_field:
4647 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4648 bit_pos = 0;
4649 } else if (f->a.aligned) {
4650 goto new_field;
4651 } else if (!packed) {
4652 int a8 = align * 8;
4653 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4654 if (ofs > size / align)
4655 goto new_field;
4658 /* in pcc mode, long long bitfields have type int if they fit */
4659 if (size == 8 && bit_size <= 32)
4660 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4662 while (bit_pos >= align * 8)
4663 c += align, bit_pos -= align * 8;
4664 offset = c;
4666 /* In PCC layout named bit-fields influence the alignment
4667 of the containing struct using the base types alignment,
4668 except for packed fields (which here have correct align). */
4669 if (f->v & SYM_FIRST_ANOM
4670 // && bit_size // ??? gcc on ARM/rpi does that
4672 align = 1;
4674 } else {
4675 bt = f->type.t & VT_BTYPE;
4676 if ((bit_pos + bit_size > size * 8)
4677 || (bit_size > 0) == (bt != prevbt)
4679 c = (c + align - 1) & -align;
4680 offset = c;
4681 bit_pos = 0;
4682 /* In MS bitfield mode a bit-field run always uses
4683 at least as many bits as the underlying type.
4684 To start a new run it's also required that this
4685 or the last bit-field had non-zero width. */
4686 if (bit_size || prev_bit_size)
4687 c += size;
4689 /* In MS layout the records alignment is normally
4690 influenced by the field, except for a zero-width
4691 field at the start of a run (but by further zero-width
4692 fields it is again). */
4693 if (bit_size == 0 && prevbt != bt)
4694 align = 1;
4695 prevbt = bt;
4696 prev_bit_size = bit_size;
4699 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4700 | (bit_pos << VT_STRUCT_SHIFT);
4701 bit_pos += bit_size;
4703 if (align > maxalign)
4704 maxalign = align;
4706 #ifdef BF_DEBUG
4707 printf("set field %s offset %-2d size %-2d align %-2d",
4708 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4709 if (f->type.t & VT_BITFIELD) {
4710 printf(" pos %-2d bits %-2d",
4711 BIT_POS(f->type.t),
4712 BIT_SIZE(f->type.t)
4715 printf("\n");
4716 #endif
4718 f->c = offset;
4719 f->r = 0;
4722 if (pcc)
4723 c += (bit_pos + 7) >> 3;
4725 /* store size and alignment */
4726 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4727 if (a < maxalign)
4728 a = maxalign;
4729 type->ref->r = a;
4730 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4731 /* can happen if individual align for some member was given. In
4732 this case MSVC ignores maxalign when aligning the size */
4733 a = pragma_pack;
4734 if (a < bt)
4735 a = bt;
4737 c = (c + a - 1) & -a;
4738 type->ref->c = c;
4740 #ifdef BF_DEBUG
4741 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4742 #endif
4744 /* check whether we can access bitfields by their type */
4745 for (f = type->ref->next; f; f = f->next) {
4746 int s, px, cx, c0;
4747 CType t;
4749 if (0 == (f->type.t & VT_BITFIELD))
4750 continue;
4751 f->type.ref = f;
4752 f->auxtype = -1;
4753 bit_size = BIT_SIZE(f->type.t);
4754 if (bit_size == 0)
4755 continue;
4756 bit_pos = BIT_POS(f->type.t);
4757 size = type_size(&f->type, &align);
4759 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4760 #ifdef TCC_TARGET_ARM
4761 && !(f->c & (align - 1))
4762 #endif
4764 continue;
4766 /* try to access the field using a different type */
4767 c0 = -1, s = align = 1;
4768 t.t = VT_BYTE;
4769 for (;;) {
4770 px = f->c * 8 + bit_pos;
4771 cx = (px >> 3) & -align;
4772 px = px - (cx << 3);
4773 if (c0 == cx)
4774 break;
4775 s = (px + bit_size + 7) >> 3;
4776 if (s > 4) {
4777 t.t = VT_LLONG;
4778 } else if (s > 2) {
4779 t.t = VT_INT;
4780 } else if (s > 1) {
4781 t.t = VT_SHORT;
4782 } else {
4783 t.t = VT_BYTE;
4785 s = type_size(&t, &align);
4786 c0 = cx;
4789 if (px + bit_size <= s * 8 && cx + s <= c
4790 #ifdef TCC_TARGET_ARM
4791 && !(cx & (align - 1))
4792 #endif
4794 /* update offset and bit position */
4795 f->c = cx;
4796 bit_pos = px;
4797 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4798 | (bit_pos << VT_STRUCT_SHIFT);
4799 if (s != size)
4800 f->auxtype = t.t;
4801 #ifdef BF_DEBUG
4802 printf("FIX field %s offset %-2d size %-2d align %-2d "
4803 "pos %-2d bits %-2d\n",
4804 get_tok_str(f->v & ~SYM_FIELD, NULL),
4805 cx, s, align, px, bit_size);
4806 #endif
4807 } else {
4808 /* fall back to load/store single-byte wise */
4809 f->auxtype = VT_STRUCT;
4810 #ifdef BF_DEBUG
4811 printf("FIX field %s : load byte-wise\n",
4812 get_tok_str(f->v & ~SYM_FIELD, NULL));
4813 #endif
4818 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4819 static void struct_decl(CType *type, int u)
4821 int v, c, size, align, flexible;
4822 int bit_size, bsize, bt;
4823 Sym *s, *ss, **ps;
4824 AttributeDef ad, ad1;
4825 CType type1, btype;
4827 memset(&ad, 0, sizeof ad);
4828 next();
4829 parse_attribute(&ad);
4830 if (tok != '{') {
4831 v = tok;
4832 next();
4833 /* struct already defined ? return it */
4834 if (v < TOK_IDENT)
4835 expect("struct/union/enum name");
4836 s = struct_find(v);
4837 if (s && (s->sym_scope == local_scope || tok != '{')) {
4838 if (u == s->type.t)
4839 goto do_decl;
4840 if (u == VT_ENUM && IS_ENUM(s->type.t))
4841 goto do_decl;
4842 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4844 } else {
4845 v = anon_sym++;
4847 /* Record the original enum/struct/union token. */
4848 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4849 type1.ref = NULL;
4850 /* we put an undefined size for struct/union */
4851 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4852 s->r = 0; /* default alignment is zero as gcc */
4853 do_decl:
4854 type->t = s->type.t;
4855 type->ref = s;
4857 if (tok == '{') {
4858 next();
4859 if (s->c != -1)
4860 tcc_error("struct/union/enum already defined");
4861 s->c = -2;
4862 /* cannot be empty */
4863 /* non empty enums are not allowed */
4864 ps = &s->next;
4865 if (u == VT_ENUM) {
4866 long long ll = 0, pl = 0, nl = 0;
4867 CType t;
4868 t.ref = s;
4869 /* enum symbols have static storage */
4870 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4871 for(;;) {
4872 v = tok;
4873 if (v < TOK_UIDENT)
4874 expect("identifier");
4875 ss = sym_find(v);
4876 if (ss && !local_stack)
4877 tcc_error("redefinition of enumerator '%s'",
4878 get_tok_str(v, NULL));
4879 next();
4880 if (tok == '=') {
4881 next();
4882 ll = expr_const64();
4884 ss = sym_push(v, &t, VT_CONST, 0);
4885 ss->enum_val = ll;
4886 *ps = ss, ps = &ss->next;
4887 if (ll < nl)
4888 nl = ll;
4889 if (ll > pl)
4890 pl = ll;
4891 if (tok != ',')
4892 break;
4893 next();
4894 ll++;
4895 /* NOTE: we accept a trailing comma */
4896 if (tok == '}')
4897 break;
4899 skip('}');
4900 /* set integral type of the enum */
4901 t.t = VT_INT;
4902 if (nl >= 0) {
4903 if (pl != (unsigned)pl)
4904 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4905 t.t |= VT_UNSIGNED;
4906 } else if (pl != (int)pl || nl != (int)nl)
4907 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4908 s->type.t = type->t = t.t | VT_ENUM;
4909 s->c = 0;
4910 /* set type for enum members */
4911 for (ss = s->next; ss; ss = ss->next) {
4912 ll = ss->enum_val;
4913 if (ll == (int)ll) /* default is int if it fits */
4914 continue;
4915 if (t.t & VT_UNSIGNED) {
4916 ss->type.t |= VT_UNSIGNED;
4917 if (ll == (unsigned)ll)
4918 continue;
4920 ss->type.t = (ss->type.t & ~VT_BTYPE)
4921 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4923 } else {
4924 c = 0;
4925 flexible = 0;
4926 while (tok != '}') {
4927 if (!parse_btype(&btype, &ad1)) {
4928 skip(';');
4929 continue;
4931 while (1) {
4932 if (flexible)
4933 tcc_error("flexible array member '%s' not at the end of struct",
4934 get_tok_str(v, NULL));
4935 bit_size = -1;
4936 v = 0;
4937 type1 = btype;
4938 if (tok != ':') {
4939 if (tok != ';')
4940 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4941 if (v == 0) {
4942 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4943 expect("identifier");
4944 else {
4945 int v = btype.ref->v;
4946 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4947 if (tcc_state->ms_extensions == 0)
4948 expect("identifier");
4952 if (type_size(&type1, &align) < 0) {
4953 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4954 flexible = 1;
4955 else
4956 tcc_error("field '%s' has incomplete type",
4957 get_tok_str(v, NULL));
4959 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4960 (type1.t & VT_BTYPE) == VT_VOID ||
4961 (type1.t & VT_STORAGE))
4962 tcc_error("invalid type for '%s'",
4963 get_tok_str(v, NULL));
4965 if (tok == ':') {
4966 next();
4967 bit_size = expr_const();
4968 /* XXX: handle v = 0 case for messages */
4969 if (bit_size < 0)
4970 tcc_error("negative width in bit-field '%s'",
4971 get_tok_str(v, NULL));
4972 if (v && bit_size == 0)
4973 tcc_error("zero width for bit-field '%s'",
4974 get_tok_str(v, NULL));
4975 parse_attribute(&ad1);
4977 size = type_size(&type1, &align);
4978 if (bit_size >= 0) {
4979 bt = type1.t & VT_BTYPE;
4980 if (bt != VT_INT &&
4981 bt != VT_BYTE &&
4982 bt != VT_SHORT &&
4983 bt != VT_BOOL &&
4984 bt != VT_LLONG)
4985 tcc_error("bitfields must have scalar type");
4986 bsize = size * 8;
4987 if (bit_size > bsize) {
4988 tcc_error("width of '%s' exceeds its type",
4989 get_tok_str(v, NULL));
4990 } else if (bit_size == bsize
4991 && !ad.a.packed && !ad1.a.packed) {
4992 /* no need for bit fields */
4994 } else if (bit_size == 64) {
4995 tcc_error("field width 64 not implemented");
4996 } else {
4997 type1.t = (type1.t & ~VT_STRUCT_MASK)
4998 | VT_BITFIELD
4999 | (bit_size << (VT_STRUCT_SHIFT + 6));
5002 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
5003 /* Remember we've seen a real field to check
5004 for placement of flexible array member. */
5005 c = 1;
5007 /* If member is a struct or bit-field, enforce
5008 placing into the struct (as anonymous). */
5009 if (v == 0 &&
5010 ((type1.t & VT_BTYPE) == VT_STRUCT ||
5011 bit_size >= 0)) {
5012 v = anon_sym++;
5014 if (v) {
5015 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
5016 ss->a = ad1.a;
5017 *ps = ss;
5018 ps = &ss->next;
5020 if (tok == ';' || tok == TOK_EOF)
5021 break;
5022 skip(',');
5024 skip(';');
5026 skip('}');
5027 parse_attribute(&ad);
5028 if (ad.cleanup_func) {
5029 tcc_warning("attribute '__cleanup__' ignored on type");
5031 check_fields(type, 1);
5032 check_fields(type, 0);
5033 struct_layout(type, &ad);
5038 static void sym_to_attr(AttributeDef *ad, Sym *s)
5040 merge_symattr(&ad->a, &s->a);
5041 merge_funcattr(&ad->f, &s->f);
5044 /* Add type qualifiers to a type. If the type is an array then the qualifiers
5045 are added to the element type, copied because it could be a typedef. */
5046 static void parse_btype_qualify(CType *type, int qualifiers)
5048 while (type->t & VT_ARRAY) {
5049 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
5050 type = &type->ref->type;
5052 type->t |= qualifiers;
5055 /* return 0 if no type declaration. otherwise, return the basic type
5056 and skip it.
5058 static int parse_btype(CType *type, AttributeDef *ad)
5060 int t, u, bt, st, type_found, typespec_found, g, n;
5061 Sym *s;
5062 CType type1;
5064 memset(ad, 0, sizeof(AttributeDef));
5065 type_found = 0;
5066 typespec_found = 0;
5067 t = VT_INT;
5068 bt = st = -1;
5069 type->ref = NULL;
5071 while(1) {
5072 switch(tok) {
5073 case TOK_EXTENSION:
5074 /* currently, we really ignore extension */
5075 next();
5076 continue;
5078 /* basic types */
5079 case TOK_CHAR:
5080 u = VT_BYTE;
5081 basic_type:
5082 next();
5083 basic_type1:
5084 if (u == VT_SHORT || u == VT_LONG) {
5085 if (st != -1 || (bt != -1 && bt != VT_INT))
5086 tmbt: tcc_error("too many basic types");
5087 st = u;
5088 } else {
5089 if (bt != -1 || (st != -1 && u != VT_INT))
5090 goto tmbt;
5091 bt = u;
5093 if (u != VT_INT)
5094 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
5095 typespec_found = 1;
5096 break;
5097 case TOK_VOID:
5098 u = VT_VOID;
5099 goto basic_type;
5100 case TOK_SHORT:
5101 u = VT_SHORT;
5102 goto basic_type;
5103 case TOK_INT:
5104 u = VT_INT;
5105 goto basic_type;
5106 case TOK_ALIGNAS:
5107 { int n;
5108 AttributeDef ad1;
5109 next();
5110 skip('(');
5111 memset(&ad1, 0, sizeof(AttributeDef));
5112 if (parse_btype(&type1, &ad1)) {
5113 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
5114 if (ad1.a.aligned)
5115 n = 1 << (ad1.a.aligned - 1);
5116 else
5117 type_size(&type1, &n);
5118 } else {
5119 n = expr_const();
5120 if (n <= 0 || (n & (n - 1)) != 0)
5121 tcc_error("alignment must be a positive power of two");
5123 skip(')');
5124 ad->a.aligned = exact_log2p1(n);
5126 continue;
5127 case TOK_LONG:
5128 if ((t & VT_BTYPE) == VT_DOUBLE) {
5129 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
5130 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
5131 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
5132 } else {
5133 u = VT_LONG;
5134 goto basic_type;
5136 next();
5137 break;
5138 #ifdef TCC_TARGET_ARM64
5139 case TOK_UINT128:
5140 /* GCC's __uint128_t appears in some Linux header files. Make it a
5141 synonym for long double to get the size and alignment right. */
5142 u = VT_LDOUBLE;
5143 goto basic_type;
5144 #endif
5145 case TOK_BOOL:
5146 u = VT_BOOL;
5147 goto basic_type;
5148 case TOK_FLOAT:
5149 u = VT_FLOAT;
5150 goto basic_type;
5151 case TOK_DOUBLE:
5152 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
5153 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
5154 } else {
5155 u = VT_DOUBLE;
5156 goto basic_type;
5158 next();
5159 break;
5160 case TOK_ENUM:
5161 struct_decl(&type1, VT_ENUM);
5162 basic_type2:
5163 u = type1.t;
5164 type->ref = type1.ref;
5165 goto basic_type1;
5166 case TOK_STRUCT:
5167 struct_decl(&type1, VT_STRUCT);
5168 goto basic_type2;
5169 case TOK_UNION:
5170 struct_decl(&type1, VT_UNION);
5171 goto basic_type2;
5173 /* type modifiers */
5174 case TOK__Atomic:
5175 next();
5176 type->t = t;
5177 parse_btype_qualify(type, VT_ATOMIC);
5178 t = type->t;
5179 if (tok == '(') {
5180 parse_expr_type(&type1);
5181 /* remove all storage modifiers except typedef */
5182 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
5183 if (type1.ref)
5184 sym_to_attr(ad, type1.ref);
5185 goto basic_type2;
5187 break;
5188 case TOK_CONST1:
5189 case TOK_CONST2:
5190 case TOK_CONST3:
5191 type->t = t;
5192 parse_btype_qualify(type, VT_CONSTANT);
5193 t = type->t;
5194 next();
5195 break;
5196 case TOK_VOLATILE1:
5197 case TOK_VOLATILE2:
5198 case TOK_VOLATILE3:
5199 type->t = t;
5200 parse_btype_qualify(type, VT_VOLATILE);
5201 t = type->t;
5202 next();
5203 break;
5204 case TOK_SIGNED1:
5205 case TOK_SIGNED2:
5206 case TOK_SIGNED3:
5207 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
5208 tcc_error("signed and unsigned modifier");
5209 t |= VT_DEFSIGN;
5210 next();
5211 typespec_found = 1;
5212 break;
5213 case TOK_REGISTER:
5214 case TOK_AUTO:
5215 case TOK_RESTRICT1:
5216 case TOK_RESTRICT2:
5217 case TOK_RESTRICT3:
5218 next();
5219 break;
5220 case TOK_UNSIGNED:
5221 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
5222 tcc_error("signed and unsigned modifier");
5223 t |= VT_DEFSIGN | VT_UNSIGNED;
5224 next();
5225 typespec_found = 1;
5226 break;
5228 /* storage */
5229 case TOK_EXTERN:
5230 g = VT_EXTERN;
5231 goto storage;
5232 case TOK_STATIC:
5233 g = VT_STATIC;
5234 goto storage;
5235 case TOK_TYPEDEF:
5236 g = VT_TYPEDEF;
5237 goto storage;
5238 storage:
5239 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
5240 tcc_error("multiple storage classes");
5241 t |= g;
5242 next();
5243 break;
5244 case TOK_INLINE1:
5245 case TOK_INLINE2:
5246 case TOK_INLINE3:
5247 t |= VT_INLINE;
5248 next();
5249 break;
5250 case TOK_NORETURN3:
5251 next();
5252 ad->f.func_noreturn = 1;
5253 break;
5254 /* GNUC attribute */
5255 case TOK_ATTRIBUTE1:
5256 case TOK_ATTRIBUTE2:
5257 parse_attribute(ad);
5258 if (ad->attr_mode) {
5259 u = ad->attr_mode -1;
5260 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
5262 continue;
5263 /* GNUC typeof */
5264 case TOK_TYPEOF1:
5265 case TOK_TYPEOF2:
5266 case TOK_TYPEOF3:
5267 next();
5268 parse_expr_type(&type1);
5269 /* remove all storage modifiers except typedef */
5270 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
5271 if (type1.ref)
5272 sym_to_attr(ad, type1.ref);
5273 goto basic_type2;
5274 default:
5275 if (typespec_found)
5276 goto the_end;
5277 s = sym_find(tok);
5278 if (!s || !(s->type.t & VT_TYPEDEF))
5279 goto the_end;
5281 n = tok, next();
5282 if (tok == ':' && !in_generic) {
5283 /* ignore if it's a label */
5284 unget_tok(n);
5285 goto the_end;
5288 t &= ~(VT_BTYPE|VT_LONG);
5289 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
5290 type->t = (s->type.t & ~VT_TYPEDEF) | u;
5291 type->ref = s->type.ref;
5292 if (t)
5293 parse_btype_qualify(type, t);
5294 t = type->t;
5295 /* get attributes from typedef */
5296 sym_to_attr(ad, s);
5297 typespec_found = 1;
5298 st = bt = -2;
5299 break;
5301 type_found = 1;
5303 the_end:
5304 if (tcc_state->char_is_unsigned) {
5305 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
5306 t |= VT_UNSIGNED;
5308 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5309 bt = t & (VT_BTYPE|VT_LONG);
5310 if (bt == VT_LONG)
5311 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
5312 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5313 if (bt == VT_LDOUBLE)
5314 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
5315 #endif
5316 type->t = t;
5317 return type_found;
5320 /* convert a function parameter type (array to pointer and function to
5321 function pointer) */
5322 static inline void convert_parameter_type(CType *pt)
5324 /* remove const and volatile qualifiers (XXX: const could be used
5325 to indicate a const function parameter */
5326 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
5327 /* array must be transformed to pointer according to ANSI C */
5328 pt->t &= ~VT_ARRAY;
5329 if ((pt->t & VT_BTYPE) == VT_FUNC) {
5330 mk_pointer(pt);
5334 ST_FUNC void parse_asm_str(CString *astr)
5336 skip('(');
5337 parse_mult_str(astr, "string constant");
5340 /* Parse an asm label and return the token */
5341 static int asm_label_instr(void)
5343 int v;
5344 CString astr;
5346 next();
5347 parse_asm_str(&astr);
5348 skip(')');
5349 #ifdef ASM_DEBUG
5350 printf("asm_alias: \"%s\"\n", (char *)astr.data);
5351 #endif
5352 v = tok_alloc(astr.data, astr.size - 1)->tok;
5353 cstr_free(&astr);
5354 return v;
5357 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
5359 int n, l, t1, arg_size, align, unused_align;
5360 Sym **plast, *s, *first;
5361 AttributeDef ad1;
5362 CType pt;
5364 if (tok == '(') {
5365 /* function type, or recursive declarator (return if so) */
5366 next();
5367 if (td && !(td & TYPE_ABSTRACT))
5368 return 0;
5369 if (tok == ')')
5370 l = 0;
5371 else if (parse_btype(&pt, &ad1))
5372 l = FUNC_NEW;
5373 else if (td) {
5374 merge_attr (ad, &ad1);
5375 return 0;
5376 } else
5377 l = FUNC_OLD;
5378 first = NULL;
5379 plast = &first;
5380 arg_size = 0;
5381 if (l) {
5382 for(;;) {
5383 /* read param name and compute offset */
5384 if (l != FUNC_OLD) {
5385 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
5386 break;
5387 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
5388 if ((pt.t & VT_BTYPE) == VT_VOID)
5389 tcc_error("parameter declared as void");
5390 } else {
5391 n = tok;
5392 if (n < TOK_UIDENT)
5393 expect("identifier");
5394 pt.t = VT_VOID; /* invalid type */
5395 pt.ref = NULL;
5396 next();
5398 convert_parameter_type(&pt);
5399 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
5400 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
5401 *plast = s;
5402 plast = &s->next;
5403 if (tok == ')')
5404 break;
5405 skip(',');
5406 if (l == FUNC_NEW && tok == TOK_DOTS) {
5407 l = FUNC_ELLIPSIS;
5408 next();
5409 break;
5411 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
5412 tcc_error("invalid type");
5414 } else
5415 /* if no parameters, then old type prototype */
5416 l = FUNC_OLD;
5417 skip(')');
5418 /* NOTE: const is ignored in returned type as it has a special
5419 meaning in gcc / C++ */
5420 type->t &= ~VT_CONSTANT;
5421 /* some ancient pre-K&R C allows a function to return an array
5422 and the array brackets to be put after the arguments, such
5423 that "int c()[]" means something like "int[] c()" */
5424 if (tok == '[') {
5425 next();
5426 skip(']'); /* only handle simple "[]" */
5427 mk_pointer(type);
5429 /* we push a anonymous symbol which will contain the function prototype */
5430 ad->f.func_args = arg_size;
5431 ad->f.func_type = l;
5432 s = sym_push(SYM_FIELD, type, 0, 0);
5433 s->a = ad->a;
5434 s->f = ad->f;
5435 s->next = first;
5436 type->t = VT_FUNC;
5437 type->ref = s;
5438 } else if (tok == '[') {
5439 int saved_nocode_wanted = nocode_wanted;
5440 /* array definition */
5441 next();
5442 while (1) {
5443 /* XXX The optional type-quals and static should only be accepted
5444 in parameter decls. The '*' as well, and then even only
5445 in prototypes (not function defs). */
5446 switch (tok) {
5447 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
5448 case TOK_CONST1:
5449 case TOK_VOLATILE1:
5450 case TOK_STATIC:
5451 case '*':
5452 next();
5453 continue;
5454 default:
5455 break;
5457 break;
5459 n = -1;
5460 t1 = 0;
5461 if (tok != ']') {
5462 if (!local_stack || (storage & VT_STATIC))
5463 vpushi(expr_const());
5464 else {
5465 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5466 length must always be evaluated, even under nocode_wanted,
5467 so that its size slot is initialized (e.g. under sizeof
5468 or typeof). */
5469 nocode_wanted = 0;
5470 gexpr();
5472 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5473 n = vtop->c.i;
5474 if (n < 0)
5475 tcc_error("invalid array size");
5476 } else {
5477 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5478 tcc_error("size of variable length array should be an integer");
5479 n = 0;
5480 t1 = VT_VLA;
5483 skip(']');
5484 /* parse next post type */
5485 post_type(type, ad, storage, 0);
5487 if ((type->t & VT_BTYPE) == VT_FUNC)
5488 tcc_error("declaration of an array of functions");
5489 if ((type->t & VT_BTYPE) == VT_VOID
5490 || type_size(type, &unused_align) < 0)
5491 tcc_error("declaration of an array of incomplete type elements");
5493 t1 |= type->t & VT_VLA;
5495 if (t1 & VT_VLA) {
5496 if (n < 0)
5497 tcc_error("need explicit inner array size in VLAs");
5498 loc -= type_size(&int_type, &align);
5499 loc &= -align;
5500 n = loc;
5502 vla_runtime_type_size(type, &align);
5503 gen_op('*');
5504 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5505 vswap();
5506 vstore();
5508 if (n != -1)
5509 vpop();
5510 nocode_wanted = saved_nocode_wanted;
5512 /* we push an anonymous symbol which will contain the array
5513 element type */
5514 s = sym_push(SYM_FIELD, type, 0, n);
5515 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5516 type->ref = s;
5518 return 1;
5521 /* Parse a type declarator (except basic type), and return the type
5522 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5523 expected. 'type' should contain the basic type. 'ad' is the
5524 attribute definition of the basic type. It can be modified by
5525 type_decl(). If this (possibly abstract) declarator is a pointer chain
5526 it returns the innermost pointed to type (equals *type, but is a different
5527 pointer), otherwise returns type itself, that's used for recursive calls. */
5528 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5530 CType *post, *ret;
5531 int qualifiers, storage;
5533 /* recursive type, remove storage bits first, apply them later again */
5534 storage = type->t & VT_STORAGE;
5535 type->t &= ~VT_STORAGE;
5536 post = ret = type;
5538 while (tok == '*') {
5539 qualifiers = 0;
5540 redo:
5541 next();
5542 switch(tok) {
5543 case TOK__Atomic:
5544 qualifiers |= VT_ATOMIC;
5545 goto redo;
5546 case TOK_CONST1:
5547 case TOK_CONST2:
5548 case TOK_CONST3:
5549 qualifiers |= VT_CONSTANT;
5550 goto redo;
5551 case TOK_VOLATILE1:
5552 case TOK_VOLATILE2:
5553 case TOK_VOLATILE3:
5554 qualifiers |= VT_VOLATILE;
5555 goto redo;
5556 case TOK_RESTRICT1:
5557 case TOK_RESTRICT2:
5558 case TOK_RESTRICT3:
5559 goto redo;
5560 /* XXX: clarify attribute handling */
5561 case TOK_ATTRIBUTE1:
5562 case TOK_ATTRIBUTE2:
5563 parse_attribute(ad);
5564 break;
5566 mk_pointer(type);
5567 type->t |= qualifiers;
5568 if (ret == type)
5569 /* innermost pointed to type is the one for the first derivation */
5570 ret = pointed_type(type);
5573 if (tok == '(') {
5574 /* This is possibly a parameter type list for abstract declarators
5575 ('int ()'), use post_type for testing this. */
5576 if (!post_type(type, ad, 0, td)) {
5577 /* It's not, so it's a nested declarator, and the post operations
5578 apply to the innermost pointed to type (if any). */
5579 /* XXX: this is not correct to modify 'ad' at this point, but
5580 the syntax is not clear */
5581 parse_attribute(ad);
5582 post = type_decl(type, ad, v, td);
5583 skip(')');
5584 } else
5585 goto abstract;
5586 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5587 /* type identifier */
5588 *v = tok;
5589 next();
5590 } else {
5591 abstract:
5592 if (!(td & TYPE_ABSTRACT))
5593 expect("identifier");
5594 *v = 0;
5596 post_type(post, ad, storage, 0);
5597 parse_attribute(ad);
5598 type->t |= storage;
5599 return ret;
5602 /* indirection with full error checking and bound check */
5603 ST_FUNC void indir(void)
5605 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5606 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5607 return;
5608 expect("pointer");
5610 if (vtop->r & VT_LVAL)
5611 gv(RC_INT);
5612 vtop->type = *pointed_type(&vtop->type);
5613 /* Arrays and functions are never lvalues */
5614 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5615 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5616 vtop->r |= VT_LVAL;
5617 /* if bound checking, the referenced pointer must be checked */
5618 #ifdef CONFIG_TCC_BCHECK
5619 if (tcc_state->do_bounds_check)
5620 vtop->r |= VT_MUSTBOUND;
5621 #endif
5625 /* pass a parameter to a function and do type checking and casting */
5626 static void gfunc_param_typed(Sym *func, Sym *arg)
5628 int func_type;
5629 CType type;
5631 func_type = func->f.func_type;
5632 if (func_type == FUNC_OLD ||
5633 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5634 /* default casting : only need to convert float to double */
5635 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5636 gen_cast_s(VT_DOUBLE);
5637 } else if (vtop->type.t & VT_BITFIELD) {
5638 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5639 type.ref = vtop->type.ref;
5640 gen_cast(&type);
5641 } else if (vtop->r & VT_MUSTCAST) {
5642 force_charshort_cast();
5644 } else if (arg == NULL) {
5645 tcc_error("too many arguments to function");
5646 } else {
5647 type = arg->type;
5648 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5649 gen_assign_cast(&type);
5653 /* parse an expression and return its type without any side effect. */
5654 static void expr_type(CType *type, void (*expr_fn)(void))
5656 nocode_wanted++;
5657 expr_fn();
5658 *type = vtop->type;
5659 vpop();
5660 nocode_wanted--;
5663 /* parse an expression of the form '(type)' or '(expr)' and return its
5664 type */
5665 static void parse_expr_type(CType *type)
5667 int n;
5668 AttributeDef ad;
5670 skip('(');
5671 if (parse_btype(type, &ad)) {
5672 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5673 } else {
5674 expr_type(type, gexpr);
5676 skip(')');
5679 static void parse_type(CType *type)
5681 AttributeDef ad;
5682 int n;
5684 if (!parse_btype(type, &ad)) {
5685 expect("type");
5687 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5690 static void parse_builtin_params(int nc, const char *args)
5692 char c, sep = '(';
5693 CType type;
5694 if (nc)
5695 nocode_wanted++;
5696 next();
5697 if (*args == 0)
5698 skip(sep);
5699 while ((c = *args++)) {
5700 skip(sep);
5701 sep = ',';
5702 if (c == 't') {
5703 parse_type(&type);
5704 vpush(&type);
5705 continue;
5707 expr_eq();
5708 type.ref = NULL;
5709 type.t = 0;
5710 switch (c) {
5711 case 'e':
5712 continue;
5713 case 'V':
5714 type.t = VT_CONSTANT;
5715 case 'v':
5716 type.t |= VT_VOID;
5717 mk_pointer (&type);
5718 break;
5719 case 'S':
5720 type.t = VT_CONSTANT;
5721 case 's':
5722 type.t |= char_type.t;
5723 mk_pointer (&type);
5724 break;
5725 case 'i':
5726 type.t = VT_INT;
5727 break;
5728 case 'l':
5729 type.t = VT_SIZE_T;
5730 break;
5731 default:
5732 break;
5734 gen_assign_cast(&type);
5736 skip(')');
5737 if (nc)
5738 nocode_wanted--;
5741 static void parse_atomic(int atok)
5743 int size, align, arg;
5744 CType *atom, *atom_ptr, ct = {0};
5745 char buf[40];
5746 static const char *const templates[] = {
5748 * Each entry consists of callback and function template.
5749 * The template represents argument types and return type.
5751 * ? void (return-only)
5752 * b bool
5753 * a atomic
5754 * A read-only atomic
5755 * p pointer to memory
5756 * v value
5757 * m memory model
5760 /* keep in order of appearance in tcctok.h: */
5761 /* __atomic_store */ "avm.?",
5762 /* __atomic_load */ "Am.v",
5763 /* __atomic_exchange */ "avm.v",
5764 /* __atomic_compare_exchange */ "apvbmm.b",
5765 /* __atomic_fetch_add */ "avm.v",
5766 /* __atomic_fetch_sub */ "avm.v",
5767 /* __atomic_fetch_or */ "avm.v",
5768 /* __atomic_fetch_xor */ "avm.v",
5769 /* __atomic_fetch_and */ "avm.v"
5771 const char *template = templates[(atok - TOK___atomic_store)];
5773 atom = atom_ptr = NULL;
5774 size = 0; /* pacify compiler */
5775 next();
5776 skip('(');
5777 for (arg = 0;;) {
5778 expr_eq();
5779 switch (template[arg]) {
5780 case 'a':
5781 case 'A':
5782 atom_ptr = &vtop->type;
5783 if ((atom_ptr->t & VT_BTYPE) != VT_PTR)
5784 expect("pointer");
5785 atom = pointed_type(atom_ptr);
5786 size = type_size(atom, &align);
5787 if (size > 8
5788 || (size & (size - 1))
5789 || (atok > TOK___atomic_compare_exchange
5790 && (0 == btype_size(atom->t & VT_BTYPE)
5791 || (atom->t & VT_BTYPE) == VT_PTR)))
5792 expect("integral or integer-sized pointer target type");
5793 /* GCC does not care either: */
5794 /* if (!(atom->t & VT_ATOMIC))
5795 tcc_warning("pointer target declaration is missing '_Atomic'"); */
5796 break;
5798 case 'p':
5799 if ((vtop->type.t & VT_BTYPE) != VT_PTR
5800 || type_size(pointed_type(&vtop->type), &align) != size)
5801 tcc_error("pointer target type mismatch in argument %d", arg + 1);
5802 gen_assign_cast(atom_ptr);
5803 break;
5804 case 'v':
5805 gen_assign_cast(atom);
5806 break;
5807 case 'm':
5808 gen_assign_cast(&int_type);
5809 break;
5810 case 'b':
5811 ct.t = VT_BOOL;
5812 gen_assign_cast(&ct);
5813 break;
5815 if ('.' == template[++arg])
5816 break;
5817 skip(',');
5819 skip(')');
5821 ct.t = VT_VOID;
5822 switch (template[arg + 1]) {
5823 case 'b':
5824 ct.t = VT_BOOL;
5825 break;
5826 case 'v':
5827 ct = *atom;
5828 break;
5831 sprintf(buf, "%s_%d", get_tok_str(atok, 0), size);
5832 vpush_helper_func(tok_alloc_const(buf));
5833 vrott(arg + 1);
5834 gfunc_call(arg);
5836 vpush(&ct);
5837 PUT_R_RET(vtop, ct.t);
5840 ST_FUNC void unary(void)
5842 int n, t, align, size, r, sizeof_caller;
5843 CType type;
5844 Sym *s;
5845 AttributeDef ad;
5847 /* generate line number info */
5848 if (debug_modes)
5849 tcc_debug_line(tcc_state), tcc_tcov_check_line (1);
5851 sizeof_caller = in_sizeof;
5852 in_sizeof = 0;
5853 type.ref = NULL;
5854 /* XXX: GCC 2.95.3 does not generate a table although it should be
5855 better here */
5856 tok_next:
5857 switch(tok) {
5858 case TOK_EXTENSION:
5859 next();
5860 goto tok_next;
5861 case TOK_LCHAR:
5862 #ifdef TCC_TARGET_PE
5863 t = VT_SHORT|VT_UNSIGNED;
5864 goto push_tokc;
5865 #endif
5866 case TOK_CINT:
5867 case TOK_CCHAR:
5868 t = VT_INT;
5869 push_tokc:
5870 type.t = t;
5871 vsetc(&type, VT_CONST, &tokc);
5872 next();
5873 break;
5874 case TOK_CUINT:
5875 t = VT_INT | VT_UNSIGNED;
5876 goto push_tokc;
5877 case TOK_CLLONG:
5878 t = VT_LLONG;
5879 goto push_tokc;
5880 case TOK_CULLONG:
5881 t = VT_LLONG | VT_UNSIGNED;
5882 goto push_tokc;
5883 case TOK_CFLOAT:
5884 t = VT_FLOAT;
5885 goto push_tokc;
5886 case TOK_CDOUBLE:
5887 t = VT_DOUBLE;
5888 goto push_tokc;
5889 case TOK_CLDOUBLE:
5890 t = VT_LDOUBLE;
5891 goto push_tokc;
5892 case TOK_CLONG:
5893 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5894 goto push_tokc;
5895 case TOK_CULONG:
5896 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5897 goto push_tokc;
5898 case TOK___FUNCTION__:
5899 if (!gnu_ext)
5900 goto tok_identifier;
5901 /* fall thru */
5902 case TOK___FUNC__:
5904 Section *sec;
5905 void *ptr;
5906 int len;
5907 /* special function name identifier */
5908 len = strlen(funcname) + 1;
5909 /* generate char[len] type */
5910 type.t = VT_BYTE;
5911 if (tcc_state->warn_write_strings)
5912 type.t |= VT_CONSTANT;
5913 mk_pointer(&type);
5914 type.t |= VT_ARRAY;
5915 type.ref->c = len;
5916 sec = rodata_section;
5917 vpush_ref(&type, sec, sec->data_offset, len);
5918 if (!NODATA_WANTED) {
5919 ptr = section_ptr_add(sec, len);
5920 memcpy(ptr, funcname, len);
5922 next();
5924 break;
5925 case TOK_LSTR:
5926 #ifdef TCC_TARGET_PE
5927 t = VT_SHORT | VT_UNSIGNED;
5928 #else
5929 t = VT_INT;
5930 #endif
5931 goto str_init;
5932 case TOK_STR:
5933 /* string parsing */
5934 t = VT_BYTE;
5935 if (tcc_state->char_is_unsigned)
5936 t = VT_BYTE | VT_UNSIGNED;
5937 str_init:
5938 if (tcc_state->warn_write_strings)
5939 t |= VT_CONSTANT;
5940 type.t = t;
5941 mk_pointer(&type);
5942 type.t |= VT_ARRAY;
5943 memset(&ad, 0, sizeof(AttributeDef));
5944 ad.section = rodata_section;
5945 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5946 break;
5947 case '(':
5948 next();
5949 /* cast ? */
5950 if (parse_btype(&type, &ad)) {
5951 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5952 skip(')');
5953 /* check ISOC99 compound literal */
5954 if (tok == '{') {
5955 /* data is allocated locally by default */
5956 if (global_expr)
5957 r = VT_CONST;
5958 else
5959 r = VT_LOCAL;
5960 /* all except arrays are lvalues */
5961 if (!(type.t & VT_ARRAY))
5962 r |= VT_LVAL;
5963 memset(&ad, 0, sizeof(AttributeDef));
5964 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5965 } else {
5966 if (sizeof_caller) {
5967 vpush(&type);
5968 return;
5970 unary();
5971 gen_cast(&type);
5973 } else if (tok == '{') {
5974 int saved_nocode_wanted = nocode_wanted;
5975 if (const_wanted && !(nocode_wanted & unevalmask))
5976 expect("constant");
5977 if (0 == local_scope)
5978 tcc_error("statement expression outside of function");
5979 /* save all registers */
5980 save_regs(0);
5981 /* statement expression : we do not accept break/continue
5982 inside as GCC does. We do retain the nocode_wanted state,
5983 as statement expressions can't ever be entered from the
5984 outside, so any reactivation of code emission (from labels
5985 or loop heads) can be disabled again after the end of it. */
5986 block(1);
5987 nocode_wanted = saved_nocode_wanted;
5988 skip(')');
5989 } else {
5990 gexpr();
5991 skip(')');
5993 break;
5994 case '*':
5995 next();
5996 unary();
5997 indir();
5998 break;
5999 case '&':
6000 next();
6001 unary();
6002 /* functions names must be treated as function pointers,
6003 except for unary '&' and sizeof. Since we consider that
6004 functions are not lvalues, we only have to handle it
6005 there and in function calls. */
6006 /* arrays can also be used although they are not lvalues */
6007 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
6008 !(vtop->type.t & VT_ARRAY))
6009 test_lvalue();
6010 if (vtop->sym)
6011 vtop->sym->a.addrtaken = 1;
6012 mk_pointer(&vtop->type);
6013 gaddrof();
6014 break;
6015 case '!':
6016 next();
6017 unary();
6018 gen_test_zero(TOK_EQ);
6019 break;
6020 case '~':
6021 next();
6022 unary();
6023 vpushi(-1);
6024 gen_op('^');
6025 break;
6026 case '+':
6027 next();
6028 unary();
6029 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
6030 tcc_error("pointer not accepted for unary plus");
6031 /* In order to force cast, we add zero, except for floating point
6032 where we really need an noop (otherwise -0.0 will be transformed
6033 into +0.0). */
6034 if (!is_float(vtop->type.t)) {
6035 vpushi(0);
6036 gen_op('+');
6038 break;
6039 case TOK_SIZEOF:
6040 case TOK_ALIGNOF1:
6041 case TOK_ALIGNOF2:
6042 case TOK_ALIGNOF3:
6043 t = tok;
6044 next();
6045 in_sizeof++;
6046 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
6047 s = NULL;
6048 if (vtop[1].r & VT_SYM)
6049 s = vtop[1].sym; /* hack: accessing previous vtop */
6050 size = type_size(&type, &align);
6051 if (s && s->a.aligned)
6052 align = 1 << (s->a.aligned - 1);
6053 if (t == TOK_SIZEOF) {
6054 if (!(type.t & VT_VLA)) {
6055 if (size < 0)
6056 tcc_error("sizeof applied to an incomplete type");
6057 vpushs(size);
6058 } else {
6059 vla_runtime_type_size(&type, &align);
6061 } else {
6062 vpushs(align);
6064 vtop->type.t |= VT_UNSIGNED;
6065 break;
6067 case TOK_builtin_expect:
6068 /* __builtin_expect is a no-op for now */
6069 parse_builtin_params(0, "ee");
6070 vpop();
6071 break;
6072 case TOK_builtin_types_compatible_p:
6073 parse_builtin_params(0, "tt");
6074 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
6075 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
6076 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
6077 vtop -= 2;
6078 vpushi(n);
6079 break;
6080 case TOK_builtin_choose_expr:
6082 int64_t c;
6083 next();
6084 skip('(');
6085 c = expr_const64();
6086 skip(',');
6087 if (!c) {
6088 nocode_wanted++;
6090 expr_eq();
6091 if (!c) {
6092 vpop();
6093 nocode_wanted--;
6095 skip(',');
6096 if (c) {
6097 nocode_wanted++;
6099 expr_eq();
6100 if (c) {
6101 vpop();
6102 nocode_wanted--;
6104 skip(')');
6106 break;
6107 case TOK_builtin_constant_p:
6108 parse_builtin_params(1, "e");
6109 n = (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6110 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
6111 vtop--;
6112 vpushi(n);
6113 break;
6114 case TOK_builtin_frame_address:
6115 case TOK_builtin_return_address:
6117 int tok1 = tok;
6118 int level;
6119 next();
6120 skip('(');
6121 if (tok != TOK_CINT) {
6122 tcc_error("%s only takes positive integers",
6123 tok1 == TOK_builtin_return_address ?
6124 "__builtin_return_address" :
6125 "__builtin_frame_address");
6127 level = (uint32_t)tokc.i;
6128 next();
6129 skip(')');
6130 type.t = VT_VOID;
6131 mk_pointer(&type);
6132 vset(&type, VT_LOCAL, 0); /* local frame */
6133 while (level--) {
6134 #ifdef TCC_TARGET_RISCV64
6135 vpushi(2*PTR_SIZE);
6136 gen_op('-');
6137 #endif
6138 mk_pointer(&vtop->type);
6139 indir(); /* -> parent frame */
6141 if (tok1 == TOK_builtin_return_address) {
6142 // assume return address is just above frame pointer on stack
6143 #ifdef TCC_TARGET_ARM
6144 vpushi(2*PTR_SIZE);
6145 gen_op('+');
6146 #elif defined TCC_TARGET_RISCV64
6147 vpushi(PTR_SIZE);
6148 gen_op('-');
6149 #else
6150 vpushi(PTR_SIZE);
6151 gen_op('+');
6152 #endif
6153 mk_pointer(&vtop->type);
6154 indir();
6157 break;
6158 #ifdef TCC_TARGET_RISCV64
6159 case TOK_builtin_va_start:
6160 parse_builtin_params(0, "ee");
6161 r = vtop->r & VT_VALMASK;
6162 if (r == VT_LLOCAL)
6163 r = VT_LOCAL;
6164 if (r != VT_LOCAL)
6165 tcc_error("__builtin_va_start expects a local variable");
6166 gen_va_start();
6167 vstore();
6168 break;
6169 #endif
6170 #ifdef TCC_TARGET_X86_64
6171 #ifdef TCC_TARGET_PE
6172 case TOK_builtin_va_start:
6173 parse_builtin_params(0, "ee");
6174 r = vtop->r & VT_VALMASK;
6175 if (r == VT_LLOCAL)
6176 r = VT_LOCAL;
6177 if (r != VT_LOCAL)
6178 tcc_error("__builtin_va_start expects a local variable");
6179 vtop->r = r;
6180 vtop->type = char_pointer_type;
6181 vtop->c.i += 8;
6182 vstore();
6183 break;
6184 #else
6185 case TOK_builtin_va_arg_types:
6186 parse_builtin_params(0, "t");
6187 vpushi(classify_x86_64_va_arg(&vtop->type));
6188 vswap();
6189 vpop();
6190 break;
6191 #endif
6192 #endif
6194 #ifdef TCC_TARGET_ARM64
6195 case TOK_builtin_va_start: {
6196 parse_builtin_params(0, "ee");
6197 //xx check types
6198 gen_va_start();
6199 vpushi(0);
6200 vtop->type.t = VT_VOID;
6201 break;
6203 case TOK_builtin_va_arg: {
6204 parse_builtin_params(0, "et");
6205 type = vtop->type;
6206 vpop();
6207 //xx check types
6208 gen_va_arg(&type);
6209 vtop->type = type;
6210 break;
6212 case TOK___arm64_clear_cache: {
6213 parse_builtin_params(0, "ee");
6214 gen_clear_cache();
6215 vpushi(0);
6216 vtop->type.t = VT_VOID;
6217 break;
6219 #endif
6221 /* atomic operations */
6222 case TOK___atomic_store:
6223 case TOK___atomic_load:
6224 case TOK___atomic_exchange:
6225 case TOK___atomic_compare_exchange:
6226 case TOK___atomic_fetch_add:
6227 case TOK___atomic_fetch_sub:
6228 case TOK___atomic_fetch_or:
6229 case TOK___atomic_fetch_xor:
6230 case TOK___atomic_fetch_and:
6231 parse_atomic(tok);
6232 break;
6234 /* pre operations */
6235 case TOK_INC:
6236 case TOK_DEC:
6237 t = tok;
6238 next();
6239 unary();
6240 inc(0, t);
6241 break;
6242 case '-':
6243 next();
6244 unary();
6245 if (is_float(vtop->type.t)) {
6246 gen_opif(TOK_NEG);
6247 } else {
6248 vpushi(0);
6249 vswap();
6250 gen_op('-');
6252 break;
6253 case TOK_LAND:
6254 if (!gnu_ext)
6255 goto tok_identifier;
6256 next();
6257 /* allow to take the address of a label */
6258 if (tok < TOK_UIDENT)
6259 expect("label identifier");
6260 s = label_find(tok);
6261 if (!s) {
6262 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6263 } else {
6264 if (s->r == LABEL_DECLARED)
6265 s->r = LABEL_FORWARD;
6267 if (!s->type.t) {
6268 s->type.t = VT_VOID;
6269 mk_pointer(&s->type);
6270 s->type.t |= VT_STATIC;
6272 vpushsym(&s->type, s);
6273 next();
6274 break;
6276 case TOK_GENERIC:
6278 CType controlling_type;
6279 int has_default = 0;
6280 int has_match = 0;
6281 int learn = 0;
6282 TokenString *str = NULL;
6283 int saved_const_wanted = const_wanted;
6285 next();
6286 skip('(');
6287 const_wanted = 0;
6288 expr_type(&controlling_type, expr_eq);
6289 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
6290 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
6291 mk_pointer(&controlling_type);
6292 const_wanted = saved_const_wanted;
6293 for (;;) {
6294 learn = 0;
6295 skip(',');
6296 if (tok == TOK_DEFAULT) {
6297 if (has_default)
6298 tcc_error("too many 'default'");
6299 has_default = 1;
6300 if (!has_match)
6301 learn = 1;
6302 next();
6303 } else {
6304 AttributeDef ad_tmp;
6305 int itmp;
6306 CType cur_type;
6308 in_generic++;
6309 parse_btype(&cur_type, &ad_tmp);
6310 in_generic--;
6312 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
6313 if (compare_types(&controlling_type, &cur_type, 0)) {
6314 if (has_match) {
6315 tcc_error("type match twice");
6317 has_match = 1;
6318 learn = 1;
6321 skip(':');
6322 if (learn) {
6323 if (str)
6324 tok_str_free(str);
6325 skip_or_save_block(&str);
6326 } else {
6327 skip_or_save_block(NULL);
6329 if (tok == ')')
6330 break;
6332 if (!str) {
6333 char buf[60];
6334 type_to_str(buf, sizeof buf, &controlling_type, NULL);
6335 tcc_error("type '%s' does not match any association", buf);
6337 begin_macro(str, 1);
6338 next();
6339 expr_eq();
6340 if (tok != TOK_EOF)
6341 expect(",");
6342 end_macro();
6343 next();
6344 break;
6346 // special qnan , snan and infinity values
6347 case TOK___NAN__:
6348 n = 0x7fc00000;
6349 special_math_val:
6350 vpushi(n);
6351 vtop->type.t = VT_FLOAT;
6352 next();
6353 break;
6354 case TOK___SNAN__:
6355 n = 0x7f800001;
6356 goto special_math_val;
6357 case TOK___INF__:
6358 n = 0x7f800000;
6359 goto special_math_val;
6361 default:
6362 tok_identifier:
6363 t = tok;
6364 next();
6365 if (t < TOK_UIDENT)
6366 expect("identifier");
6367 s = sym_find(t);
6368 if (!s || IS_ASM_SYM(s)) {
6369 const char *name = get_tok_str(t, NULL);
6370 if (tok != '(')
6371 tcc_error("'%s' undeclared", name);
6372 /* for simple function calls, we tolerate undeclared
6373 external reference to int() function */
6374 if (tcc_state->warn_implicit_function_declaration
6375 #ifdef TCC_TARGET_PE
6376 /* people must be warned about using undeclared WINAPI functions
6377 (which usually start with uppercase letter) */
6378 || (name[0] >= 'A' && name[0] <= 'Z')
6379 #endif
6381 tcc_warning("implicit declaration of function '%s'", name);
6382 s = external_global_sym(t, &func_old_type);
6385 r = s->r;
6386 /* A symbol that has a register is a local register variable,
6387 which starts out as VT_LOCAL value. */
6388 if ((r & VT_VALMASK) < VT_CONST)
6389 r = (r & ~VT_VALMASK) | VT_LOCAL;
6391 vset(&s->type, r, s->c);
6392 /* Point to s as backpointer (even without r&VT_SYM).
6393 Will be used by at least the x86 inline asm parser for
6394 regvars. */
6395 vtop->sym = s;
6397 if (r & VT_SYM) {
6398 vtop->c.i = 0;
6399 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
6400 vtop->c.i = s->enum_val;
6402 break;
6405 /* post operations */
6406 while (1) {
6407 if (tok == TOK_INC || tok == TOK_DEC) {
6408 inc(1, tok);
6409 next();
6410 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
6411 int qualifiers, cumofs = 0;
6412 /* field */
6413 if (tok == TOK_ARROW)
6414 indir();
6415 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
6416 test_lvalue();
6417 gaddrof();
6418 /* expect pointer on structure */
6419 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
6420 expect("struct or union");
6421 if (tok == TOK_CDOUBLE)
6422 expect("field name");
6423 next();
6424 if (tok == TOK_CINT || tok == TOK_CUINT)
6425 expect("field name");
6426 s = find_field(&vtop->type, tok, &cumofs);
6427 if (!s)
6428 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
6429 /* add field offset to pointer */
6430 vtop->type = char_pointer_type; /* change type to 'char *' */
6431 vpushi(cumofs + s->c);
6432 gen_op('+');
6433 /* change type to field type, and set to lvalue */
6434 vtop->type = s->type;
6435 vtop->type.t |= qualifiers;
6436 /* an array is never an lvalue */
6437 if (!(vtop->type.t & VT_ARRAY)) {
6438 vtop->r |= VT_LVAL;
6439 #ifdef CONFIG_TCC_BCHECK
6440 /* if bound checking, the referenced pointer must be checked */
6441 if (tcc_state->do_bounds_check)
6442 vtop->r |= VT_MUSTBOUND;
6443 #endif
6445 next();
6446 } else if (tok == '[') {
6447 next();
6448 gexpr();
6449 gen_op('+');
6450 indir();
6451 skip(']');
6452 } else if (tok == '(') {
6453 SValue ret;
6454 Sym *sa;
6455 int nb_args, ret_nregs, ret_align, regsize, variadic;
6457 /* function call */
6458 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
6459 /* pointer test (no array accepted) */
6460 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
6461 vtop->type = *pointed_type(&vtop->type);
6462 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
6463 goto error_func;
6464 } else {
6465 error_func:
6466 expect("function pointer");
6468 } else {
6469 vtop->r &= ~VT_LVAL; /* no lvalue */
6471 /* get return type */
6472 s = vtop->type.ref;
6473 next();
6474 sa = s->next; /* first parameter */
6475 nb_args = regsize = 0;
6476 ret.r2 = VT_CONST;
6477 /* compute first implicit argument if a structure is returned */
6478 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
6479 variadic = (s->f.func_type == FUNC_ELLIPSIS);
6480 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6481 &ret_align, &regsize);
6482 if (ret_nregs <= 0) {
6483 /* get some space for the returned structure */
6484 size = type_size(&s->type, &align);
6485 #ifdef TCC_TARGET_ARM64
6486 /* On arm64, a small struct is return in registers.
6487 It is much easier to write it to memory if we know
6488 that we are allowed to write some extra bytes, so
6489 round the allocated space up to a power of 2: */
6490 if (size < 16)
6491 while (size & (size - 1))
6492 size = (size | (size - 1)) + 1;
6493 #endif
6494 loc = (loc - size) & -align;
6495 ret.type = s->type;
6496 ret.r = VT_LOCAL | VT_LVAL;
6497 /* pass it as 'int' to avoid structure arg passing
6498 problems */
6499 vseti(VT_LOCAL, loc);
6500 #ifdef CONFIG_TCC_BCHECK
6501 if (tcc_state->do_bounds_check)
6502 --loc;
6503 #endif
6504 ret.c = vtop->c;
6505 if (ret_nregs < 0)
6506 vtop--;
6507 else
6508 nb_args++;
6510 } else {
6511 ret_nregs = 1;
6512 ret.type = s->type;
6515 if (ret_nregs > 0) {
6516 /* return in register */
6517 ret.c.i = 0;
6518 PUT_R_RET(&ret, ret.type.t);
6520 if (tok != ')') {
6521 for(;;) {
6522 expr_eq();
6523 gfunc_param_typed(s, sa);
6524 nb_args++;
6525 if (sa)
6526 sa = sa->next;
6527 if (tok == ')')
6528 break;
6529 skip(',');
6532 if (sa)
6533 tcc_error("too few arguments to function");
6534 skip(')');
6535 gfunc_call(nb_args);
6537 if (ret_nregs < 0) {
6538 vsetc(&ret.type, ret.r, &ret.c);
6539 #ifdef TCC_TARGET_RISCV64
6540 arch_transfer_ret_regs(1);
6541 #endif
6542 } else {
6543 /* return value */
6544 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6545 vsetc(&ret.type, r, &ret.c);
6546 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6549 /* handle packed struct return */
6550 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6551 int addr, offset;
6553 size = type_size(&s->type, &align);
6554 /* We're writing whole regs often, make sure there's enough
6555 space. Assume register size is power of 2. */
6556 if (regsize > align)
6557 align = regsize;
6558 loc = (loc - size) & -align;
6559 addr = loc;
6560 offset = 0;
6561 for (;;) {
6562 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6563 vswap();
6564 vstore();
6565 vtop--;
6566 if (--ret_nregs == 0)
6567 break;
6568 offset += regsize;
6570 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6573 /* Promote char/short return values. This is matters only
6574 for calling function that were not compiled by TCC and
6575 only on some architectures. For those where it doesn't
6576 matter we expect things to be already promoted to int,
6577 but not larger. */
6578 t = s->type.t & VT_BTYPE;
6579 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6580 #ifdef PROMOTE_RET
6581 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6582 #else
6583 vtop->type.t = VT_INT;
6584 #endif
6587 if (s->f.func_noreturn) {
6588 if (debug_modes)
6589 tcc_tcov_block_end (tcov_data.line);
6590 CODE_OFF();
6592 } else {
6593 break;
6598 #ifndef precedence_parser /* original top-down parser */
6600 static void expr_prod(void)
6602 int t;
6604 unary();
6605 while ((t = tok) == '*' || t == '/' || t == '%') {
6606 next();
6607 unary();
6608 gen_op(t);
6612 static void expr_sum(void)
6614 int t;
6616 expr_prod();
6617 while ((t = tok) == '+' || t == '-') {
6618 next();
6619 expr_prod();
6620 gen_op(t);
6624 static void expr_shift(void)
6626 int t;
6628 expr_sum();
6629 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6630 next();
6631 expr_sum();
6632 gen_op(t);
6636 static void expr_cmp(void)
6638 int t;
6640 expr_shift();
6641 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6642 t == TOK_ULT || t == TOK_UGE) {
6643 next();
6644 expr_shift();
6645 gen_op(t);
6649 static void expr_cmpeq(void)
6651 int t;
6653 expr_cmp();
6654 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6655 next();
6656 expr_cmp();
6657 gen_op(t);
6661 static void expr_and(void)
6663 expr_cmpeq();
6664 while (tok == '&') {
6665 next();
6666 expr_cmpeq();
6667 gen_op('&');
6671 static void expr_xor(void)
6673 expr_and();
6674 while (tok == '^') {
6675 next();
6676 expr_and();
6677 gen_op('^');
6681 static void expr_or(void)
6683 expr_xor();
6684 while (tok == '|') {
6685 next();
6686 expr_xor();
6687 gen_op('|');
6691 static void expr_landor(int op);
6693 static void expr_land(void)
6695 expr_or();
6696 if (tok == TOK_LAND)
6697 expr_landor(tok);
6700 static void expr_lor(void)
6702 expr_land();
6703 if (tok == TOK_LOR)
6704 expr_landor(tok);
6707 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6708 #else /* defined precedence_parser */
6709 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6710 # define expr_lor() unary(), expr_infix(1)
6712 static int precedence(int tok)
6714 switch (tok) {
6715 case TOK_LOR: return 1;
6716 case TOK_LAND: return 2;
6717 case '|': return 3;
6718 case '^': return 4;
6719 case '&': return 5;
6720 case TOK_EQ: case TOK_NE: return 6;
6721 relat: case TOK_ULT: case TOK_UGE: return 7;
6722 case TOK_SHL: case TOK_SAR: return 8;
6723 case '+': case '-': return 9;
6724 case '*': case '/': case '%': return 10;
6725 default:
6726 if (tok >= TOK_ULE && tok <= TOK_GT)
6727 goto relat;
6728 return 0;
6731 static unsigned char prec[256];
6732 static void init_prec(void)
6734 int i;
6735 for (i = 0; i < 256; i++)
6736 prec[i] = precedence(i);
6738 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6740 static void expr_landor(int op);
6742 static void expr_infix(int p)
6744 int t = tok, p2;
6745 while ((p2 = precedence(t)) >= p) {
6746 if (t == TOK_LOR || t == TOK_LAND) {
6747 expr_landor(t);
6748 } else {
6749 next();
6750 unary();
6751 if (precedence(tok) > p2)
6752 expr_infix(p2 + 1);
6753 gen_op(t);
6755 t = tok;
6758 #endif
6760 /* Assuming vtop is a value used in a conditional context
6761 (i.e. compared with zero) return 0 if it's false, 1 if
6762 true and -1 if it can't be statically determined. */
6763 static int condition_3way(void)
6765 int c = -1;
6766 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6767 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6768 vdup();
6769 gen_cast_s(VT_BOOL);
6770 c = vtop->c.i;
6771 vpop();
6773 return c;
6776 static void expr_landor(int op)
6778 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6779 for(;;) {
6780 c = f ? i : condition_3way();
6781 if (c < 0)
6782 save_regs(1), cc = 0;
6783 else if (c != i)
6784 nocode_wanted++, f = 1;
6785 if (tok != op)
6786 break;
6787 if (c < 0)
6788 t = gvtst(i, t);
6789 else
6790 vpop();
6791 next();
6792 expr_landor_next(op);
6794 if (cc || f) {
6795 vpop();
6796 vpushi(i ^ f);
6797 gsym(t);
6798 nocode_wanted -= f;
6799 } else {
6800 gvtst_set(i, t);
6804 static int is_cond_bool(SValue *sv)
6806 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6807 && (sv->type.t & VT_BTYPE) == VT_INT)
6808 return (unsigned)sv->c.i < 2;
6809 if (sv->r == VT_CMP)
6810 return 1;
6811 return 0;
6814 static void expr_cond(void)
6816 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6817 SValue sv;
6818 CType type;
6819 int ncw_prev;
6821 expr_lor();
6822 if (tok == '?') {
6823 next();
6824 c = condition_3way();
6825 g = (tok == ':' && gnu_ext);
6826 tt = 0;
6827 if (!g) {
6828 if (c < 0) {
6829 save_regs(1);
6830 tt = gvtst(1, 0);
6831 } else {
6832 vpop();
6834 } else if (c < 0) {
6835 /* needed to avoid having different registers saved in
6836 each branch */
6837 save_regs(1);
6838 gv_dup();
6839 tt = gvtst(0, 0);
6842 ncw_prev = nocode_wanted;
6843 if (c == 0)
6844 nocode_wanted++;
6845 if (!g)
6846 gexpr();
6848 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6849 mk_pointer(&vtop->type);
6850 sv = *vtop; /* save value to handle it later */
6851 vtop--; /* no vpop so that FP stack is not flushed */
6853 if (g) {
6854 u = tt;
6855 } else if (c < 0) {
6856 u = gjmp(0);
6857 gsym(tt);
6858 } else
6859 u = 0;
6861 nocode_wanted = ncw_prev;
6862 if (c == 1)
6863 nocode_wanted++;
6864 skip(':');
6865 expr_cond();
6867 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6868 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6869 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6870 this code jumps directly to the if's then/else branches. */
6871 t1 = gvtst(0, 0);
6872 t2 = gjmp(0);
6873 gsym(u);
6874 vpushv(&sv);
6875 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6876 gvtst_set(0, t1);
6877 gvtst_set(1, t2);
6878 nocode_wanted = ncw_prev;
6879 // tcc_warning("two conditions expr_cond");
6880 return;
6883 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6884 mk_pointer(&vtop->type);
6886 /* cast operands to correct type according to ISOC rules */
6887 if (!combine_types(&type, &sv, vtop, '?'))
6888 type_incompatibility_error(&sv.type, &vtop->type,
6889 "type mismatch in conditional expression (have '%s' and '%s')");
6890 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6891 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6892 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6894 /* now we convert second operand */
6895 if (c != 1) {
6896 gen_cast(&type);
6897 if (islv) {
6898 mk_pointer(&vtop->type);
6899 gaddrof();
6900 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6901 gaddrof();
6904 rc = RC_TYPE(type.t);
6905 /* for long longs, we use fixed registers to avoid having
6906 to handle a complicated move */
6907 if (USING_TWO_WORDS(type.t))
6908 rc = RC_RET(type.t);
6910 tt = r2 = 0;
6911 if (c < 0) {
6912 r2 = gv(rc);
6913 tt = gjmp(0);
6915 gsym(u);
6916 nocode_wanted = ncw_prev;
6918 /* this is horrible, but we must also convert first
6919 operand */
6920 if (c != 0) {
6921 *vtop = sv;
6922 gen_cast(&type);
6923 if (islv) {
6924 mk_pointer(&vtop->type);
6925 gaddrof();
6926 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6927 gaddrof();
6930 if (c < 0) {
6931 r1 = gv(rc);
6932 move_reg(r2, r1, islv ? VT_PTR : type.t);
6933 vtop->r = r2;
6934 gsym(tt);
6937 if (islv)
6938 indir();
6942 static void expr_eq(void)
6944 int t;
6946 expr_cond();
6947 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6948 test_lvalue();
6949 next();
6950 if (t == '=') {
6951 expr_eq();
6952 } else {
6953 vdup();
6954 expr_eq();
6955 gen_op(TOK_ASSIGN_OP(t));
6957 vstore();
6961 ST_FUNC void gexpr(void)
6963 while (1) {
6964 expr_eq();
6965 if (tok != ',')
6966 break;
6967 vpop();
6968 next();
6972 /* parse a constant expression and return value in vtop. */
6973 static void expr_const1(void)
6975 const_wanted++;
6976 nocode_wanted += unevalmask + 1;
6977 expr_cond();
6978 nocode_wanted -= unevalmask + 1;
6979 const_wanted--;
6982 /* parse an integer constant and return its value. */
6983 static inline int64_t expr_const64(void)
6985 int64_t c;
6986 expr_const1();
6987 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
6988 expect("constant expression");
6989 c = vtop->c.i;
6990 vpop();
6991 return c;
6994 /* parse an integer constant and return its value.
6995 Complain if it doesn't fit 32bit (signed or unsigned). */
6996 ST_FUNC int expr_const(void)
6998 int c;
6999 int64_t wc = expr_const64();
7000 c = wc;
7001 if (c != wc && (unsigned)c != wc)
7002 tcc_error("constant exceeds 32 bit");
7003 return c;
7006 /* ------------------------------------------------------------------------- */
7007 /* return from function */
7009 #ifndef TCC_TARGET_ARM64
7010 static void gfunc_return(CType *func_type)
7012 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
7013 CType type, ret_type;
7014 int ret_align, ret_nregs, regsize;
7015 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
7016 &ret_align, &regsize);
7017 if (ret_nregs < 0) {
7018 #ifdef TCC_TARGET_RISCV64
7019 arch_transfer_ret_regs(0);
7020 #endif
7021 } else if (0 == ret_nregs) {
7022 /* if returning structure, must copy it to implicit
7023 first pointer arg location */
7024 type = *func_type;
7025 mk_pointer(&type);
7026 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
7027 indir();
7028 vswap();
7029 /* copy structure value to pointer */
7030 vstore();
7031 } else {
7032 /* returning structure packed into registers */
7033 int size, addr, align, rc;
7034 size = type_size(func_type,&align);
7035 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
7036 (vtop->c.i & (ret_align-1)))
7037 && (align & (ret_align-1))) {
7038 loc = (loc - size) & -ret_align;
7039 addr = loc;
7040 type = *func_type;
7041 vset(&type, VT_LOCAL | VT_LVAL, addr);
7042 vswap();
7043 vstore();
7044 vpop();
7045 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
7047 vtop->type = ret_type;
7048 rc = RC_RET(ret_type.t);
7049 if (ret_nregs == 1)
7050 gv(rc);
7051 else {
7052 for (;;) {
7053 vdup();
7054 gv(rc);
7055 vpop();
7056 if (--ret_nregs == 0)
7057 break;
7058 /* We assume that when a structure is returned in multiple
7059 registers, their classes are consecutive values of the
7060 suite s(n) = 2^n */
7061 rc <<= 1;
7062 vtop->c.i += regsize;
7066 } else {
7067 gv(RC_RET(func_type->t));
7069 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
7071 #endif
7073 static void check_func_return(void)
7075 if ((func_vt.t & VT_BTYPE) == VT_VOID)
7076 return;
7077 if (!strcmp (funcname, "main")
7078 && (func_vt.t & VT_BTYPE) == VT_INT) {
7079 /* main returns 0 by default */
7080 vpushi(0);
7081 gen_assign_cast(&func_vt);
7082 gfunc_return(&func_vt);
7083 } else {
7084 tcc_warning("function might return no value: '%s'", funcname);
7088 /* ------------------------------------------------------------------------- */
7089 /* switch/case */
7091 static int case_cmpi(const void *pa, const void *pb)
7093 int64_t a = (*(struct case_t**) pa)->v1;
7094 int64_t b = (*(struct case_t**) pb)->v1;
7095 return a < b ? -1 : a > b;
7098 static int case_cmpu(const void *pa, const void *pb)
7100 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
7101 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
7102 return a < b ? -1 : a > b;
7105 static void gtst_addr(int t, int a)
7107 gsym_addr(gvtst(0, t), a);
7110 static void gcase(struct case_t **base, int len, int *bsym)
7112 struct case_t *p;
7113 int e;
7114 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
7115 while (len > 8) {
7116 /* binary search */
7117 p = base[len/2];
7118 vdup();
7119 if (ll)
7120 vpushll(p->v2);
7121 else
7122 vpushi(p->v2);
7123 gen_op(TOK_LE);
7124 e = gvtst(1, 0);
7125 vdup();
7126 if (ll)
7127 vpushll(p->v1);
7128 else
7129 vpushi(p->v1);
7130 gen_op(TOK_GE);
7131 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
7132 /* x < v1 */
7133 gcase(base, len/2, bsym);
7134 /* x > v2 */
7135 gsym(e);
7136 e = len/2 + 1;
7137 base += e; len -= e;
7139 /* linear scan */
7140 while (len--) {
7141 p = *base++;
7142 vdup();
7143 if (ll)
7144 vpushll(p->v2);
7145 else
7146 vpushi(p->v2);
7147 if (p->v1 == p->v2) {
7148 gen_op(TOK_EQ);
7149 gtst_addr(0, p->sym);
7150 } else {
7151 gen_op(TOK_LE);
7152 e = gvtst(1, 0);
7153 vdup();
7154 if (ll)
7155 vpushll(p->v1);
7156 else
7157 vpushi(p->v1);
7158 gen_op(TOK_GE);
7159 gtst_addr(0, p->sym);
7160 gsym(e);
7163 *bsym = gjmp(*bsym);
7166 /* ------------------------------------------------------------------------- */
7167 /* __attribute__((cleanup(fn))) */
7169 static void try_call_scope_cleanup(Sym *stop)
7171 Sym *cls = cur_scope->cl.s;
7173 for (; cls != stop; cls = cls->ncl) {
7174 Sym *fs = cls->next;
7175 Sym *vs = cls->prev_tok;
7177 vpushsym(&fs->type, fs);
7178 vset(&vs->type, vs->r, vs->c);
7179 vtop->sym = vs;
7180 mk_pointer(&vtop->type);
7181 gaddrof();
7182 gfunc_call(1);
7186 static void try_call_cleanup_goto(Sym *cleanupstate)
7188 Sym *oc, *cc;
7189 int ocd, ccd;
7191 if (!cur_scope->cl.s)
7192 return;
7194 /* search NCA of both cleanup chains given parents and initial depth */
7195 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
7196 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
7198 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
7200 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
7203 try_call_scope_cleanup(cc);
7206 /* call 'func' for each __attribute__((cleanup(func))) */
7207 static void block_cleanup(struct scope *o)
7209 int jmp = 0;
7210 Sym *g, **pg;
7211 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
7212 if (g->prev_tok->r & LABEL_FORWARD) {
7213 Sym *pcl = g->next;
7214 if (!jmp)
7215 jmp = gjmp(0);
7216 gsym(pcl->jnext);
7217 try_call_scope_cleanup(o->cl.s);
7218 pcl->jnext = gjmp(0);
7219 if (!o->cl.n)
7220 goto remove_pending;
7221 g->c = o->cl.n;
7222 pg = &g->prev;
7223 } else {
7224 remove_pending:
7225 *pg = g->prev;
7226 sym_free(g);
7229 gsym(jmp);
7230 try_call_scope_cleanup(o->cl.s);
7233 /* ------------------------------------------------------------------------- */
7234 /* VLA */
7236 static void vla_restore(int loc)
7238 if (loc)
7239 gen_vla_sp_restore(loc);
7242 static void vla_leave(struct scope *o)
7244 struct scope *c = cur_scope, *v = NULL;
7245 for (; c != o && c; c = c->prev)
7246 if (c->vla.num)
7247 v = c;
7248 if (v)
7249 vla_restore(v->vla.locorig);
7252 /* ------------------------------------------------------------------------- */
7253 /* local scopes */
7255 void new_scope(struct scope *o)
7257 /* copy and link previous scope */
7258 *o = *cur_scope;
7259 o->prev = cur_scope;
7260 cur_scope = o;
7261 cur_scope->vla.num = 0;
7263 /* record local declaration stack position */
7264 o->lstk = local_stack;
7265 o->llstk = local_label_stack;
7266 ++local_scope;
7268 if (debug_modes)
7269 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
7272 void prev_scope(struct scope *o, int is_expr)
7274 vla_leave(o->prev);
7276 if (o->cl.s != o->prev->cl.s)
7277 block_cleanup(o->prev);
7279 /* pop locally defined labels */
7280 label_pop(&local_label_stack, o->llstk, is_expr);
7282 /* In the is_expr case (a statement expression is finished here),
7283 vtop might refer to symbols on the local_stack. Either via the
7284 type or via vtop->sym. We can't pop those nor any that in turn
7285 might be referred to. To make it easier we don't roll back
7286 any symbols in that case; some upper level call to block() will
7287 do that. We do have to remove such symbols from the lookup
7288 tables, though. sym_pop will do that. */
7290 /* pop locally defined symbols */
7291 pop_local_syms(o->lstk, is_expr);
7292 cur_scope = o->prev;
7293 --local_scope;
7295 if (debug_modes)
7296 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
7299 /* leave a scope via break/continue(/goto) */
7300 void leave_scope(struct scope *o)
7302 if (!o)
7303 return;
7304 try_call_scope_cleanup(o->cl.s);
7305 vla_leave(o);
7308 /* ------------------------------------------------------------------------- */
7309 /* call block from 'for do while' loops */
7311 static void lblock(int *bsym, int *csym)
7313 struct scope *lo = loop_scope, *co = cur_scope;
7314 int *b = co->bsym, *c = co->csym;
7315 if (csym) {
7316 co->csym = csym;
7317 loop_scope = co;
7319 co->bsym = bsym;
7320 block(0);
7321 co->bsym = b;
7322 if (csym) {
7323 co->csym = c;
7324 loop_scope = lo;
7328 static void block(int is_expr)
7330 int a, b, c, d, e, t;
7331 struct scope o;
7332 Sym *s;
7334 if (is_expr) {
7335 /* default return value is (void) */
7336 vpushi(0);
7337 vtop->type.t = VT_VOID;
7340 again:
7341 t = tok;
7342 /* If the token carries a value, next() might destroy it. Only with
7343 invalid code such as f(){"123"4;} */
7344 if (TOK_HAS_VALUE(t))
7345 goto expr;
7346 next();
7348 if (debug_modes)
7349 tcc_tcov_check_line (0), tcc_tcov_block_begin ();
7351 if (t == TOK_IF) {
7352 skip('(');
7353 gexpr();
7354 skip(')');
7355 a = gvtst(1, 0);
7356 block(0);
7357 if (tok == TOK_ELSE) {
7358 d = gjmp(0);
7359 gsym(a);
7360 next();
7361 block(0);
7362 gsym(d); /* patch else jmp */
7363 } else {
7364 gsym(a);
7367 } else if (t == TOK_WHILE) {
7368 d = gind();
7369 skip('(');
7370 gexpr();
7371 skip(')');
7372 a = gvtst(1, 0);
7373 b = 0;
7374 lblock(&a, &b);
7375 gjmp_addr(d);
7376 gsym_addr(b, d);
7377 gsym(a);
7379 } else if (t == '{') {
7380 new_scope(&o);
7382 /* handle local labels declarations */
7383 while (tok == TOK_LABEL) {
7384 do {
7385 next();
7386 if (tok < TOK_UIDENT)
7387 expect("label identifier");
7388 label_push(&local_label_stack, tok, LABEL_DECLARED);
7389 next();
7390 } while (tok == ',');
7391 skip(';');
7394 while (tok != '}') {
7395 decl(VT_LOCAL);
7396 if (tok != '}') {
7397 if (is_expr)
7398 vpop();
7399 block(is_expr);
7403 prev_scope(&o, is_expr);
7404 if (local_scope)
7405 next();
7406 else if (!nocode_wanted)
7407 check_func_return();
7409 } else if (t == TOK_RETURN) {
7410 b = (func_vt.t & VT_BTYPE) != VT_VOID;
7411 if (tok != ';') {
7412 gexpr();
7413 if (b) {
7414 gen_assign_cast(&func_vt);
7415 } else {
7416 if (vtop->type.t != VT_VOID)
7417 tcc_warning("void function returns a value");
7418 vtop--;
7420 } else if (b) {
7421 tcc_warning("'return' with no value");
7422 b = 0;
7424 leave_scope(root_scope);
7425 if (b)
7426 gfunc_return(&func_vt);
7427 skip(';');
7428 /* jump unless last stmt in top-level block */
7429 if (tok != '}' || local_scope != 1)
7430 rsym = gjmp(rsym);
7431 if (debug_modes)
7432 tcc_tcov_block_end (tcov_data.line);
7433 CODE_OFF();
7435 } else if (t == TOK_BREAK) {
7436 /* compute jump */
7437 if (!cur_scope->bsym)
7438 tcc_error("cannot break");
7439 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
7440 leave_scope(cur_switch->scope);
7441 else
7442 leave_scope(loop_scope);
7443 *cur_scope->bsym = gjmp(*cur_scope->bsym);
7444 skip(';');
7446 } else if (t == TOK_CONTINUE) {
7447 /* compute jump */
7448 if (!cur_scope->csym)
7449 tcc_error("cannot continue");
7450 leave_scope(loop_scope);
7451 *cur_scope->csym = gjmp(*cur_scope->csym);
7452 skip(';');
7454 } else if (t == TOK_FOR) {
7455 new_scope(&o);
7457 skip('(');
7458 if (tok != ';') {
7459 /* c99 for-loop init decl? */
7460 if (!decl0(VT_LOCAL, 1, NULL)) {
7461 /* no, regular for-loop init expr */
7462 gexpr();
7463 vpop();
7466 skip(';');
7467 a = b = 0;
7468 c = d = gind();
7469 if (tok != ';') {
7470 gexpr();
7471 a = gvtst(1, 0);
7473 skip(';');
7474 if (tok != ')') {
7475 e = gjmp(0);
7476 d = gind();
7477 gexpr();
7478 vpop();
7479 gjmp_addr(c);
7480 gsym(e);
7482 skip(')');
7483 lblock(&a, &b);
7484 gjmp_addr(d);
7485 gsym_addr(b, d);
7486 gsym(a);
7487 prev_scope(&o, 0);
7489 } else if (t == TOK_DO) {
7490 a = b = 0;
7491 d = gind();
7492 lblock(&a, &b);
7493 gsym(b);
7494 skip(TOK_WHILE);
7495 skip('(');
7496 gexpr();
7497 skip(')');
7498 skip(';');
7499 c = gvtst(0, 0);
7500 gsym_addr(c, d);
7501 gsym(a);
7503 } else if (t == TOK_SWITCH) {
7504 struct switch_t *sw;
7506 sw = tcc_mallocz(sizeof *sw);
7507 sw->bsym = &a;
7508 sw->scope = cur_scope;
7509 sw->prev = cur_switch;
7510 cur_switch = sw;
7512 skip('(');
7513 gexpr();
7514 skip(')');
7515 sw->sv = *vtop--; /* save switch value */
7517 a = 0;
7518 b = gjmp(0); /* jump to first case */
7519 lblock(&a, NULL);
7520 a = gjmp(a); /* add implicit break */
7521 /* case lookup */
7522 gsym(b);
7524 if (sw->sv.type.t & VT_UNSIGNED)
7525 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7526 else
7527 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7529 for (b = 1; b < sw->n; b++)
7530 if (sw->sv.type.t & VT_UNSIGNED
7531 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7532 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7533 tcc_error("duplicate case value");
7535 vpushv(&sw->sv);
7536 gv(RC_INT);
7537 d = 0, gcase(sw->p, sw->n, &d);
7538 vpop();
7539 if (sw->def_sym)
7540 gsym_addr(d, sw->def_sym);
7541 else
7542 gsym(d);
7543 /* break label */
7544 gsym(a);
7546 dynarray_reset(&sw->p, &sw->n);
7547 cur_switch = sw->prev;
7548 tcc_free(sw);
7550 } else if (t == TOK_CASE) {
7551 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7552 if (!cur_switch)
7553 expect("switch");
7554 cr->v1 = cr->v2 = expr_const64();
7555 if (gnu_ext && tok == TOK_DOTS) {
7556 next();
7557 cr->v2 = expr_const64();
7558 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7559 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7560 tcc_warning("empty case range");
7562 tcov_data.ind = 0;
7563 cr->sym = gind();
7564 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7565 skip(':');
7566 is_expr = 0;
7567 goto block_after_label;
7569 } else if (t == TOK_DEFAULT) {
7570 if (!cur_switch)
7571 expect("switch");
7572 if (cur_switch->def_sym)
7573 tcc_error("too many 'default'");
7574 tcov_data.ind = 0;
7575 cur_switch->def_sym = gind();
7576 skip(':');
7577 is_expr = 0;
7578 goto block_after_label;
7580 } else if (t == TOK_GOTO) {
7581 if (cur_scope->vla.num)
7582 vla_restore(cur_scope->vla.locorig);
7583 if (tok == '*' && gnu_ext) {
7584 /* computed goto */
7585 next();
7586 gexpr();
7587 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7588 expect("pointer");
7589 ggoto();
7591 } else if (tok >= TOK_UIDENT) {
7592 s = label_find(tok);
7593 /* put forward definition if needed */
7594 if (!s)
7595 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7596 else if (s->r == LABEL_DECLARED)
7597 s->r = LABEL_FORWARD;
7599 if (s->r & LABEL_FORWARD) {
7600 /* start new goto chain for cleanups, linked via label->next */
7601 if (cur_scope->cl.s && !nocode_wanted) {
7602 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7603 pending_gotos->prev_tok = s;
7604 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7605 pending_gotos->next = s;
7607 s->jnext = gjmp(s->jnext);
7608 } else {
7609 try_call_cleanup_goto(s->cleanupstate);
7610 gjmp_addr(s->jnext);
7612 next();
7614 } else {
7615 expect("label identifier");
7617 skip(';');
7619 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7620 asm_instr();
7622 } else {
7623 if (tok == ':' && t >= TOK_UIDENT) {
7624 /* label case */
7625 next();
7626 s = label_find(t);
7627 if (s) {
7628 if (s->r == LABEL_DEFINED)
7629 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7630 s->r = LABEL_DEFINED;
7631 if (s->next) {
7632 Sym *pcl; /* pending cleanup goto */
7633 for (pcl = s->next; pcl; pcl = pcl->prev)
7634 gsym(pcl->jnext);
7635 sym_pop(&s->next, NULL, 0);
7636 } else
7637 gsym(s->jnext);
7638 } else {
7639 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7641 s->jnext = gind();
7642 s->cleanupstate = cur_scope->cl.s;
7644 block_after_label:
7645 vla_restore(cur_scope->vla.loc);
7646 /* we accept this, but it is a mistake */
7647 if (tok == '}') {
7648 tcc_warning("deprecated use of label at end of compound statement");
7649 } else {
7650 goto again;
7653 } else {
7654 /* expression case */
7655 if (t != ';') {
7656 unget_tok(t);
7657 expr:
7658 if (is_expr) {
7659 vpop();
7660 gexpr();
7661 } else {
7662 gexpr();
7663 vpop();
7665 skip(';');
7670 if (debug_modes)
7671 tcc_tcov_check_line (0), tcc_tcov_block_end (0);
7674 /* This skips over a stream of tokens containing balanced {} and ()
7675 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7676 with a '{'). If STR then allocates and stores the skipped tokens
7677 in *STR. This doesn't check if () and {} are nested correctly,
7678 i.e. "({)}" is accepted. */
7679 static void skip_or_save_block(TokenString **str)
7681 int braces = tok == '{';
7682 int level = 0;
7683 if (str)
7684 *str = tok_str_alloc();
7686 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
7687 int t;
7688 if (tok == TOK_EOF) {
7689 if (str || level > 0)
7690 tcc_error("unexpected end of file");
7691 else
7692 break;
7694 if (str)
7695 tok_str_add_tok(*str);
7696 t = tok;
7697 next();
7698 if (t == '{' || t == '(') {
7699 level++;
7700 } else if (t == '}' || t == ')') {
7701 level--;
7702 if (level == 0 && braces && t == '}')
7703 break;
7706 if (str) {
7707 tok_str_add(*str, -1);
7708 tok_str_add(*str, 0);
7712 #define EXPR_CONST 1
7713 #define EXPR_ANY 2
7715 static void parse_init_elem(int expr_type)
7717 int saved_global_expr;
7718 switch(expr_type) {
7719 case EXPR_CONST:
7720 /* compound literals must be allocated globally in this case */
7721 saved_global_expr = global_expr;
7722 global_expr = 1;
7723 expr_const1();
7724 global_expr = saved_global_expr;
7725 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7726 (compound literals). */
7727 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7728 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7729 || vtop->sym->v < SYM_FIRST_ANOM))
7730 #ifdef TCC_TARGET_PE
7731 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7732 #endif
7734 tcc_error("initializer element is not constant");
7735 break;
7736 case EXPR_ANY:
7737 expr_eq();
7738 break;
7742 #if 1
7743 static void init_assert(init_params *p, int offset)
7745 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7746 : !nocode_wanted && offset > p->local_offset)
7747 tcc_internal_error("initializer overflow");
7749 #else
7750 #define init_assert(sec, offset)
7751 #endif
7753 /* put zeros for variable based init */
7754 static void init_putz(init_params *p, unsigned long c, int size)
7756 init_assert(p, c + size);
7757 if (p->sec) {
7758 /* nothing to do because globals are already set to zero */
7759 } else {
7760 vpush_helper_func(TOK_memset);
7761 vseti(VT_LOCAL, c);
7762 #ifdef TCC_TARGET_ARM
7763 vpushs(size);
7764 vpushi(0);
7765 #else
7766 vpushi(0);
7767 vpushs(size);
7768 #endif
7769 gfunc_call(3);
7773 #define DIF_FIRST 1
7774 #define DIF_SIZE_ONLY 2
7775 #define DIF_HAVE_ELEM 4
7776 #define DIF_CLEAR 8
7778 /* delete relocations for specified range c ... c + size. Unfortunatly
7779 in very special cases, relocations may occur unordered */
7780 static void decl_design_delrels(Section *sec, int c, int size)
7782 ElfW_Rel *rel, *rel2, *rel_end;
7783 if (!sec || !sec->reloc)
7784 return;
7785 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7786 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7787 while (rel < rel_end) {
7788 if (rel->r_offset >= c && rel->r_offset < c + size) {
7789 sec->reloc->data_offset -= sizeof *rel;
7790 } else {
7791 if (rel2 != rel)
7792 memcpy(rel2, rel, sizeof *rel);
7793 ++rel2;
7795 ++rel;
7799 static void decl_design_flex(init_params *p, Sym *ref, int index)
7801 if (ref == p->flex_array_ref) {
7802 if (index >= ref->c)
7803 ref->c = index + 1;
7804 } else if (ref->c < 0)
7805 tcc_error("flexible array has zero size in this context");
7808 /* t is the array or struct type. c is the array or struct
7809 address. cur_field is the pointer to the current
7810 field, for arrays the 'c' member contains the current start
7811 index. 'flags' is as in decl_initializer.
7812 'al' contains the already initialized length of the
7813 current container (starting at c). This returns the new length of that. */
7814 static int decl_designator(init_params *p, CType *type, unsigned long c,
7815 Sym **cur_field, int flags, int al)
7817 Sym *s, *f;
7818 int index, index_last, align, l, nb_elems, elem_size;
7819 unsigned long corig = c;
7821 elem_size = 0;
7822 nb_elems = 1;
7824 if (flags & DIF_HAVE_ELEM)
7825 goto no_designator;
7827 if (gnu_ext && tok >= TOK_UIDENT) {
7828 l = tok, next();
7829 if (tok == ':')
7830 goto struct_field;
7831 unget_tok(l);
7834 /* NOTE: we only support ranges for last designator */
7835 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7836 if (tok == '[') {
7837 if (!(type->t & VT_ARRAY))
7838 expect("array type");
7839 next();
7840 index = index_last = expr_const();
7841 if (tok == TOK_DOTS && gnu_ext) {
7842 next();
7843 index_last = expr_const();
7845 skip(']');
7846 s = type->ref;
7847 decl_design_flex(p, s, index_last);
7848 if (index < 0 || index_last >= s->c || index_last < index)
7849 tcc_error("index exceeds array bounds or range is empty");
7850 if (cur_field)
7851 (*cur_field)->c = index_last;
7852 type = pointed_type(type);
7853 elem_size = type_size(type, &align);
7854 c += index * elem_size;
7855 nb_elems = index_last - index + 1;
7856 } else {
7857 int cumofs;
7858 next();
7859 l = tok;
7860 struct_field:
7861 next();
7862 if ((type->t & VT_BTYPE) != VT_STRUCT)
7863 expect("struct/union type");
7864 cumofs = 0;
7865 f = find_field(type, l, &cumofs);
7866 if (!f)
7867 expect("field");
7868 if (cur_field)
7869 *cur_field = f;
7870 type = &f->type;
7871 c += cumofs + f->c;
7873 cur_field = NULL;
7875 if (!cur_field) {
7876 if (tok == '=') {
7877 next();
7878 } else if (!gnu_ext) {
7879 expect("=");
7881 } else {
7882 no_designator:
7883 if (type->t & VT_ARRAY) {
7884 index = (*cur_field)->c;
7885 s = type->ref;
7886 decl_design_flex(p, s, index);
7887 if (index >= s->c)
7888 tcc_error("too many initializers");
7889 type = pointed_type(type);
7890 elem_size = type_size(type, &align);
7891 c += index * elem_size;
7892 } else {
7893 f = *cur_field;
7894 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
7895 *cur_field = f = f->next;
7896 if (!f)
7897 tcc_error("too many initializers");
7898 type = &f->type;
7899 c += f->c;
7903 if (!elem_size) /* for structs */
7904 elem_size = type_size(type, &align);
7906 /* Using designators the same element can be initialized more
7907 than once. In that case we need to delete possibly already
7908 existing relocations. */
7909 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7910 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7911 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7914 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7916 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7917 Sym aref = {0};
7918 CType t1;
7919 int i;
7920 if (p->sec || (type->t & VT_ARRAY)) {
7921 /* make init_putv/vstore believe it were a struct */
7922 aref.c = elem_size;
7923 t1.t = VT_STRUCT, t1.ref = &aref;
7924 type = &t1;
7926 if (p->sec)
7927 vpush_ref(type, p->sec, c, elem_size);
7928 else
7929 vset(type, VT_LOCAL|VT_LVAL, c);
7930 for (i = 1; i < nb_elems; i++) {
7931 vdup();
7932 init_putv(p, type, c + elem_size * i);
7934 vpop();
7937 c += nb_elems * elem_size;
7938 if (c - corig > al)
7939 al = c - corig;
7940 return al;
7943 /* store a value or an expression directly in global data or in local array */
7944 static void init_putv(init_params *p, CType *type, unsigned long c)
7946 int bt;
7947 void *ptr;
7948 CType dtype;
7949 int size, align;
7950 Section *sec = p->sec;
7951 uint64_t val;
7953 dtype = *type;
7954 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7956 size = type_size(type, &align);
7957 if (type->t & VT_BITFIELD)
7958 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7959 init_assert(p, c + size);
7961 if (sec) {
7962 /* XXX: not portable */
7963 /* XXX: generate error if incorrect relocation */
7964 gen_assign_cast(&dtype);
7965 bt = type->t & VT_BTYPE;
7967 if ((vtop->r & VT_SYM)
7968 && bt != VT_PTR
7969 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7970 || (type->t & VT_BITFIELD))
7971 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
7973 tcc_error("initializer element is not computable at load time");
7975 if (NODATA_WANTED) {
7976 vtop--;
7977 return;
7980 ptr = sec->data + c;
7981 val = vtop->c.i;
7983 /* XXX: make code faster ? */
7984 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
7985 vtop->sym->v >= SYM_FIRST_ANOM &&
7986 /* XXX This rejects compound literals like
7987 '(void *){ptr}'. The problem is that '&sym' is
7988 represented the same way, which would be ruled out
7989 by the SYM_FIRST_ANOM check above, but also '"string"'
7990 in 'char *p = "string"' is represented the same
7991 with the type being VT_PTR and the symbol being an
7992 anonymous one. That is, there's no difference in vtop
7993 between '(void *){x}' and '&(void *){x}'. Ignore
7994 pointer typed entities here. Hopefully no real code
7995 will ever use compound literals with scalar type. */
7996 (vtop->type.t & VT_BTYPE) != VT_PTR) {
7997 /* These come from compound literals, memcpy stuff over. */
7998 Section *ssec;
7999 ElfSym *esym;
8000 ElfW_Rel *rel;
8001 esym = elfsym(vtop->sym);
8002 ssec = tcc_state->sections[esym->st_shndx];
8003 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
8004 if (ssec->reloc) {
8005 /* We need to copy over all memory contents, and that
8006 includes relocations. Use the fact that relocs are
8007 created it order, so look from the end of relocs
8008 until we hit one before the copied region. */
8009 unsigned long relofs = ssec->reloc->data_offset;
8010 while (relofs >= sizeof(*rel)) {
8011 relofs -= sizeof(*rel);
8012 rel = (ElfW_Rel*)(ssec->reloc->data + relofs);
8013 if (rel->r_offset >= esym->st_value + size)
8014 continue;
8015 if (rel->r_offset < esym->st_value)
8016 break;
8017 put_elf_reloca(symtab_section, sec,
8018 c + rel->r_offset - esym->st_value,
8019 ELFW(R_TYPE)(rel->r_info),
8020 ELFW(R_SYM)(rel->r_info),
8021 #if PTR_SIZE == 8
8022 rel->r_addend
8023 #else
8025 #endif
8029 } else {
8030 if (type->t & VT_BITFIELD) {
8031 int bit_pos, bit_size, bits, n;
8032 unsigned char *p, v, m;
8033 bit_pos = BIT_POS(vtop->type.t);
8034 bit_size = BIT_SIZE(vtop->type.t);
8035 p = (unsigned char*)ptr + (bit_pos >> 3);
8036 bit_pos &= 7, bits = 0;
8037 while (bit_size) {
8038 n = 8 - bit_pos;
8039 if (n > bit_size)
8040 n = bit_size;
8041 v = val >> bits << bit_pos;
8042 m = ((1 << n) - 1) << bit_pos;
8043 *p = (*p & ~m) | (v & m);
8044 bits += n, bit_size -= n, bit_pos = 0, ++p;
8046 } else
8047 switch(bt) {
8048 case VT_BOOL:
8049 *(char *)ptr = val != 0;
8050 break;
8051 case VT_BYTE:
8052 *(char *)ptr = val;
8053 break;
8054 case VT_SHORT:
8055 write16le(ptr, val);
8056 break;
8057 case VT_FLOAT:
8058 write32le(ptr, val);
8059 break;
8060 case VT_DOUBLE:
8061 write64le(ptr, val);
8062 break;
8063 case VT_LDOUBLE:
8064 #if defined TCC_IS_NATIVE_387
8065 /* Host and target platform may be different but both have x87.
8066 On windows, tcc does not use VT_LDOUBLE, except when it is a
8067 cross compiler. In this case a mingw gcc as host compiler
8068 comes here with 10-byte long doubles, while msvc or tcc won't.
8069 tcc itself can still translate by asm.
8070 In any case we avoid possibly random bytes 11 and 12.
8072 if (sizeof (long double) >= 10)
8073 memcpy(ptr, &vtop->c.ld, 10);
8074 #ifdef __TINYC__
8075 else if (sizeof (long double) == sizeof (double))
8076 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
8077 #endif
8078 else if (vtop->c.ld == 0.0)
8080 else
8081 #endif
8082 /* For other platforms it should work natively, but may not work
8083 for cross compilers */
8084 if (sizeof(long double) == LDOUBLE_SIZE)
8085 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
8086 else if (sizeof(double) == LDOUBLE_SIZE)
8087 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
8088 #ifndef TCC_CROSS_TEST
8089 else
8090 tcc_error("can't cross compile long double constants");
8091 #endif
8092 break;
8094 #if PTR_SIZE == 8
8095 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
8096 case VT_LLONG:
8097 case VT_PTR:
8098 if (vtop->r & VT_SYM)
8099 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
8100 else
8101 write64le(ptr, val);
8102 break;
8103 case VT_INT:
8104 write32le(ptr, val);
8105 break;
8106 #else
8107 case VT_LLONG:
8108 write64le(ptr, val);
8109 break;
8110 case VT_PTR:
8111 case VT_INT:
8112 if (vtop->r & VT_SYM)
8113 greloc(sec, vtop->sym, c, R_DATA_PTR);
8114 write32le(ptr, val);
8115 break;
8116 #endif
8117 default:
8118 //tcc_internal_error("unexpected type");
8119 break;
8122 vtop--;
8123 } else {
8124 vset(&dtype, VT_LOCAL|VT_LVAL, c);
8125 vswap();
8126 vstore();
8127 vpop();
8131 /* 't' contains the type and storage info. 'c' is the offset of the
8132 object in section 'sec'. If 'sec' is NULL, it means stack based
8133 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
8134 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
8135 size only evaluation is wanted (only for arrays). */
8136 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
8138 int len, n, no_oblock, i;
8139 int size1, align1;
8140 Sym *s, *f;
8141 Sym indexsym;
8142 CType *t1;
8144 /* generate line number info */
8145 if (debug_modes && !p->sec)
8146 tcc_debug_line(tcc_state), tcc_tcov_check_line (1);
8148 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
8149 /* In case of strings we have special handling for arrays, so
8150 don't consume them as initializer value (which would commit them
8151 to some anonymous symbol). */
8152 tok != TOK_LSTR && tok != TOK_STR &&
8153 !(flags & DIF_SIZE_ONLY)) {
8154 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
8155 flags |= DIF_HAVE_ELEM;
8158 if ((flags & DIF_HAVE_ELEM) &&
8159 !(type->t & VT_ARRAY) &&
8160 /* Use i_c_parameter_t, to strip toplevel qualifiers.
8161 The source type might have VT_CONSTANT set, which is
8162 of course assignable to non-const elements. */
8163 is_compatible_unqualified_types(type, &vtop->type)) {
8164 goto init_putv;
8166 } else if (type->t & VT_ARRAY) {
8167 no_oblock = 1;
8168 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
8169 tok == '{') {
8170 skip('{');
8171 no_oblock = 0;
8174 s = type->ref;
8175 n = s->c;
8176 t1 = pointed_type(type);
8177 size1 = type_size(t1, &align1);
8179 /* only parse strings here if correct type (otherwise: handle
8180 them as ((w)char *) expressions */
8181 if ((tok == TOK_LSTR &&
8182 #ifdef TCC_TARGET_PE
8183 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
8184 #else
8185 (t1->t & VT_BTYPE) == VT_INT
8186 #endif
8187 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
8188 len = 0;
8189 cstr_reset(&initstr);
8190 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
8191 tcc_error("unhandled string literal merging");
8192 while (tok == TOK_STR || tok == TOK_LSTR) {
8193 if (initstr.size)
8194 initstr.size -= size1;
8195 if (tok == TOK_STR)
8196 len += tokc.str.size;
8197 else
8198 len += tokc.str.size / sizeof(nwchar_t);
8199 len--;
8200 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
8201 next();
8203 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
8204 && tok != TOK_EOF) {
8205 /* Not a lone literal but part of a bigger expression. */
8206 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
8207 tokc.str.size = initstr.size;
8208 tokc.str.data = initstr.data;
8209 goto do_init_array;
8212 decl_design_flex(p, s, len);
8213 if (!(flags & DIF_SIZE_ONLY)) {
8214 int nb = n;
8215 if (len < nb)
8216 nb = len;
8217 if (len > nb)
8218 tcc_warning("initializer-string for array is too long");
8219 /* in order to go faster for common case (char
8220 string in global variable, we handle it
8221 specifically */
8222 if (p->sec && size1 == 1) {
8223 init_assert(p, c + nb);
8224 if (!NODATA_WANTED)
8225 memcpy(p->sec->data + c, initstr.data, nb);
8226 } else {
8227 for(i=0;i<n;i++) {
8228 if (i >= nb) {
8229 /* only add trailing zero if enough storage (no
8230 warning in this case since it is standard) */
8231 if (flags & DIF_CLEAR)
8232 break;
8233 if (n - i >= 4) {
8234 init_putz(p, c + i * size1, (n - i) * size1);
8235 break;
8237 ch = 0;
8238 } else if (size1 == 1)
8239 ch = ((unsigned char *)initstr.data)[i];
8240 else
8241 ch = ((nwchar_t *)initstr.data)[i];
8242 vpushi(ch);
8243 init_putv(p, t1, c + i * size1);
8247 } else {
8249 do_init_array:
8250 indexsym.c = 0;
8251 f = &indexsym;
8253 do_init_list:
8254 /* zero memory once in advance */
8255 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
8256 init_putz(p, c, n*size1);
8257 flags |= DIF_CLEAR;
8260 len = 0;
8261 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
8262 len = decl_designator(p, type, c, &f, flags, len);
8263 flags &= ~DIF_HAVE_ELEM;
8264 if (type->t & VT_ARRAY) {
8265 ++indexsym.c;
8266 /* special test for multi dimensional arrays (may not
8267 be strictly correct if designators are used at the
8268 same time) */
8269 if (no_oblock && len >= n*size1)
8270 break;
8271 } else {
8272 if (s->type.t == VT_UNION)
8273 f = NULL;
8274 else
8275 f = f->next;
8276 if (no_oblock && f == NULL)
8277 break;
8280 if (tok == '}')
8281 break;
8282 skip(',');
8285 if (!no_oblock)
8286 skip('}');
8287 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
8288 no_oblock = 1;
8289 if ((flags & DIF_FIRST) || tok == '{') {
8290 skip('{');
8291 no_oblock = 0;
8293 s = type->ref;
8294 f = s->next;
8295 n = s->c;
8296 size1 = 1;
8297 goto do_init_list;
8298 } else if (tok == '{') {
8299 if (flags & DIF_HAVE_ELEM)
8300 skip(';');
8301 next();
8302 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
8303 skip('}');
8304 } else if ((flags & DIF_SIZE_ONLY)) {
8305 /* If we supported only ISO C we wouldn't have to accept calling
8306 this on anything than an array if DIF_SIZE_ONLY (and even then
8307 only on the outermost level, so no recursion would be needed),
8308 because initializing a flex array member isn't supported.
8309 But GNU C supports it, so we need to recurse even into
8310 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
8311 /* just skip expression */
8312 skip_or_save_block(NULL);
8313 } else {
8314 if (!(flags & DIF_HAVE_ELEM)) {
8315 /* This should happen only when we haven't parsed
8316 the init element above for fear of committing a
8317 string constant to memory too early. */
8318 if (tok != TOK_STR && tok != TOK_LSTR)
8319 expect("string constant");
8320 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
8322 init_putv:
8323 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
8324 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
8325 && vtop->c.i == 0
8326 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
8328 vpop();
8329 else
8330 init_putv(p, type, c);
8334 /* parse an initializer for type 't' if 'has_init' is non zero, and
8335 allocate space in local or global data space ('r' is either
8336 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
8337 variable 'v' of scope 'scope' is declared before initializers
8338 are parsed. If 'v' is zero, then a reference to the new object
8339 is put in the value stack. If 'has_init' is 2, a special parsing
8340 is done to handle string constants. */
8341 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
8342 int has_init, int v, int scope)
8344 int size, align, addr;
8345 TokenString *init_str = NULL;
8347 Section *sec;
8348 Sym *flexible_array;
8349 Sym *sym;
8350 int saved_nocode_wanted = nocode_wanted;
8351 #ifdef CONFIG_TCC_BCHECK
8352 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
8353 #endif
8354 init_params p = {0};
8356 /* Always allocate static or global variables */
8357 if (v && (r & VT_VALMASK) == VT_CONST)
8358 nocode_wanted |= 0x80000000;
8360 flexible_array = NULL;
8361 size = type_size(type, &align);
8363 /* exactly one flexible array may be initialized, either the
8364 toplevel array or the last member of the toplevel struct */
8366 if (size < 0) {
8367 /* If the base type itself was an array type of unspecified size
8368 (like in 'typedef int arr[]; arr x = {1};') then we will
8369 overwrite the unknown size by the real one for this decl.
8370 We need to unshare the ref symbol holding that size. */
8371 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
8372 p.flex_array_ref = type->ref;
8374 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
8375 Sym *field = type->ref->next;
8376 if (field) {
8377 while (field->next)
8378 field = field->next;
8379 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
8380 flexible_array = field;
8381 p.flex_array_ref = field->type.ref;
8382 size = -1;
8387 if (size < 0) {
8388 /* If unknown size, do a dry-run 1st pass */
8389 if (!has_init)
8390 tcc_error("unknown type size");
8391 if (has_init == 2) {
8392 /* only get strings */
8393 init_str = tok_str_alloc();
8394 while (tok == TOK_STR || tok == TOK_LSTR) {
8395 tok_str_add_tok(init_str);
8396 next();
8398 tok_str_add(init_str, -1);
8399 tok_str_add(init_str, 0);
8400 } else
8401 skip_or_save_block(&init_str);
8402 unget_tok(0);
8404 /* compute size */
8405 begin_macro(init_str, 1);
8406 next();
8407 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
8408 /* prepare second initializer parsing */
8409 macro_ptr = init_str->str;
8410 next();
8412 /* if still unknown size, error */
8413 size = type_size(type, &align);
8414 if (size < 0)
8415 tcc_error("unknown type size");
8417 /* If there's a flex member and it was used in the initializer
8418 adjust size. */
8419 if (flexible_array && flexible_array->type.ref->c > 0)
8420 size += flexible_array->type.ref->c
8421 * pointed_size(&flexible_array->type);
8424 /* take into account specified alignment if bigger */
8425 if (ad->a.aligned) {
8426 int speca = 1 << (ad->a.aligned - 1);
8427 if (speca > align)
8428 align = speca;
8429 } else if (ad->a.packed) {
8430 align = 1;
8433 if (!v && NODATA_WANTED)
8434 size = 0, align = 1;
8436 if ((r & VT_VALMASK) == VT_LOCAL) {
8437 sec = NULL;
8438 #ifdef CONFIG_TCC_BCHECK
8439 if (bcheck && v) {
8440 /* add padding between stack variables for bound checking */
8441 loc -= align;
8443 #endif
8444 loc = (loc - size) & -align;
8445 addr = loc;
8446 p.local_offset = addr + size;
8447 #ifdef CONFIG_TCC_BCHECK
8448 if (bcheck && v) {
8449 /* add padding between stack variables for bound checking */
8450 loc -= align;
8452 #endif
8453 if (v) {
8454 /* local variable */
8455 #ifdef CONFIG_TCC_ASM
8456 if (ad->asm_label) {
8457 int reg = asm_parse_regvar(ad->asm_label);
8458 if (reg >= 0)
8459 r = (r & ~VT_VALMASK) | reg;
8461 #endif
8462 sym = sym_push(v, type, r, addr);
8463 if (ad->cleanup_func) {
8464 Sym *cls = sym_push2(&all_cleanups,
8465 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8466 cls->prev_tok = sym;
8467 cls->next = ad->cleanup_func;
8468 cls->ncl = cur_scope->cl.s;
8469 cur_scope->cl.s = cls;
8472 sym->a = ad->a;
8473 } else {
8474 /* push local reference */
8475 vset(type, r, addr);
8477 } else {
8478 sym = NULL;
8479 if (v && scope == VT_CONST) {
8480 /* see if the symbol was already defined */
8481 sym = sym_find(v);
8482 if (sym) {
8483 if (p.flex_array_ref && (sym->type.t & type->t & VT_ARRAY)
8484 && sym->type.ref->c > type->ref->c) {
8485 /* flex array was already declared with explicit size
8486 extern int arr[10];
8487 int arr[] = { 1,2,3 }; */
8488 type->ref->c = sym->type.ref->c;
8489 size = type_size(type, &align);
8491 patch_storage(sym, ad, type);
8492 /* we accept several definitions of the same global variable. */
8493 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8494 goto no_alloc;
8498 /* allocate symbol in corresponding section */
8499 sec = ad->section;
8500 if (!sec) {
8501 CType *tp = type;
8502 while ((tp->t & (VT_BTYPE|VT_ARRAY)) == (VT_PTR|VT_ARRAY))
8503 tp = &tp->ref->type;
8504 if (tp->t & VT_CONSTANT) {
8505 sec = rodata_section;
8506 } else if (has_init) {
8507 sec = data_section;
8508 /*if (tcc_state->g_debug & 4)
8509 tcc_warning("rw data: %s", get_tok_str(v, 0));*/
8510 } else if (tcc_state->nocommon)
8511 sec = bss_section;
8514 if (sec) {
8515 addr = section_add(sec, size, align);
8516 #ifdef CONFIG_TCC_BCHECK
8517 /* add padding if bound check */
8518 if (bcheck)
8519 section_add(sec, 1, 1);
8520 #endif
8521 } else {
8522 addr = align; /* SHN_COMMON is special, symbol value is align */
8523 sec = common_section;
8526 if (v) {
8527 if (!sym) {
8528 sym = sym_push(v, type, r | VT_SYM, 0);
8529 patch_storage(sym, ad, NULL);
8531 /* update symbol definition */
8532 put_extern_sym(sym, sec, addr, size);
8533 } else {
8534 /* push global reference */
8535 vpush_ref(type, sec, addr, size);
8536 sym = vtop->sym;
8537 vtop->r |= r;
8540 #ifdef CONFIG_TCC_BCHECK
8541 /* handles bounds now because the symbol must be defined
8542 before for the relocation */
8543 if (bcheck) {
8544 addr_t *bounds_ptr;
8546 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8547 /* then add global bound info */
8548 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8549 bounds_ptr[0] = 0; /* relocated */
8550 bounds_ptr[1] = size;
8552 #endif
8555 if (type->t & VT_VLA) {
8556 int a;
8558 if (NODATA_WANTED)
8559 goto no_alloc;
8561 /* save before-VLA stack pointer if needed */
8562 if (cur_scope->vla.num == 0) {
8563 if (cur_scope->prev && cur_scope->prev->vla.num) {
8564 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8565 } else {
8566 gen_vla_sp_save(loc -= PTR_SIZE);
8567 cur_scope->vla.locorig = loc;
8571 vla_runtime_type_size(type, &a);
8572 gen_vla_alloc(type, a);
8573 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8574 /* on _WIN64, because of the function args scratch area, the
8575 result of alloca differs from RSP and is returned in RAX. */
8576 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8577 #endif
8578 gen_vla_sp_save(addr);
8579 cur_scope->vla.loc = addr;
8580 cur_scope->vla.num++;
8581 } else if (has_init) {
8582 p.sec = sec;
8583 decl_initializer(&p, type, addr, DIF_FIRST);
8584 /* patch flexible array member size back to -1, */
8585 /* for possible subsequent similar declarations */
8586 if (flexible_array)
8587 flexible_array->type.ref->c = -1;
8590 no_alloc:
8591 /* restore parse state if needed */
8592 if (init_str) {
8593 end_macro();
8594 next();
8597 nocode_wanted = saved_nocode_wanted;
8600 /* parse a function defined by symbol 'sym' and generate its code in
8601 'cur_text_section' */
8602 static void gen_function(Sym *sym)
8604 struct scope f = { 0 };
8605 cur_scope = root_scope = &f;
8606 nocode_wanted = 0;
8607 ind = cur_text_section->data_offset;
8608 if (sym->a.aligned) {
8609 size_t newoff = section_add(cur_text_section, 0,
8610 1 << (sym->a.aligned - 1));
8611 gen_fill_nops(newoff - ind);
8613 /* NOTE: we patch the symbol size later */
8614 put_extern_sym(sym, cur_text_section, ind, 0);
8615 if (sym->type.ref->f.func_ctor)
8616 add_array (tcc_state, ".init_array", sym->c);
8617 if (sym->type.ref->f.func_dtor)
8618 add_array (tcc_state, ".fini_array", sym->c);
8620 funcname = get_tok_str(sym->v, NULL);
8621 func_ind = ind;
8622 func_vt = sym->type.ref->type;
8623 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8625 /* put debug symbol */
8626 tcc_debug_funcstart(tcc_state, sym);
8627 /* push a dummy symbol to enable local sym storage */
8628 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8629 local_scope = 1; /* for function parameters */
8630 gfunc_prolog(sym);
8631 local_scope = 0;
8632 rsym = 0;
8633 clear_temp_local_var_list();
8634 block(0);
8635 gsym(rsym);
8636 nocode_wanted = 0;
8637 /* reset local stack */
8638 pop_local_syms(NULL, 0);
8639 gfunc_epilog();
8640 cur_text_section->data_offset = ind;
8641 local_scope = 0;
8642 label_pop(&global_label_stack, NULL, 0);
8643 sym_pop(&all_cleanups, NULL, 0);
8644 /* patch symbol size */
8645 elfsym(sym)->st_size = ind - func_ind;
8646 /* end of function */
8647 tcc_debug_funcend(tcc_state, ind - func_ind);
8648 /* It's better to crash than to generate wrong code */
8649 cur_text_section = NULL;
8650 funcname = ""; /* for safety */
8651 func_vt.t = VT_VOID; /* for safety */
8652 func_var = 0; /* for safety */
8653 ind = 0; /* for safety */
8654 nocode_wanted = 0x80000000;
8655 check_vstack();
8656 /* do this after funcend debug info */
8657 next();
8660 static void gen_inline_functions(TCCState *s)
8662 Sym *sym;
8663 int inline_generated, i;
8664 struct InlineFunc *fn;
8666 tcc_open_bf(s, ":inline:", 0);
8667 /* iterate while inline function are referenced */
8668 do {
8669 inline_generated = 0;
8670 for (i = 0; i < s->nb_inline_fns; ++i) {
8671 fn = s->inline_fns[i];
8672 sym = fn->sym;
8673 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8674 /* the function was used or forced (and then not internal):
8675 generate its code and convert it to a normal function */
8676 fn->sym = NULL;
8677 tcc_debug_putfile(s, fn->filename);
8678 begin_macro(fn->func_str, 1);
8679 next();
8680 cur_text_section = text_section;
8681 gen_function(sym);
8682 end_macro();
8684 inline_generated = 1;
8687 } while (inline_generated);
8688 tcc_close();
8691 static void free_inline_functions(TCCState *s)
8693 int i;
8694 /* free tokens of unused inline functions */
8695 for (i = 0; i < s->nb_inline_fns; ++i) {
8696 struct InlineFunc *fn = s->inline_fns[i];
8697 if (fn->sym)
8698 tok_str_free(fn->func_str);
8700 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8703 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8704 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8705 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
8707 int v, has_init, r, oldint;
8708 CType type, btype;
8709 Sym *sym;
8710 AttributeDef ad, adbase;
8712 while (1) {
8713 if (tok == TOK_STATIC_ASSERT) {
8714 CString error_str;
8715 int c;
8717 next();
8718 skip('(');
8719 c = expr_const();
8721 if (tok == ')') {
8722 if (!c)
8723 tcc_error("_Static_assert fail");
8724 next();
8725 goto static_assert_out;
8728 skip(',');
8729 parse_mult_str(&error_str, "string constant");
8730 if (c == 0)
8731 tcc_error("%s", (char *)error_str.data);
8732 cstr_free(&error_str);
8733 skip(')');
8734 static_assert_out:
8735 skip(';');
8736 continue;
8739 oldint = 0;
8740 if (!parse_btype(&btype, &adbase)) {
8741 if (is_for_loop_init)
8742 return 0;
8743 /* skip redundant ';' if not in old parameter decl scope */
8744 if (tok == ';' && l != VT_CMP) {
8745 next();
8746 continue;
8748 if (l != VT_CONST)
8749 break;
8750 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8751 /* global asm block */
8752 asm_global_instr();
8753 continue;
8755 if (tok >= TOK_UIDENT) {
8756 /* special test for old K&R protos without explicit int
8757 type. Only accepted when defining global data */
8758 btype.t = VT_INT;
8759 oldint = 1;
8760 } else {
8761 if (tok != TOK_EOF)
8762 expect("declaration");
8763 break;
8767 if (tok == ';') {
8768 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8769 v = btype.ref->v;
8770 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8771 tcc_warning("unnamed struct/union that defines no instances");
8772 next();
8773 continue;
8775 if (IS_ENUM(btype.t)) {
8776 next();
8777 continue;
8781 while (1) { /* iterate thru each declaration */
8782 type = btype;
8783 ad = adbase;
8784 type_decl(&type, &ad, &v, TYPE_DIRECT);
8785 #if 0
8787 char buf[500];
8788 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8789 printf("type = '%s'\n", buf);
8791 #endif
8792 if ((type.t & VT_BTYPE) == VT_FUNC) {
8793 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
8794 tcc_error("function without file scope cannot be static");
8795 /* if old style function prototype, we accept a
8796 declaration list */
8797 sym = type.ref;
8798 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
8799 decl0(VT_CMP, 0, sym);
8800 #ifdef TCC_TARGET_MACHO
8801 if (sym->f.func_alwinl
8802 && ((type.t & (VT_EXTERN | VT_INLINE))
8803 == (VT_EXTERN | VT_INLINE))) {
8804 /* always_inline functions must be handled as if they
8805 don't generate multiple global defs, even if extern
8806 inline, i.e. GNU inline semantics for those. Rewrite
8807 them into static inline. */
8808 type.t &= ~VT_EXTERN;
8809 type.t |= VT_STATIC;
8811 #endif
8812 /* always compile 'extern inline' */
8813 if (type.t & VT_EXTERN)
8814 type.t &= ~VT_INLINE;
8816 } else if (oldint) {
8817 tcc_warning("type defaults to int");
8820 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8821 ad.asm_label = asm_label_instr();
8822 /* parse one last attribute list, after asm label */
8823 parse_attribute(&ad);
8824 #if 0
8825 /* gcc does not allow __asm__("label") with function definition,
8826 but why not ... */
8827 if (tok == '{')
8828 expect(";");
8829 #endif
8832 #ifdef TCC_TARGET_PE
8833 if (ad.a.dllimport || ad.a.dllexport) {
8834 if (type.t & VT_STATIC)
8835 tcc_error("cannot have dll linkage with static");
8836 if (type.t & VT_TYPEDEF) {
8837 tcc_warning("'%s' attribute ignored for typedef",
8838 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8839 (ad.a.dllexport = 0, "dllexport"));
8840 } else if (ad.a.dllimport) {
8841 if ((type.t & VT_BTYPE) == VT_FUNC)
8842 ad.a.dllimport = 0;
8843 else
8844 type.t |= VT_EXTERN;
8847 #endif
8848 if (tok == '{') {
8849 if (l != VT_CONST)
8850 tcc_error("cannot use local functions");
8851 if ((type.t & VT_BTYPE) != VT_FUNC)
8852 expect("function definition");
8854 /* reject abstract declarators in function definition
8855 make old style params without decl have int type */
8856 sym = type.ref;
8857 while ((sym = sym->next) != NULL) {
8858 if (!(sym->v & ~SYM_FIELD))
8859 expect("identifier");
8860 if (sym->type.t == VT_VOID)
8861 sym->type = int_type;
8864 /* apply post-declaraton attributes */
8865 merge_funcattr(&type.ref->f, &ad.f);
8867 /* put function symbol */
8868 type.t &= ~VT_EXTERN;
8869 sym = external_sym(v, &type, 0, &ad);
8871 /* static inline functions are just recorded as a kind
8872 of macro. Their code will be emitted at the end of
8873 the compilation unit only if they are used */
8874 if (sym->type.t & VT_INLINE) {
8875 struct InlineFunc *fn;
8876 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8877 strcpy(fn->filename, file->filename);
8878 fn->sym = sym;
8879 skip_or_save_block(&fn->func_str);
8880 dynarray_add(&tcc_state->inline_fns,
8881 &tcc_state->nb_inline_fns, fn);
8882 } else {
8883 /* compute text section */
8884 cur_text_section = ad.section;
8885 if (!cur_text_section)
8886 cur_text_section = text_section;
8887 gen_function(sym);
8889 break;
8890 } else {
8891 if (l == VT_CMP) {
8892 /* find parameter in function parameter list */
8893 for (sym = func_sym->next; sym; sym = sym->next)
8894 if ((sym->v & ~SYM_FIELD) == v)
8895 goto found;
8896 tcc_error("declaration for parameter '%s' but no such parameter",
8897 get_tok_str(v, NULL));
8898 found:
8899 if (type.t & VT_STORAGE) /* 'register' is okay */
8900 tcc_error("storage class specified for '%s'",
8901 get_tok_str(v, NULL));
8902 if (sym->type.t != VT_VOID)
8903 tcc_error("redefinition of parameter '%s'",
8904 get_tok_str(v, NULL));
8905 convert_parameter_type(&type);
8906 sym->type = type;
8907 } else if (type.t & VT_TYPEDEF) {
8908 /* save typedefed type */
8909 /* XXX: test storage specifiers ? */
8910 sym = sym_find(v);
8911 if (sym && sym->sym_scope == local_scope) {
8912 if (!is_compatible_types(&sym->type, &type)
8913 || !(sym->type.t & VT_TYPEDEF))
8914 tcc_error("incompatible redefinition of '%s'",
8915 get_tok_str(v, NULL));
8916 sym->type = type;
8917 } else {
8918 sym = sym_push(v, &type, 0, 0);
8920 sym->a = ad.a;
8921 sym->f = ad.f;
8922 if (debug_modes)
8923 tcc_debug_typedef (tcc_state, sym);
8924 } else if ((type.t & VT_BTYPE) == VT_VOID
8925 && !(type.t & VT_EXTERN)) {
8926 tcc_error("declaration of void object");
8927 } else {
8928 r = 0;
8929 if ((type.t & VT_BTYPE) == VT_FUNC) {
8930 /* external function definition */
8931 /* specific case for func_call attribute */
8932 type.ref->f = ad.f;
8933 } else if (!(type.t & VT_ARRAY)) {
8934 /* not lvalue if array */
8935 r |= VT_LVAL;
8937 has_init = (tok == '=');
8938 if (has_init && (type.t & VT_VLA))
8939 tcc_error("variable length array cannot be initialized");
8940 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8941 || (type.t & VT_BTYPE) == VT_FUNC
8942 /* as with GCC, uninitialized global arrays with no size
8943 are considered extern: */
8944 || ((type.t & VT_ARRAY) && !has_init
8945 && l == VT_CONST && type.ref->c < 0)
8947 /* external variable or function */
8948 type.t |= VT_EXTERN;
8949 sym = external_sym(v, &type, r, &ad);
8950 if (ad.alias_target) {
8951 /* Aliases need to be emitted when their target
8952 symbol is emitted, even if perhaps unreferenced.
8953 We only support the case where the base is
8954 already defined, otherwise we would need
8955 deferring to emit the aliases until the end of
8956 the compile unit. */
8957 Sym *alias_target = sym_find(ad.alias_target);
8958 ElfSym *esym = elfsym(alias_target);
8959 if (!esym)
8960 tcc_error("unsupported forward __alias__ attribute");
8961 put_extern_sym2(sym, esym->st_shndx,
8962 esym->st_value, esym->st_size, 1);
8964 } else {
8965 if (type.t & VT_STATIC)
8966 r |= VT_CONST;
8967 else
8968 r |= l;
8969 if (has_init)
8970 next();
8971 else if (l == VT_CONST)
8972 /* uninitialized global variables may be overridden */
8973 type.t |= VT_EXTERN;
8974 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
8977 if (tok != ',') {
8978 if (is_for_loop_init)
8979 return 1;
8980 skip(';');
8981 break;
8983 next();
8987 return 0;
8990 static void decl(int l)
8992 decl0(l, 0, NULL);
8995 /* ------------------------------------------------------------------------- */
8996 #undef gjmp_addr
8997 #undef gjmp
8998 /* ------------------------------------------------------------------------- */