Long double Constant problem
[tinycc.git] / tccgen.c
bloba4d3b39e3a908fb9541d59fde9b878cda6568ce2
1 /*
2 * TCC - Tiny C Compiler
3 *
4 * Copyright (c) 2001-2004 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 #define USING_GLOBALS
22 #include "tcc.h"
24 /********************************************************/
25 /* global variables */
27 /* loc : local variable index
28 ind : output code index
29 rsym: return symbol
30 anon_sym: anonymous symbol index
32 ST_DATA int rsym, anon_sym, ind, loc;
34 ST_DATA Sym *global_stack;
35 ST_DATA Sym *local_stack;
36 ST_DATA Sym *define_stack;
37 ST_DATA Sym *global_label_stack;
38 ST_DATA Sym *local_label_stack;
40 static Sym *sym_free_first;
41 static void **sym_pools;
42 static int nb_sym_pools;
44 static Sym *all_cleanups, *pending_gotos;
45 static int local_scope;
46 static int in_sizeof;
47 static int in_generic;
48 static int section_sym;
49 ST_DATA char debug_modes;
51 ST_DATA SValue *vtop;
52 static SValue _vstack[1 + VSTACK_SIZE];
53 #define vstack (_vstack + 1)
55 ST_DATA int const_wanted; /* true if constant wanted */
56 ST_DATA int nocode_wanted; /* no code generation wanted */
57 #define unevalmask 0xffff /* unevaluated subexpression */
58 #define NODATA_WANTED (nocode_wanted > 0) /* no static data output wanted either */
59 #define STATIC_DATA_WANTED (nocode_wanted & 0xC0000000) /* only static data output */
61 /* Automagical code suppression ----> */
62 #define CODE_OFF() (nocode_wanted |= 0x20000000)
63 #define CODE_ON() (nocode_wanted &= ~0x20000000)
65 static void tcc_tcov_block_begin(void);
67 /* Clear 'nocode_wanted' at label if it was used */
68 ST_FUNC void gsym(int t) { if (t) { gsym_addr(t, ind); CODE_ON(); }}
69 static int gind(void) { int t = ind; CODE_ON(); if (debug_modes) tcc_tcov_block_begin(); return t; }
71 /* Set 'nocode_wanted' after unconditional jumps */
72 static void gjmp_addr_acs(int t) { gjmp_addr(t); CODE_OFF(); }
73 static int gjmp_acs(int t) { t = gjmp(t); CODE_OFF(); return t; }
75 /* These are #undef'd at the end of this file */
76 #define gjmp_addr gjmp_addr_acs
77 #define gjmp gjmp_acs
78 /* <---- */
80 ST_DATA int global_expr; /* true if compound literals must be allocated globally (used during initializers parsing */
81 ST_DATA CType func_vt; /* current function return type (used by return instruction) */
82 ST_DATA int func_var; /* true if current function is variadic (used by return instruction) */
83 ST_DATA int func_vc;
84 static int last_line_num, new_file, func_ind; /* debug info control */
85 ST_DATA const char *funcname;
86 ST_DATA CType int_type, func_old_type, char_type, char_pointer_type;
87 static CString initstr;
89 #if PTR_SIZE == 4
90 #define VT_SIZE_T (VT_INT | VT_UNSIGNED)
91 #define VT_PTRDIFF_T VT_INT
92 #elif LONG_SIZE == 4
93 #define VT_SIZE_T (VT_LLONG | VT_UNSIGNED)
94 #define VT_PTRDIFF_T VT_LLONG
95 #else
96 #define VT_SIZE_T (VT_LONG | VT_LLONG | VT_UNSIGNED)
97 #define VT_PTRDIFF_T (VT_LONG | VT_LLONG)
98 #endif
100 ST_DATA struct switch_t {
101 struct case_t {
102 int64_t v1, v2;
103 int sym;
104 } **p; int n; /* list of case ranges */
105 int def_sym; /* default symbol */
106 int *bsym;
107 struct scope *scope;
108 struct switch_t *prev;
109 SValue sv;
110 } *cur_switch; /* current switch */
112 #define MAX_TEMP_LOCAL_VARIABLE_NUMBER 8
113 /*list of temporary local variables on the stack in current function. */
114 ST_DATA struct temp_local_variable {
115 int location; //offset on stack. Svalue.c.i
116 short size;
117 short align;
118 } arr_temp_local_vars[MAX_TEMP_LOCAL_VARIABLE_NUMBER];
119 short nb_temp_local_vars;
121 static struct scope {
122 struct scope *prev;
123 struct { int loc, locorig, num; } vla;
124 struct { Sym *s; int n; } cl;
125 int *bsym, *csym;
126 Sym *lstk, *llstk;
127 } *cur_scope, *loop_scope, *root_scope;
129 typedef struct {
130 Section *sec;
131 int local_offset;
132 Sym *flex_array_ref;
133 } init_params;
135 /********************************************************/
136 /* stab debug support */
138 static const struct {
139 int type;
140 const char *name;
141 } default_debug[] = {
142 { VT_INT, "int:t1=r1;-2147483648;2147483647;" },
143 { VT_BYTE, "char:t2=r2;0;127;" },
144 #if LONG_SIZE == 4
145 { VT_LONG | VT_INT, "long int:t3=r3;-2147483648;2147483647;" },
146 #else
147 { VT_LLONG | VT_LONG, "long int:t3=r3;-9223372036854775808;9223372036854775807;" },
148 #endif
149 { VT_INT | VT_UNSIGNED, "unsigned int:t4=r4;0;037777777777;" },
150 #if LONG_SIZE == 4
151 { VT_LONG | VT_INT | VT_UNSIGNED, "long unsigned int:t5=r5;0;037777777777;" },
152 #else
153 /* use octal instead of -1 so size_t works (-gstabs+ in gcc) */
154 { VT_LLONG | VT_LONG | VT_UNSIGNED, "long unsigned int:t5=r5;0;01777777777777777777777;" },
155 #endif
156 { VT_QLONG, "__int128:t6=r6;0;-1;" },
157 { VT_QLONG | VT_UNSIGNED, "__int128 unsigned:t7=r7;0;-1;" },
158 { VT_LLONG, "long long int:t8=r8;-9223372036854775808;9223372036854775807;" },
159 { VT_LLONG | VT_UNSIGNED, "long long unsigned int:t9=r9;0;01777777777777777777777;" },
160 { VT_SHORT, "short int:t10=r10;-32768;32767;" },
161 { VT_SHORT | VT_UNSIGNED, "short unsigned int:t11=r11;0;65535;" },
162 { VT_BYTE | VT_DEFSIGN, "signed char:t12=r12;-128;127;" },
163 { VT_BYTE | VT_DEFSIGN | VT_UNSIGNED, "unsigned char:t13=r13;0;255;" },
164 { VT_FLOAT, "float:t14=r1;4;0;" },
165 { VT_DOUBLE, "double:t15=r1;8;0;" },
166 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
167 { VT_DOUBLE | VT_LONG, "long double:t16=r1;8;0;" },
168 #else
169 { VT_LDOUBLE, "long double:t16=r1;16;0;" },
170 #endif
171 { -1, "_Float32:t17=r1;4;0;" },
172 { -1, "_Float64:t18=r1;8;0;" },
173 { -1, "_Float128:t19=r1;16;0;" },
174 { -1, "_Float32x:t20=r1;8;0;" },
175 { -1, "_Float64x:t21=r1;16;0;" },
176 { -1, "_Decimal32:t22=r1;4;0;" },
177 { -1, "_Decimal64:t23=r1;8;0;" },
178 { -1, "_Decimal128:t24=r1;16;0;" },
179 /* if default char is unsigned */
180 { VT_BYTE | VT_UNSIGNED, "unsigned char:t25=r25;0;255;" },
181 /* boolean type */
182 { VT_BOOL, "bool:t26=r26;0;255;" },
183 { VT_VOID, "void:t27=27" },
186 static int debug_next_type;
188 static struct debug_hash {
189 int debug_type;
190 Sym *type;
191 } *debug_hash;
193 static int n_debug_hash;
195 static struct debug_info {
196 int start;
197 int end;
198 int n_sym;
199 struct debug_sym {
200 int type;
201 unsigned long value;
202 char *str;
203 Section *sec;
204 int sym_index;
205 } *sym;
206 struct debug_info *child, *next, *last, *parent;
207 } *debug_info, *debug_info_root;
209 static struct {
210 unsigned long offset;
211 unsigned long last_file_name;
212 unsigned long last_func_name;
213 int ind;
214 int line;
215 } tcov_data;
217 /********************************************************/
218 #if 1
219 #define precedence_parser
220 static void init_prec(void);
221 #endif
222 /********************************************************/
223 #ifndef CONFIG_TCC_ASM
224 ST_FUNC void asm_instr(void)
226 tcc_error("inline asm() not supported");
228 ST_FUNC void asm_global_instr(void)
230 tcc_error("inline asm() not supported");
232 #endif
234 /* ------------------------------------------------------------------------- */
235 static void gen_cast(CType *type);
236 static void gen_cast_s(int t);
237 static inline CType *pointed_type(CType *type);
238 static int is_compatible_types(CType *type1, CType *type2);
239 static int parse_btype(CType *type, AttributeDef *ad);
240 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td);
241 static void parse_expr_type(CType *type);
242 static void init_putv(init_params *p, CType *type, unsigned long c);
243 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags);
244 static void block(int is_expr);
245 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r, int has_init, int v, int scope);
246 static void decl(int l);
247 static int decl0(int l, int is_for_loop_init, Sym *);
248 static void expr_eq(void);
249 static void vla_runtime_type_size(CType *type, int *a);
250 static int is_compatible_unqualified_types(CType *type1, CType *type2);
251 static inline int64_t expr_const64(void);
252 static void vpush64(int ty, unsigned long long v);
253 static void vpush(CType *type);
254 static int gvtst(int inv, int t);
255 static void gen_inline_functions(TCCState *s);
256 static void free_inline_functions(TCCState *s);
257 static void skip_or_save_block(TokenString **str);
258 static void gv_dup(void);
259 static int get_temp_local_var(int size,int align);
260 static void clear_temp_local_var_list();
261 static void cast_error(CType *st, CType *dt);
263 ST_INLN int is_float(int t)
265 int bt = t & VT_BTYPE;
266 return bt == VT_LDOUBLE
267 || bt == VT_DOUBLE
268 || bt == VT_FLOAT
269 || bt == VT_QFLOAT;
272 static inline int is_integer_btype(int bt)
274 return bt == VT_BYTE
275 || bt == VT_BOOL
276 || bt == VT_SHORT
277 || bt == VT_INT
278 || bt == VT_LLONG;
281 static int btype_size(int bt)
283 return bt == VT_BYTE || bt == VT_BOOL ? 1 :
284 bt == VT_SHORT ? 2 :
285 bt == VT_INT ? 4 :
286 bt == VT_LLONG ? 8 :
287 bt == VT_PTR ? PTR_SIZE : 0;
290 /* returns function return register from type */
291 static int R_RET(int t)
293 if (!is_float(t))
294 return REG_IRET;
295 #ifdef TCC_TARGET_X86_64
296 if ((t & VT_BTYPE) == VT_LDOUBLE)
297 return TREG_ST0;
298 #elif defined TCC_TARGET_RISCV64
299 if ((t & VT_BTYPE) == VT_LDOUBLE)
300 return REG_IRET;
301 #endif
302 return REG_FRET;
305 /* returns 2nd function return register, if any */
306 static int R2_RET(int t)
308 t &= VT_BTYPE;
309 #if PTR_SIZE == 4
310 if (t == VT_LLONG)
311 return REG_IRE2;
312 #elif defined TCC_TARGET_X86_64
313 if (t == VT_QLONG)
314 return REG_IRE2;
315 if (t == VT_QFLOAT)
316 return REG_FRE2;
317 #elif defined TCC_TARGET_RISCV64
318 if (t == VT_LDOUBLE)
319 return REG_IRE2;
320 #endif
321 return VT_CONST;
324 /* returns true for two-word types */
325 #define USING_TWO_WORDS(t) (R2_RET(t) != VT_CONST)
327 /* put function return registers to stack value */
328 static void PUT_R_RET(SValue *sv, int t)
330 sv->r = R_RET(t), sv->r2 = R2_RET(t);
333 /* returns function return register class for type t */
334 static int RC_RET(int t)
336 return reg_classes[R_RET(t)] & ~(RC_FLOAT | RC_INT);
339 /* returns generic register class for type t */
340 static int RC_TYPE(int t)
342 if (!is_float(t))
343 return RC_INT;
344 #ifdef TCC_TARGET_X86_64
345 if ((t & VT_BTYPE) == VT_LDOUBLE)
346 return RC_ST0;
347 if ((t & VT_BTYPE) == VT_QFLOAT)
348 return RC_FRET;
349 #elif defined TCC_TARGET_RISCV64
350 if ((t & VT_BTYPE) == VT_LDOUBLE)
351 return RC_INT;
352 #endif
353 return RC_FLOAT;
356 /* returns 2nd register class corresponding to t and rc */
357 static int RC2_TYPE(int t, int rc)
359 if (!USING_TWO_WORDS(t))
360 return 0;
361 #ifdef RC_IRE2
362 if (rc == RC_IRET)
363 return RC_IRE2;
364 #endif
365 #ifdef RC_FRE2
366 if (rc == RC_FRET)
367 return RC_FRE2;
368 #endif
369 if (rc & RC_FLOAT)
370 return RC_FLOAT;
371 return RC_INT;
374 /* we use our own 'finite' function to avoid potential problems with
375 non standard math libs */
376 /* XXX: endianness dependent */
377 ST_FUNC int ieee_finite(double d)
379 int p[4];
380 memcpy(p, &d, sizeof(double));
381 return ((unsigned)((p[1] | 0x800fffff) + 1)) >> 31;
384 /* compiling intel long double natively */
385 #if (defined __i386__ || defined __x86_64__) \
386 && (defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64)
387 # define TCC_IS_NATIVE_387
388 #endif
390 ST_FUNC void test_lvalue(void)
392 if (!(vtop->r & VT_LVAL))
393 expect("lvalue");
396 ST_FUNC void check_vstack(void)
398 if (vtop != vstack - 1)
399 tcc_error("internal compiler error: vstack leak (%d)",
400 (int)(vtop - vstack + 1));
403 /* ------------------------------------------------------------------------- */
404 /* vstack debugging aid */
406 #if 0
407 void pv (const char *lbl, int a, int b)
409 int i;
410 for (i = a; i < a + b; ++i) {
411 SValue *p = &vtop[-i];
412 printf("%s vtop[-%d] : type.t:%04x r:%04x r2:%04x c.i:%d\n",
413 lbl, i, p->type.t, p->r, p->r2, (int)p->c.i);
416 #endif
418 /* ------------------------------------------------------------------------- */
419 /* start of translation unit info */
420 ST_FUNC void tcc_debug_start(TCCState *s1)
422 if (s1->do_debug) {
423 int i;
424 char buf[512];
426 /* file info: full path + filename */
427 section_sym = put_elf_sym(symtab_section, 0, 0,
428 ELFW(ST_INFO)(STB_LOCAL, STT_SECTION), 0,
429 text_section->sh_num, NULL);
430 getcwd(buf, sizeof(buf));
431 #ifdef _WIN32
432 normalize_slashes(buf);
433 #endif
434 pstrcat(buf, sizeof(buf), "/");
435 put_stabs_r(s1, buf, N_SO, 0, 0,
436 text_section->data_offset, text_section, section_sym);
437 put_stabs_r(s1, file->prev ? file->prev->filename : file->filename,
438 N_SO, 0, 0,
439 text_section->data_offset, text_section, section_sym);
440 for (i = 0; i < sizeof (default_debug) / sizeof (default_debug[0]); i++)
441 put_stabs(s1, default_debug[i].name, N_LSYM, 0, 0, 0);
443 new_file = last_line_num = 0;
444 func_ind = -1;
445 debug_next_type = sizeof(default_debug) / sizeof(default_debug[0]);
446 debug_hash = NULL;
447 n_debug_hash = 0;
449 /* we're currently 'including' the <command line> */
450 tcc_debug_bincl(s1);
453 /* an elf symbol of type STT_FILE must be put so that STB_LOCAL
454 symbols can be safely used */
455 put_elf_sym(symtab_section, 0, 0,
456 ELFW(ST_INFO)(STB_LOCAL, STT_FILE), 0,
457 SHN_ABS, file->filename);
460 /* put end of translation unit info */
461 ST_FUNC void tcc_debug_end(TCCState *s1)
463 if (!s1->do_debug)
464 return;
465 put_stabs_r(s1, NULL, N_SO, 0, 0,
466 text_section->data_offset, text_section, section_sym);
467 tcc_free(debug_hash);
470 static BufferedFile* put_new_file(TCCState *s1)
472 BufferedFile *f = file;
473 /* use upper file if from inline ":asm:" */
474 if (f->filename[0] == ':')
475 f = f->prev;
476 if (f && new_file) {
477 put_stabs_r(s1, f->filename, N_SOL, 0, 0, ind, text_section, section_sym);
478 new_file = last_line_num = 0;
480 return f;
483 /* put alternative filename */
484 ST_FUNC void tcc_debug_putfile(TCCState *s1, const char *filename)
486 if (0 == strcmp(file->filename, filename))
487 return;
488 pstrcpy(file->filename, sizeof(file->filename), filename);
489 new_file = 1;
492 /* begin of #include */
493 ST_FUNC void tcc_debug_bincl(TCCState *s1)
495 if (!s1->do_debug)
496 return;
497 put_stabs(s1, file->filename, N_BINCL, 0, 0, 0);
498 new_file = 1;
501 /* end of #include */
502 ST_FUNC void tcc_debug_eincl(TCCState *s1)
504 if (!s1->do_debug)
505 return;
506 put_stabn(s1, N_EINCL, 0, 0, 0);
507 new_file = 1;
510 /* generate line number info */
511 static void tcc_debug_line(TCCState *s1)
513 BufferedFile *f;
514 if (!s1->do_debug
515 || cur_text_section != text_section
516 || !(f = put_new_file(s1))
517 || last_line_num == f->line_num)
518 return;
519 if (func_ind != -1) {
520 put_stabn(s1, N_SLINE, 0, f->line_num, ind - func_ind);
521 } else {
522 /* from tcc_assemble */
523 put_stabs_r(s1, NULL, N_SLINE, 0, f->line_num, ind, text_section, section_sym);
525 last_line_num = f->line_num;
528 static void tcc_debug_stabs (TCCState *s1, const char *str, int type, unsigned long value,
529 Section *sec, int sym_index)
531 struct debug_sym *s;
533 if (debug_info) {
534 debug_info->sym =
535 (struct debug_sym *)tcc_realloc (debug_info->sym,
536 sizeof(struct debug_sym) *
537 (debug_info->n_sym + 1));
538 s = debug_info->sym + debug_info->n_sym++;
539 s->type = type;
540 s->value = value;
541 s->str = tcc_strdup(str);
542 s->sec = sec;
543 s->sym_index = sym_index;
545 else if (sec)
546 put_stabs_r (s1, str, type, 0, 0, value, sec, sym_index);
547 else
548 put_stabs (s1, str, type, 0, 0, value);
551 static void tcc_debug_stabn(TCCState *s1, int type, int value)
553 if (!s1->do_debug)
554 return;
555 if (type == N_LBRAC) {
556 struct debug_info *info =
557 (struct debug_info *) tcc_mallocz(sizeof (*info));
559 info->start = value;
560 info->parent = debug_info;
561 if (debug_info) {
562 if (debug_info->child) {
563 if (debug_info->child->last)
564 debug_info->child->last->next = info;
565 else
566 debug_info->child->next = info;
567 debug_info->child->last = info;
569 else
570 debug_info->child = info;
572 else
573 debug_info_root = info;
574 debug_info = info;
576 else {
577 debug_info->end = value;
578 debug_info = debug_info->parent;
582 static void tcc_get_debug_info(TCCState *s1, Sym *s, CString *result)
584 int type;
585 int n = 0;
586 int debug_type = -1;
587 Sym *t = s;
588 CString str;
590 for (;;) {
591 type = t->type.t & ~(VT_STORAGE | VT_CONSTANT | VT_VOLATILE);
592 if ((type & VT_BTYPE) != VT_BYTE)
593 type &= ~VT_DEFSIGN;
594 if (type == VT_PTR || type == (VT_PTR | VT_ARRAY))
595 n++, t = t->type.ref;
596 else
597 break;
599 if ((type & VT_BTYPE) == VT_STRUCT) {
600 int i;
602 t = t->type.ref;
603 for (i = 0; i < n_debug_hash; i++) {
604 if (t == debug_hash[i].type) {
605 debug_type = debug_hash[i].debug_type;
606 break;
609 if (debug_type == -1) {
610 debug_type = ++debug_next_type;
611 debug_hash = (struct debug_hash *)
612 tcc_realloc (debug_hash,
613 (n_debug_hash + 1) * sizeof(*debug_hash));
614 debug_hash[n_debug_hash].debug_type = debug_type;
615 debug_hash[n_debug_hash++].type = t;
616 cstr_new (&str);
617 cstr_printf (&str, "%s:T%d=%c%d",
618 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
619 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
620 debug_type,
621 IS_UNION (t->type.t) ? 'u' : 's',
622 t->c);
623 while (t->next) {
624 int pos, size, align;
626 t = t->next;
627 cstr_printf (&str, "%s:",
628 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
629 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
630 tcc_get_debug_info (s1, t, &str);
631 if (t->type.t & VT_BITFIELD) {
632 pos = t->c * 8 + BIT_POS(t->type.t);
633 size = BIT_SIZE(t->type.t);
635 else {
636 pos = t->c * 8;
637 size = type_size(&t->type, &align) * 8;
639 cstr_printf (&str, ",%d,%d;", pos, size);
641 cstr_printf (&str, ";");
642 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
643 cstr_free (&str);
646 else if (IS_ENUM(type)) {
647 Sym *e = t = t->type.ref;
649 debug_type = ++debug_next_type;
650 cstr_new (&str);
651 cstr_printf (&str, "%s:T%d=e",
652 (t->v & ~SYM_STRUCT) >= SYM_FIRST_ANOM
653 ? "" : get_tok_str(t->v & ~SYM_STRUCT, NULL),
654 debug_type);
655 while (t->next) {
656 t = t->next;
657 cstr_printf (&str, "%s:",
658 (t->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
659 ? "" : get_tok_str(t->v & ~SYM_FIELD, NULL));
660 cstr_printf (&str, e->type.t & VT_UNSIGNED ? "%u," : "%d,",
661 (int)t->enum_val);
663 cstr_printf (&str, ";");
664 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
665 cstr_free (&str);
667 else if ((type & VT_BTYPE) != VT_FUNC) {
668 type &= ~VT_STRUCT_MASK;
669 for (debug_type = 1;
670 debug_type <= sizeof(default_debug) / sizeof(default_debug[0]);
671 debug_type++)
672 if (default_debug[debug_type - 1].type == type)
673 break;
674 if (debug_type > sizeof(default_debug) / sizeof(default_debug[0]))
675 return;
677 if (n > 0)
678 cstr_printf (result, "%d=", ++debug_next_type);
679 t = s;
680 for (;;) {
681 type = t->type.t & ~(VT_STORAGE | VT_CONSTANT | VT_VOLATILE);
682 if ((type & VT_BTYPE) != VT_BYTE)
683 type &= ~VT_DEFSIGN;
684 if (type == VT_PTR)
685 cstr_printf (result, "%d=*", ++debug_next_type);
686 else if (type == (VT_PTR | VT_ARRAY))
687 cstr_printf (result, "%d=ar1;0;%d;",
688 ++debug_next_type, t->type.ref->c - 1);
689 else if (type == VT_FUNC) {
690 cstr_printf (result, "%d=f", ++debug_next_type);
691 tcc_get_debug_info (s1, t->type.ref, result);
692 return;
694 else
695 break;
696 t = t->type.ref;
698 cstr_printf (result, "%d", debug_type);
701 static void tcc_debug_finish (TCCState *s1, struct debug_info *cur)
703 while (cur) {
704 int i;
705 struct debug_info *next = cur->next;
707 for (i = 0; i < cur->n_sym; i++) {
708 struct debug_sym *s = &cur->sym[i];
710 if (s->sec)
711 put_stabs_r(s1, s->str, s->type, 0, 0, s->value,
712 s->sec, s->sym_index);
713 else
714 put_stabs(s1, s->str, s->type, 0, 0, s->value);
715 tcc_free (s->str);
717 tcc_free (cur->sym);
718 put_stabn(s1, N_LBRAC, 0, 0, cur->start);
719 tcc_debug_finish (s1, cur->child);
720 put_stabn(s1, N_RBRAC, 0, 0, cur->end);
721 tcc_free (cur);
722 cur = next;
726 static void tcc_add_debug_info(TCCState *s1, int param, Sym *s, Sym *e)
728 CString debug_str;
729 if (!s1->do_debug)
730 return;
731 cstr_new (&debug_str);
732 for (; s != e; s = s->prev) {
733 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
734 continue;
735 cstr_reset (&debug_str);
736 cstr_printf (&debug_str, "%s:%s", get_tok_str(s->v, NULL), param ? "p" : "");
737 tcc_get_debug_info(s1, s, &debug_str);
738 tcc_debug_stabs(s1, debug_str.data, param ? N_PSYM : N_LSYM, s->c, NULL, 0);
740 cstr_free (&debug_str);
743 /* put function symbol */
744 static void tcc_debug_funcstart(TCCState *s1, Sym *sym)
746 CString debug_str;
747 BufferedFile *f;
748 if (!s1->do_debug)
749 return;
750 debug_info_root = NULL;
751 debug_info = NULL;
752 tcc_debug_stabn(s1, N_LBRAC, ind - func_ind);
753 if (!(f = put_new_file(s1)))
754 return;
755 cstr_new (&debug_str);
756 cstr_printf(&debug_str, "%s:%c", funcname, sym->type.t & VT_STATIC ? 'f' : 'F');
757 tcc_get_debug_info(s1, sym->type.ref, &debug_str);
758 put_stabs_r(s1, debug_str.data, N_FUN, 0, f->line_num, 0, cur_text_section, sym->c);
759 cstr_free (&debug_str);
761 tcc_debug_line(s1);
764 /* put function size */
765 static void tcc_debug_funcend(TCCState *s1, int size)
767 if (!s1->do_debug)
768 return;
769 tcc_debug_stabn(s1, N_RBRAC, size);
770 tcc_debug_finish (s1, debug_info_root);
774 static void tcc_debug_extern_sym(TCCState *s1, Sym *sym, int sh_num, int sym_bind, int sym_type)
776 Section *s;
777 CString str;
779 if (!s1->do_debug)
780 return;
781 if (sym_type == STT_FUNC || sym->v >= SYM_FIRST_ANOM)
782 return;
783 s = s1->sections[sh_num];
785 cstr_new (&str);
786 cstr_printf (&str, "%s:%c",
787 get_tok_str(sym->v, NULL),
788 sym_bind == STB_GLOBAL ? 'G' : local_scope ? 'V' : 'S'
790 tcc_get_debug_info(s1, sym, &str);
791 if (sym_bind == STB_GLOBAL)
792 tcc_debug_stabs(s1, str.data, N_GSYM, 0, NULL, 0);
793 else
794 tcc_debug_stabs(s1, str.data,
795 (sym->type.t & VT_STATIC) && data_section == s
796 ? N_STSYM : N_LCSYM, 0, s, sym->c);
797 cstr_free (&str);
800 static void tcc_debug_typedef(TCCState *s1, Sym *sym)
802 CString str;
804 if (!s1->do_debug)
805 return;
806 cstr_new (&str);
807 cstr_printf (&str, "%s:t",
808 (sym->v & ~SYM_FIELD) >= SYM_FIRST_ANOM
809 ? "" : get_tok_str(sym->v & ~SYM_FIELD, NULL));
810 tcc_get_debug_info(s1, sym, &str);
811 tcc_debug_stabs(s1, str.data, N_LSYM, 0, NULL, 0);
812 cstr_free (&str);
815 /* ------------------------------------------------------------------------- */
816 /* for section layout see lib/tcov.c */
818 static void tcc_tcov_block_end(int line);
820 static void tcc_tcov_block_begin(void)
822 SValue sv;
823 void *ptr;
824 unsigned long last_offset = tcov_data.offset;
826 tcc_tcov_block_end (0);
827 if (tcc_state->test_coverage == 0 || nocode_wanted)
828 return;
830 if (tcov_data.last_file_name == 0 ||
831 strcmp ((const char *)(tcov_section->data + tcov_data.last_file_name),
832 file->true_filename) != 0) {
833 char wd[1024];
834 CString cstr;
836 if (tcov_data.last_func_name)
837 section_ptr_add(tcov_section, 1);
838 if (tcov_data.last_file_name)
839 section_ptr_add(tcov_section, 1);
840 tcov_data.last_func_name = 0;
841 cstr_new (&cstr);
842 if (file->true_filename[0] == '/') {
843 tcov_data.last_file_name = tcov_section->data_offset;
844 cstr_printf (&cstr, "%s", file->true_filename);
846 else {
847 getcwd (wd, sizeof(wd));
848 tcov_data.last_file_name = tcov_section->data_offset + strlen(wd) + 1;
849 cstr_printf (&cstr, "%s/%s", wd, file->true_filename);
851 ptr = section_ptr_add(tcov_section, cstr.size + 1);
852 strncpy((char *)ptr, cstr.data, cstr.size);
853 #ifdef _WIN32
854 normalize_slashes((char *)ptr);
855 #endif
856 cstr_free (&cstr);
858 if (tcov_data.last_func_name == 0 ||
859 strcmp ((const char *)(tcov_section->data + tcov_data.last_func_name),
860 funcname) != 0) {
861 size_t len;
863 if (tcov_data.last_func_name)
864 section_ptr_add(tcov_section, 1);
865 tcov_data.last_func_name = tcov_section->data_offset;
866 len = strlen (funcname);
867 ptr = section_ptr_add(tcov_section, len + 1);
868 strncpy((char *)ptr, funcname, len);
869 section_ptr_add(tcov_section, -tcov_section->data_offset & 7);
870 ptr = section_ptr_add(tcov_section, 8);
871 write64le (ptr, file->line_num);
873 if (ind == tcov_data.ind && tcov_data.line == file->line_num)
874 tcov_data.offset = last_offset;
875 else {
876 Sym label = {0};
877 label.type.t = VT_LLONG | VT_STATIC;
879 ptr = section_ptr_add(tcov_section, 16);
880 tcov_data.line = file->line_num;
881 write64le (ptr, (tcov_data.line << 8) | 0xff);
882 put_extern_sym(&label, tcov_section,
883 ((unsigned char *)ptr - tcov_section->data) + 8, 0);
884 sv.type = label.type;
885 sv.r = VT_SYM | VT_LVAL | VT_CONST;
886 sv.r2 = VT_CONST;
887 sv.c.i = 0;
888 sv.sym = &label;
889 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || \
890 defined TCC_TARGET_ARM || defined TCC_TARGET_ARM64 || \
891 defined TCC_TARGET_RISCV64
892 gen_increment_tcov (&sv);
893 #else
894 vpushv(&sv);
895 inc(0, TOK_INC);
896 vpop();
897 #endif
898 tcov_data.offset = (unsigned char *)ptr - tcov_section->data;
899 tcov_data.ind = ind;
903 static void tcc_tcov_block_end(int line)
905 if (tcc_state->test_coverage == 0)
906 return;
907 if (tcov_data.offset) {
908 void *ptr = tcov_section->data + tcov_data.offset;
909 unsigned long long nline = line ? line : file->line_num;
911 write64le (ptr, (read64le (ptr) & 0xfffffffffull) | (nline << 36));
912 tcov_data.offset = 0;
916 static void tcc_tcov_check_line(int start)
918 if (tcc_state->test_coverage == 0)
919 return;
920 if (tcov_data.line != file->line_num) {
921 if ((tcov_data.line + 1) != file->line_num) {
922 tcc_tcov_block_end (tcov_data.line);
923 if (start)
924 tcc_tcov_block_begin ();
926 else
927 tcov_data.line = file->line_num;
931 static void tcc_tcov_start(void)
933 if (tcc_state->test_coverage == 0)
934 return;
935 memset (&tcov_data, 0, sizeof (tcov_data));
936 if (tcov_section == NULL) {
937 tcov_section = new_section(tcc_state, ".tcov", SHT_PROGBITS,
938 SHF_ALLOC | SHF_WRITE);
939 section_ptr_add(tcov_section, 4); // pointer to executable name
943 static void tcc_tcov_end(void)
945 if (tcc_state->test_coverage == 0)
946 return;
947 if (tcov_data.last_func_name)
948 section_ptr_add(tcov_section, 1);
949 if (tcov_data.last_file_name)
950 section_ptr_add(tcov_section, 1);
953 /* ------------------------------------------------------------------------- */
954 /* initialize vstack and types. This must be done also for tcc -E */
955 ST_FUNC void tccgen_init(TCCState *s1)
957 vtop = vstack - 1;
958 memset(vtop, 0, sizeof *vtop);
960 /* define some often used types */
961 int_type.t = VT_INT;
963 char_type.t = VT_BYTE;
964 if (s1->char_is_unsigned)
965 char_type.t |= VT_UNSIGNED;
966 char_pointer_type = char_type;
967 mk_pointer(&char_pointer_type);
969 func_old_type.t = VT_FUNC;
970 func_old_type.ref = sym_push(SYM_FIELD, &int_type, 0, 0);
971 func_old_type.ref->f.func_call = FUNC_CDECL;
972 func_old_type.ref->f.func_type = FUNC_OLD;
973 #ifdef precedence_parser
974 init_prec();
975 #endif
976 cstr_new(&initstr);
979 ST_FUNC int tccgen_compile(TCCState *s1)
981 cur_text_section = NULL;
982 funcname = "";
983 anon_sym = SYM_FIRST_ANOM;
984 section_sym = 0;
985 const_wanted = 0;
986 nocode_wanted = 0x80000000;
987 local_scope = 0;
988 debug_modes = s1->do_debug | s1->test_coverage << 1;
990 tcc_debug_start(s1);
991 tcc_tcov_start ();
992 #ifdef TCC_TARGET_ARM
993 arm_init(s1);
994 #endif
995 #ifdef INC_DEBUG
996 printf("%s: **** new file\n", file->filename);
997 #endif
998 parse_flags = PARSE_FLAG_PREPROCESS | PARSE_FLAG_TOK_NUM | PARSE_FLAG_TOK_STR;
999 next();
1000 decl(VT_CONST);
1001 gen_inline_functions(s1);
1002 check_vstack();
1003 /* end of translation unit info */
1004 tcc_debug_end(s1);
1005 tcc_tcov_end ();
1006 return 0;
1009 ST_FUNC void tccgen_finish(TCCState *s1)
1011 cstr_free(&initstr);
1012 free_inline_functions(s1);
1013 sym_pop(&global_stack, NULL, 0);
1014 sym_pop(&local_stack, NULL, 0);
1015 /* free preprocessor macros */
1016 free_defines(NULL);
1017 /* free sym_pools */
1018 dynarray_reset(&sym_pools, &nb_sym_pools);
1019 sym_free_first = NULL;
1022 /* ------------------------------------------------------------------------- */
1023 ST_FUNC ElfSym *elfsym(Sym *s)
1025 if (!s || !s->c)
1026 return NULL;
1027 return &((ElfSym *)symtab_section->data)[s->c];
1030 /* apply storage attributes to Elf symbol */
1031 ST_FUNC void update_storage(Sym *sym)
1033 ElfSym *esym;
1034 int sym_bind, old_sym_bind;
1036 esym = elfsym(sym);
1037 if (!esym)
1038 return;
1040 if (sym->a.visibility)
1041 esym->st_other = (esym->st_other & ~ELFW(ST_VISIBILITY)(-1))
1042 | sym->a.visibility;
1044 if (sym->type.t & (VT_STATIC | VT_INLINE))
1045 sym_bind = STB_LOCAL;
1046 else if (sym->a.weak)
1047 sym_bind = STB_WEAK;
1048 else
1049 sym_bind = STB_GLOBAL;
1050 old_sym_bind = ELFW(ST_BIND)(esym->st_info);
1051 if (sym_bind != old_sym_bind) {
1052 esym->st_info = ELFW(ST_INFO)(sym_bind, ELFW(ST_TYPE)(esym->st_info));
1055 #ifdef TCC_TARGET_PE
1056 if (sym->a.dllimport)
1057 esym->st_other |= ST_PE_IMPORT;
1058 if (sym->a.dllexport)
1059 esym->st_other |= ST_PE_EXPORT;
1060 #endif
1062 #if 0
1063 printf("storage %s: bind=%c vis=%d exp=%d imp=%d\n",
1064 get_tok_str(sym->v, NULL),
1065 sym_bind == STB_WEAK ? 'w' : sym_bind == STB_LOCAL ? 'l' : 'g',
1066 sym->a.visibility,
1067 sym->a.dllexport,
1068 sym->a.dllimport
1070 #endif
1073 /* ------------------------------------------------------------------------- */
1074 /* update sym->c so that it points to an external symbol in section
1075 'section' with value 'value' */
1077 ST_FUNC void put_extern_sym2(Sym *sym, int sh_num,
1078 addr_t value, unsigned long size,
1079 int can_add_underscore)
1081 int sym_type, sym_bind, info, other, t;
1082 ElfSym *esym;
1083 const char *name;
1084 char buf1[256];
1086 if (!sym->c) {
1087 name = get_tok_str(sym->v, NULL);
1088 t = sym->type.t;
1089 if ((t & VT_BTYPE) == VT_FUNC) {
1090 sym_type = STT_FUNC;
1091 } else if ((t & VT_BTYPE) == VT_VOID) {
1092 sym_type = STT_NOTYPE;
1093 if ((t & (VT_BTYPE|VT_ASM_FUNC)) == VT_ASM_FUNC)
1094 sym_type = STT_FUNC;
1095 } else {
1096 sym_type = STT_OBJECT;
1098 if (t & (VT_STATIC | VT_INLINE))
1099 sym_bind = STB_LOCAL;
1100 else
1101 sym_bind = STB_GLOBAL;
1102 other = 0;
1104 #ifdef TCC_TARGET_PE
1105 if (sym_type == STT_FUNC && sym->type.ref) {
1106 Sym *ref = sym->type.ref;
1107 if (ref->a.nodecorate) {
1108 can_add_underscore = 0;
1110 if (ref->f.func_call == FUNC_STDCALL && can_add_underscore) {
1111 sprintf(buf1, "_%s@%d", name, ref->f.func_args * PTR_SIZE);
1112 name = buf1;
1113 other |= ST_PE_STDCALL;
1114 can_add_underscore = 0;
1117 #endif
1119 if (sym->asm_label) {
1120 name = get_tok_str(sym->asm_label, NULL);
1121 can_add_underscore = 0;
1124 if (tcc_state->leading_underscore && can_add_underscore) {
1125 buf1[0] = '_';
1126 pstrcpy(buf1 + 1, sizeof(buf1) - 1, name);
1127 name = buf1;
1130 info = ELFW(ST_INFO)(sym_bind, sym_type);
1131 sym->c = put_elf_sym(symtab_section, value, size, info, other, sh_num, name);
1133 if (debug_modes)
1134 tcc_debug_extern_sym(tcc_state, sym, sh_num, sym_bind, sym_type);
1136 } else {
1137 esym = elfsym(sym);
1138 esym->st_value = value;
1139 esym->st_size = size;
1140 esym->st_shndx = sh_num;
1142 update_storage(sym);
1145 ST_FUNC void put_extern_sym(Sym *sym, Section *section,
1146 addr_t value, unsigned long size)
1148 int sh_num = section ? section->sh_num : SHN_UNDEF;
1149 put_extern_sym2(sym, sh_num, value, size, 1);
1152 /* add a new relocation entry to symbol 'sym' in section 's' */
1153 ST_FUNC void greloca(Section *s, Sym *sym, unsigned long offset, int type,
1154 addr_t addend)
1156 int c = 0;
1158 if (nocode_wanted && s == cur_text_section)
1159 return;
1161 if (sym) {
1162 if (0 == sym->c)
1163 put_extern_sym(sym, NULL, 0, 0);
1164 c = sym->c;
1167 /* now we can add ELF relocation info */
1168 put_elf_reloca(symtab_section, s, offset, type, c, addend);
1171 #if PTR_SIZE == 4
1172 ST_FUNC void greloc(Section *s, Sym *sym, unsigned long offset, int type)
1174 greloca(s, sym, offset, type, 0);
1176 #endif
1178 /* ------------------------------------------------------------------------- */
1179 /* symbol allocator */
1180 static Sym *__sym_malloc(void)
1182 Sym *sym_pool, *sym, *last_sym;
1183 int i;
1185 sym_pool = tcc_malloc(SYM_POOL_NB * sizeof(Sym));
1186 dynarray_add(&sym_pools, &nb_sym_pools, sym_pool);
1188 last_sym = sym_free_first;
1189 sym = sym_pool;
1190 for(i = 0; i < SYM_POOL_NB; i++) {
1191 sym->next = last_sym;
1192 last_sym = sym;
1193 sym++;
1195 sym_free_first = last_sym;
1196 return last_sym;
1199 static inline Sym *sym_malloc(void)
1201 Sym *sym;
1202 #ifndef SYM_DEBUG
1203 sym = sym_free_first;
1204 if (!sym)
1205 sym = __sym_malloc();
1206 sym_free_first = sym->next;
1207 return sym;
1208 #else
1209 sym = tcc_malloc(sizeof(Sym));
1210 return sym;
1211 #endif
1214 ST_INLN void sym_free(Sym *sym)
1216 #ifndef SYM_DEBUG
1217 sym->next = sym_free_first;
1218 sym_free_first = sym;
1219 #else
1220 tcc_free(sym);
1221 #endif
1224 /* push, without hashing */
1225 ST_FUNC Sym *sym_push2(Sym **ps, int v, int t, int c)
1227 Sym *s;
1229 s = sym_malloc();
1230 memset(s, 0, sizeof *s);
1231 s->v = v;
1232 s->type.t = t;
1233 s->c = c;
1234 /* add in stack */
1235 s->prev = *ps;
1236 *ps = s;
1237 return s;
1240 /* find a symbol and return its associated structure. 's' is the top
1241 of the symbol stack */
1242 ST_FUNC Sym *sym_find2(Sym *s, int v)
1244 while (s) {
1245 if (s->v == v)
1246 return s;
1247 else if (s->v == -1)
1248 return NULL;
1249 s = s->prev;
1251 return NULL;
1254 /* structure lookup */
1255 ST_INLN Sym *struct_find(int v)
1257 v -= TOK_IDENT;
1258 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1259 return NULL;
1260 return table_ident[v]->sym_struct;
1263 /* find an identifier */
1264 ST_INLN Sym *sym_find(int v)
1266 v -= TOK_IDENT;
1267 if ((unsigned)v >= (unsigned)(tok_ident - TOK_IDENT))
1268 return NULL;
1269 return table_ident[v]->sym_identifier;
1272 static int sym_scope(Sym *s)
1274 if (IS_ENUM_VAL (s->type.t))
1275 return s->type.ref->sym_scope;
1276 else
1277 return s->sym_scope;
1280 /* push a given symbol on the symbol stack */
1281 ST_FUNC Sym *sym_push(int v, CType *type, int r, int c)
1283 Sym *s, **ps;
1284 TokenSym *ts;
1286 if (local_stack)
1287 ps = &local_stack;
1288 else
1289 ps = &global_stack;
1290 s = sym_push2(ps, v, type->t, c);
1291 s->type.ref = type->ref;
1292 s->r = r;
1293 /* don't record fields or anonymous symbols */
1294 /* XXX: simplify */
1295 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1296 /* record symbol in token array */
1297 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1298 if (v & SYM_STRUCT)
1299 ps = &ts->sym_struct;
1300 else
1301 ps = &ts->sym_identifier;
1302 s->prev_tok = *ps;
1303 *ps = s;
1304 s->sym_scope = local_scope;
1305 if (s->prev_tok && sym_scope(s->prev_tok) == s->sym_scope)
1306 tcc_error("redeclaration of '%s'",
1307 get_tok_str(v & ~SYM_STRUCT, NULL));
1309 return s;
1312 /* push a global identifier */
1313 ST_FUNC Sym *global_identifier_push(int v, int t, int c)
1315 Sym *s, **ps;
1316 s = sym_push2(&global_stack, v, t, c);
1317 s->r = VT_CONST | VT_SYM;
1318 /* don't record anonymous symbol */
1319 if (v < SYM_FIRST_ANOM) {
1320 ps = &table_ident[v - TOK_IDENT]->sym_identifier;
1321 /* modify the top most local identifier, so that sym_identifier will
1322 point to 's' when popped; happens when called from inline asm */
1323 while (*ps != NULL && (*ps)->sym_scope)
1324 ps = &(*ps)->prev_tok;
1325 s->prev_tok = *ps;
1326 *ps = s;
1328 return s;
1331 /* pop symbols until top reaches 'b'. If KEEP is non-zero don't really
1332 pop them yet from the list, but do remove them from the token array. */
1333 ST_FUNC void sym_pop(Sym **ptop, Sym *b, int keep)
1335 Sym *s, *ss, **ps;
1336 TokenSym *ts;
1337 int v;
1339 s = *ptop;
1340 while(s != b) {
1341 ss = s->prev;
1342 v = s->v;
1343 /* remove symbol in token array */
1344 /* XXX: simplify */
1345 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
1346 ts = table_ident[(v & ~SYM_STRUCT) - TOK_IDENT];
1347 if (v & SYM_STRUCT)
1348 ps = &ts->sym_struct;
1349 else
1350 ps = &ts->sym_identifier;
1351 *ps = s->prev_tok;
1353 if (!keep)
1354 sym_free(s);
1355 s = ss;
1357 if (!keep)
1358 *ptop = b;
1361 /* ------------------------------------------------------------------------- */
1362 static void vcheck_cmp(void)
1364 /* cannot let cpu flags if other instruction are generated. Also
1365 avoid leaving VT_JMP anywhere except on the top of the stack
1366 because it would complicate the code generator.
1368 Don't do this when nocode_wanted. vtop might come from
1369 !nocode_wanted regions (see 88_codeopt.c) and transforming
1370 it to a register without actually generating code is wrong
1371 as their value might still be used for real. All values
1372 we push under nocode_wanted will eventually be popped
1373 again, so that the VT_CMP/VT_JMP value will be in vtop
1374 when code is unsuppressed again. */
1376 if (vtop->r == VT_CMP && !nocode_wanted)
1377 gv(RC_INT);
1380 static void vsetc(CType *type, int r, CValue *vc)
1382 if (vtop >= vstack + (VSTACK_SIZE - 1))
1383 tcc_error("memory full (vstack)");
1384 vcheck_cmp();
1385 vtop++;
1386 vtop->type = *type;
1387 vtop->r = r;
1388 vtop->r2 = VT_CONST;
1389 vtop->c = *vc;
1390 vtop->sym = NULL;
1393 ST_FUNC void vswap(void)
1395 SValue tmp;
1397 vcheck_cmp();
1398 tmp = vtop[0];
1399 vtop[0] = vtop[-1];
1400 vtop[-1] = tmp;
1403 /* pop stack value */
1404 ST_FUNC void vpop(void)
1406 int v;
1407 v = vtop->r & VT_VALMASK;
1408 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1409 /* for x86, we need to pop the FP stack */
1410 if (v == TREG_ST0) {
1411 o(0xd8dd); /* fstp %st(0) */
1412 } else
1413 #endif
1414 if (v == VT_CMP) {
1415 /* need to put correct jump if && or || without test */
1416 gsym(vtop->jtrue);
1417 gsym(vtop->jfalse);
1419 vtop--;
1422 /* push constant of type "type" with useless value */
1423 static void vpush(CType *type)
1425 vset(type, VT_CONST, 0);
1428 /* push arbitrary 64bit constant */
1429 static void vpush64(int ty, unsigned long long v)
1431 CValue cval;
1432 CType ctype;
1433 ctype.t = ty;
1434 ctype.ref = NULL;
1435 cval.i = v;
1436 vsetc(&ctype, VT_CONST, &cval);
1439 /* push integer constant */
1440 ST_FUNC void vpushi(int v)
1442 vpush64(VT_INT, v);
1445 /* push a pointer sized constant */
1446 static void vpushs(addr_t v)
1448 vpush64(VT_SIZE_T, v);
1451 /* push long long constant */
1452 static inline void vpushll(long long v)
1454 vpush64(VT_LLONG, v);
1457 ST_FUNC void vset(CType *type, int r, int v)
1459 CValue cval;
1460 cval.i = v;
1461 vsetc(type, r, &cval);
1464 static void vseti(int r, int v)
1466 CType type;
1467 type.t = VT_INT;
1468 type.ref = NULL;
1469 vset(&type, r, v);
1472 ST_FUNC void vpushv(SValue *v)
1474 if (vtop >= vstack + (VSTACK_SIZE - 1))
1475 tcc_error("memory full (vstack)");
1476 vtop++;
1477 *vtop = *v;
1480 static void vdup(void)
1482 vpushv(vtop);
1485 /* rotate n first stack elements to the bottom
1486 I1 ... In -> I2 ... In I1 [top is right]
1488 ST_FUNC void vrotb(int n)
1490 int i;
1491 SValue tmp;
1493 vcheck_cmp();
1494 tmp = vtop[-n + 1];
1495 for(i=-n+1;i!=0;i++)
1496 vtop[i] = vtop[i+1];
1497 vtop[0] = tmp;
1500 /* rotate the n elements before entry e towards the top
1501 I1 ... In ... -> In I1 ... I(n-1) ... [top is right]
1503 ST_FUNC void vrote(SValue *e, int n)
1505 int i;
1506 SValue tmp;
1508 vcheck_cmp();
1509 tmp = *e;
1510 for(i = 0;i < n - 1; i++)
1511 e[-i] = e[-i - 1];
1512 e[-n + 1] = tmp;
1515 /* rotate n first stack elements to the top
1516 I1 ... In -> In I1 ... I(n-1) [top is right]
1518 ST_FUNC void vrott(int n)
1520 vrote(vtop, n);
1523 /* ------------------------------------------------------------------------- */
1524 /* vtop->r = VT_CMP means CPU-flags have been set from comparison or test. */
1526 /* called from generators to set the result from relational ops */
1527 ST_FUNC void vset_VT_CMP(int op)
1529 vtop->r = VT_CMP;
1530 vtop->cmp_op = op;
1531 vtop->jfalse = 0;
1532 vtop->jtrue = 0;
1535 /* called once before asking generators to load VT_CMP to a register */
1536 static void vset_VT_JMP(void)
1538 int op = vtop->cmp_op;
1540 if (vtop->jtrue || vtop->jfalse) {
1541 /* we need to jump to 'mov $0,%R' or 'mov $1,%R' */
1542 int inv = op & (op < 2); /* small optimization */
1543 vseti(VT_JMP+inv, gvtst(inv, 0));
1544 } else {
1545 /* otherwise convert flags (rsp. 0/1) to register */
1546 vtop->c.i = op;
1547 if (op < 2) /* doesn't seem to happen */
1548 vtop->r = VT_CONST;
1552 /* Set CPU Flags, doesn't yet jump */
1553 static void gvtst_set(int inv, int t)
1555 int *p;
1557 if (vtop->r != VT_CMP) {
1558 vpushi(0);
1559 gen_op(TOK_NE);
1560 if (vtop->r != VT_CMP) /* must be VT_CONST then */
1561 vset_VT_CMP(vtop->c.i != 0);
1564 p = inv ? &vtop->jfalse : &vtop->jtrue;
1565 *p = gjmp_append(*p, t);
1568 /* Generate value test
1570 * Generate a test for any value (jump, comparison and integers) */
1571 static int gvtst(int inv, int t)
1573 int op, x, u;
1575 gvtst_set(inv, t);
1576 t = vtop->jtrue, u = vtop->jfalse;
1577 if (inv)
1578 x = u, u = t, t = x;
1579 op = vtop->cmp_op;
1581 /* jump to the wanted target */
1582 if (op > 1)
1583 t = gjmp_cond(op ^ inv, t);
1584 else if (op != inv)
1585 t = gjmp(t);
1586 /* resolve complementary jumps to here */
1587 gsym(u);
1589 vtop--;
1590 return t;
1593 /* generate a zero or nozero test */
1594 static void gen_test_zero(int op)
1596 if (vtop->r == VT_CMP) {
1597 int j;
1598 if (op == TOK_EQ) {
1599 j = vtop->jfalse;
1600 vtop->jfalse = vtop->jtrue;
1601 vtop->jtrue = j;
1602 vtop->cmp_op ^= 1;
1604 } else {
1605 vpushi(0);
1606 gen_op(op);
1610 /* ------------------------------------------------------------------------- */
1611 /* push a symbol value of TYPE */
1612 ST_FUNC void vpushsym(CType *type, Sym *sym)
1614 CValue cval;
1615 cval.i = 0;
1616 vsetc(type, VT_CONST | VT_SYM, &cval);
1617 vtop->sym = sym;
1620 /* Return a static symbol pointing to a section */
1621 ST_FUNC Sym *get_sym_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1623 int v;
1624 Sym *sym;
1626 v = anon_sym++;
1627 sym = sym_push(v, type, VT_CONST | VT_SYM, 0);
1628 sym->type.t |= VT_STATIC;
1629 put_extern_sym(sym, sec, offset, size);
1630 return sym;
1633 /* push a reference to a section offset by adding a dummy symbol */
1634 static void vpush_ref(CType *type, Section *sec, unsigned long offset, unsigned long size)
1636 vpushsym(type, get_sym_ref(type, sec, offset, size));
1639 /* define a new external reference to a symbol 'v' of type 'u' */
1640 ST_FUNC Sym *external_global_sym(int v, CType *type)
1642 Sym *s;
1644 s = sym_find(v);
1645 if (!s) {
1646 /* push forward reference */
1647 s = global_identifier_push(v, type->t | VT_EXTERN, 0);
1648 s->type.ref = type->ref;
1649 } else if (IS_ASM_SYM(s)) {
1650 s->type.t = type->t | (s->type.t & VT_EXTERN);
1651 s->type.ref = type->ref;
1652 update_storage(s);
1654 return s;
1657 /* create an external reference with no specific type similar to asm labels.
1658 This avoids type conflicts if the symbol is used from C too */
1659 ST_FUNC Sym *external_helper_sym(int v)
1661 CType ct = { VT_ASM_FUNC, NULL };
1662 return external_global_sym(v, &ct);
1665 /* push a reference to an helper function (such as memmove) */
1666 ST_FUNC void vpush_helper_func(int v)
1668 vpushsym(&func_old_type, external_helper_sym(v));
1671 /* Merge symbol attributes. */
1672 static void merge_symattr(struct SymAttr *sa, struct SymAttr *sa1)
1674 if (sa1->aligned && !sa->aligned)
1675 sa->aligned = sa1->aligned;
1676 sa->packed |= sa1->packed;
1677 sa->weak |= sa1->weak;
1678 if (sa1->visibility != STV_DEFAULT) {
1679 int vis = sa->visibility;
1680 if (vis == STV_DEFAULT
1681 || vis > sa1->visibility)
1682 vis = sa1->visibility;
1683 sa->visibility = vis;
1685 sa->dllexport |= sa1->dllexport;
1686 sa->nodecorate |= sa1->nodecorate;
1687 sa->dllimport |= sa1->dllimport;
1690 /* Merge function attributes. */
1691 static void merge_funcattr(struct FuncAttr *fa, struct FuncAttr *fa1)
1693 if (fa1->func_call && !fa->func_call)
1694 fa->func_call = fa1->func_call;
1695 if (fa1->func_type && !fa->func_type)
1696 fa->func_type = fa1->func_type;
1697 if (fa1->func_args && !fa->func_args)
1698 fa->func_args = fa1->func_args;
1699 if (fa1->func_noreturn)
1700 fa->func_noreturn = 1;
1701 if (fa1->func_ctor)
1702 fa->func_ctor = 1;
1703 if (fa1->func_dtor)
1704 fa->func_dtor = 1;
1707 /* Merge attributes. */
1708 static void merge_attr(AttributeDef *ad, AttributeDef *ad1)
1710 merge_symattr(&ad->a, &ad1->a);
1711 merge_funcattr(&ad->f, &ad1->f);
1713 if (ad1->section)
1714 ad->section = ad1->section;
1715 if (ad1->alias_target)
1716 ad->alias_target = ad1->alias_target;
1717 if (ad1->asm_label)
1718 ad->asm_label = ad1->asm_label;
1719 if (ad1->attr_mode)
1720 ad->attr_mode = ad1->attr_mode;
1723 /* Merge some type attributes. */
1724 static void patch_type(Sym *sym, CType *type)
1726 if (!(type->t & VT_EXTERN) || IS_ENUM_VAL(sym->type.t)) {
1727 if (!(sym->type.t & VT_EXTERN))
1728 tcc_error("redefinition of '%s'", get_tok_str(sym->v, NULL));
1729 sym->type.t &= ~VT_EXTERN;
1732 if (IS_ASM_SYM(sym)) {
1733 /* stay static if both are static */
1734 sym->type.t = type->t & (sym->type.t | ~VT_STATIC);
1735 sym->type.ref = type->ref;
1738 if (!is_compatible_types(&sym->type, type)) {
1739 tcc_error("incompatible types for redefinition of '%s'",
1740 get_tok_str(sym->v, NULL));
1742 } else if ((sym->type.t & VT_BTYPE) == VT_FUNC) {
1743 int static_proto = sym->type.t & VT_STATIC;
1744 /* warn if static follows non-static function declaration */
1745 if ((type->t & VT_STATIC) && !static_proto
1746 /* XXX this test for inline shouldn't be here. Until we
1747 implement gnu-inline mode again it silences a warning for
1748 mingw caused by our workarounds. */
1749 && !((type->t | sym->type.t) & VT_INLINE))
1750 tcc_warning("static storage ignored for redefinition of '%s'",
1751 get_tok_str(sym->v, NULL));
1753 /* set 'inline' if both agree or if one has static */
1754 if ((type->t | sym->type.t) & VT_INLINE) {
1755 if (!((type->t ^ sym->type.t) & VT_INLINE)
1756 || ((type->t | sym->type.t) & VT_STATIC))
1757 static_proto |= VT_INLINE;
1760 if (0 == (type->t & VT_EXTERN)) {
1761 struct FuncAttr f = sym->type.ref->f;
1762 /* put complete type, use static from prototype */
1763 sym->type.t = (type->t & ~(VT_STATIC|VT_INLINE)) | static_proto;
1764 sym->type.ref = type->ref;
1765 merge_funcattr(&sym->type.ref->f, &f);
1766 } else {
1767 sym->type.t &= ~VT_INLINE | static_proto;
1770 if (sym->type.ref->f.func_type == FUNC_OLD
1771 && type->ref->f.func_type != FUNC_OLD) {
1772 sym->type.ref = type->ref;
1775 } else {
1776 if ((sym->type.t & VT_ARRAY) && type->ref->c >= 0) {
1777 /* set array size if it was omitted in extern declaration */
1778 sym->type.ref->c = type->ref->c;
1780 if ((type->t ^ sym->type.t) & VT_STATIC)
1781 tcc_warning("storage mismatch for redefinition of '%s'",
1782 get_tok_str(sym->v, NULL));
1786 /* Merge some storage attributes. */
1787 static void patch_storage(Sym *sym, AttributeDef *ad, CType *type)
1789 if (type)
1790 patch_type(sym, type);
1792 #ifdef TCC_TARGET_PE
1793 if (sym->a.dllimport != ad->a.dllimport)
1794 tcc_error("incompatible dll linkage for redefinition of '%s'",
1795 get_tok_str(sym->v, NULL));
1796 #endif
1797 merge_symattr(&sym->a, &ad->a);
1798 if (ad->asm_label)
1799 sym->asm_label = ad->asm_label;
1800 update_storage(sym);
1803 /* copy sym to other stack */
1804 static Sym *sym_copy(Sym *s0, Sym **ps)
1806 Sym *s;
1807 s = sym_malloc(), *s = *s0;
1808 s->prev = *ps, *ps = s;
1809 if (s->v < SYM_FIRST_ANOM) {
1810 ps = &table_ident[s->v - TOK_IDENT]->sym_identifier;
1811 s->prev_tok = *ps, *ps = s;
1813 return s;
1816 /* copy s->type.ref to stack 'ps' for VT_FUNC and VT_PTR */
1817 static void sym_copy_ref(Sym *s, Sym **ps)
1819 int bt = s->type.t & VT_BTYPE;
1820 if (bt == VT_FUNC || bt == VT_PTR) {
1821 Sym **sp = &s->type.ref;
1822 for (s = *sp, *sp = NULL; s; s = s->next) {
1823 Sym *s2 = sym_copy(s, ps);
1824 sp = &(*sp = s2)->next;
1825 sym_copy_ref(s2, ps);
1830 /* define a new external reference to a symbol 'v' */
1831 static Sym *external_sym(int v, CType *type, int r, AttributeDef *ad)
1833 Sym *s;
1835 /* look for global symbol */
1836 s = sym_find(v);
1837 while (s && s->sym_scope)
1838 s = s->prev_tok;
1840 if (!s) {
1841 /* push forward reference */
1842 s = global_identifier_push(v, type->t, 0);
1843 s->r |= r;
1844 s->a = ad->a;
1845 s->asm_label = ad->asm_label;
1846 s->type.ref = type->ref;
1847 /* copy type to the global stack */
1848 if (local_stack)
1849 sym_copy_ref(s, &global_stack);
1850 } else {
1851 patch_storage(s, ad, type);
1853 /* push variables on local_stack if any */
1854 if (local_stack && (s->type.t & VT_BTYPE) != VT_FUNC)
1855 s = sym_copy(s, &local_stack);
1856 return s;
1859 /* save registers up to (vtop - n) stack entry */
1860 ST_FUNC void save_regs(int n)
1862 SValue *p, *p1;
1863 for(p = vstack, p1 = vtop - n; p <= p1; p++)
1864 save_reg(p->r);
1867 /* save r to the memory stack, and mark it as being free */
1868 ST_FUNC void save_reg(int r)
1870 save_reg_upstack(r, 0);
1873 /* save r to the memory stack, and mark it as being free,
1874 if seen up to (vtop - n) stack entry */
1875 ST_FUNC void save_reg_upstack(int r, int n)
1877 int l, size, align, bt;
1878 SValue *p, *p1, sv;
1880 if ((r &= VT_VALMASK) >= VT_CONST)
1881 return;
1882 if (nocode_wanted)
1883 return;
1884 l = 0;
1885 for(p = vstack, p1 = vtop - n; p <= p1; p++) {
1886 if ((p->r & VT_VALMASK) == r || p->r2 == r) {
1887 /* must save value on stack if not already done */
1888 if (!l) {
1889 bt = p->type.t & VT_BTYPE;
1890 if (bt == VT_VOID)
1891 continue;
1892 if ((p->r & VT_LVAL) || bt == VT_FUNC)
1893 bt = VT_PTR;
1894 sv.type.t = bt;
1895 size = type_size(&sv.type, &align);
1896 l = get_temp_local_var(size,align);
1897 sv.r = VT_LOCAL | VT_LVAL;
1898 sv.c.i = l;
1899 store(p->r & VT_VALMASK, &sv);
1900 #if defined(TCC_TARGET_I386) || defined(TCC_TARGET_X86_64)
1901 /* x86 specific: need to pop fp register ST0 if saved */
1902 if (r == TREG_ST0) {
1903 o(0xd8dd); /* fstp %st(0) */
1905 #endif
1906 /* special long long case */
1907 if (p->r2 < VT_CONST && USING_TWO_WORDS(bt)) {
1908 sv.c.i += PTR_SIZE;
1909 store(p->r2, &sv);
1912 /* mark that stack entry as being saved on the stack */
1913 if (p->r & VT_LVAL) {
1914 /* also clear the bounded flag because the
1915 relocation address of the function was stored in
1916 p->c.i */
1917 p->r = (p->r & ~(VT_VALMASK | VT_BOUNDED)) | VT_LLOCAL;
1918 } else {
1919 p->r = VT_LVAL | VT_LOCAL;
1921 p->r2 = VT_CONST;
1922 p->c.i = l;
1927 #ifdef TCC_TARGET_ARM
1928 /* find a register of class 'rc2' with at most one reference on stack.
1929 * If none, call get_reg(rc) */
1930 ST_FUNC int get_reg_ex(int rc, int rc2)
1932 int r;
1933 SValue *p;
1935 for(r=0;r<NB_REGS;r++) {
1936 if (reg_classes[r] & rc2) {
1937 int n;
1938 n=0;
1939 for(p = vstack; p <= vtop; p++) {
1940 if ((p->r & VT_VALMASK) == r ||
1941 p->r2 == r)
1942 n++;
1944 if (n <= 1)
1945 return r;
1948 return get_reg(rc);
1950 #endif
1952 /* find a free register of class 'rc'. If none, save one register */
1953 ST_FUNC int get_reg(int rc)
1955 int r;
1956 SValue *p;
1958 /* find a free register */
1959 for(r=0;r<NB_REGS;r++) {
1960 if (reg_classes[r] & rc) {
1961 if (nocode_wanted)
1962 return r;
1963 for(p=vstack;p<=vtop;p++) {
1964 if ((p->r & VT_VALMASK) == r ||
1965 p->r2 == r)
1966 goto notfound;
1968 return r;
1970 notfound: ;
1973 /* no register left : free the first one on the stack (VERY
1974 IMPORTANT to start from the bottom to ensure that we don't
1975 spill registers used in gen_opi()) */
1976 for(p=vstack;p<=vtop;p++) {
1977 /* look at second register (if long long) */
1978 r = p->r2;
1979 if (r < VT_CONST && (reg_classes[r] & rc))
1980 goto save_found;
1981 r = p->r & VT_VALMASK;
1982 if (r < VT_CONST && (reg_classes[r] & rc)) {
1983 save_found:
1984 save_reg(r);
1985 return r;
1988 /* Should never comes here */
1989 return -1;
1992 /* find a free temporary local variable (return the offset on stack) match the size and align. If none, add new temporary stack variable*/
1993 static int get_temp_local_var(int size,int align){
1994 int i;
1995 struct temp_local_variable *temp_var;
1996 int found_var;
1997 SValue *p;
1998 int r;
1999 char free;
2000 char found;
2001 found=0;
2002 for(i=0;i<nb_temp_local_vars;i++){
2003 temp_var=&arr_temp_local_vars[i];
2004 if(temp_var->size<size||align!=temp_var->align){
2005 continue;
2007 /*check if temp_var is free*/
2008 free=1;
2009 for(p=vstack;p<=vtop;p++) {
2010 r=p->r&VT_VALMASK;
2011 if(r==VT_LOCAL||r==VT_LLOCAL){
2012 if(p->c.i==temp_var->location){
2013 free=0;
2014 break;
2018 if(free){
2019 found_var=temp_var->location;
2020 found=1;
2021 break;
2024 if(!found){
2025 loc = (loc - size) & -align;
2026 if(nb_temp_local_vars<MAX_TEMP_LOCAL_VARIABLE_NUMBER){
2027 temp_var=&arr_temp_local_vars[i];
2028 temp_var->location=loc;
2029 temp_var->size=size;
2030 temp_var->align=align;
2031 nb_temp_local_vars++;
2033 found_var=loc;
2035 return found_var;
2038 static void clear_temp_local_var_list(){
2039 nb_temp_local_vars=0;
2042 /* move register 's' (of type 't') to 'r', and flush previous value of r to memory
2043 if needed */
2044 static void move_reg(int r, int s, int t)
2046 SValue sv;
2048 if (r != s) {
2049 save_reg(r);
2050 sv.type.t = t;
2051 sv.type.ref = NULL;
2052 sv.r = s;
2053 sv.c.i = 0;
2054 load(r, &sv);
2058 /* get address of vtop (vtop MUST BE an lvalue) */
2059 ST_FUNC void gaddrof(void)
2061 vtop->r &= ~VT_LVAL;
2062 /* tricky: if saved lvalue, then we can go back to lvalue */
2063 if ((vtop->r & VT_VALMASK) == VT_LLOCAL)
2064 vtop->r = (vtop->r & ~VT_VALMASK) | VT_LOCAL | VT_LVAL;
2067 #ifdef CONFIG_TCC_BCHECK
2068 /* generate a bounded pointer addition */
2069 static void gen_bounded_ptr_add(void)
2071 int save = (vtop[-1].r & VT_VALMASK) == VT_LOCAL;
2072 if (save) {
2073 vpushv(&vtop[-1]);
2074 vrott(3);
2076 vpush_helper_func(TOK___bound_ptr_add);
2077 vrott(3);
2078 gfunc_call(2);
2079 vtop -= save;
2080 vpushi(0);
2081 /* returned pointer is in REG_IRET */
2082 vtop->r = REG_IRET | VT_BOUNDED;
2083 if (nocode_wanted)
2084 return;
2085 /* relocation offset of the bounding function call point */
2086 vtop->c.i = (cur_text_section->reloc->data_offset - sizeof(ElfW_Rel));
2089 /* patch pointer addition in vtop so that pointer dereferencing is
2090 also tested */
2091 static void gen_bounded_ptr_deref(void)
2093 addr_t func;
2094 int size, align;
2095 ElfW_Rel *rel;
2096 Sym *sym;
2098 if (nocode_wanted)
2099 return;
2101 size = type_size(&vtop->type, &align);
2102 switch(size) {
2103 case 1: func = TOK___bound_ptr_indir1; break;
2104 case 2: func = TOK___bound_ptr_indir2; break;
2105 case 4: func = TOK___bound_ptr_indir4; break;
2106 case 8: func = TOK___bound_ptr_indir8; break;
2107 case 12: func = TOK___bound_ptr_indir12; break;
2108 case 16: func = TOK___bound_ptr_indir16; break;
2109 default:
2110 /* may happen with struct member access */
2111 return;
2113 sym = external_helper_sym(func);
2114 if (!sym->c)
2115 put_extern_sym(sym, NULL, 0, 0);
2116 /* patch relocation */
2117 /* XXX: find a better solution ? */
2118 rel = (ElfW_Rel *)(cur_text_section->reloc->data + vtop->c.i);
2119 rel->r_info = ELFW(R_INFO)(sym->c, ELFW(R_TYPE)(rel->r_info));
2122 /* generate lvalue bound code */
2123 static void gbound(void)
2125 CType type1;
2127 vtop->r &= ~VT_MUSTBOUND;
2128 /* if lvalue, then use checking code before dereferencing */
2129 if (vtop->r & VT_LVAL) {
2130 /* if not VT_BOUNDED value, then make one */
2131 if (!(vtop->r & VT_BOUNDED)) {
2132 /* must save type because we must set it to int to get pointer */
2133 type1 = vtop->type;
2134 vtop->type.t = VT_PTR;
2135 gaddrof();
2136 vpushi(0);
2137 gen_bounded_ptr_add();
2138 vtop->r |= VT_LVAL;
2139 vtop->type = type1;
2141 /* then check for dereferencing */
2142 gen_bounded_ptr_deref();
2146 /* we need to call __bound_ptr_add before we start to load function
2147 args into registers */
2148 ST_FUNC void gbound_args(int nb_args)
2150 int i, v;
2151 SValue *sv;
2153 for (i = 1; i <= nb_args; ++i)
2154 if (vtop[1 - i].r & VT_MUSTBOUND) {
2155 vrotb(i);
2156 gbound();
2157 vrott(i);
2160 sv = vtop - nb_args;
2161 if (sv->r & VT_SYM) {
2162 v = sv->sym->v;
2163 if (v == TOK_setjmp
2164 || v == TOK__setjmp
2165 #ifndef TCC_TARGET_PE
2166 || v == TOK_sigsetjmp
2167 || v == TOK___sigsetjmp
2168 #endif
2170 vpush_helper_func(TOK___bound_setjmp);
2171 vpushv(sv + 1);
2172 gfunc_call(1);
2173 func_bound_add_epilog = 1;
2175 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64
2176 if (v == TOK_alloca)
2177 func_bound_add_epilog = 1;
2178 #endif
2179 #if TARGETOS_NetBSD
2180 if (v == TOK_longjmp) /* undo rename to __longjmp14 */
2181 sv->sym->asm_label = TOK___bound_longjmp;
2182 #endif
2186 /* Add bounds for local symbols from S to E (via ->prev) */
2187 static void add_local_bounds(Sym *s, Sym *e)
2189 for (; s != e; s = s->prev) {
2190 if (!s->v || (s->r & VT_VALMASK) != VT_LOCAL)
2191 continue;
2192 /* Add arrays/structs/unions because we always take address */
2193 if ((s->type.t & VT_ARRAY)
2194 || (s->type.t & VT_BTYPE) == VT_STRUCT
2195 || s->a.addrtaken) {
2196 /* add local bound info */
2197 int align, size = type_size(&s->type, &align);
2198 addr_t *bounds_ptr = section_ptr_add(lbounds_section,
2199 2 * sizeof(addr_t));
2200 bounds_ptr[0] = s->c;
2201 bounds_ptr[1] = size;
2205 #endif
2207 /* Wrapper around sym_pop, that potentially also registers local bounds. */
2208 static void pop_local_syms(Sym *b, int keep)
2210 #ifdef CONFIG_TCC_BCHECK
2211 if (tcc_state->do_bounds_check && !keep && (local_scope || !func_var))
2212 add_local_bounds(local_stack, b);
2213 #endif
2214 if (debug_modes)
2215 tcc_add_debug_info (tcc_state, !local_scope, local_stack, b);
2216 sym_pop(&local_stack, b, keep);
2219 static void incr_bf_adr(int o)
2221 vtop->type = char_pointer_type;
2222 gaddrof();
2223 vpushs(o);
2224 gen_op('+');
2225 vtop->type.t = VT_BYTE | VT_UNSIGNED;
2226 vtop->r |= VT_LVAL;
2229 /* single-byte load mode for packed or otherwise unaligned bitfields */
2230 static void load_packed_bf(CType *type, int bit_pos, int bit_size)
2232 int n, o, bits;
2233 save_reg_upstack(vtop->r, 1);
2234 vpush64(type->t & VT_BTYPE, 0); // B X
2235 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2236 do {
2237 vswap(); // X B
2238 incr_bf_adr(o);
2239 vdup(); // X B B
2240 n = 8 - bit_pos;
2241 if (n > bit_size)
2242 n = bit_size;
2243 if (bit_pos)
2244 vpushi(bit_pos), gen_op(TOK_SHR), bit_pos = 0; // X B Y
2245 if (n < 8)
2246 vpushi((1 << n) - 1), gen_op('&');
2247 gen_cast(type);
2248 if (bits)
2249 vpushi(bits), gen_op(TOK_SHL);
2250 vrotb(3); // B Y X
2251 gen_op('|'); // B X
2252 bits += n, bit_size -= n, o = 1;
2253 } while (bit_size);
2254 vswap(), vpop();
2255 if (!(type->t & VT_UNSIGNED)) {
2256 n = ((type->t & VT_BTYPE) == VT_LLONG ? 64 : 32) - bits;
2257 vpushi(n), gen_op(TOK_SHL);
2258 vpushi(n), gen_op(TOK_SAR);
2262 /* single-byte store mode for packed or otherwise unaligned bitfields */
2263 static void store_packed_bf(int bit_pos, int bit_size)
2265 int bits, n, o, m, c;
2266 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2267 vswap(); // X B
2268 save_reg_upstack(vtop->r, 1);
2269 bits = 0, o = bit_pos >> 3, bit_pos &= 7;
2270 do {
2271 incr_bf_adr(o); // X B
2272 vswap(); //B X
2273 c ? vdup() : gv_dup(); // B V X
2274 vrott(3); // X B V
2275 if (bits)
2276 vpushi(bits), gen_op(TOK_SHR);
2277 if (bit_pos)
2278 vpushi(bit_pos), gen_op(TOK_SHL);
2279 n = 8 - bit_pos;
2280 if (n > bit_size)
2281 n = bit_size;
2282 if (n < 8) {
2283 m = ((1 << n) - 1) << bit_pos;
2284 vpushi(m), gen_op('&'); // X B V1
2285 vpushv(vtop-1); // X B V1 B
2286 vpushi(m & 0x80 ? ~m & 0x7f : ~m);
2287 gen_op('&'); // X B V1 B1
2288 gen_op('|'); // X B V2
2290 vdup(), vtop[-1] = vtop[-2]; // X B B V2
2291 vstore(), vpop(); // X B
2292 bits += n, bit_size -= n, bit_pos = 0, o = 1;
2293 } while (bit_size);
2294 vpop(), vpop();
2297 static int adjust_bf(SValue *sv, int bit_pos, int bit_size)
2299 int t;
2300 if (0 == sv->type.ref)
2301 return 0;
2302 t = sv->type.ref->auxtype;
2303 if (t != -1 && t != VT_STRUCT) {
2304 sv->type.t = (sv->type.t & ~(VT_BTYPE | VT_LONG)) | t;
2305 sv->r |= VT_LVAL;
2307 return t;
2310 /* store vtop a register belonging to class 'rc'. lvalues are
2311 converted to values. Cannot be used if cannot be converted to
2312 register value (such as structures). */
2313 ST_FUNC int gv(int rc)
2315 int r, r2, r_ok, r2_ok, rc2, bt;
2316 int bit_pos, bit_size, size, align;
2318 /* NOTE: get_reg can modify vstack[] */
2319 if (vtop->type.t & VT_BITFIELD) {
2320 CType type;
2322 bit_pos = BIT_POS(vtop->type.t);
2323 bit_size = BIT_SIZE(vtop->type.t);
2324 /* remove bit field info to avoid loops */
2325 vtop->type.t &= ~VT_STRUCT_MASK;
2327 type.ref = NULL;
2328 type.t = vtop->type.t & VT_UNSIGNED;
2329 if ((vtop->type.t & VT_BTYPE) == VT_BOOL)
2330 type.t |= VT_UNSIGNED;
2332 r = adjust_bf(vtop, bit_pos, bit_size);
2334 if ((vtop->type.t & VT_BTYPE) == VT_LLONG)
2335 type.t |= VT_LLONG;
2336 else
2337 type.t |= VT_INT;
2339 if (r == VT_STRUCT) {
2340 load_packed_bf(&type, bit_pos, bit_size);
2341 } else {
2342 int bits = (type.t & VT_BTYPE) == VT_LLONG ? 64 : 32;
2343 /* cast to int to propagate signedness in following ops */
2344 gen_cast(&type);
2345 /* generate shifts */
2346 vpushi(bits - (bit_pos + bit_size));
2347 gen_op(TOK_SHL);
2348 vpushi(bits - bit_size);
2349 /* NOTE: transformed to SHR if unsigned */
2350 gen_op(TOK_SAR);
2352 r = gv(rc);
2353 } else {
2354 if (is_float(vtop->type.t) &&
2355 (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2356 /* CPUs usually cannot use float constants, so we store them
2357 generically in data segment */
2358 init_params p = { data_section };
2359 unsigned long offset;
2360 size = type_size(&vtop->type, &align);
2361 if (NODATA_WANTED)
2362 size = 0, align = 1;
2363 offset = section_add(p.sec, size, align);
2364 vpush_ref(&vtop->type, p.sec, offset, size);
2365 vswap();
2366 init_putv(&p, &vtop->type, offset);
2367 vtop->r |= VT_LVAL;
2369 #ifdef CONFIG_TCC_BCHECK
2370 if (vtop->r & VT_MUSTBOUND)
2371 gbound();
2372 #endif
2374 bt = vtop->type.t & VT_BTYPE;
2376 #ifdef TCC_TARGET_RISCV64
2377 /* XXX mega hack */
2378 if (bt == VT_LDOUBLE && rc == RC_FLOAT)
2379 rc = RC_INT;
2380 #endif
2381 rc2 = RC2_TYPE(bt, rc);
2383 /* need to reload if:
2384 - constant
2385 - lvalue (need to dereference pointer)
2386 - already a register, but not in the right class */
2387 r = vtop->r & VT_VALMASK;
2388 r_ok = !(vtop->r & VT_LVAL) && (r < VT_CONST) && (reg_classes[r] & rc);
2389 r2_ok = !rc2 || ((vtop->r2 < VT_CONST) && (reg_classes[vtop->r2] & rc2));
2391 if (!r_ok || !r2_ok) {
2392 if (!r_ok)
2393 r = get_reg(rc);
2394 if (rc2) {
2395 int load_type = (bt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
2396 int original_type = vtop->type.t;
2398 /* two register type load :
2399 expand to two words temporarily */
2400 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST) {
2401 /* load constant */
2402 unsigned long long ll = vtop->c.i;
2403 vtop->c.i = ll; /* first word */
2404 load(r, vtop);
2405 vtop->r = r; /* save register value */
2406 vpushi(ll >> 32); /* second word */
2407 } else if (vtop->r & VT_LVAL) {
2408 /* We do not want to modifier the long long pointer here.
2409 So we save any other instances down the stack */
2410 save_reg_upstack(vtop->r, 1);
2411 /* load from memory */
2412 vtop->type.t = load_type;
2413 load(r, vtop);
2414 vdup();
2415 vtop[-1].r = r; /* save register value */
2416 /* increment pointer to get second word */
2417 vtop->type.t = VT_PTRDIFF_T;
2418 gaddrof();
2419 vpushs(PTR_SIZE);
2420 gen_op('+');
2421 vtop->r |= VT_LVAL;
2422 vtop->type.t = load_type;
2423 } else {
2424 /* move registers */
2425 if (!r_ok)
2426 load(r, vtop);
2427 if (r2_ok && vtop->r2 < VT_CONST)
2428 goto done;
2429 vdup();
2430 vtop[-1].r = r; /* save register value */
2431 vtop->r = vtop[-1].r2;
2433 /* Allocate second register. Here we rely on the fact that
2434 get_reg() tries first to free r2 of an SValue. */
2435 r2 = get_reg(rc2);
2436 load(r2, vtop);
2437 vpop();
2438 /* write second register */
2439 vtop->r2 = r2;
2440 done:
2441 vtop->type.t = original_type;
2442 } else {
2443 if (vtop->r == VT_CMP)
2444 vset_VT_JMP();
2445 /* one register type load */
2446 load(r, vtop);
2449 vtop->r = r;
2450 #ifdef TCC_TARGET_C67
2451 /* uses register pairs for doubles */
2452 if (bt == VT_DOUBLE)
2453 vtop->r2 = r+1;
2454 #endif
2456 return r;
2459 /* generate vtop[-1] and vtop[0] in resp. classes rc1 and rc2 */
2460 ST_FUNC void gv2(int rc1, int rc2)
2462 /* generate more generic register first. But VT_JMP or VT_CMP
2463 values must be generated first in all cases to avoid possible
2464 reload errors */
2465 if (vtop->r != VT_CMP && rc1 <= rc2) {
2466 vswap();
2467 gv(rc1);
2468 vswap();
2469 gv(rc2);
2470 /* test if reload is needed for first register */
2471 if ((vtop[-1].r & VT_VALMASK) >= VT_CONST) {
2472 vswap();
2473 gv(rc1);
2474 vswap();
2476 } else {
2477 gv(rc2);
2478 vswap();
2479 gv(rc1);
2480 vswap();
2481 /* test if reload is needed for first register */
2482 if ((vtop[0].r & VT_VALMASK) >= VT_CONST) {
2483 gv(rc2);
2488 #if PTR_SIZE == 4
2489 /* expand 64bit on stack in two ints */
2490 ST_FUNC void lexpand(void)
2492 int u, v;
2493 u = vtop->type.t & (VT_DEFSIGN | VT_UNSIGNED);
2494 v = vtop->r & (VT_VALMASK | VT_LVAL);
2495 if (v == VT_CONST) {
2496 vdup();
2497 vtop[0].c.i >>= 32;
2498 } else if (v == (VT_LVAL|VT_CONST) || v == (VT_LVAL|VT_LOCAL)) {
2499 vdup();
2500 vtop[0].c.i += 4;
2501 } else {
2502 gv(RC_INT);
2503 vdup();
2504 vtop[0].r = vtop[-1].r2;
2505 vtop[0].r2 = vtop[-1].r2 = VT_CONST;
2507 vtop[0].type.t = vtop[-1].type.t = VT_INT | u;
2509 #endif
2511 #if PTR_SIZE == 4
2512 /* build a long long from two ints */
2513 static void lbuild(int t)
2515 gv2(RC_INT, RC_INT);
2516 vtop[-1].r2 = vtop[0].r;
2517 vtop[-1].type.t = t;
2518 vpop();
2520 #endif
2522 /* convert stack entry to register and duplicate its value in another
2523 register */
2524 static void gv_dup(void)
2526 int t, rc, r;
2528 t = vtop->type.t;
2529 #if PTR_SIZE == 4
2530 if ((t & VT_BTYPE) == VT_LLONG) {
2531 if (t & VT_BITFIELD) {
2532 gv(RC_INT);
2533 t = vtop->type.t;
2535 lexpand();
2536 gv_dup();
2537 vswap();
2538 vrotb(3);
2539 gv_dup();
2540 vrotb(4);
2541 /* stack: H L L1 H1 */
2542 lbuild(t);
2543 vrotb(3);
2544 vrotb(3);
2545 vswap();
2546 lbuild(t);
2547 vswap();
2548 return;
2550 #endif
2551 /* duplicate value */
2552 rc = RC_TYPE(t);
2553 gv(rc);
2554 r = get_reg(rc);
2555 vdup();
2556 load(r, vtop);
2557 vtop->r = r;
2560 #if PTR_SIZE == 4
2561 /* generate CPU independent (unsigned) long long operations */
2562 static void gen_opl(int op)
2564 int t, a, b, op1, c, i;
2565 int func;
2566 unsigned short reg_iret = REG_IRET;
2567 unsigned short reg_lret = REG_IRE2;
2568 SValue tmp;
2570 switch(op) {
2571 case '/':
2572 case TOK_PDIV:
2573 func = TOK___divdi3;
2574 goto gen_func;
2575 case TOK_UDIV:
2576 func = TOK___udivdi3;
2577 goto gen_func;
2578 case '%':
2579 func = TOK___moddi3;
2580 goto gen_mod_func;
2581 case TOK_UMOD:
2582 func = TOK___umoddi3;
2583 gen_mod_func:
2584 #ifdef TCC_ARM_EABI
2585 reg_iret = TREG_R2;
2586 reg_lret = TREG_R3;
2587 #endif
2588 gen_func:
2589 /* call generic long long function */
2590 vpush_helper_func(func);
2591 vrott(3);
2592 gfunc_call(2);
2593 vpushi(0);
2594 vtop->r = reg_iret;
2595 vtop->r2 = reg_lret;
2596 break;
2597 case '^':
2598 case '&':
2599 case '|':
2600 case '*':
2601 case '+':
2602 case '-':
2603 //pv("gen_opl A",0,2);
2604 t = vtop->type.t;
2605 vswap();
2606 lexpand();
2607 vrotb(3);
2608 lexpand();
2609 /* stack: L1 H1 L2 H2 */
2610 tmp = vtop[0];
2611 vtop[0] = vtop[-3];
2612 vtop[-3] = tmp;
2613 tmp = vtop[-2];
2614 vtop[-2] = vtop[-3];
2615 vtop[-3] = tmp;
2616 vswap();
2617 /* stack: H1 H2 L1 L2 */
2618 //pv("gen_opl B",0,4);
2619 if (op == '*') {
2620 vpushv(vtop - 1);
2621 vpushv(vtop - 1);
2622 gen_op(TOK_UMULL);
2623 lexpand();
2624 /* stack: H1 H2 L1 L2 ML MH */
2625 for(i=0;i<4;i++)
2626 vrotb(6);
2627 /* stack: ML MH H1 H2 L1 L2 */
2628 tmp = vtop[0];
2629 vtop[0] = vtop[-2];
2630 vtop[-2] = tmp;
2631 /* stack: ML MH H1 L2 H2 L1 */
2632 gen_op('*');
2633 vrotb(3);
2634 vrotb(3);
2635 gen_op('*');
2636 /* stack: ML MH M1 M2 */
2637 gen_op('+');
2638 gen_op('+');
2639 } else if (op == '+' || op == '-') {
2640 /* XXX: add non carry method too (for MIPS or alpha) */
2641 if (op == '+')
2642 op1 = TOK_ADDC1;
2643 else
2644 op1 = TOK_SUBC1;
2645 gen_op(op1);
2646 /* stack: H1 H2 (L1 op L2) */
2647 vrotb(3);
2648 vrotb(3);
2649 gen_op(op1 + 1); /* TOK_xxxC2 */
2650 } else {
2651 gen_op(op);
2652 /* stack: H1 H2 (L1 op L2) */
2653 vrotb(3);
2654 vrotb(3);
2655 /* stack: (L1 op L2) H1 H2 */
2656 gen_op(op);
2657 /* stack: (L1 op L2) (H1 op H2) */
2659 /* stack: L H */
2660 lbuild(t);
2661 break;
2662 case TOK_SAR:
2663 case TOK_SHR:
2664 case TOK_SHL:
2665 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
2666 t = vtop[-1].type.t;
2667 vswap();
2668 lexpand();
2669 vrotb(3);
2670 /* stack: L H shift */
2671 c = (int)vtop->c.i;
2672 /* constant: simpler */
2673 /* NOTE: all comments are for SHL. the other cases are
2674 done by swapping words */
2675 vpop();
2676 if (op != TOK_SHL)
2677 vswap();
2678 if (c >= 32) {
2679 /* stack: L H */
2680 vpop();
2681 if (c > 32) {
2682 vpushi(c - 32);
2683 gen_op(op);
2685 if (op != TOK_SAR) {
2686 vpushi(0);
2687 } else {
2688 gv_dup();
2689 vpushi(31);
2690 gen_op(TOK_SAR);
2692 vswap();
2693 } else {
2694 vswap();
2695 gv_dup();
2696 /* stack: H L L */
2697 vpushi(c);
2698 gen_op(op);
2699 vswap();
2700 vpushi(32 - c);
2701 if (op == TOK_SHL)
2702 gen_op(TOK_SHR);
2703 else
2704 gen_op(TOK_SHL);
2705 vrotb(3);
2706 /* stack: L L H */
2707 vpushi(c);
2708 if (op == TOK_SHL)
2709 gen_op(TOK_SHL);
2710 else
2711 gen_op(TOK_SHR);
2712 gen_op('|');
2714 if (op != TOK_SHL)
2715 vswap();
2716 lbuild(t);
2717 } else {
2718 /* XXX: should provide a faster fallback on x86 ? */
2719 switch(op) {
2720 case TOK_SAR:
2721 func = TOK___ashrdi3;
2722 goto gen_func;
2723 case TOK_SHR:
2724 func = TOK___lshrdi3;
2725 goto gen_func;
2726 case TOK_SHL:
2727 func = TOK___ashldi3;
2728 goto gen_func;
2731 break;
2732 default:
2733 /* compare operations */
2734 t = vtop->type.t;
2735 vswap();
2736 lexpand();
2737 vrotb(3);
2738 lexpand();
2739 /* stack: L1 H1 L2 H2 */
2740 tmp = vtop[-1];
2741 vtop[-1] = vtop[-2];
2742 vtop[-2] = tmp;
2743 /* stack: L1 L2 H1 H2 */
2744 save_regs(4);
2745 /* compare high */
2746 op1 = op;
2747 /* when values are equal, we need to compare low words. since
2748 the jump is inverted, we invert the test too. */
2749 if (op1 == TOK_LT)
2750 op1 = TOK_LE;
2751 else if (op1 == TOK_GT)
2752 op1 = TOK_GE;
2753 else if (op1 == TOK_ULT)
2754 op1 = TOK_ULE;
2755 else if (op1 == TOK_UGT)
2756 op1 = TOK_UGE;
2757 a = 0;
2758 b = 0;
2759 gen_op(op1);
2760 if (op == TOK_NE) {
2761 b = gvtst(0, 0);
2762 } else {
2763 a = gvtst(1, 0);
2764 if (op != TOK_EQ) {
2765 /* generate non equal test */
2766 vpushi(0);
2767 vset_VT_CMP(TOK_NE);
2768 b = gvtst(0, 0);
2771 /* compare low. Always unsigned */
2772 op1 = op;
2773 if (op1 == TOK_LT)
2774 op1 = TOK_ULT;
2775 else if (op1 == TOK_LE)
2776 op1 = TOK_ULE;
2777 else if (op1 == TOK_GT)
2778 op1 = TOK_UGT;
2779 else if (op1 == TOK_GE)
2780 op1 = TOK_UGE;
2781 gen_op(op1);
2782 #if 0//def TCC_TARGET_I386
2783 if (op == TOK_NE) { gsym(b); break; }
2784 if (op == TOK_EQ) { gsym(a); break; }
2785 #endif
2786 gvtst_set(1, a);
2787 gvtst_set(0, b);
2788 break;
2791 #endif
2793 static uint64_t gen_opic_sdiv(uint64_t a, uint64_t b)
2795 uint64_t x = (a >> 63 ? -a : a) / (b >> 63 ? -b : b);
2796 return (a ^ b) >> 63 ? -x : x;
2799 static int gen_opic_lt(uint64_t a, uint64_t b)
2801 return (a ^ (uint64_t)1 << 63) < (b ^ (uint64_t)1 << 63);
2804 /* handle integer constant optimizations and various machine
2805 independent opt */
2806 static void gen_opic(int op)
2808 SValue *v1 = vtop - 1;
2809 SValue *v2 = vtop;
2810 int t1 = v1->type.t & VT_BTYPE;
2811 int t2 = v2->type.t & VT_BTYPE;
2812 int c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2813 int c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
2814 uint64_t l1 = c1 ? v1->c.i : 0;
2815 uint64_t l2 = c2 ? v2->c.i : 0;
2816 int shm = (t1 == VT_LLONG) ? 63 : 31;
2818 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2819 l1 = ((uint32_t)l1 |
2820 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2821 if (t2 != VT_LLONG && (PTR_SIZE != 8 || t2 != VT_PTR))
2822 l2 = ((uint32_t)l2 |
2823 (v2->type.t & VT_UNSIGNED ? 0 : -(l2 & 0x80000000)));
2825 if (c1 && c2) {
2826 switch(op) {
2827 case '+': l1 += l2; break;
2828 case '-': l1 -= l2; break;
2829 case '&': l1 &= l2; break;
2830 case '^': l1 ^= l2; break;
2831 case '|': l1 |= l2; break;
2832 case '*': l1 *= l2; break;
2834 case TOK_PDIV:
2835 case '/':
2836 case '%':
2837 case TOK_UDIV:
2838 case TOK_UMOD:
2839 /* if division by zero, generate explicit division */
2840 if (l2 == 0) {
2841 if (const_wanted && !(nocode_wanted & unevalmask))
2842 tcc_error("division by zero in constant");
2843 goto general_case;
2845 switch(op) {
2846 default: l1 = gen_opic_sdiv(l1, l2); break;
2847 case '%': l1 = l1 - l2 * gen_opic_sdiv(l1, l2); break;
2848 case TOK_UDIV: l1 = l1 / l2; break;
2849 case TOK_UMOD: l1 = l1 % l2; break;
2851 break;
2852 case TOK_SHL: l1 <<= (l2 & shm); break;
2853 case TOK_SHR: l1 >>= (l2 & shm); break;
2854 case TOK_SAR:
2855 l1 = (l1 >> 63) ? ~(~l1 >> (l2 & shm)) : l1 >> (l2 & shm);
2856 break;
2857 /* tests */
2858 case TOK_ULT: l1 = l1 < l2; break;
2859 case TOK_UGE: l1 = l1 >= l2; break;
2860 case TOK_EQ: l1 = l1 == l2; break;
2861 case TOK_NE: l1 = l1 != l2; break;
2862 case TOK_ULE: l1 = l1 <= l2; break;
2863 case TOK_UGT: l1 = l1 > l2; break;
2864 case TOK_LT: l1 = gen_opic_lt(l1, l2); break;
2865 case TOK_GE: l1 = !gen_opic_lt(l1, l2); break;
2866 case TOK_LE: l1 = !gen_opic_lt(l2, l1); break;
2867 case TOK_GT: l1 = gen_opic_lt(l2, l1); break;
2868 /* logical */
2869 case TOK_LAND: l1 = l1 && l2; break;
2870 case TOK_LOR: l1 = l1 || l2; break;
2871 default:
2872 goto general_case;
2874 if (t1 != VT_LLONG && (PTR_SIZE != 8 || t1 != VT_PTR))
2875 l1 = ((uint32_t)l1 |
2876 (v1->type.t & VT_UNSIGNED ? 0 : -(l1 & 0x80000000)));
2877 v1->c.i = l1;
2878 vtop--;
2879 } else {
2880 /* if commutative ops, put c2 as constant */
2881 if (c1 && (op == '+' || op == '&' || op == '^' ||
2882 op == '|' || op == '*' || op == TOK_EQ || op == TOK_NE)) {
2883 vswap();
2884 c2 = c1; //c = c1, c1 = c2, c2 = c;
2885 l2 = l1; //l = l1, l1 = l2, l2 = l;
2887 if (!const_wanted &&
2888 c1 && ((l1 == 0 &&
2889 (op == TOK_SHL || op == TOK_SHR || op == TOK_SAR)) ||
2890 (l1 == -1 && op == TOK_SAR))) {
2891 /* treat (0 << x), (0 >> x) and (-1 >> x) as constant */
2892 vtop--;
2893 } else if (!const_wanted &&
2894 c2 && ((l2 == 0 && (op == '&' || op == '*')) ||
2895 (op == '|' &&
2896 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))) ||
2897 (l2 == 1 && (op == '%' || op == TOK_UMOD)))) {
2898 /* treat (x & 0), (x * 0), (x | -1) and (x % 1) as constant */
2899 if (l2 == 1)
2900 vtop->c.i = 0;
2901 vswap();
2902 vtop--;
2903 } else if (c2 && (((op == '*' || op == '/' || op == TOK_UDIV ||
2904 op == TOK_PDIV) &&
2905 l2 == 1) ||
2906 ((op == '+' || op == '-' || op == '|' || op == '^' ||
2907 op == TOK_SHL || op == TOK_SHR || op == TOK_SAR) &&
2908 l2 == 0) ||
2909 (op == '&' &&
2910 (l2 == -1 || (l2 == 0xFFFFFFFF && t2 != VT_LLONG))))) {
2911 /* filter out NOP operations like x*1, x-0, x&-1... */
2912 vtop--;
2913 } else if (c2 && (op == '*' || op == TOK_PDIV || op == TOK_UDIV)) {
2914 /* try to use shifts instead of muls or divs */
2915 if (l2 > 0 && (l2 & (l2 - 1)) == 0) {
2916 int n = -1;
2917 while (l2) {
2918 l2 >>= 1;
2919 n++;
2921 vtop->c.i = n;
2922 if (op == '*')
2923 op = TOK_SHL;
2924 else if (op == TOK_PDIV)
2925 op = TOK_SAR;
2926 else
2927 op = TOK_SHR;
2929 goto general_case;
2930 } else if (c2 && (op == '+' || op == '-') &&
2931 (((vtop[-1].r & (VT_VALMASK | VT_LVAL | VT_SYM)) == (VT_CONST | VT_SYM))
2932 || (vtop[-1].r & (VT_VALMASK | VT_LVAL)) == VT_LOCAL)) {
2933 /* symbol + constant case */
2934 if (op == '-')
2935 l2 = -l2;
2936 l2 += vtop[-1].c.i;
2937 /* The backends can't always deal with addends to symbols
2938 larger than +-1<<31. Don't construct such. */
2939 if ((int)l2 != l2)
2940 goto general_case;
2941 vtop--;
2942 vtop->c.i = l2;
2943 } else {
2944 general_case:
2945 /* call low level op generator */
2946 if (t1 == VT_LLONG || t2 == VT_LLONG ||
2947 (PTR_SIZE == 8 && (t1 == VT_PTR || t2 == VT_PTR)))
2948 gen_opl(op);
2949 else
2950 gen_opi(op);
2955 #if defined TCC_TARGET_X86_64 || defined TCC_TARGET_I386
2956 # define gen_negf gen_opf
2957 #elif defined TCC_TARGET_ARM
2958 void gen_negf(int op)
2960 /* arm will detect 0-x and replace by vneg */
2961 vpushi(0), vswap(), gen_op('-');
2963 #else
2964 /* XXX: implement in gen_opf() for other backends too */
2965 void gen_negf(int op)
2967 /* In IEEE negate(x) isn't subtract(0,x). Without NaNs it's
2968 subtract(-0, x), but with them it's really a sign flip
2969 operation. We implement this with bit manipulation and have
2970 to do some type reinterpretation for this, which TCC can do
2971 only via memory. */
2973 int align, size, bt;
2975 size = type_size(&vtop->type, &align);
2976 bt = vtop->type.t & VT_BTYPE;
2977 save_reg(gv(RC_TYPE(bt)));
2978 vdup();
2979 incr_bf_adr(size - 1);
2980 vdup();
2981 vpushi(0x80); /* flip sign */
2982 gen_op('^');
2983 vstore();
2984 vpop();
2986 #endif
2988 /* generate a floating point operation with constant propagation */
2989 static void gen_opif(int op)
2991 int c1, c2;
2992 SValue *v1, *v2;
2993 #if defined _MSC_VER && defined __x86_64__
2994 /* avoid bad optimization with f1 -= f2 for f1:-0.0, f2:0.0 */
2995 volatile
2996 #endif
2997 long double f1, f2;
2999 v1 = vtop - 1;
3000 v2 = vtop;
3001 if (op == TOK_NEG)
3002 v1 = v2;
3004 /* currently, we cannot do computations with forward symbols */
3005 c1 = (v1->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3006 c2 = (v2->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3007 if (c1 && c2) {
3008 if (v1->type.t == VT_FLOAT) {
3009 f1 = v1->c.f;
3010 f2 = v2->c.f;
3011 } else if (v1->type.t == VT_DOUBLE) {
3012 f1 = v1->c.d;
3013 f2 = v2->c.d;
3014 } else {
3015 f1 = v1->c.ld;
3016 f2 = v2->c.ld;
3018 /* NOTE: we only do constant propagation if finite number (not
3019 NaN or infinity) (ANSI spec) */
3020 if (!(ieee_finite(f1) || !ieee_finite(f2)) && !const_wanted)
3021 goto general_case;
3022 switch(op) {
3023 case '+': f1 += f2; break;
3024 case '-': f1 -= f2; break;
3025 case '*': f1 *= f2; break;
3026 case '/':
3027 if (f2 == (f1-f1) ) {
3028 union { float f; unsigned u; } x1, x2, y;
3029 /* If not in initializer we need to potentially generate
3030 FP exceptions at runtime, otherwise we want to fold. */
3031 if (!const_wanted)
3032 goto general_case;
3033 /* the run-time result of 0.0/0.0 on x87, also of other compilers
3034 when used to compile the f1 /= f2 below, would be -nan */
3035 x1.f = f1, x2.f = f2;
3036 if (f1 == (f1-f1) )
3037 y.u = 0x7fc00000; /* nan */
3038 else
3039 y.u = 0x7f800000; /* infinity */
3040 y.u |= (x1.u ^ x2.u) & 0x80000000; /* set sign */
3041 f1 = y.f;
3042 break;
3044 f1 /= f2;
3045 break;
3046 case TOK_NEG:
3047 f1 = -f1;
3048 goto unary_result;
3049 /* XXX: also handles tests ? */
3050 default:
3051 goto general_case;
3053 vtop--;
3054 unary_result:
3055 /* XXX: overflow test ? */
3056 if (v1->type.t == VT_FLOAT) {
3057 v1->c.f = f1;
3058 } else if (v1->type.t == VT_DOUBLE) {
3059 v1->c.d = f1;
3060 } else {
3061 v1->c.ld = f1;
3063 } else {
3064 general_case:
3065 if (op == TOK_NEG) {
3066 gen_negf(op);
3067 } else {
3068 gen_opf(op);
3073 /* print a type. If 'varstr' is not NULL, then the variable is also
3074 printed in the type */
3075 /* XXX: union */
3076 /* XXX: add array and function pointers */
3077 static void type_to_str(char *buf, int buf_size,
3078 CType *type, const char *varstr)
3080 int bt, v, t;
3081 Sym *s, *sa;
3082 char buf1[256];
3083 const char *tstr;
3085 t = type->t;
3086 bt = t & VT_BTYPE;
3087 buf[0] = '\0';
3089 if (t & VT_EXTERN)
3090 pstrcat(buf, buf_size, "extern ");
3091 if (t & VT_STATIC)
3092 pstrcat(buf, buf_size, "static ");
3093 if (t & VT_TYPEDEF)
3094 pstrcat(buf, buf_size, "typedef ");
3095 if (t & VT_INLINE)
3096 pstrcat(buf, buf_size, "inline ");
3097 if (t & VT_VOLATILE)
3098 pstrcat(buf, buf_size, "volatile ");
3099 if (t & VT_CONSTANT)
3100 pstrcat(buf, buf_size, "const ");
3102 if (((t & VT_DEFSIGN) && bt == VT_BYTE)
3103 || ((t & VT_UNSIGNED)
3104 && (bt == VT_SHORT || bt == VT_INT || bt == VT_LLONG)
3105 && !IS_ENUM(t)
3107 pstrcat(buf, buf_size, (t & VT_UNSIGNED) ? "unsigned " : "signed ");
3109 buf_size -= strlen(buf);
3110 buf += strlen(buf);
3112 switch(bt) {
3113 case VT_VOID:
3114 tstr = "void";
3115 goto add_tstr;
3116 case VT_BOOL:
3117 tstr = "_Bool";
3118 goto add_tstr;
3119 case VT_BYTE:
3120 tstr = "char";
3121 goto add_tstr;
3122 case VT_SHORT:
3123 tstr = "short";
3124 goto add_tstr;
3125 case VT_INT:
3126 tstr = "int";
3127 goto maybe_long;
3128 case VT_LLONG:
3129 tstr = "long long";
3130 maybe_long:
3131 if (t & VT_LONG)
3132 tstr = "long";
3133 if (!IS_ENUM(t))
3134 goto add_tstr;
3135 tstr = "enum ";
3136 goto tstruct;
3137 case VT_FLOAT:
3138 tstr = "float";
3139 goto add_tstr;
3140 case VT_DOUBLE:
3141 tstr = "double";
3142 if (!(t & VT_LONG))
3143 goto add_tstr;
3144 case VT_LDOUBLE:
3145 tstr = "long double";
3146 add_tstr:
3147 pstrcat(buf, buf_size, tstr);
3148 break;
3149 case VT_STRUCT:
3150 tstr = "struct ";
3151 if (IS_UNION(t))
3152 tstr = "union ";
3153 tstruct:
3154 pstrcat(buf, buf_size, tstr);
3155 v = type->ref->v & ~SYM_STRUCT;
3156 if (v >= SYM_FIRST_ANOM)
3157 pstrcat(buf, buf_size, "<anonymous>");
3158 else
3159 pstrcat(buf, buf_size, get_tok_str(v, NULL));
3160 break;
3161 case VT_FUNC:
3162 s = type->ref;
3163 buf1[0]=0;
3164 if (varstr && '*' == *varstr) {
3165 pstrcat(buf1, sizeof(buf1), "(");
3166 pstrcat(buf1, sizeof(buf1), varstr);
3167 pstrcat(buf1, sizeof(buf1), ")");
3169 pstrcat(buf1, buf_size, "(");
3170 sa = s->next;
3171 while (sa != NULL) {
3172 char buf2[256];
3173 type_to_str(buf2, sizeof(buf2), &sa->type, NULL);
3174 pstrcat(buf1, sizeof(buf1), buf2);
3175 sa = sa->next;
3176 if (sa)
3177 pstrcat(buf1, sizeof(buf1), ", ");
3179 if (s->f.func_type == FUNC_ELLIPSIS)
3180 pstrcat(buf1, sizeof(buf1), ", ...");
3181 pstrcat(buf1, sizeof(buf1), ")");
3182 type_to_str(buf, buf_size, &s->type, buf1);
3183 goto no_var;
3184 case VT_PTR:
3185 s = type->ref;
3186 if (t & VT_ARRAY) {
3187 if (varstr && '*' == *varstr)
3188 snprintf(buf1, sizeof(buf1), "(%s)[%d]", varstr, s->c);
3189 else
3190 snprintf(buf1, sizeof(buf1), "%s[%d]", varstr ? varstr : "", s->c);
3191 type_to_str(buf, buf_size, &s->type, buf1);
3192 goto no_var;
3194 pstrcpy(buf1, sizeof(buf1), "*");
3195 if (t & VT_CONSTANT)
3196 pstrcat(buf1, buf_size, "const ");
3197 if (t & VT_VOLATILE)
3198 pstrcat(buf1, buf_size, "volatile ");
3199 if (varstr)
3200 pstrcat(buf1, sizeof(buf1), varstr);
3201 type_to_str(buf, buf_size, &s->type, buf1);
3202 goto no_var;
3204 if (varstr) {
3205 pstrcat(buf, buf_size, " ");
3206 pstrcat(buf, buf_size, varstr);
3208 no_var: ;
3211 static void type_incompatibility_error(CType* st, CType* dt, const char* fmt)
3213 char buf1[256], buf2[256];
3214 type_to_str(buf1, sizeof(buf1), st, NULL);
3215 type_to_str(buf2, sizeof(buf2), dt, NULL);
3216 tcc_error(fmt, buf1, buf2);
3219 static void type_incompatibility_warning(CType* st, CType* dt, const char* fmt)
3221 char buf1[256], buf2[256];
3222 type_to_str(buf1, sizeof(buf1), st, NULL);
3223 type_to_str(buf2, sizeof(buf2), dt, NULL);
3224 tcc_warning(fmt, buf1, buf2);
3227 static int pointed_size(CType *type)
3229 int align;
3230 return type_size(pointed_type(type), &align);
3233 static void vla_runtime_pointed_size(CType *type)
3235 int align;
3236 vla_runtime_type_size(pointed_type(type), &align);
3239 static inline int is_null_pointer(SValue *p)
3241 if ((p->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
3242 return 0;
3243 return ((p->type.t & VT_BTYPE) == VT_INT && (uint32_t)p->c.i == 0) ||
3244 ((p->type.t & VT_BTYPE) == VT_LLONG && p->c.i == 0) ||
3245 ((p->type.t & VT_BTYPE) == VT_PTR &&
3246 (PTR_SIZE == 4 ? (uint32_t)p->c.i == 0 : p->c.i == 0) &&
3247 ((pointed_type(&p->type)->t & VT_BTYPE) == VT_VOID) &&
3248 0 == (pointed_type(&p->type)->t & (VT_CONSTANT | VT_VOLATILE))
3252 /* compare function types. OLD functions match any new functions */
3253 static int is_compatible_func(CType *type1, CType *type2)
3255 Sym *s1, *s2;
3257 s1 = type1->ref;
3258 s2 = type2->ref;
3259 if (s1->f.func_call != s2->f.func_call)
3260 return 0;
3261 if (s1->f.func_type != s2->f.func_type
3262 && s1->f.func_type != FUNC_OLD
3263 && s2->f.func_type != FUNC_OLD)
3264 return 0;
3265 for (;;) {
3266 if (!is_compatible_unqualified_types(&s1->type, &s2->type))
3267 return 0;
3268 if (s1->f.func_type == FUNC_OLD || s2->f.func_type == FUNC_OLD )
3269 return 1;
3270 s1 = s1->next;
3271 s2 = s2->next;
3272 if (!s1)
3273 return !s2;
3274 if (!s2)
3275 return 0;
3279 /* return true if type1 and type2 are the same. If unqualified is
3280 true, qualifiers on the types are ignored.
3282 static int compare_types(CType *type1, CType *type2, int unqualified)
3284 int bt1, t1, t2;
3286 t1 = type1->t & VT_TYPE;
3287 t2 = type2->t & VT_TYPE;
3288 if (unqualified) {
3289 /* strip qualifiers before comparing */
3290 t1 &= ~(VT_CONSTANT | VT_VOLATILE);
3291 t2 &= ~(VT_CONSTANT | VT_VOLATILE);
3294 /* Default Vs explicit signedness only matters for char */
3295 if ((t1 & VT_BTYPE) != VT_BYTE) {
3296 t1 &= ~VT_DEFSIGN;
3297 t2 &= ~VT_DEFSIGN;
3299 /* XXX: bitfields ? */
3300 if (t1 != t2)
3301 return 0;
3303 if ((t1 & VT_ARRAY)
3304 && !(type1->ref->c < 0
3305 || type2->ref->c < 0
3306 || type1->ref->c == type2->ref->c))
3307 return 0;
3309 /* test more complicated cases */
3310 bt1 = t1 & VT_BTYPE;
3311 if (bt1 == VT_PTR) {
3312 type1 = pointed_type(type1);
3313 type2 = pointed_type(type2);
3314 return is_compatible_types(type1, type2);
3315 } else if (bt1 == VT_STRUCT) {
3316 return (type1->ref == type2->ref);
3317 } else if (bt1 == VT_FUNC) {
3318 return is_compatible_func(type1, type2);
3319 } else if (IS_ENUM(type1->t) && IS_ENUM(type2->t)) {
3320 /* If both are enums then they must be the same, if only one is then
3321 t1 and t2 must be equal, which was checked above already. */
3322 return type1->ref == type2->ref;
3323 } else {
3324 return 1;
3328 /* Check if OP1 and OP2 can be "combined" with operation OP, the combined
3329 type is stored in DEST if non-null (except for pointer plus/minus) . */
3330 static int combine_types(CType *dest, SValue *op1, SValue *op2, int op)
3332 CType *type1 = &op1->type, *type2 = &op2->type, type;
3333 int t1 = type1->t, t2 = type2->t, bt1 = t1 & VT_BTYPE, bt2 = t2 & VT_BTYPE;
3334 int ret = 1;
3336 type.t = VT_VOID;
3337 type.ref = NULL;
3339 if (bt1 == VT_VOID || bt2 == VT_VOID) {
3340 ret = op == '?' ? 1 : 0;
3341 /* NOTE: as an extension, we accept void on only one side */
3342 type.t = VT_VOID;
3343 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3344 if (op == '+') ; /* Handled in caller */
3345 /* http://port70.net/~nsz/c/c99/n1256.html#6.5.15p6 */
3346 /* If one is a null ptr constant the result type is the other. */
3347 else if (is_null_pointer (op2)) type = *type1;
3348 else if (is_null_pointer (op1)) type = *type2;
3349 else if (bt1 != bt2) {
3350 /* accept comparison or cond-expr between pointer and integer
3351 with a warning */
3352 if ((op == '?' || TOK_ISCOND(op))
3353 && (is_integer_btype(bt1) || is_integer_btype(bt2)))
3354 tcc_warning("pointer/integer mismatch in %s",
3355 op == '?' ? "conditional expression" : "comparison");
3356 else if (op != '-' || !is_integer_btype(bt2))
3357 ret = 0;
3358 type = *(bt1 == VT_PTR ? type1 : type2);
3359 } else {
3360 CType *pt1 = pointed_type(type1);
3361 CType *pt2 = pointed_type(type2);
3362 int pbt1 = pt1->t & VT_BTYPE;
3363 int pbt2 = pt2->t & VT_BTYPE;
3364 int newquals, copied = 0;
3365 if (pbt1 != VT_VOID && pbt2 != VT_VOID
3366 && !compare_types(pt1, pt2, 1/*unqualif*/)) {
3367 if (op != '?' && !TOK_ISCOND(op))
3368 ret = 0;
3369 else
3370 type_incompatibility_warning(type1, type2,
3371 op == '?'
3372 ? "pointer type mismatch in conditional expression ('%s' and '%s')"
3373 : "pointer type mismatch in comparison('%s' and '%s')");
3375 if (op == '?') {
3376 /* pointers to void get preferred, otherwise the
3377 pointed to types minus qualifs should be compatible */
3378 type = *((pbt1 == VT_VOID) ? type1 : type2);
3379 /* combine qualifs */
3380 newquals = ((pt1->t | pt2->t) & (VT_CONSTANT | VT_VOLATILE));
3381 if ((~pointed_type(&type)->t & (VT_CONSTANT | VT_VOLATILE))
3382 & newquals)
3384 /* copy the pointer target symbol */
3385 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3386 0, type.ref->c);
3387 copied = 1;
3388 pointed_type(&type)->t |= newquals;
3390 /* pointers to incomplete arrays get converted to
3391 pointers to completed ones if possible */
3392 if (pt1->t & VT_ARRAY
3393 && pt2->t & VT_ARRAY
3394 && pointed_type(&type)->ref->c < 0
3395 && (pt1->ref->c > 0 || pt2->ref->c > 0))
3397 if (!copied)
3398 type.ref = sym_push(SYM_FIELD, &type.ref->type,
3399 0, type.ref->c);
3400 pointed_type(&type)->ref =
3401 sym_push(SYM_FIELD, &pointed_type(&type)->ref->type,
3402 0, pointed_type(&type)->ref->c);
3403 pointed_type(&type)->ref->c =
3404 0 < pt1->ref->c ? pt1->ref->c : pt2->ref->c;
3408 if (TOK_ISCOND(op))
3409 type.t = VT_SIZE_T;
3410 } else if (bt1 == VT_STRUCT || bt2 == VT_STRUCT) {
3411 if (op != '?' || !compare_types(type1, type2, 1))
3412 ret = 0;
3413 type = *type1;
3414 } else if (is_float(bt1) || is_float(bt2)) {
3415 if (bt1 == VT_LDOUBLE || bt2 == VT_LDOUBLE) {
3416 type.t = VT_LDOUBLE;
3417 } else if (bt1 == VT_DOUBLE || bt2 == VT_DOUBLE) {
3418 type.t = VT_DOUBLE;
3419 } else {
3420 type.t = VT_FLOAT;
3422 } else if (bt1 == VT_LLONG || bt2 == VT_LLONG) {
3423 /* cast to biggest op */
3424 type.t = VT_LLONG | VT_LONG;
3425 if (bt1 == VT_LLONG)
3426 type.t &= t1;
3427 if (bt2 == VT_LLONG)
3428 type.t &= t2;
3429 /* convert to unsigned if it does not fit in a long long */
3430 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED) ||
3431 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_LLONG | VT_UNSIGNED))
3432 type.t |= VT_UNSIGNED;
3433 } else {
3434 /* integer operations */
3435 type.t = VT_INT | (VT_LONG & (t1 | t2));
3436 /* convert to unsigned if it does not fit in an integer */
3437 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED) ||
3438 (t2 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (VT_INT | VT_UNSIGNED))
3439 type.t |= VT_UNSIGNED;
3441 if (dest)
3442 *dest = type;
3443 return ret;
3446 /* generic gen_op: handles types problems */
3447 ST_FUNC void gen_op(int op)
3449 int u, t1, t2, bt1, bt2, t;
3450 CType type1, combtype;
3452 redo:
3453 t1 = vtop[-1].type.t;
3454 t2 = vtop[0].type.t;
3455 bt1 = t1 & VT_BTYPE;
3456 bt2 = t2 & VT_BTYPE;
3458 if (bt1 == VT_FUNC || bt2 == VT_FUNC) {
3459 if (bt2 == VT_FUNC) {
3460 mk_pointer(&vtop->type);
3461 gaddrof();
3463 if (bt1 == VT_FUNC) {
3464 vswap();
3465 mk_pointer(&vtop->type);
3466 gaddrof();
3467 vswap();
3469 goto redo;
3470 } else if (!combine_types(&combtype, vtop - 1, vtop, op)) {
3471 tcc_error_noabort("invalid operand types for binary operation");
3472 vpop();
3473 } else if (bt1 == VT_PTR || bt2 == VT_PTR) {
3474 /* at least one operand is a pointer */
3475 /* relational op: must be both pointers */
3476 if (TOK_ISCOND(op))
3477 goto std_op;
3478 /* if both pointers, then it must be the '-' op */
3479 if (bt1 == VT_PTR && bt2 == VT_PTR) {
3480 if (op != '-')
3481 tcc_error("cannot use pointers here");
3482 if (vtop[-1].type.t & VT_VLA) {
3483 vla_runtime_pointed_size(&vtop[-1].type);
3484 } else {
3485 vpushi(pointed_size(&vtop[-1].type));
3487 vrott(3);
3488 gen_opic(op);
3489 vtop->type.t = VT_PTRDIFF_T;
3490 vswap();
3491 gen_op(TOK_PDIV);
3492 } else {
3493 /* exactly one pointer : must be '+' or '-'. */
3494 if (op != '-' && op != '+')
3495 tcc_error("cannot use pointers here");
3496 /* Put pointer as first operand */
3497 if (bt2 == VT_PTR) {
3498 vswap();
3499 t = t1, t1 = t2, t2 = t;
3501 #if PTR_SIZE == 4
3502 if ((vtop[0].type.t & VT_BTYPE) == VT_LLONG)
3503 /* XXX: truncate here because gen_opl can't handle ptr + long long */
3504 gen_cast_s(VT_INT);
3505 #endif
3506 type1 = vtop[-1].type;
3507 if (vtop[-1].type.t & VT_VLA)
3508 vla_runtime_pointed_size(&vtop[-1].type);
3509 else {
3510 u = pointed_size(&vtop[-1].type);
3511 if (u < 0)
3512 tcc_error("unknown array element size");
3513 #if PTR_SIZE == 8
3514 vpushll(u);
3515 #else
3516 /* XXX: cast to int ? (long long case) */
3517 vpushi(u);
3518 #endif
3520 gen_op('*');
3521 #ifdef CONFIG_TCC_BCHECK
3522 if (tcc_state->do_bounds_check && !const_wanted) {
3523 /* if bounded pointers, we generate a special code to
3524 test bounds */
3525 if (op == '-') {
3526 vpushi(0);
3527 vswap();
3528 gen_op('-');
3530 gen_bounded_ptr_add();
3531 } else
3532 #endif
3534 gen_opic(op);
3536 type1.t &= ~VT_ARRAY;
3537 /* put again type if gen_opic() swaped operands */
3538 vtop->type = type1;
3540 } else {
3541 /* floats can only be used for a few operations */
3542 if (is_float(combtype.t)
3543 && op != '+' && op != '-' && op != '*' && op != '/'
3544 && !TOK_ISCOND(op))
3545 tcc_error("invalid operands for binary operation");
3546 else if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL) {
3547 t = bt1 == VT_LLONG ? VT_LLONG : VT_INT;
3548 if ((t1 & (VT_BTYPE | VT_UNSIGNED | VT_BITFIELD)) == (t | VT_UNSIGNED))
3549 t |= VT_UNSIGNED;
3550 t |= (VT_LONG & t1);
3551 combtype.t = t;
3553 std_op:
3554 t = t2 = combtype.t;
3555 /* XXX: currently, some unsigned operations are explicit, so
3556 we modify them here */
3557 if (t & VT_UNSIGNED) {
3558 if (op == TOK_SAR)
3559 op = TOK_SHR;
3560 else if (op == '/')
3561 op = TOK_UDIV;
3562 else if (op == '%')
3563 op = TOK_UMOD;
3564 else if (op == TOK_LT)
3565 op = TOK_ULT;
3566 else if (op == TOK_GT)
3567 op = TOK_UGT;
3568 else if (op == TOK_LE)
3569 op = TOK_ULE;
3570 else if (op == TOK_GE)
3571 op = TOK_UGE;
3573 vswap();
3574 gen_cast_s(t);
3575 vswap();
3576 /* special case for shifts and long long: we keep the shift as
3577 an integer */
3578 if (op == TOK_SHR || op == TOK_SAR || op == TOK_SHL)
3579 t2 = VT_INT;
3580 gen_cast_s(t2);
3581 if (is_float(t))
3582 gen_opif(op);
3583 else
3584 gen_opic(op);
3585 if (TOK_ISCOND(op)) {
3586 /* relational op: the result is an int */
3587 vtop->type.t = VT_INT;
3588 } else {
3589 vtop->type.t = t;
3592 // Make sure that we have converted to an rvalue:
3593 if (vtop->r & VT_LVAL)
3594 gv(is_float(vtop->type.t & VT_BTYPE) ? RC_FLOAT : RC_INT);
3597 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64 || defined TCC_TARGET_ARM
3598 #define gen_cvt_itof1 gen_cvt_itof
3599 #else
3600 /* generic itof for unsigned long long case */
3601 static void gen_cvt_itof1(int t)
3603 if ((vtop->type.t & (VT_BTYPE | VT_UNSIGNED)) ==
3604 (VT_LLONG | VT_UNSIGNED)) {
3606 if (t == VT_FLOAT)
3607 vpush_helper_func(TOK___floatundisf);
3608 #if LDOUBLE_SIZE != 8
3609 else if (t == VT_LDOUBLE)
3610 vpush_helper_func(TOK___floatundixf);
3611 #endif
3612 else
3613 vpush_helper_func(TOK___floatundidf);
3614 vrott(2);
3615 gfunc_call(1);
3616 vpushi(0);
3617 PUT_R_RET(vtop, t);
3618 } else {
3619 gen_cvt_itof(t);
3622 #endif
3624 #if defined TCC_TARGET_ARM64 || defined TCC_TARGET_RISCV64
3625 #define gen_cvt_ftoi1 gen_cvt_ftoi
3626 #else
3627 /* generic ftoi for unsigned long long case */
3628 static void gen_cvt_ftoi1(int t)
3630 int st;
3631 if (t == (VT_LLONG | VT_UNSIGNED)) {
3632 /* not handled natively */
3633 st = vtop->type.t & VT_BTYPE;
3634 if (st == VT_FLOAT)
3635 vpush_helper_func(TOK___fixunssfdi);
3636 #if LDOUBLE_SIZE != 8
3637 else if (st == VT_LDOUBLE)
3638 vpush_helper_func(TOK___fixunsxfdi);
3639 #endif
3640 else
3641 vpush_helper_func(TOK___fixunsdfdi);
3642 vrott(2);
3643 gfunc_call(1);
3644 vpushi(0);
3645 PUT_R_RET(vtop, t);
3646 } else {
3647 gen_cvt_ftoi(t);
3650 #endif
3652 /* special delayed cast for char/short */
3653 static void force_charshort_cast(void)
3655 int sbt = BFGET(vtop->r, VT_MUSTCAST) == 2 ? VT_LLONG : VT_INT;
3656 int dbt = vtop->type.t;
3657 vtop->r &= ~VT_MUSTCAST;
3658 vtop->type.t = sbt;
3659 gen_cast_s(dbt == VT_BOOL ? VT_BYTE|VT_UNSIGNED : dbt);
3660 vtop->type.t = dbt;
3663 static void gen_cast_s(int t)
3665 CType type;
3666 type.t = t;
3667 type.ref = NULL;
3668 gen_cast(&type);
3671 /* cast 'vtop' to 'type'. Casting to bitfields is forbidden. */
3672 static void gen_cast(CType *type)
3674 int sbt, dbt, sf, df, c;
3675 int dbt_bt, sbt_bt, ds, ss, bits, trunc;
3677 /* special delayed cast for char/short */
3678 if (vtop->r & VT_MUSTCAST)
3679 force_charshort_cast();
3681 /* bitfields first get cast to ints */
3682 if (vtop->type.t & VT_BITFIELD)
3683 gv(RC_INT);
3685 dbt = type->t & (VT_BTYPE | VT_UNSIGNED);
3686 sbt = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
3687 if (sbt == VT_FUNC)
3688 sbt = VT_PTR;
3690 again:
3691 if (sbt != dbt) {
3692 sf = is_float(sbt);
3693 df = is_float(dbt);
3694 dbt_bt = dbt & VT_BTYPE;
3695 sbt_bt = sbt & VT_BTYPE;
3697 c = (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST;
3698 #if !defined TCC_IS_NATIVE && !defined TCC_IS_NATIVE_387
3699 c &= (dbt != VT_LDOUBLE) | !!nocode_wanted;
3700 #endif
3701 if (c) {
3702 /* constant case: we can do it now */
3703 /* XXX: in ISOC, cannot do it if error in convert */
3704 if (sbt == VT_FLOAT)
3705 vtop->c.ld = vtop->c.f;
3706 else if (sbt == VT_DOUBLE)
3707 vtop->c.ld = vtop->c.d;
3709 if (df) {
3710 if (sbt_bt == VT_LLONG) {
3711 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 63))
3712 vtop->c.ld = vtop->c.i;
3713 else
3714 vtop->c.ld = -(long double)-vtop->c.i;
3715 } else if(!sf) {
3716 if ((sbt & VT_UNSIGNED) || !(vtop->c.i >> 31))
3717 vtop->c.ld = (uint32_t)vtop->c.i;
3718 else
3719 vtop->c.ld = -(long double)-(uint32_t)vtop->c.i;
3722 if (dbt == VT_FLOAT)
3723 vtop->c.f = (float)vtop->c.ld;
3724 else if (dbt == VT_DOUBLE)
3725 vtop->c.d = (double)vtop->c.ld;
3726 } else if (sf && dbt == VT_BOOL) {
3727 vtop->c.i = (vtop->c.ld != 0);
3728 } else {
3729 if(sf)
3730 vtop->c.i = vtop->c.ld;
3731 else if (sbt_bt == VT_LLONG || (PTR_SIZE == 8 && sbt == VT_PTR))
3733 else if (sbt & VT_UNSIGNED)
3734 vtop->c.i = (uint32_t)vtop->c.i;
3735 else
3736 vtop->c.i = ((uint32_t)vtop->c.i | -(vtop->c.i & 0x80000000));
3738 if (dbt_bt == VT_LLONG || (PTR_SIZE == 8 && dbt == VT_PTR))
3740 else if (dbt == VT_BOOL)
3741 vtop->c.i = (vtop->c.i != 0);
3742 else {
3743 uint32_t m = dbt_bt == VT_BYTE ? 0xff :
3744 dbt_bt == VT_SHORT ? 0xffff :
3745 0xffffffff;
3746 vtop->c.i &= m;
3747 if (!(dbt & VT_UNSIGNED))
3748 vtop->c.i |= -(vtop->c.i & ((m >> 1) + 1));
3751 goto done;
3753 } else if (dbt == VT_BOOL
3754 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM))
3755 == (VT_CONST | VT_SYM)) {
3756 /* addresses are considered non-zero (see tcctest.c:sinit23) */
3757 vtop->r = VT_CONST;
3758 vtop->c.i = 1;
3759 goto done;
3762 /* cannot generate code for global or static initializers */
3763 if (STATIC_DATA_WANTED)
3764 goto done;
3766 /* non constant case: generate code */
3767 if (dbt == VT_BOOL) {
3768 gen_test_zero(TOK_NE);
3769 goto done;
3772 if (sf || df) {
3773 if (sf && df) {
3774 /* convert from fp to fp */
3775 gen_cvt_ftof(dbt);
3776 } else if (df) {
3777 /* convert int to fp */
3778 gen_cvt_itof1(dbt);
3779 } else {
3780 /* convert fp to int */
3781 sbt = dbt;
3782 if (dbt_bt != VT_LLONG && dbt_bt != VT_INT)
3783 sbt = VT_INT;
3784 gen_cvt_ftoi1(sbt);
3785 goto again; /* may need char/short cast */
3787 goto done;
3790 ds = btype_size(dbt_bt);
3791 ss = btype_size(sbt_bt);
3792 if (ds == 0 || ss == 0) {
3793 if (dbt_bt == VT_VOID)
3794 goto done;
3795 cast_error(&vtop->type, type);
3797 if (IS_ENUM(type->t) && type->ref->c < 0)
3798 tcc_error("cast to incomplete type");
3800 /* same size and no sign conversion needed */
3801 if (ds == ss && ds >= 4)
3802 goto done;
3803 if (dbt_bt == VT_PTR || sbt_bt == VT_PTR) {
3804 tcc_warning("cast between pointer and integer of different size");
3805 if (sbt_bt == VT_PTR) {
3806 /* put integer type to allow logical operations below */
3807 vtop->type.t = (PTR_SIZE == 8 ? VT_LLONG : VT_INT);
3811 /* processor allows { int a = 0, b = *(char*)&a; }
3812 That means that if we cast to less width, we can just
3813 change the type and read it still later. */
3814 #define ALLOW_SUBTYPE_ACCESS 1
3816 if (ALLOW_SUBTYPE_ACCESS && (vtop->r & VT_LVAL)) {
3817 /* value still in memory */
3818 if (ds <= ss)
3819 goto done;
3820 /* ss <= 4 here */
3821 if (ds <= 4 && !(dbt == (VT_SHORT | VT_UNSIGNED) && sbt == VT_BYTE)) {
3822 gv(RC_INT);
3823 goto done; /* no 64bit envolved */
3826 gv(RC_INT);
3828 trunc = 0;
3829 #if PTR_SIZE == 4
3830 if (ds == 8) {
3831 /* generate high word */
3832 if (sbt & VT_UNSIGNED) {
3833 vpushi(0);
3834 gv(RC_INT);
3835 } else {
3836 gv_dup();
3837 vpushi(31);
3838 gen_op(TOK_SAR);
3840 lbuild(dbt);
3841 } else if (ss == 8) {
3842 /* from long long: just take low order word */
3843 lexpand();
3844 vpop();
3846 ss = 4;
3848 #elif PTR_SIZE == 8
3849 if (ds == 8) {
3850 /* need to convert from 32bit to 64bit */
3851 if (sbt & VT_UNSIGNED) {
3852 #if defined(TCC_TARGET_RISCV64)
3853 /* RISC-V keeps 32bit vals in registers sign-extended.
3854 So here we need a zero-extension. */
3855 trunc = 32;
3856 #else
3857 goto done;
3858 #endif
3859 } else {
3860 gen_cvt_sxtw();
3861 goto done;
3863 ss = ds, ds = 4, dbt = sbt;
3864 } else if (ss == 8) {
3865 /* RISC-V keeps 32bit vals in registers sign-extended.
3866 So here we need a sign-extension for signed types and
3867 zero-extension. for unsigned types. */
3868 #if !defined(TCC_TARGET_RISCV64)
3869 trunc = 32; /* zero upper 32 bits for non RISC-V targets */
3870 #endif
3871 } else {
3872 ss = 4;
3874 #endif
3876 if (ds >= ss)
3877 goto done;
3878 #if defined TCC_TARGET_I386 || defined TCC_TARGET_X86_64 || defined TCC_TARGET_ARM64
3879 if (ss == 4) {
3880 gen_cvt_csti(dbt);
3881 goto done;
3883 #endif
3884 bits = (ss - ds) * 8;
3885 /* for unsigned, gen_op will convert SAR to SHR */
3886 vtop->type.t = (ss == 8 ? VT_LLONG : VT_INT) | (dbt & VT_UNSIGNED);
3887 vpushi(bits);
3888 gen_op(TOK_SHL);
3889 vpushi(bits - trunc);
3890 gen_op(TOK_SAR);
3891 vpushi(trunc);
3892 gen_op(TOK_SHR);
3894 done:
3895 vtop->type = *type;
3896 vtop->type.t &= ~ ( VT_CONSTANT | VT_VOLATILE | VT_ARRAY );
3899 /* return type size as known at compile time. Put alignment at 'a' */
3900 ST_FUNC int type_size(CType *type, int *a)
3902 Sym *s;
3903 int bt;
3905 bt = type->t & VT_BTYPE;
3906 if (bt == VT_STRUCT) {
3907 /* struct/union */
3908 s = type->ref;
3909 *a = s->r;
3910 return s->c;
3911 } else if (bt == VT_PTR) {
3912 if (type->t & VT_ARRAY) {
3913 int ts;
3915 s = type->ref;
3916 ts = type_size(&s->type, a);
3918 if (ts < 0 && s->c < 0)
3919 ts = -ts;
3921 return ts * s->c;
3922 } else {
3923 *a = PTR_SIZE;
3924 return PTR_SIZE;
3926 } else if (IS_ENUM(type->t) && type->ref->c < 0) {
3927 return -1; /* incomplete enum */
3928 } else if (bt == VT_LDOUBLE) {
3929 *a = LDOUBLE_ALIGN;
3930 return LDOUBLE_SIZE;
3931 } else if (bt == VT_DOUBLE || bt == VT_LLONG) {
3932 #ifdef TCC_TARGET_I386
3933 #ifdef TCC_TARGET_PE
3934 *a = 8;
3935 #else
3936 *a = 4;
3937 #endif
3938 #elif defined(TCC_TARGET_ARM)
3939 #ifdef TCC_ARM_EABI
3940 *a = 8;
3941 #else
3942 *a = 4;
3943 #endif
3944 #else
3945 *a = 8;
3946 #endif
3947 return 8;
3948 } else if (bt == VT_INT || bt == VT_FLOAT) {
3949 *a = 4;
3950 return 4;
3951 } else if (bt == VT_SHORT) {
3952 *a = 2;
3953 return 2;
3954 } else if (bt == VT_QLONG || bt == VT_QFLOAT) {
3955 *a = 8;
3956 return 16;
3957 } else {
3958 /* char, void, function, _Bool */
3959 *a = 1;
3960 return 1;
3964 /* push type size as known at runtime time on top of value stack. Put
3965 alignment at 'a' */
3966 ST_FUNC void vla_runtime_type_size(CType *type, int *a)
3968 if (type->t & VT_VLA) {
3969 type_size(&type->ref->type, a);
3970 vset(&int_type, VT_LOCAL|VT_LVAL, type->ref->c);
3971 } else {
3972 vpushi(type_size(type, a));
3976 /* return the pointed type of t */
3977 static inline CType *pointed_type(CType *type)
3979 return &type->ref->type;
3982 /* modify type so that its it is a pointer to type. */
3983 ST_FUNC void mk_pointer(CType *type)
3985 Sym *s;
3986 s = sym_push(SYM_FIELD, type, 0, -1);
3987 type->t = VT_PTR | (type->t & VT_STORAGE);
3988 type->ref = s;
3991 /* return true if type1 and type2 are exactly the same (including
3992 qualifiers).
3994 static int is_compatible_types(CType *type1, CType *type2)
3996 return compare_types(type1,type2,0);
3999 /* return true if type1 and type2 are the same (ignoring qualifiers).
4001 static int is_compatible_unqualified_types(CType *type1, CType *type2)
4003 return compare_types(type1,type2,1);
4006 static void cast_error(CType *st, CType *dt)
4008 type_incompatibility_error(st, dt, "cannot convert '%s' to '%s'");
4011 /* verify type compatibility to store vtop in 'dt' type */
4012 static void verify_assign_cast(CType *dt)
4014 CType *st, *type1, *type2;
4015 int dbt, sbt, qualwarn, lvl;
4017 st = &vtop->type; /* source type */
4018 dbt = dt->t & VT_BTYPE;
4019 sbt = st->t & VT_BTYPE;
4020 if (dt->t & VT_CONSTANT)
4021 tcc_warning("assignment of read-only location");
4022 switch(dbt) {
4023 case VT_VOID:
4024 if (sbt != dbt)
4025 tcc_error("assignment to void expression");
4026 break;
4027 case VT_PTR:
4028 /* special cases for pointers */
4029 /* '0' can also be a pointer */
4030 if (is_null_pointer(vtop))
4031 break;
4032 /* accept implicit pointer to integer cast with warning */
4033 if (is_integer_btype(sbt)) {
4034 tcc_warning("assignment makes pointer from integer without a cast");
4035 break;
4037 type1 = pointed_type(dt);
4038 if (sbt == VT_PTR)
4039 type2 = pointed_type(st);
4040 else if (sbt == VT_FUNC)
4041 type2 = st; /* a function is implicitly a function pointer */
4042 else
4043 goto error;
4044 if (is_compatible_types(type1, type2))
4045 break;
4046 for (qualwarn = lvl = 0;; ++lvl) {
4047 if (((type2->t & VT_CONSTANT) && !(type1->t & VT_CONSTANT)) ||
4048 ((type2->t & VT_VOLATILE) && !(type1->t & VT_VOLATILE)))
4049 qualwarn = 1;
4050 dbt = type1->t & (VT_BTYPE|VT_LONG);
4051 sbt = type2->t & (VT_BTYPE|VT_LONG);
4052 if (dbt != VT_PTR || sbt != VT_PTR)
4053 break;
4054 type1 = pointed_type(type1);
4055 type2 = pointed_type(type2);
4057 if (!is_compatible_unqualified_types(type1, type2)) {
4058 if ((dbt == VT_VOID || sbt == VT_VOID) && lvl == 0) {
4059 /* void * can match anything */
4060 } else if (dbt == sbt
4061 && is_integer_btype(sbt & VT_BTYPE)
4062 && IS_ENUM(type1->t) + IS_ENUM(type2->t)
4063 + !!((type1->t ^ type2->t) & VT_UNSIGNED) < 2) {
4064 /* Like GCC don't warn by default for merely changes
4065 in pointer target signedness. Do warn for different
4066 base types, though, in particular for unsigned enums
4067 and signed int targets. */
4068 } else {
4069 tcc_warning("assignment from incompatible pointer type");
4070 break;
4073 if (qualwarn)
4074 tcc_warning("assignment discards qualifiers from pointer target type");
4075 break;
4076 case VT_BYTE:
4077 case VT_SHORT:
4078 case VT_INT:
4079 case VT_LLONG:
4080 if (sbt == VT_PTR || sbt == VT_FUNC) {
4081 tcc_warning("assignment makes integer from pointer without a cast");
4082 } else if (sbt == VT_STRUCT) {
4083 goto case_VT_STRUCT;
4085 /* XXX: more tests */
4086 break;
4087 case VT_STRUCT:
4088 case_VT_STRUCT:
4089 if (!is_compatible_unqualified_types(dt, st)) {
4090 error:
4091 cast_error(st, dt);
4093 break;
4097 static void gen_assign_cast(CType *dt)
4099 verify_assign_cast(dt);
4100 gen_cast(dt);
4103 /* store vtop in lvalue pushed on stack */
4104 ST_FUNC void vstore(void)
4106 int sbt, dbt, ft, r, size, align, bit_size, bit_pos, delayed_cast;
4108 ft = vtop[-1].type.t;
4109 sbt = vtop->type.t & VT_BTYPE;
4110 dbt = ft & VT_BTYPE;
4112 verify_assign_cast(&vtop[-1].type);
4114 if (sbt == VT_STRUCT) {
4115 /* if structure, only generate pointer */
4116 /* structure assignment : generate memcpy */
4117 /* XXX: optimize if small size */
4118 size = type_size(&vtop->type, &align);
4120 /* destination */
4121 vswap();
4122 #ifdef CONFIG_TCC_BCHECK
4123 if (vtop->r & VT_MUSTBOUND)
4124 gbound(); /* check would be wrong after gaddrof() */
4125 #endif
4126 vtop->type.t = VT_PTR;
4127 gaddrof();
4129 /* address of memcpy() */
4130 #ifdef TCC_ARM_EABI
4131 if(!(align & 7))
4132 vpush_helper_func(TOK_memmove8);
4133 else if(!(align & 3))
4134 vpush_helper_func(TOK_memmove4);
4135 else
4136 #endif
4137 /* Use memmove, rather than memcpy, as dest and src may be same: */
4138 vpush_helper_func(TOK_memmove);
4140 vswap();
4141 /* source */
4142 vpushv(vtop - 2);
4143 #ifdef CONFIG_TCC_BCHECK
4144 if (vtop->r & VT_MUSTBOUND)
4145 gbound();
4146 #endif
4147 vtop->type.t = VT_PTR;
4148 gaddrof();
4149 /* type size */
4150 vpushi(size);
4151 gfunc_call(3);
4152 /* leave source on stack */
4154 } else if (ft & VT_BITFIELD) {
4155 /* bitfield store handling */
4157 /* save lvalue as expression result (example: s.b = s.a = n;) */
4158 vdup(), vtop[-1] = vtop[-2];
4160 bit_pos = BIT_POS(ft);
4161 bit_size = BIT_SIZE(ft);
4162 /* remove bit field info to avoid loops */
4163 vtop[-1].type.t = ft & ~VT_STRUCT_MASK;
4165 if (dbt == VT_BOOL) {
4166 gen_cast(&vtop[-1].type);
4167 vtop[-1].type.t = (vtop[-1].type.t & ~VT_BTYPE) | (VT_BYTE | VT_UNSIGNED);
4169 r = adjust_bf(vtop - 1, bit_pos, bit_size);
4170 if (dbt != VT_BOOL) {
4171 gen_cast(&vtop[-1].type);
4172 dbt = vtop[-1].type.t & VT_BTYPE;
4174 if (r == VT_STRUCT) {
4175 store_packed_bf(bit_pos, bit_size);
4176 } else {
4177 unsigned long long mask = (1ULL << bit_size) - 1;
4178 if (dbt != VT_BOOL) {
4179 /* mask source */
4180 if (dbt == VT_LLONG)
4181 vpushll(mask);
4182 else
4183 vpushi((unsigned)mask);
4184 gen_op('&');
4186 /* shift source */
4187 vpushi(bit_pos);
4188 gen_op(TOK_SHL);
4189 vswap();
4190 /* duplicate destination */
4191 vdup();
4192 vrott(3);
4193 /* load destination, mask and or with source */
4194 if (dbt == VT_LLONG)
4195 vpushll(~(mask << bit_pos));
4196 else
4197 vpushi(~((unsigned)mask << bit_pos));
4198 gen_op('&');
4199 gen_op('|');
4200 /* store result */
4201 vstore();
4202 /* ... and discard */
4203 vpop();
4205 } else if (dbt == VT_VOID) {
4206 --vtop;
4207 } else {
4208 /* optimize char/short casts */
4209 delayed_cast = 0;
4210 if ((dbt == VT_BYTE || dbt == VT_SHORT)
4211 && is_integer_btype(sbt)
4213 if ((vtop->r & VT_MUSTCAST)
4214 && btype_size(dbt) > btype_size(sbt)
4216 force_charshort_cast();
4217 delayed_cast = 1;
4218 } else {
4219 gen_cast(&vtop[-1].type);
4222 #ifdef CONFIG_TCC_BCHECK
4223 /* bound check case */
4224 if (vtop[-1].r & VT_MUSTBOUND) {
4225 vswap();
4226 gbound();
4227 vswap();
4229 #endif
4230 gv(RC_TYPE(dbt)); /* generate value */
4232 if (delayed_cast) {
4233 vtop->r |= BFVAL(VT_MUSTCAST, (sbt == VT_LLONG) + 1);
4234 //tcc_warning("deley cast %x -> %x", sbt, dbt);
4235 vtop->type.t = ft & VT_TYPE;
4238 /* if lvalue was saved on stack, must read it */
4239 if ((vtop[-1].r & VT_VALMASK) == VT_LLOCAL) {
4240 SValue sv;
4241 r = get_reg(RC_INT);
4242 sv.type.t = VT_PTRDIFF_T;
4243 sv.r = VT_LOCAL | VT_LVAL;
4244 sv.c.i = vtop[-1].c.i;
4245 load(r, &sv);
4246 vtop[-1].r = r | VT_LVAL;
4249 r = vtop->r & VT_VALMASK;
4250 /* two word case handling :
4251 store second register at word + 4 (or +8 for x86-64) */
4252 if (USING_TWO_WORDS(dbt)) {
4253 int load_type = (dbt == VT_QFLOAT) ? VT_DOUBLE : VT_PTRDIFF_T;
4254 vtop[-1].type.t = load_type;
4255 store(r, vtop - 1);
4256 vswap();
4257 /* convert to int to increment easily */
4258 vtop->type.t = VT_PTRDIFF_T;
4259 gaddrof();
4260 vpushs(PTR_SIZE);
4261 gen_op('+');
4262 vtop->r |= VT_LVAL;
4263 vswap();
4264 vtop[-1].type.t = load_type;
4265 /* XXX: it works because r2 is spilled last ! */
4266 store(vtop->r2, vtop - 1);
4267 } else {
4268 /* single word */
4269 store(r, vtop - 1);
4271 vswap();
4272 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
4276 /* post defines POST/PRE add. c is the token ++ or -- */
4277 ST_FUNC void inc(int post, int c)
4279 test_lvalue();
4280 vdup(); /* save lvalue */
4281 if (post) {
4282 gv_dup(); /* duplicate value */
4283 vrotb(3);
4284 vrotb(3);
4286 /* add constant */
4287 vpushi(c - TOK_MID);
4288 gen_op('+');
4289 vstore(); /* store value */
4290 if (post)
4291 vpop(); /* if post op, return saved value */
4294 ST_FUNC void parse_mult_str (CString *astr, const char *msg)
4296 /* read the string */
4297 if (tok != TOK_STR)
4298 expect(msg);
4299 cstr_new(astr);
4300 while (tok == TOK_STR) {
4301 /* XXX: add \0 handling too ? */
4302 cstr_cat(astr, tokc.str.data, -1);
4303 next();
4305 cstr_ccat(astr, '\0');
4308 /* If I is >= 1 and a power of two, returns log2(i)+1.
4309 If I is 0 returns 0. */
4310 ST_FUNC int exact_log2p1(int i)
4312 int ret;
4313 if (!i)
4314 return 0;
4315 for (ret = 1; i >= 1 << 8; ret += 8)
4316 i >>= 8;
4317 if (i >= 1 << 4)
4318 ret += 4, i >>= 4;
4319 if (i >= 1 << 2)
4320 ret += 2, i >>= 2;
4321 if (i >= 1 << 1)
4322 ret++;
4323 return ret;
4326 /* Parse __attribute__((...)) GNUC extension. */
4327 static void parse_attribute(AttributeDef *ad)
4329 int t, n;
4330 CString astr;
4332 redo:
4333 if (tok != TOK_ATTRIBUTE1 && tok != TOK_ATTRIBUTE2)
4334 return;
4335 next();
4336 skip('(');
4337 skip('(');
4338 while (tok != ')') {
4339 if (tok < TOK_IDENT)
4340 expect("attribute name");
4341 t = tok;
4342 next();
4343 switch(t) {
4344 case TOK_CLEANUP1:
4345 case TOK_CLEANUP2:
4347 Sym *s;
4349 skip('(');
4350 s = sym_find(tok);
4351 if (!s) {
4352 tcc_warning("implicit declaration of function '%s'",
4353 get_tok_str(tok, &tokc));
4354 s = external_global_sym(tok, &func_old_type);
4355 } else if ((s->type.t & VT_BTYPE) != VT_FUNC)
4356 tcc_error("'%s' is not declared as function", get_tok_str(tok, &tokc));
4357 ad->cleanup_func = s;
4358 next();
4359 skip(')');
4360 break;
4362 case TOK_CONSTRUCTOR1:
4363 case TOK_CONSTRUCTOR2:
4364 ad->f.func_ctor = 1;
4365 break;
4366 case TOK_DESTRUCTOR1:
4367 case TOK_DESTRUCTOR2:
4368 ad->f.func_dtor = 1;
4369 break;
4370 case TOK_ALWAYS_INLINE1:
4371 case TOK_ALWAYS_INLINE2:
4372 ad->f.func_alwinl = 1;
4373 break;
4374 case TOK_SECTION1:
4375 case TOK_SECTION2:
4376 skip('(');
4377 parse_mult_str(&astr, "section name");
4378 ad->section = find_section(tcc_state, (char *)astr.data);
4379 skip(')');
4380 cstr_free(&astr);
4381 break;
4382 case TOK_ALIAS1:
4383 case TOK_ALIAS2:
4384 skip('(');
4385 parse_mult_str(&astr, "alias(\"target\")");
4386 ad->alias_target = /* save string as token, for later */
4387 tok_alloc((char*)astr.data, astr.size-1)->tok;
4388 skip(')');
4389 cstr_free(&astr);
4390 break;
4391 case TOK_VISIBILITY1:
4392 case TOK_VISIBILITY2:
4393 skip('(');
4394 parse_mult_str(&astr,
4395 "visibility(\"default|hidden|internal|protected\")");
4396 if (!strcmp (astr.data, "default"))
4397 ad->a.visibility = STV_DEFAULT;
4398 else if (!strcmp (astr.data, "hidden"))
4399 ad->a.visibility = STV_HIDDEN;
4400 else if (!strcmp (astr.data, "internal"))
4401 ad->a.visibility = STV_INTERNAL;
4402 else if (!strcmp (astr.data, "protected"))
4403 ad->a.visibility = STV_PROTECTED;
4404 else
4405 expect("visibility(\"default|hidden|internal|protected\")");
4406 skip(')');
4407 cstr_free(&astr);
4408 break;
4409 case TOK_ALIGNED1:
4410 case TOK_ALIGNED2:
4411 if (tok == '(') {
4412 next();
4413 n = expr_const();
4414 if (n <= 0 || (n & (n - 1)) != 0)
4415 tcc_error("alignment must be a positive power of two");
4416 skip(')');
4417 } else {
4418 n = MAX_ALIGN;
4420 ad->a.aligned = exact_log2p1(n);
4421 if (n != 1 << (ad->a.aligned - 1))
4422 tcc_error("alignment of %d is larger than implemented", n);
4423 break;
4424 case TOK_PACKED1:
4425 case TOK_PACKED2:
4426 ad->a.packed = 1;
4427 break;
4428 case TOK_WEAK1:
4429 case TOK_WEAK2:
4430 ad->a.weak = 1;
4431 break;
4432 case TOK_UNUSED1:
4433 case TOK_UNUSED2:
4434 /* currently, no need to handle it because tcc does not
4435 track unused objects */
4436 break;
4437 case TOK_NORETURN1:
4438 case TOK_NORETURN2:
4439 ad->f.func_noreturn = 1;
4440 break;
4441 case TOK_CDECL1:
4442 case TOK_CDECL2:
4443 case TOK_CDECL3:
4444 ad->f.func_call = FUNC_CDECL;
4445 break;
4446 case TOK_STDCALL1:
4447 case TOK_STDCALL2:
4448 case TOK_STDCALL3:
4449 ad->f.func_call = FUNC_STDCALL;
4450 break;
4451 #ifdef TCC_TARGET_I386
4452 case TOK_REGPARM1:
4453 case TOK_REGPARM2:
4454 skip('(');
4455 n = expr_const();
4456 if (n > 3)
4457 n = 3;
4458 else if (n < 0)
4459 n = 0;
4460 if (n > 0)
4461 ad->f.func_call = FUNC_FASTCALL1 + n - 1;
4462 skip(')');
4463 break;
4464 case TOK_FASTCALL1:
4465 case TOK_FASTCALL2:
4466 case TOK_FASTCALL3:
4467 ad->f.func_call = FUNC_FASTCALLW;
4468 break;
4469 #endif
4470 case TOK_MODE:
4471 skip('(');
4472 switch(tok) {
4473 case TOK_MODE_DI:
4474 ad->attr_mode = VT_LLONG + 1;
4475 break;
4476 case TOK_MODE_QI:
4477 ad->attr_mode = VT_BYTE + 1;
4478 break;
4479 case TOK_MODE_HI:
4480 ad->attr_mode = VT_SHORT + 1;
4481 break;
4482 case TOK_MODE_SI:
4483 case TOK_MODE_word:
4484 ad->attr_mode = VT_INT + 1;
4485 break;
4486 default:
4487 tcc_warning("__mode__(%s) not supported\n", get_tok_str(tok, NULL));
4488 break;
4490 next();
4491 skip(')');
4492 break;
4493 case TOK_DLLEXPORT:
4494 ad->a.dllexport = 1;
4495 break;
4496 case TOK_NODECORATE:
4497 ad->a.nodecorate = 1;
4498 break;
4499 case TOK_DLLIMPORT:
4500 ad->a.dllimport = 1;
4501 break;
4502 default:
4503 if (tcc_state->warn_unsupported)
4504 tcc_warning("'%s' attribute ignored", get_tok_str(t, NULL));
4505 /* skip parameters */
4506 if (tok == '(') {
4507 int parenthesis = 0;
4508 do {
4509 if (tok == '(')
4510 parenthesis++;
4511 else if (tok == ')')
4512 parenthesis--;
4513 next();
4514 } while (parenthesis && tok != -1);
4516 break;
4518 if (tok != ',')
4519 break;
4520 next();
4522 skip(')');
4523 skip(')');
4524 goto redo;
4527 static Sym * find_field (CType *type, int v, int *cumofs)
4529 Sym *s = type->ref;
4530 v |= SYM_FIELD;
4531 while ((s = s->next) != NULL) {
4532 if ((s->v & SYM_FIELD) &&
4533 (s->type.t & VT_BTYPE) == VT_STRUCT &&
4534 (s->v & ~SYM_FIELD) >= SYM_FIRST_ANOM) {
4535 Sym *ret = find_field (&s->type, v, cumofs);
4536 if (ret) {
4537 *cumofs += s->c;
4538 return ret;
4541 if (s->v == v)
4542 break;
4544 return s;
4547 static void check_fields (CType *type, int check)
4549 Sym *s = type->ref;
4551 while ((s = s->next) != NULL) {
4552 int v = s->v & ~SYM_FIELD;
4553 if (v < SYM_FIRST_ANOM) {
4554 TokenSym *ts = table_ident[v - TOK_IDENT];
4555 if (check && (ts->tok & SYM_FIELD))
4556 tcc_error("duplicate member '%s'", get_tok_str(v, NULL));
4557 ts->tok ^= SYM_FIELD;
4558 } else if ((s->type.t & VT_BTYPE) == VT_STRUCT)
4559 check_fields (&s->type, check);
4563 static void struct_layout(CType *type, AttributeDef *ad)
4565 int size, align, maxalign, offset, c, bit_pos, bit_size;
4566 int packed, a, bt, prevbt, prev_bit_size;
4567 int pcc = !tcc_state->ms_bitfields;
4568 int pragma_pack = *tcc_state->pack_stack_ptr;
4569 Sym *f;
4571 maxalign = 1;
4572 offset = 0;
4573 c = 0;
4574 bit_pos = 0;
4575 prevbt = VT_STRUCT; /* make it never match */
4576 prev_bit_size = 0;
4578 //#define BF_DEBUG
4580 for (f = type->ref->next; f; f = f->next) {
4581 if (f->type.t & VT_BITFIELD)
4582 bit_size = BIT_SIZE(f->type.t);
4583 else
4584 bit_size = -1;
4585 size = type_size(&f->type, &align);
4586 a = f->a.aligned ? 1 << (f->a.aligned - 1) : 0;
4587 packed = 0;
4589 if (pcc && bit_size == 0) {
4590 /* in pcc mode, packing does not affect zero-width bitfields */
4592 } else {
4593 /* in pcc mode, attribute packed overrides if set. */
4594 if (pcc && (f->a.packed || ad->a.packed))
4595 align = packed = 1;
4597 /* pragma pack overrides align if lesser and packs bitfields always */
4598 if (pragma_pack) {
4599 packed = 1;
4600 if (pragma_pack < align)
4601 align = pragma_pack;
4602 /* in pcc mode pragma pack also overrides individual align */
4603 if (pcc && pragma_pack < a)
4604 a = 0;
4607 /* some individual align was specified */
4608 if (a)
4609 align = a;
4611 if (type->ref->type.t == VT_UNION) {
4612 if (pcc && bit_size >= 0)
4613 size = (bit_size + 7) >> 3;
4614 offset = 0;
4615 if (size > c)
4616 c = size;
4618 } else if (bit_size < 0) {
4619 if (pcc)
4620 c += (bit_pos + 7) >> 3;
4621 c = (c + align - 1) & -align;
4622 offset = c;
4623 if (size > 0)
4624 c += size;
4625 bit_pos = 0;
4626 prevbt = VT_STRUCT;
4627 prev_bit_size = 0;
4629 } else {
4630 /* A bit-field. Layout is more complicated. There are two
4631 options: PCC (GCC) compatible and MS compatible */
4632 if (pcc) {
4633 /* In PCC layout a bit-field is placed adjacent to the
4634 preceding bit-fields, except if:
4635 - it has zero-width
4636 - an individual alignment was given
4637 - it would overflow its base type container and
4638 there is no packing */
4639 if (bit_size == 0) {
4640 new_field:
4641 c = (c + ((bit_pos + 7) >> 3) + align - 1) & -align;
4642 bit_pos = 0;
4643 } else if (f->a.aligned) {
4644 goto new_field;
4645 } else if (!packed) {
4646 int a8 = align * 8;
4647 int ofs = ((c * 8 + bit_pos) % a8 + bit_size + a8 - 1) / a8;
4648 if (ofs > size / align)
4649 goto new_field;
4652 /* in pcc mode, long long bitfields have type int if they fit */
4653 if (size == 8 && bit_size <= 32)
4654 f->type.t = (f->type.t & ~VT_BTYPE) | VT_INT, size = 4;
4656 while (bit_pos >= align * 8)
4657 c += align, bit_pos -= align * 8;
4658 offset = c;
4660 /* In PCC layout named bit-fields influence the alignment
4661 of the containing struct using the base types alignment,
4662 except for packed fields (which here have correct align). */
4663 if (f->v & SYM_FIRST_ANOM
4664 // && bit_size // ??? gcc on ARM/rpi does that
4666 align = 1;
4668 } else {
4669 bt = f->type.t & VT_BTYPE;
4670 if ((bit_pos + bit_size > size * 8)
4671 || (bit_size > 0) == (bt != prevbt)
4673 c = (c + align - 1) & -align;
4674 offset = c;
4675 bit_pos = 0;
4676 /* In MS bitfield mode a bit-field run always uses
4677 at least as many bits as the underlying type.
4678 To start a new run it's also required that this
4679 or the last bit-field had non-zero width. */
4680 if (bit_size || prev_bit_size)
4681 c += size;
4683 /* In MS layout the records alignment is normally
4684 influenced by the field, except for a zero-width
4685 field at the start of a run (but by further zero-width
4686 fields it is again). */
4687 if (bit_size == 0 && prevbt != bt)
4688 align = 1;
4689 prevbt = bt;
4690 prev_bit_size = bit_size;
4693 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4694 | (bit_pos << VT_STRUCT_SHIFT);
4695 bit_pos += bit_size;
4697 if (align > maxalign)
4698 maxalign = align;
4700 #ifdef BF_DEBUG
4701 printf("set field %s offset %-2d size %-2d align %-2d",
4702 get_tok_str(f->v & ~SYM_FIELD, NULL), offset, size, align);
4703 if (f->type.t & VT_BITFIELD) {
4704 printf(" pos %-2d bits %-2d",
4705 BIT_POS(f->type.t),
4706 BIT_SIZE(f->type.t)
4709 printf("\n");
4710 #endif
4712 f->c = offset;
4713 f->r = 0;
4716 if (pcc)
4717 c += (bit_pos + 7) >> 3;
4719 /* store size and alignment */
4720 a = bt = ad->a.aligned ? 1 << (ad->a.aligned - 1) : 1;
4721 if (a < maxalign)
4722 a = maxalign;
4723 type->ref->r = a;
4724 if (pragma_pack && pragma_pack < maxalign && 0 == pcc) {
4725 /* can happen if individual align for some member was given. In
4726 this case MSVC ignores maxalign when aligning the size */
4727 a = pragma_pack;
4728 if (a < bt)
4729 a = bt;
4731 c = (c + a - 1) & -a;
4732 type->ref->c = c;
4734 #ifdef BF_DEBUG
4735 printf("struct size %-2d align %-2d\n\n", c, a), fflush(stdout);
4736 #endif
4738 /* check whether we can access bitfields by their type */
4739 for (f = type->ref->next; f; f = f->next) {
4740 int s, px, cx, c0;
4741 CType t;
4743 if (0 == (f->type.t & VT_BITFIELD))
4744 continue;
4745 f->type.ref = f;
4746 f->auxtype = -1;
4747 bit_size = BIT_SIZE(f->type.t);
4748 if (bit_size == 0)
4749 continue;
4750 bit_pos = BIT_POS(f->type.t);
4751 size = type_size(&f->type, &align);
4753 if (bit_pos + bit_size <= size * 8 && f->c + size <= c
4754 #ifdef TCC_TARGET_ARM
4755 && !(f->c & (align - 1))
4756 #endif
4758 continue;
4760 /* try to access the field using a different type */
4761 c0 = -1, s = align = 1;
4762 t.t = VT_BYTE;
4763 for (;;) {
4764 px = f->c * 8 + bit_pos;
4765 cx = (px >> 3) & -align;
4766 px = px - (cx << 3);
4767 if (c0 == cx)
4768 break;
4769 s = (px + bit_size + 7) >> 3;
4770 if (s > 4) {
4771 t.t = VT_LLONG;
4772 } else if (s > 2) {
4773 t.t = VT_INT;
4774 } else if (s > 1) {
4775 t.t = VT_SHORT;
4776 } else {
4777 t.t = VT_BYTE;
4779 s = type_size(&t, &align);
4780 c0 = cx;
4783 if (px + bit_size <= s * 8 && cx + s <= c
4784 #ifdef TCC_TARGET_ARM
4785 && !(cx & (align - 1))
4786 #endif
4788 /* update offset and bit position */
4789 f->c = cx;
4790 bit_pos = px;
4791 f->type.t = (f->type.t & ~(0x3f << VT_STRUCT_SHIFT))
4792 | (bit_pos << VT_STRUCT_SHIFT);
4793 if (s != size)
4794 f->auxtype = t.t;
4795 #ifdef BF_DEBUG
4796 printf("FIX field %s offset %-2d size %-2d align %-2d "
4797 "pos %-2d bits %-2d\n",
4798 get_tok_str(f->v & ~SYM_FIELD, NULL),
4799 cx, s, align, px, bit_size);
4800 #endif
4801 } else {
4802 /* fall back to load/store single-byte wise */
4803 f->auxtype = VT_STRUCT;
4804 #ifdef BF_DEBUG
4805 printf("FIX field %s : load byte-wise\n",
4806 get_tok_str(f->v & ~SYM_FIELD, NULL));
4807 #endif
4812 /* enum/struct/union declaration. u is VT_ENUM/VT_STRUCT/VT_UNION */
4813 static void struct_decl(CType *type, int u)
4815 int v, c, size, align, flexible;
4816 int bit_size, bsize, bt;
4817 Sym *s, *ss, **ps;
4818 AttributeDef ad, ad1;
4819 CType type1, btype;
4821 memset(&ad, 0, sizeof ad);
4822 next();
4823 parse_attribute(&ad);
4824 if (tok != '{') {
4825 v = tok;
4826 next();
4827 /* struct already defined ? return it */
4828 if (v < TOK_IDENT)
4829 expect("struct/union/enum name");
4830 s = struct_find(v);
4831 if (s && (s->sym_scope == local_scope || tok != '{')) {
4832 if (u == s->type.t)
4833 goto do_decl;
4834 if (u == VT_ENUM && IS_ENUM(s->type.t))
4835 goto do_decl;
4836 tcc_error("redefinition of '%s'", get_tok_str(v, NULL));
4838 } else {
4839 v = anon_sym++;
4841 /* Record the original enum/struct/union token. */
4842 type1.t = u == VT_ENUM ? u | VT_INT | VT_UNSIGNED : u;
4843 type1.ref = NULL;
4844 /* we put an undefined size for struct/union */
4845 s = sym_push(v | SYM_STRUCT, &type1, 0, -1);
4846 s->r = 0; /* default alignment is zero as gcc */
4847 do_decl:
4848 type->t = s->type.t;
4849 type->ref = s;
4851 if (tok == '{') {
4852 next();
4853 if (s->c != -1)
4854 tcc_error("struct/union/enum already defined");
4855 s->c = -2;
4856 /* cannot be empty */
4857 /* non empty enums are not allowed */
4858 ps = &s->next;
4859 if (u == VT_ENUM) {
4860 long long ll = 0, pl = 0, nl = 0;
4861 CType t;
4862 t.ref = s;
4863 /* enum symbols have static storage */
4864 t.t = VT_INT|VT_STATIC|VT_ENUM_VAL;
4865 for(;;) {
4866 v = tok;
4867 if (v < TOK_UIDENT)
4868 expect("identifier");
4869 ss = sym_find(v);
4870 if (ss && !local_stack)
4871 tcc_error("redefinition of enumerator '%s'",
4872 get_tok_str(v, NULL));
4873 next();
4874 if (tok == '=') {
4875 next();
4876 ll = expr_const64();
4878 ss = sym_push(v, &t, VT_CONST, 0);
4879 ss->enum_val = ll;
4880 *ps = ss, ps = &ss->next;
4881 if (ll < nl)
4882 nl = ll;
4883 if (ll > pl)
4884 pl = ll;
4885 if (tok != ',')
4886 break;
4887 next();
4888 ll++;
4889 /* NOTE: we accept a trailing comma */
4890 if (tok == '}')
4891 break;
4893 skip('}');
4894 /* set integral type of the enum */
4895 t.t = VT_INT;
4896 if (nl >= 0) {
4897 if (pl != (unsigned)pl)
4898 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4899 t.t |= VT_UNSIGNED;
4900 } else if (pl != (int)pl || nl != (int)nl)
4901 t.t = (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4902 s->type.t = type->t = t.t | VT_ENUM;
4903 s->c = 0;
4904 /* set type for enum members */
4905 for (ss = s->next; ss; ss = ss->next) {
4906 ll = ss->enum_val;
4907 if (ll == (int)ll) /* default is int if it fits */
4908 continue;
4909 if (t.t & VT_UNSIGNED) {
4910 ss->type.t |= VT_UNSIGNED;
4911 if (ll == (unsigned)ll)
4912 continue;
4914 ss->type.t = (ss->type.t & ~VT_BTYPE)
4915 | (LONG_SIZE==8 ? VT_LLONG|VT_LONG : VT_LLONG);
4917 } else {
4918 c = 0;
4919 flexible = 0;
4920 while (tok != '}') {
4921 if (!parse_btype(&btype, &ad1)) {
4922 skip(';');
4923 continue;
4925 while (1) {
4926 if (flexible)
4927 tcc_error("flexible array member '%s' not at the end of struct",
4928 get_tok_str(v, NULL));
4929 bit_size = -1;
4930 v = 0;
4931 type1 = btype;
4932 if (tok != ':') {
4933 if (tok != ';')
4934 type_decl(&type1, &ad1, &v, TYPE_DIRECT);
4935 if (v == 0) {
4936 if ((type1.t & VT_BTYPE) != VT_STRUCT)
4937 expect("identifier");
4938 else {
4939 int v = btype.ref->v;
4940 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) < SYM_FIRST_ANOM) {
4941 if (tcc_state->ms_extensions == 0)
4942 expect("identifier");
4946 if (type_size(&type1, &align) < 0) {
4947 if ((u == VT_STRUCT) && (type1.t & VT_ARRAY) && c)
4948 flexible = 1;
4949 else
4950 tcc_error("field '%s' has incomplete type",
4951 get_tok_str(v, NULL));
4953 if ((type1.t & VT_BTYPE) == VT_FUNC ||
4954 (type1.t & VT_BTYPE) == VT_VOID ||
4955 (type1.t & VT_STORAGE))
4956 tcc_error("invalid type for '%s'",
4957 get_tok_str(v, NULL));
4959 if (tok == ':') {
4960 next();
4961 bit_size = expr_const();
4962 /* XXX: handle v = 0 case for messages */
4963 if (bit_size < 0)
4964 tcc_error("negative width in bit-field '%s'",
4965 get_tok_str(v, NULL));
4966 if (v && bit_size == 0)
4967 tcc_error("zero width for bit-field '%s'",
4968 get_tok_str(v, NULL));
4969 parse_attribute(&ad1);
4971 size = type_size(&type1, &align);
4972 if (bit_size >= 0) {
4973 bt = type1.t & VT_BTYPE;
4974 if (bt != VT_INT &&
4975 bt != VT_BYTE &&
4976 bt != VT_SHORT &&
4977 bt != VT_BOOL &&
4978 bt != VT_LLONG)
4979 tcc_error("bitfields must have scalar type");
4980 bsize = size * 8;
4981 if (bit_size > bsize) {
4982 tcc_error("width of '%s' exceeds its type",
4983 get_tok_str(v, NULL));
4984 } else if (bit_size == bsize
4985 && !ad.a.packed && !ad1.a.packed) {
4986 /* no need for bit fields */
4988 } else if (bit_size == 64) {
4989 tcc_error("field width 64 not implemented");
4990 } else {
4991 type1.t = (type1.t & ~VT_STRUCT_MASK)
4992 | VT_BITFIELD
4993 | (bit_size << (VT_STRUCT_SHIFT + 6));
4996 if (v != 0 || (type1.t & VT_BTYPE) == VT_STRUCT) {
4997 /* Remember we've seen a real field to check
4998 for placement of flexible array member. */
4999 c = 1;
5001 /* If member is a struct or bit-field, enforce
5002 placing into the struct (as anonymous). */
5003 if (v == 0 &&
5004 ((type1.t & VT_BTYPE) == VT_STRUCT ||
5005 bit_size >= 0)) {
5006 v = anon_sym++;
5008 if (v) {
5009 ss = sym_push(v | SYM_FIELD, &type1, 0, 0);
5010 ss->a = ad1.a;
5011 *ps = ss;
5012 ps = &ss->next;
5014 if (tok == ';' || tok == TOK_EOF)
5015 break;
5016 skip(',');
5018 skip(';');
5020 skip('}');
5021 parse_attribute(&ad);
5022 if (ad.cleanup_func) {
5023 tcc_warning("attribute '__cleanup__' ignored on type");
5025 check_fields(type, 1);
5026 check_fields(type, 0);
5027 struct_layout(type, &ad);
5032 static void sym_to_attr(AttributeDef *ad, Sym *s)
5034 merge_symattr(&ad->a, &s->a);
5035 merge_funcattr(&ad->f, &s->f);
5038 /* Add type qualifiers to a type. If the type is an array then the qualifiers
5039 are added to the element type, copied because it could be a typedef. */
5040 static void parse_btype_qualify(CType *type, int qualifiers)
5042 while (type->t & VT_ARRAY) {
5043 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
5044 type = &type->ref->type;
5046 type->t |= qualifiers;
5049 /* return 0 if no type declaration. otherwise, return the basic type
5050 and skip it.
5052 static int parse_btype(CType *type, AttributeDef *ad)
5054 int t, u, bt, st, type_found, typespec_found, g, n;
5055 Sym *s;
5056 CType type1;
5058 memset(ad, 0, sizeof(AttributeDef));
5059 type_found = 0;
5060 typespec_found = 0;
5061 t = VT_INT;
5062 bt = st = -1;
5063 type->ref = NULL;
5065 while(1) {
5066 switch(tok) {
5067 case TOK_EXTENSION:
5068 /* currently, we really ignore extension */
5069 next();
5070 continue;
5072 /* basic types */
5073 case TOK_CHAR:
5074 u = VT_BYTE;
5075 basic_type:
5076 next();
5077 basic_type1:
5078 if (u == VT_SHORT || u == VT_LONG) {
5079 if (st != -1 || (bt != -1 && bt != VT_INT))
5080 tmbt: tcc_error("too many basic types");
5081 st = u;
5082 } else {
5083 if (bt != -1 || (st != -1 && u != VT_INT))
5084 goto tmbt;
5085 bt = u;
5087 if (u != VT_INT)
5088 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
5089 typespec_found = 1;
5090 break;
5091 case TOK_VOID:
5092 u = VT_VOID;
5093 goto basic_type;
5094 case TOK_SHORT:
5095 u = VT_SHORT;
5096 goto basic_type;
5097 case TOK_INT:
5098 u = VT_INT;
5099 goto basic_type;
5100 case TOK_ALIGNAS:
5101 { int n;
5102 AttributeDef ad1;
5103 next();
5104 skip('(');
5105 memset(&ad1, 0, sizeof(AttributeDef));
5106 if (parse_btype(&type1, &ad1)) {
5107 type_decl(&type1, &ad1, &n, TYPE_ABSTRACT);
5108 if (ad1.a.aligned)
5109 n = 1 << (ad1.a.aligned - 1);
5110 else
5111 type_size(&type1, &n);
5112 } else {
5113 n = expr_const();
5114 if (n <= 0 || (n & (n - 1)) != 0)
5115 tcc_error("alignment must be a positive power of two");
5117 skip(')');
5118 ad->a.aligned = exact_log2p1(n);
5120 continue;
5121 case TOK_LONG:
5122 if ((t & VT_BTYPE) == VT_DOUBLE) {
5123 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
5124 } else if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
5125 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LLONG;
5126 } else {
5127 u = VT_LONG;
5128 goto basic_type;
5130 next();
5131 break;
5132 #ifdef TCC_TARGET_ARM64
5133 case TOK_UINT128:
5134 /* GCC's __uint128_t appears in some Linux header files. Make it a
5135 synonym for long double to get the size and alignment right. */
5136 u = VT_LDOUBLE;
5137 goto basic_type;
5138 #endif
5139 case TOK_BOOL:
5140 u = VT_BOOL;
5141 goto basic_type;
5142 case TOK_FLOAT:
5143 u = VT_FLOAT;
5144 goto basic_type;
5145 case TOK_DOUBLE:
5146 if ((t & (VT_BTYPE|VT_LONG)) == VT_LONG) {
5147 t = (t & ~(VT_BTYPE|VT_LONG)) | VT_LDOUBLE;
5148 } else {
5149 u = VT_DOUBLE;
5150 goto basic_type;
5152 next();
5153 break;
5154 case TOK_ENUM:
5155 struct_decl(&type1, VT_ENUM);
5156 basic_type2:
5157 u = type1.t;
5158 type->ref = type1.ref;
5159 goto basic_type1;
5160 case TOK_STRUCT:
5161 struct_decl(&type1, VT_STRUCT);
5162 goto basic_type2;
5163 case TOK_UNION:
5164 struct_decl(&type1, VT_UNION);
5165 goto basic_type2;
5167 /* type modifiers */
5168 case TOK__Atomic:
5169 next();
5170 type->t = t;
5171 parse_btype_qualify(type, VT_ATOMIC);
5172 t = type->t;
5173 if (tok == '(') {
5174 parse_expr_type(&type1);
5175 /* remove all storage modifiers except typedef */
5176 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
5177 if (type1.ref)
5178 sym_to_attr(ad, type1.ref);
5179 goto basic_type2;
5181 break;
5182 case TOK_CONST1:
5183 case TOK_CONST2:
5184 case TOK_CONST3:
5185 type->t = t;
5186 parse_btype_qualify(type, VT_CONSTANT);
5187 t = type->t;
5188 next();
5189 break;
5190 case TOK_VOLATILE1:
5191 case TOK_VOLATILE2:
5192 case TOK_VOLATILE3:
5193 type->t = t;
5194 parse_btype_qualify(type, VT_VOLATILE);
5195 t = type->t;
5196 next();
5197 break;
5198 case TOK_SIGNED1:
5199 case TOK_SIGNED2:
5200 case TOK_SIGNED3:
5201 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == (VT_DEFSIGN|VT_UNSIGNED))
5202 tcc_error("signed and unsigned modifier");
5203 t |= VT_DEFSIGN;
5204 next();
5205 typespec_found = 1;
5206 break;
5207 case TOK_REGISTER:
5208 case TOK_AUTO:
5209 case TOK_RESTRICT1:
5210 case TOK_RESTRICT2:
5211 case TOK_RESTRICT3:
5212 next();
5213 break;
5214 case TOK_UNSIGNED:
5215 if ((t & (VT_DEFSIGN|VT_UNSIGNED)) == VT_DEFSIGN)
5216 tcc_error("signed and unsigned modifier");
5217 t |= VT_DEFSIGN | VT_UNSIGNED;
5218 next();
5219 typespec_found = 1;
5220 break;
5222 /* storage */
5223 case TOK_EXTERN:
5224 g = VT_EXTERN;
5225 goto storage;
5226 case TOK_STATIC:
5227 g = VT_STATIC;
5228 goto storage;
5229 case TOK_TYPEDEF:
5230 g = VT_TYPEDEF;
5231 goto storage;
5232 storage:
5233 if (t & (VT_EXTERN|VT_STATIC|VT_TYPEDEF) & ~g)
5234 tcc_error("multiple storage classes");
5235 t |= g;
5236 next();
5237 break;
5238 case TOK_INLINE1:
5239 case TOK_INLINE2:
5240 case TOK_INLINE3:
5241 t |= VT_INLINE;
5242 next();
5243 break;
5244 case TOK_NORETURN3:
5245 next();
5246 ad->f.func_noreturn = 1;
5247 break;
5248 /* GNUC attribute */
5249 case TOK_ATTRIBUTE1:
5250 case TOK_ATTRIBUTE2:
5251 parse_attribute(ad);
5252 if (ad->attr_mode) {
5253 u = ad->attr_mode -1;
5254 t = (t & ~(VT_BTYPE|VT_LONG)) | u;
5256 continue;
5257 /* GNUC typeof */
5258 case TOK_TYPEOF1:
5259 case TOK_TYPEOF2:
5260 case TOK_TYPEOF3:
5261 next();
5262 parse_expr_type(&type1);
5263 /* remove all storage modifiers except typedef */
5264 type1.t &= ~(VT_STORAGE&~VT_TYPEDEF);
5265 if (type1.ref)
5266 sym_to_attr(ad, type1.ref);
5267 goto basic_type2;
5268 default:
5269 if (typespec_found)
5270 goto the_end;
5271 s = sym_find(tok);
5272 if (!s || !(s->type.t & VT_TYPEDEF))
5273 goto the_end;
5275 n = tok, next();
5276 if (tok == ':' && !in_generic) {
5277 /* ignore if it's a label */
5278 unget_tok(n);
5279 goto the_end;
5282 t &= ~(VT_BTYPE|VT_LONG);
5283 u = t & ~(VT_CONSTANT | VT_VOLATILE), t ^= u;
5284 type->t = (s->type.t & ~VT_TYPEDEF) | u;
5285 type->ref = s->type.ref;
5286 if (t)
5287 parse_btype_qualify(type, t);
5288 t = type->t;
5289 /* get attributes from typedef */
5290 sym_to_attr(ad, s);
5291 typespec_found = 1;
5292 st = bt = -2;
5293 break;
5295 type_found = 1;
5297 the_end:
5298 if (tcc_state->char_is_unsigned) {
5299 if ((t & (VT_DEFSIGN|VT_BTYPE)) == VT_BYTE)
5300 t |= VT_UNSIGNED;
5302 /* VT_LONG is used just as a modifier for VT_INT / VT_LLONG */
5303 bt = t & (VT_BTYPE|VT_LONG);
5304 if (bt == VT_LONG)
5305 t |= LONG_SIZE == 8 ? VT_LLONG : VT_INT;
5306 #ifdef TCC_USING_DOUBLE_FOR_LDOUBLE
5307 if (bt == VT_LDOUBLE)
5308 t = (t & ~(VT_BTYPE|VT_LONG)) | (VT_DOUBLE|VT_LONG);
5309 #endif
5310 type->t = t;
5311 return type_found;
5314 /* convert a function parameter type (array to pointer and function to
5315 function pointer) */
5316 static inline void convert_parameter_type(CType *pt)
5318 /* remove const and volatile qualifiers (XXX: const could be used
5319 to indicate a const function parameter */
5320 pt->t &= ~(VT_CONSTANT | VT_VOLATILE);
5321 /* array must be transformed to pointer according to ANSI C */
5322 pt->t &= ~VT_ARRAY;
5323 if ((pt->t & VT_BTYPE) == VT_FUNC) {
5324 mk_pointer(pt);
5328 ST_FUNC void parse_asm_str(CString *astr)
5330 skip('(');
5331 parse_mult_str(astr, "string constant");
5334 /* Parse an asm label and return the token */
5335 static int asm_label_instr(void)
5337 int v;
5338 CString astr;
5340 next();
5341 parse_asm_str(&astr);
5342 skip(')');
5343 #ifdef ASM_DEBUG
5344 printf("asm_alias: \"%s\"\n", (char *)astr.data);
5345 #endif
5346 v = tok_alloc(astr.data, astr.size - 1)->tok;
5347 cstr_free(&astr);
5348 return v;
5351 static int post_type(CType *type, AttributeDef *ad, int storage, int td)
5353 int n, l, t1, arg_size, align, unused_align;
5354 Sym **plast, *s, *first;
5355 AttributeDef ad1;
5356 CType pt;
5358 if (tok == '(') {
5359 /* function type, or recursive declarator (return if so) */
5360 next();
5361 if (td && !(td & TYPE_ABSTRACT))
5362 return 0;
5363 if (tok == ')')
5364 l = 0;
5365 else if (parse_btype(&pt, &ad1))
5366 l = FUNC_NEW;
5367 else if (td) {
5368 merge_attr (ad, &ad1);
5369 return 0;
5370 } else
5371 l = FUNC_OLD;
5372 first = NULL;
5373 plast = &first;
5374 arg_size = 0;
5375 if (l) {
5376 for(;;) {
5377 /* read param name and compute offset */
5378 if (l != FUNC_OLD) {
5379 if ((pt.t & VT_BTYPE) == VT_VOID && tok == ')')
5380 break;
5381 type_decl(&pt, &ad1, &n, TYPE_DIRECT | TYPE_ABSTRACT);
5382 if ((pt.t & VT_BTYPE) == VT_VOID)
5383 tcc_error("parameter declared as void");
5384 } else {
5385 n = tok;
5386 if (n < TOK_UIDENT)
5387 expect("identifier");
5388 pt.t = VT_VOID; /* invalid type */
5389 pt.ref = NULL;
5390 next();
5392 convert_parameter_type(&pt);
5393 arg_size += (type_size(&pt, &align) + PTR_SIZE - 1) / PTR_SIZE;
5394 s = sym_push(n | SYM_FIELD, &pt, 0, 0);
5395 *plast = s;
5396 plast = &s->next;
5397 if (tok == ')')
5398 break;
5399 skip(',');
5400 if (l == FUNC_NEW && tok == TOK_DOTS) {
5401 l = FUNC_ELLIPSIS;
5402 next();
5403 break;
5405 if (l == FUNC_NEW && !parse_btype(&pt, &ad1))
5406 tcc_error("invalid type");
5408 } else
5409 /* if no parameters, then old type prototype */
5410 l = FUNC_OLD;
5411 skip(')');
5412 /* NOTE: const is ignored in returned type as it has a special
5413 meaning in gcc / C++ */
5414 type->t &= ~VT_CONSTANT;
5415 /* some ancient pre-K&R C allows a function to return an array
5416 and the array brackets to be put after the arguments, such
5417 that "int c()[]" means something like "int[] c()" */
5418 if (tok == '[') {
5419 next();
5420 skip(']'); /* only handle simple "[]" */
5421 mk_pointer(type);
5423 /* we push a anonymous symbol which will contain the function prototype */
5424 ad->f.func_args = arg_size;
5425 ad->f.func_type = l;
5426 s = sym_push(SYM_FIELD, type, 0, 0);
5427 s->a = ad->a;
5428 s->f = ad->f;
5429 s->next = first;
5430 type->t = VT_FUNC;
5431 type->ref = s;
5432 } else if (tok == '[') {
5433 int saved_nocode_wanted = nocode_wanted;
5434 /* array definition */
5435 next();
5436 while (1) {
5437 /* XXX The optional type-quals and static should only be accepted
5438 in parameter decls. The '*' as well, and then even only
5439 in prototypes (not function defs). */
5440 switch (tok) {
5441 case TOK_RESTRICT1: case TOK_RESTRICT2: case TOK_RESTRICT3:
5442 case TOK_CONST1:
5443 case TOK_VOLATILE1:
5444 case TOK_STATIC:
5445 case '*':
5446 next();
5447 continue;
5448 default:
5449 break;
5451 break;
5453 n = -1;
5454 t1 = 0;
5455 if (tok != ']') {
5456 if (!local_stack || (storage & VT_STATIC))
5457 vpushi(expr_const());
5458 else {
5459 /* VLAs (which can only happen with local_stack && !VT_STATIC)
5460 length must always be evaluated, even under nocode_wanted,
5461 so that its size slot is initialized (e.g. under sizeof
5462 or typeof). */
5463 nocode_wanted = 0;
5464 gexpr();
5466 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST) {
5467 n = vtop->c.i;
5468 if (n < 0)
5469 tcc_error("invalid array size");
5470 } else {
5471 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5472 tcc_error("size of variable length array should be an integer");
5473 n = 0;
5474 t1 = VT_VLA;
5477 skip(']');
5478 /* parse next post type */
5479 post_type(type, ad, storage, 0);
5481 if ((type->t & VT_BTYPE) == VT_FUNC)
5482 tcc_error("declaration of an array of functions");
5483 if ((type->t & VT_BTYPE) == VT_VOID
5484 || type_size(type, &unused_align) < 0)
5485 tcc_error("declaration of an array of incomplete type elements");
5487 t1 |= type->t & VT_VLA;
5489 if (t1 & VT_VLA) {
5490 if (n < 0)
5491 tcc_error("need explicit inner array size in VLAs");
5492 loc -= type_size(&int_type, &align);
5493 loc &= -align;
5494 n = loc;
5496 vla_runtime_type_size(type, &align);
5497 gen_op('*');
5498 vset(&int_type, VT_LOCAL|VT_LVAL, n);
5499 vswap();
5500 vstore();
5502 if (n != -1)
5503 vpop();
5504 nocode_wanted = saved_nocode_wanted;
5506 /* we push an anonymous symbol which will contain the array
5507 element type */
5508 s = sym_push(SYM_FIELD, type, 0, n);
5509 type->t = (t1 ? VT_VLA : VT_ARRAY) | VT_PTR;
5510 type->ref = s;
5512 return 1;
5515 /* Parse a type declarator (except basic type), and return the type
5516 in 'type'. 'td' is a bitmask indicating which kind of type decl is
5517 expected. 'type' should contain the basic type. 'ad' is the
5518 attribute definition of the basic type. It can be modified by
5519 type_decl(). If this (possibly abstract) declarator is a pointer chain
5520 it returns the innermost pointed to type (equals *type, but is a different
5521 pointer), otherwise returns type itself, that's used for recursive calls. */
5522 static CType *type_decl(CType *type, AttributeDef *ad, int *v, int td)
5524 CType *post, *ret;
5525 int qualifiers, storage;
5527 /* recursive type, remove storage bits first, apply them later again */
5528 storage = type->t & VT_STORAGE;
5529 type->t &= ~VT_STORAGE;
5530 post = ret = type;
5532 while (tok == '*') {
5533 qualifiers = 0;
5534 redo:
5535 next();
5536 switch(tok) {
5537 case TOK__Atomic:
5538 qualifiers |= VT_ATOMIC;
5539 goto redo;
5540 case TOK_CONST1:
5541 case TOK_CONST2:
5542 case TOK_CONST3:
5543 qualifiers |= VT_CONSTANT;
5544 goto redo;
5545 case TOK_VOLATILE1:
5546 case TOK_VOLATILE2:
5547 case TOK_VOLATILE3:
5548 qualifiers |= VT_VOLATILE;
5549 goto redo;
5550 case TOK_RESTRICT1:
5551 case TOK_RESTRICT2:
5552 case TOK_RESTRICT3:
5553 goto redo;
5554 /* XXX: clarify attribute handling */
5555 case TOK_ATTRIBUTE1:
5556 case TOK_ATTRIBUTE2:
5557 parse_attribute(ad);
5558 break;
5560 mk_pointer(type);
5561 type->t |= qualifiers;
5562 if (ret == type)
5563 /* innermost pointed to type is the one for the first derivation */
5564 ret = pointed_type(type);
5567 if (tok == '(') {
5568 /* This is possibly a parameter type list for abstract declarators
5569 ('int ()'), use post_type for testing this. */
5570 if (!post_type(type, ad, 0, td)) {
5571 /* It's not, so it's a nested declarator, and the post operations
5572 apply to the innermost pointed to type (if any). */
5573 /* XXX: this is not correct to modify 'ad' at this point, but
5574 the syntax is not clear */
5575 parse_attribute(ad);
5576 post = type_decl(type, ad, v, td);
5577 skip(')');
5578 } else
5579 goto abstract;
5580 } else if (tok >= TOK_IDENT && (td & TYPE_DIRECT)) {
5581 /* type identifier */
5582 *v = tok;
5583 next();
5584 } else {
5585 abstract:
5586 if (!(td & TYPE_ABSTRACT))
5587 expect("identifier");
5588 *v = 0;
5590 post_type(post, ad, storage, 0);
5591 parse_attribute(ad);
5592 type->t |= storage;
5593 return ret;
5596 /* indirection with full error checking and bound check */
5597 ST_FUNC void indir(void)
5599 if ((vtop->type.t & VT_BTYPE) != VT_PTR) {
5600 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
5601 return;
5602 expect("pointer");
5604 if (vtop->r & VT_LVAL)
5605 gv(RC_INT);
5606 vtop->type = *pointed_type(&vtop->type);
5607 /* Arrays and functions are never lvalues */
5608 if (!(vtop->type.t & (VT_ARRAY | VT_VLA))
5609 && (vtop->type.t & VT_BTYPE) != VT_FUNC) {
5610 vtop->r |= VT_LVAL;
5611 /* if bound checking, the referenced pointer must be checked */
5612 #ifdef CONFIG_TCC_BCHECK
5613 if (tcc_state->do_bounds_check)
5614 vtop->r |= VT_MUSTBOUND;
5615 #endif
5619 /* pass a parameter to a function and do type checking and casting */
5620 static void gfunc_param_typed(Sym *func, Sym *arg)
5622 int func_type;
5623 CType type;
5625 func_type = func->f.func_type;
5626 if (func_type == FUNC_OLD ||
5627 (func_type == FUNC_ELLIPSIS && arg == NULL)) {
5628 /* default casting : only need to convert float to double */
5629 if ((vtop->type.t & VT_BTYPE) == VT_FLOAT) {
5630 gen_cast_s(VT_DOUBLE);
5631 } else if (vtop->type.t & VT_BITFIELD) {
5632 type.t = vtop->type.t & (VT_BTYPE | VT_UNSIGNED);
5633 type.ref = vtop->type.ref;
5634 gen_cast(&type);
5635 } else if (vtop->r & VT_MUSTCAST) {
5636 force_charshort_cast();
5638 } else if (arg == NULL) {
5639 tcc_error("too many arguments to function");
5640 } else {
5641 type = arg->type;
5642 type.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
5643 gen_assign_cast(&type);
5647 /* parse an expression and return its type without any side effect. */
5648 static void expr_type(CType *type, void (*expr_fn)(void))
5650 nocode_wanted++;
5651 expr_fn();
5652 *type = vtop->type;
5653 vpop();
5654 nocode_wanted--;
5657 /* parse an expression of the form '(type)' or '(expr)' and return its
5658 type */
5659 static void parse_expr_type(CType *type)
5661 int n;
5662 AttributeDef ad;
5664 skip('(');
5665 if (parse_btype(type, &ad)) {
5666 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5667 } else {
5668 expr_type(type, gexpr);
5670 skip(')');
5673 static void parse_type(CType *type)
5675 AttributeDef ad;
5676 int n;
5678 if (!parse_btype(type, &ad)) {
5679 expect("type");
5681 type_decl(type, &ad, &n, TYPE_ABSTRACT);
5684 static void parse_builtin_params(int nc, const char *args)
5686 char c, sep = '(';
5687 CType type;
5688 if (nc)
5689 nocode_wanted++;
5690 next();
5691 if (*args == 0)
5692 skip(sep);
5693 while ((c = *args++)) {
5694 skip(sep);
5695 sep = ',';
5696 if (c == 't') {
5697 parse_type(&type);
5698 vpush(&type);
5699 continue;
5701 expr_eq();
5702 type.ref = NULL;
5703 type.t = 0;
5704 switch (c) {
5705 case 'e':
5706 continue;
5707 case 'V':
5708 type.t = VT_CONSTANT;
5709 case 'v':
5710 type.t |= VT_VOID;
5711 mk_pointer (&type);
5712 break;
5713 case 'S':
5714 type.t = VT_CONSTANT;
5715 case 's':
5716 type.t |= char_type.t;
5717 mk_pointer (&type);
5718 break;
5719 case 'i':
5720 type.t = VT_INT;
5721 break;
5722 case 'l':
5723 type.t = VT_SIZE_T;
5724 break;
5725 default:
5726 break;
5728 gen_assign_cast(&type);
5730 skip(')');
5731 if (nc)
5732 nocode_wanted--;
5735 static inline int is_memory_model(const SValue *sv)
5738 * FIXME
5739 * The memory models should better be backed by an enumeration.
5741 * const int t = sv->type.t;
5743 * if (!IS_ENUM_VAL(t))
5744 * return 0;
5746 * if (!(t & VT_STATIC))
5747 * return 0;
5749 * Ideally we should check whether the model matches 1:1.
5750 * If it is possible, we should check by the name of the value.
5752 return (((sv->type.t & VT_BTYPE) == VT_INT) && (sv->c.i < 6));
5755 static void parse_atomic(int atok)
5757 size_t op;
5758 size_t arg;
5759 size_t argc;
5760 CType *atom = NULL;
5761 char const *params = NULL;
5762 static struct {
5763 int const tok;
5764 char const *const params;
5765 } const ops[] = {
5767 * a -- atomic
5768 * A -- read-only atomic
5769 * p -- pointer to memory
5770 * P -- pointer to read-only memory
5771 * v -- value
5772 * m -- memory model
5774 {TOK___c11_atomic_init, "-av"},
5775 {TOK___c11_atomic_store, "-avm"},
5776 {TOK___c11_atomic_load, "am"},
5777 {TOK___c11_atomic_exchange, "avm"},
5778 {TOK___c11_atomic_compare_exchange_strong, "apvmm"},
5779 {TOK___c11_atomic_compare_exchange_weak, "apvmm"},
5780 {TOK___c11_atomic_fetch_add, "avm"},
5781 {TOK___c11_atomic_fetch_sub, "avm"},
5782 {TOK___c11_atomic_fetch_or, "avm"},
5783 {TOK___c11_atomic_fetch_xor, "avm"},
5784 {TOK___c11_atomic_fetch_and, "avm"},
5787 next();
5789 for (op = 0; op < (sizeof(ops) / sizeof(*ops)); ++op) {
5790 if (ops[op].tok == atok) {
5791 params = ops[op].params;
5792 break;
5795 if (!params)
5796 tcc_error("unknown atomic operation");
5798 argc = strlen(params);
5799 if (params[0] == '-') {
5800 ++params;
5801 --argc;
5804 vpushi(0);
5805 vpushi(0); /* function address */
5807 skip('(');
5808 for (arg = 0; arg < argc; ++arg) {
5809 expr_eq();
5811 switch (params[arg]) {
5812 case 'a':
5813 case 'A':
5814 if (atom)
5815 expect_arg("exactly one pointer to atomic", arg);
5816 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
5817 expect_arg("pointer to atomic expected", arg);
5818 atom = pointed_type(&vtop->type);
5819 if (!(atom->t & VT_ATOMIC))
5820 expect_arg("qualified pointer to atomic", arg);
5821 if ((params[arg] == 'a') && (atom->t & VT_CONSTANT))
5822 expect_arg("pointer to writable atomic", arg);
5823 atom->t &= ~VT_ATOMIC;
5824 switch (btype_size(atom->t & VT_BTYPE)) {
5825 case 1: atok += 1; break;
5826 case 2: atok += 2; break;
5827 case 4: atok += 3; break;
5828 case 8: atok += 4; break;
5829 default: tcc_error("only integer-sized types are supported");
5831 vswap();
5832 vpop();
5833 vpush_helper_func(atok);
5834 vswap();
5835 break;
5837 case 'p':
5838 if (((vtop->type.t & VT_BTYPE) != VT_PTR)
5839 || !is_compatible_unqualified_types(atom, pointed_type(&vtop->type)))
5840 expect_arg("pointer to compatible type", arg);
5841 break;
5843 case 'v':
5844 if (!is_integer_btype(vtop->type.t & VT_BTYPE))
5845 expect_arg("integer type", arg);
5846 break;
5848 case 'm':
5849 if (!is_memory_model(vtop))
5850 expect_arg("memory model", arg);
5851 vtop->type.t &= ~VT_MEMMODEL;
5852 break;
5854 default:
5855 tcc_error("unknown parameter type");
5857 if (tok == ')')
5858 break;
5859 skip(',');
5861 if (arg < (argc - 1))
5862 expect("more parameters");
5863 if (arg > (argc - 1))
5864 expect("less parameters");
5865 skip(')');
5867 gfunc_call(argc);
5870 ST_FUNC void unary(void)
5872 int n, t, align, size, r, sizeof_caller;
5873 CType type;
5874 Sym *s;
5875 AttributeDef ad;
5877 /* generate line number info */
5878 if (debug_modes)
5879 tcc_debug_line(tcc_state), tcc_tcov_check_line (1);
5881 sizeof_caller = in_sizeof;
5882 in_sizeof = 0;
5883 type.ref = NULL;
5884 /* XXX: GCC 2.95.3 does not generate a table although it should be
5885 better here */
5886 tok_next:
5887 switch(tok) {
5888 case TOK_EXTENSION:
5889 next();
5890 goto tok_next;
5891 case TOK_LCHAR:
5892 #ifdef TCC_TARGET_PE
5893 t = VT_SHORT|VT_UNSIGNED;
5894 goto push_tokc;
5895 #endif
5896 case TOK_CINT:
5897 case TOK_CCHAR:
5898 t = VT_INT;
5899 push_tokc:
5900 type.t = t;
5901 vsetc(&type, VT_CONST, &tokc);
5902 next();
5903 break;
5904 case TOK_CUINT:
5905 t = VT_INT | VT_UNSIGNED;
5906 goto push_tokc;
5907 case TOK_CLLONG:
5908 t = VT_LLONG;
5909 goto push_tokc;
5910 case TOK_CULLONG:
5911 t = VT_LLONG | VT_UNSIGNED;
5912 goto push_tokc;
5913 case TOK_CFLOAT:
5914 t = VT_FLOAT;
5915 goto push_tokc;
5916 case TOK_CDOUBLE:
5917 t = VT_DOUBLE;
5918 goto push_tokc;
5919 case TOK_CLDOUBLE:
5920 t = VT_LDOUBLE;
5921 goto push_tokc;
5922 case TOK_CLONG:
5923 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG;
5924 goto push_tokc;
5925 case TOK_CULONG:
5926 t = (LONG_SIZE == 8 ? VT_LLONG : VT_INT) | VT_LONG | VT_UNSIGNED;
5927 goto push_tokc;
5928 case TOK___FUNCTION__:
5929 if (!gnu_ext)
5930 goto tok_identifier;
5931 /* fall thru */
5932 case TOK___FUNC__:
5934 void *ptr;
5935 int len;
5936 /* special function name identifier */
5937 len = strlen(funcname) + 1;
5938 /* generate char[len] type */
5939 type.t = VT_BYTE;
5940 mk_pointer(&type);
5941 type.t |= VT_ARRAY;
5942 type.ref->c = len;
5943 vpush_ref(&type, data_section, data_section->data_offset, len);
5944 if (!NODATA_WANTED) {
5945 ptr = section_ptr_add(data_section, len);
5946 memcpy(ptr, funcname, len);
5948 next();
5950 break;
5951 case TOK_LSTR:
5952 #ifdef TCC_TARGET_PE
5953 t = VT_SHORT | VT_UNSIGNED;
5954 #else
5955 t = VT_INT;
5956 #endif
5957 goto str_init;
5958 case TOK_STR:
5959 /* string parsing */
5960 t = VT_BYTE;
5961 if (tcc_state->char_is_unsigned)
5962 t = VT_BYTE | VT_UNSIGNED;
5963 str_init:
5964 if (tcc_state->warn_write_strings)
5965 t |= VT_CONSTANT;
5966 type.t = t;
5967 mk_pointer(&type);
5968 type.t |= VT_ARRAY;
5969 memset(&ad, 0, sizeof(AttributeDef));
5970 decl_initializer_alloc(&type, &ad, VT_CONST, 2, 0, 0);
5971 break;
5972 case '(':
5973 next();
5974 /* cast ? */
5975 if (parse_btype(&type, &ad)) {
5976 type_decl(&type, &ad, &n, TYPE_ABSTRACT);
5977 skip(')');
5978 /* check ISOC99 compound literal */
5979 if (tok == '{') {
5980 /* data is allocated locally by default */
5981 if (global_expr)
5982 r = VT_CONST;
5983 else
5984 r = VT_LOCAL;
5985 /* all except arrays are lvalues */
5986 if (!(type.t & VT_ARRAY))
5987 r |= VT_LVAL;
5988 memset(&ad, 0, sizeof(AttributeDef));
5989 decl_initializer_alloc(&type, &ad, r, 1, 0, 0);
5990 } else {
5991 if (sizeof_caller) {
5992 vpush(&type);
5993 return;
5995 unary();
5996 gen_cast(&type);
5998 } else if (tok == '{') {
5999 int saved_nocode_wanted = nocode_wanted;
6000 if (const_wanted && !(nocode_wanted & unevalmask))
6001 expect("constant");
6002 if (0 == local_scope)
6003 tcc_error("statement expression outside of function");
6004 /* save all registers */
6005 save_regs(0);
6006 /* statement expression : we do not accept break/continue
6007 inside as GCC does. We do retain the nocode_wanted state,
6008 as statement expressions can't ever be entered from the
6009 outside, so any reactivation of code emission (from labels
6010 or loop heads) can be disabled again after the end of it. */
6011 block(1);
6012 nocode_wanted = saved_nocode_wanted;
6013 skip(')');
6014 } else {
6015 gexpr();
6016 skip(')');
6018 break;
6019 case '*':
6020 next();
6021 unary();
6022 indir();
6023 break;
6024 case '&':
6025 next();
6026 unary();
6027 /* functions names must be treated as function pointers,
6028 except for unary '&' and sizeof. Since we consider that
6029 functions are not lvalues, we only have to handle it
6030 there and in function calls. */
6031 /* arrays can also be used although they are not lvalues */
6032 if ((vtop->type.t & VT_BTYPE) != VT_FUNC &&
6033 !(vtop->type.t & VT_ARRAY))
6034 test_lvalue();
6035 if (vtop->sym)
6036 vtop->sym->a.addrtaken = 1;
6037 mk_pointer(&vtop->type);
6038 gaddrof();
6039 break;
6040 case '!':
6041 next();
6042 unary();
6043 gen_test_zero(TOK_EQ);
6044 break;
6045 case '~':
6046 next();
6047 unary();
6048 vpushi(-1);
6049 gen_op('^');
6050 break;
6051 case '+':
6052 next();
6053 unary();
6054 if ((vtop->type.t & VT_BTYPE) == VT_PTR)
6055 tcc_error("pointer not accepted for unary plus");
6056 /* In order to force cast, we add zero, except for floating point
6057 where we really need an noop (otherwise -0.0 will be transformed
6058 into +0.0). */
6059 if (!is_float(vtop->type.t)) {
6060 vpushi(0);
6061 gen_op('+');
6063 break;
6064 case TOK_SIZEOF:
6065 case TOK_ALIGNOF1:
6066 case TOK_ALIGNOF2:
6067 case TOK_ALIGNOF3:
6068 t = tok;
6069 next();
6070 in_sizeof++;
6071 expr_type(&type, unary); /* Perform a in_sizeof = 0; */
6072 s = NULL;
6073 if (vtop[1].r & VT_SYM)
6074 s = vtop[1].sym; /* hack: accessing previous vtop */
6075 size = type_size(&type, &align);
6076 if (s && s->a.aligned)
6077 align = 1 << (s->a.aligned - 1);
6078 if (t == TOK_SIZEOF) {
6079 if (!(type.t & VT_VLA)) {
6080 if (size < 0)
6081 tcc_error("sizeof applied to an incomplete type");
6082 vpushs(size);
6083 } else {
6084 vla_runtime_type_size(&type, &align);
6086 } else {
6087 vpushs(align);
6089 vtop->type.t |= VT_UNSIGNED;
6090 break;
6092 case TOK_builtin_expect:
6093 /* __builtin_expect is a no-op for now */
6094 parse_builtin_params(0, "ee");
6095 vpop();
6096 break;
6097 case TOK_builtin_types_compatible_p:
6098 parse_builtin_params(0, "tt");
6099 vtop[-1].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
6100 vtop[0].type.t &= ~(VT_CONSTANT | VT_VOLATILE);
6101 n = is_compatible_types(&vtop[-1].type, &vtop[0].type);
6102 vtop -= 2;
6103 vpushi(n);
6104 break;
6105 case TOK_builtin_choose_expr:
6107 int64_t c;
6108 next();
6109 skip('(');
6110 c = expr_const64();
6111 skip(',');
6112 if (!c) {
6113 nocode_wanted++;
6115 expr_eq();
6116 if (!c) {
6117 vpop();
6118 nocode_wanted--;
6120 skip(',');
6121 if (c) {
6122 nocode_wanted++;
6124 expr_eq();
6125 if (c) {
6126 vpop();
6127 nocode_wanted--;
6129 skip(')');
6131 break;
6132 case TOK_builtin_constant_p:
6133 parse_builtin_params(1, "e");
6134 n = (vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6135 !((vtop->r & VT_SYM) && vtop->sym->a.addrtaken);
6136 vtop--;
6137 vpushi(n);
6138 break;
6139 case TOK_builtin_frame_address:
6140 case TOK_builtin_return_address:
6142 int tok1 = tok;
6143 int level;
6144 next();
6145 skip('(');
6146 if (tok != TOK_CINT) {
6147 tcc_error("%s only takes positive integers",
6148 tok1 == TOK_builtin_return_address ?
6149 "__builtin_return_address" :
6150 "__builtin_frame_address");
6152 level = (uint32_t)tokc.i;
6153 next();
6154 skip(')');
6155 type.t = VT_VOID;
6156 mk_pointer(&type);
6157 vset(&type, VT_LOCAL, 0); /* local frame */
6158 while (level--) {
6159 #ifdef TCC_TARGET_RISCV64
6160 vpushi(2*PTR_SIZE);
6161 gen_op('-');
6162 #endif
6163 mk_pointer(&vtop->type);
6164 indir(); /* -> parent frame */
6166 if (tok1 == TOK_builtin_return_address) {
6167 // assume return address is just above frame pointer on stack
6168 #ifdef TCC_TARGET_ARM
6169 vpushi(2*PTR_SIZE);
6170 gen_op('+');
6171 #elif defined TCC_TARGET_RISCV64
6172 vpushi(PTR_SIZE);
6173 gen_op('-');
6174 #else
6175 vpushi(PTR_SIZE);
6176 gen_op('+');
6177 #endif
6178 mk_pointer(&vtop->type);
6179 indir();
6182 break;
6183 #ifdef TCC_TARGET_RISCV64
6184 case TOK_builtin_va_start:
6185 parse_builtin_params(0, "ee");
6186 r = vtop->r & VT_VALMASK;
6187 if (r == VT_LLOCAL)
6188 r = VT_LOCAL;
6189 if (r != VT_LOCAL)
6190 tcc_error("__builtin_va_start expects a local variable");
6191 gen_va_start();
6192 vstore();
6193 break;
6194 #endif
6195 #ifdef TCC_TARGET_X86_64
6196 #ifdef TCC_TARGET_PE
6197 case TOK_builtin_va_start:
6198 parse_builtin_params(0, "ee");
6199 r = vtop->r & VT_VALMASK;
6200 if (r == VT_LLOCAL)
6201 r = VT_LOCAL;
6202 if (r != VT_LOCAL)
6203 tcc_error("__builtin_va_start expects a local variable");
6204 vtop->r = r;
6205 vtop->type = char_pointer_type;
6206 vtop->c.i += 8;
6207 vstore();
6208 break;
6209 #else
6210 case TOK_builtin_va_arg_types:
6211 parse_builtin_params(0, "t");
6212 vpushi(classify_x86_64_va_arg(&vtop->type));
6213 vswap();
6214 vpop();
6215 break;
6216 #endif
6217 #endif
6219 #ifdef TCC_TARGET_ARM64
6220 case TOK_builtin_va_start: {
6221 parse_builtin_params(0, "ee");
6222 //xx check types
6223 gen_va_start();
6224 vpushi(0);
6225 vtop->type.t = VT_VOID;
6226 break;
6228 case TOK_builtin_va_arg: {
6229 parse_builtin_params(0, "et");
6230 type = vtop->type;
6231 vpop();
6232 //xx check types
6233 gen_va_arg(&type);
6234 vtop->type = type;
6235 break;
6237 case TOK___arm64_clear_cache: {
6238 parse_builtin_params(0, "ee");
6239 gen_clear_cache();
6240 vpushi(0);
6241 vtop->type.t = VT_VOID;
6242 break;
6244 #endif
6246 /* atomic operations */
6247 case TOK___c11_atomic_init:
6248 case TOK___c11_atomic_store:
6249 case TOK___c11_atomic_load:
6250 case TOK___c11_atomic_exchange:
6251 case TOK___c11_atomic_compare_exchange_strong:
6252 case TOK___c11_atomic_compare_exchange_weak:
6253 case TOK___c11_atomic_fetch_add:
6254 case TOK___c11_atomic_fetch_sub:
6255 case TOK___c11_atomic_fetch_or:
6256 case TOK___c11_atomic_fetch_xor:
6257 case TOK___c11_atomic_fetch_and:
6258 parse_atomic(tok);
6259 break;
6261 /* pre operations */
6262 case TOK_INC:
6263 case TOK_DEC:
6264 t = tok;
6265 next();
6266 unary();
6267 inc(0, t);
6268 break;
6269 case '-':
6270 next();
6271 unary();
6272 if (is_float(vtop->type.t)) {
6273 gen_opif(TOK_NEG);
6274 } else {
6275 vpushi(0);
6276 vswap();
6277 gen_op('-');
6279 break;
6280 case TOK_LAND:
6281 if (!gnu_ext)
6282 goto tok_identifier;
6283 next();
6284 /* allow to take the address of a label */
6285 if (tok < TOK_UIDENT)
6286 expect("label identifier");
6287 s = label_find(tok);
6288 if (!s) {
6289 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
6290 } else {
6291 if (s->r == LABEL_DECLARED)
6292 s->r = LABEL_FORWARD;
6294 if (!s->type.t) {
6295 s->type.t = VT_VOID;
6296 mk_pointer(&s->type);
6297 s->type.t |= VT_STATIC;
6299 vpushsym(&s->type, s);
6300 next();
6301 break;
6303 case TOK_GENERIC:
6305 CType controlling_type;
6306 int has_default = 0;
6307 int has_match = 0;
6308 int learn = 0;
6309 TokenString *str = NULL;
6310 int saved_const_wanted = const_wanted;
6312 next();
6313 skip('(');
6314 const_wanted = 0;
6315 expr_type(&controlling_type, expr_eq);
6316 controlling_type.t &= ~(VT_CONSTANT | VT_VOLATILE | VT_ARRAY);
6317 if ((controlling_type.t & VT_BTYPE) == VT_FUNC)
6318 mk_pointer(&controlling_type);
6319 const_wanted = saved_const_wanted;
6320 for (;;) {
6321 learn = 0;
6322 skip(',');
6323 if (tok == TOK_DEFAULT) {
6324 if (has_default)
6325 tcc_error("too many 'default'");
6326 has_default = 1;
6327 if (!has_match)
6328 learn = 1;
6329 next();
6330 } else {
6331 AttributeDef ad_tmp;
6332 int itmp;
6333 CType cur_type;
6335 in_generic++;
6336 parse_btype(&cur_type, &ad_tmp);
6337 in_generic--;
6339 type_decl(&cur_type, &ad_tmp, &itmp, TYPE_ABSTRACT);
6340 if (compare_types(&controlling_type, &cur_type, 0)) {
6341 if (has_match) {
6342 tcc_error("type match twice");
6344 has_match = 1;
6345 learn = 1;
6348 skip(':');
6349 if (learn) {
6350 if (str)
6351 tok_str_free(str);
6352 skip_or_save_block(&str);
6353 } else {
6354 skip_or_save_block(NULL);
6356 if (tok == ')')
6357 break;
6359 if (!str) {
6360 char buf[60];
6361 type_to_str(buf, sizeof buf, &controlling_type, NULL);
6362 tcc_error("type '%s' does not match any association", buf);
6364 begin_macro(str, 1);
6365 next();
6366 expr_eq();
6367 if (tok != TOK_EOF)
6368 expect(",");
6369 end_macro();
6370 next();
6371 break;
6373 // special qnan , snan and infinity values
6374 case TOK___NAN__:
6375 n = 0x7fc00000;
6376 special_math_val:
6377 vpushi(n);
6378 vtop->type.t = VT_FLOAT;
6379 next();
6380 break;
6381 case TOK___SNAN__:
6382 n = 0x7f800001;
6383 goto special_math_val;
6384 case TOK___INF__:
6385 n = 0x7f800000;
6386 goto special_math_val;
6388 default:
6389 tok_identifier:
6390 t = tok;
6391 next();
6392 if (t < TOK_UIDENT)
6393 expect("identifier");
6394 s = sym_find(t);
6395 if (!s || IS_ASM_SYM(s)) {
6396 const char *name = get_tok_str(t, NULL);
6397 if (tok != '(')
6398 tcc_error("'%s' undeclared", name);
6399 /* for simple function calls, we tolerate undeclared
6400 external reference to int() function */
6401 if (tcc_state->warn_implicit_function_declaration
6402 #ifdef TCC_TARGET_PE
6403 /* people must be warned about using undeclared WINAPI functions
6404 (which usually start with uppercase letter) */
6405 || (name[0] >= 'A' && name[0] <= 'Z')
6406 #endif
6408 tcc_warning("implicit declaration of function '%s'", name);
6409 s = external_global_sym(t, &func_old_type);
6412 r = s->r;
6413 /* A symbol that has a register is a local register variable,
6414 which starts out as VT_LOCAL value. */
6415 if ((r & VT_VALMASK) < VT_CONST)
6416 r = (r & ~VT_VALMASK) | VT_LOCAL;
6418 vset(&s->type, r, s->c);
6419 /* Point to s as backpointer (even without r&VT_SYM).
6420 Will be used by at least the x86 inline asm parser for
6421 regvars. */
6422 vtop->sym = s;
6424 if (r & VT_SYM) {
6425 vtop->c.i = 0;
6426 } else if (r == VT_CONST && IS_ENUM_VAL(s->type.t)) {
6427 vtop->c.i = s->enum_val;
6429 break;
6432 /* post operations */
6433 while (1) {
6434 if (tok == TOK_INC || tok == TOK_DEC) {
6435 inc(1, tok);
6436 next();
6437 } else if (tok == '.' || tok == TOK_ARROW || tok == TOK_CDOUBLE) {
6438 int qualifiers, cumofs = 0;
6439 /* field */
6440 if (tok == TOK_ARROW)
6441 indir();
6442 qualifiers = vtop->type.t & (VT_CONSTANT | VT_VOLATILE);
6443 test_lvalue();
6444 gaddrof();
6445 /* expect pointer on structure */
6446 if ((vtop->type.t & VT_BTYPE) != VT_STRUCT)
6447 expect("struct or union");
6448 if (tok == TOK_CDOUBLE)
6449 expect("field name");
6450 next();
6451 if (tok == TOK_CINT || tok == TOK_CUINT)
6452 expect("field name");
6453 s = find_field(&vtop->type, tok, &cumofs);
6454 if (!s)
6455 tcc_error("field not found: %s", get_tok_str(tok & ~SYM_FIELD, &tokc));
6456 /* add field offset to pointer */
6457 vtop->type = char_pointer_type; /* change type to 'char *' */
6458 vpushi(cumofs + s->c);
6459 gen_op('+');
6460 /* change type to field type, and set to lvalue */
6461 vtop->type = s->type;
6462 vtop->type.t |= qualifiers;
6463 /* an array is never an lvalue */
6464 if (!(vtop->type.t & VT_ARRAY)) {
6465 vtop->r |= VT_LVAL;
6466 #ifdef CONFIG_TCC_BCHECK
6467 /* if bound checking, the referenced pointer must be checked */
6468 if (tcc_state->do_bounds_check)
6469 vtop->r |= VT_MUSTBOUND;
6470 #endif
6472 next();
6473 } else if (tok == '[') {
6474 next();
6475 gexpr();
6476 gen_op('+');
6477 indir();
6478 skip(']');
6479 } else if (tok == '(') {
6480 SValue ret;
6481 Sym *sa;
6482 int nb_args, ret_nregs, ret_align, regsize, variadic;
6484 /* function call */
6485 if ((vtop->type.t & VT_BTYPE) != VT_FUNC) {
6486 /* pointer test (no array accepted) */
6487 if ((vtop->type.t & (VT_BTYPE | VT_ARRAY)) == VT_PTR) {
6488 vtop->type = *pointed_type(&vtop->type);
6489 if ((vtop->type.t & VT_BTYPE) != VT_FUNC)
6490 goto error_func;
6491 } else {
6492 error_func:
6493 expect("function pointer");
6495 } else {
6496 vtop->r &= ~VT_LVAL; /* no lvalue */
6498 /* get return type */
6499 s = vtop->type.ref;
6500 next();
6501 sa = s->next; /* first parameter */
6502 nb_args = regsize = 0;
6503 ret.r2 = VT_CONST;
6504 /* compute first implicit argument if a structure is returned */
6505 if ((s->type.t & VT_BTYPE) == VT_STRUCT) {
6506 variadic = (s->f.func_type == FUNC_ELLIPSIS);
6507 ret_nregs = gfunc_sret(&s->type, variadic, &ret.type,
6508 &ret_align, &regsize);
6509 if (ret_nregs <= 0) {
6510 /* get some space for the returned structure */
6511 size = type_size(&s->type, &align);
6512 #ifdef TCC_TARGET_ARM64
6513 /* On arm64, a small struct is return in registers.
6514 It is much easier to write it to memory if we know
6515 that we are allowed to write some extra bytes, so
6516 round the allocated space up to a power of 2: */
6517 if (size < 16)
6518 while (size & (size - 1))
6519 size = (size | (size - 1)) + 1;
6520 #endif
6521 loc = (loc - size) & -align;
6522 ret.type = s->type;
6523 ret.r = VT_LOCAL | VT_LVAL;
6524 /* pass it as 'int' to avoid structure arg passing
6525 problems */
6526 vseti(VT_LOCAL, loc);
6527 #ifdef CONFIG_TCC_BCHECK
6528 if (tcc_state->do_bounds_check)
6529 --loc;
6530 #endif
6531 ret.c = vtop->c;
6532 if (ret_nregs < 0)
6533 vtop--;
6534 else
6535 nb_args++;
6537 } else {
6538 ret_nregs = 1;
6539 ret.type = s->type;
6542 if (ret_nregs > 0) {
6543 /* return in register */
6544 ret.c.i = 0;
6545 PUT_R_RET(&ret, ret.type.t);
6547 if (tok != ')') {
6548 for(;;) {
6549 expr_eq();
6550 gfunc_param_typed(s, sa);
6551 nb_args++;
6552 if (sa)
6553 sa = sa->next;
6554 if (tok == ')')
6555 break;
6556 skip(',');
6559 if (sa)
6560 tcc_error("too few arguments to function");
6561 skip(')');
6562 gfunc_call(nb_args);
6564 if (ret_nregs < 0) {
6565 vsetc(&ret.type, ret.r, &ret.c);
6566 #ifdef TCC_TARGET_RISCV64
6567 arch_transfer_ret_regs(1);
6568 #endif
6569 } else {
6570 /* return value */
6571 for (r = ret.r + ret_nregs + !ret_nregs; r-- > ret.r;) {
6572 vsetc(&ret.type, r, &ret.c);
6573 vtop->r2 = ret.r2; /* Loop only happens when r2 is VT_CONST */
6576 /* handle packed struct return */
6577 if (((s->type.t & VT_BTYPE) == VT_STRUCT) && ret_nregs) {
6578 int addr, offset;
6580 size = type_size(&s->type, &align);
6581 /* We're writing whole regs often, make sure there's enough
6582 space. Assume register size is power of 2. */
6583 if (regsize > align)
6584 align = regsize;
6585 loc = (loc - size) & -align;
6586 addr = loc;
6587 offset = 0;
6588 for (;;) {
6589 vset(&ret.type, VT_LOCAL | VT_LVAL, addr + offset);
6590 vswap();
6591 vstore();
6592 vtop--;
6593 if (--ret_nregs == 0)
6594 break;
6595 offset += regsize;
6597 vset(&s->type, VT_LOCAL | VT_LVAL, addr);
6600 /* Promote char/short return values. This is matters only
6601 for calling function that were not compiled by TCC and
6602 only on some architectures. For those where it doesn't
6603 matter we expect things to be already promoted to int,
6604 but not larger. */
6605 t = s->type.t & VT_BTYPE;
6606 if (t == VT_BYTE || t == VT_SHORT || t == VT_BOOL) {
6607 #ifdef PROMOTE_RET
6608 vtop->r |= BFVAL(VT_MUSTCAST, 1);
6609 #else
6610 vtop->type.t = VT_INT;
6611 #endif
6614 if (s->f.func_noreturn) {
6615 if (debug_modes)
6616 tcc_tcov_block_end (tcov_data.line);
6617 CODE_OFF();
6619 } else {
6620 break;
6625 #ifndef precedence_parser /* original top-down parser */
6627 static void expr_prod(void)
6629 int t;
6631 unary();
6632 while ((t = tok) == '*' || t == '/' || t == '%') {
6633 next();
6634 unary();
6635 gen_op(t);
6639 static void expr_sum(void)
6641 int t;
6643 expr_prod();
6644 while ((t = tok) == '+' || t == '-') {
6645 next();
6646 expr_prod();
6647 gen_op(t);
6651 static void expr_shift(void)
6653 int t;
6655 expr_sum();
6656 while ((t = tok) == TOK_SHL || t == TOK_SAR) {
6657 next();
6658 expr_sum();
6659 gen_op(t);
6663 static void expr_cmp(void)
6665 int t;
6667 expr_shift();
6668 while (((t = tok) >= TOK_ULE && t <= TOK_GT) ||
6669 t == TOK_ULT || t == TOK_UGE) {
6670 next();
6671 expr_shift();
6672 gen_op(t);
6676 static void expr_cmpeq(void)
6678 int t;
6680 expr_cmp();
6681 while ((t = tok) == TOK_EQ || t == TOK_NE) {
6682 next();
6683 expr_cmp();
6684 gen_op(t);
6688 static void expr_and(void)
6690 expr_cmpeq();
6691 while (tok == '&') {
6692 next();
6693 expr_cmpeq();
6694 gen_op('&');
6698 static void expr_xor(void)
6700 expr_and();
6701 while (tok == '^') {
6702 next();
6703 expr_and();
6704 gen_op('^');
6708 static void expr_or(void)
6710 expr_xor();
6711 while (tok == '|') {
6712 next();
6713 expr_xor();
6714 gen_op('|');
6718 static void expr_landor(int op);
6720 static void expr_land(void)
6722 expr_or();
6723 if (tok == TOK_LAND)
6724 expr_landor(tok);
6727 static void expr_lor(void)
6729 expr_land();
6730 if (tok == TOK_LOR)
6731 expr_landor(tok);
6734 # define expr_landor_next(op) op == TOK_LAND ? expr_or() : expr_land()
6735 #else /* defined precedence_parser */
6736 # define expr_landor_next(op) unary(), expr_infix(precedence(op) + 1)
6737 # define expr_lor() unary(), expr_infix(1)
6739 static int precedence(int tok)
6741 switch (tok) {
6742 case TOK_LOR: return 1;
6743 case TOK_LAND: return 2;
6744 case '|': return 3;
6745 case '^': return 4;
6746 case '&': return 5;
6747 case TOK_EQ: case TOK_NE: return 6;
6748 relat: case TOK_ULT: case TOK_UGE: return 7;
6749 case TOK_SHL: case TOK_SAR: return 8;
6750 case '+': case '-': return 9;
6751 case '*': case '/': case '%': return 10;
6752 default:
6753 if (tok >= TOK_ULE && tok <= TOK_GT)
6754 goto relat;
6755 return 0;
6758 static unsigned char prec[256];
6759 static void init_prec(void)
6761 int i;
6762 for (i = 0; i < 256; i++)
6763 prec[i] = precedence(i);
6765 #define precedence(i) ((unsigned)i < 256 ? prec[i] : 0)
6767 static void expr_landor(int op);
6769 static void expr_infix(int p)
6771 int t = tok, p2;
6772 while ((p2 = precedence(t)) >= p) {
6773 if (t == TOK_LOR || t == TOK_LAND) {
6774 expr_landor(t);
6775 } else {
6776 next();
6777 unary();
6778 if (precedence(tok) > p2)
6779 expr_infix(p2 + 1);
6780 gen_op(t);
6782 t = tok;
6785 #endif
6787 /* Assuming vtop is a value used in a conditional context
6788 (i.e. compared with zero) return 0 if it's false, 1 if
6789 true and -1 if it can't be statically determined. */
6790 static int condition_3way(void)
6792 int c = -1;
6793 if ((vtop->r & (VT_VALMASK | VT_LVAL)) == VT_CONST &&
6794 (!(vtop->r & VT_SYM) || !vtop->sym->a.weak)) {
6795 vdup();
6796 gen_cast_s(VT_BOOL);
6797 c = vtop->c.i;
6798 vpop();
6800 return c;
6803 static void expr_landor(int op)
6805 int t = 0, cc = 1, f = 0, i = op == TOK_LAND, c;
6806 for(;;) {
6807 c = f ? i : condition_3way();
6808 if (c < 0)
6809 save_regs(1), cc = 0;
6810 else if (c != i)
6811 nocode_wanted++, f = 1;
6812 if (tok != op)
6813 break;
6814 if (c < 0)
6815 t = gvtst(i, t);
6816 else
6817 vpop();
6818 next();
6819 expr_landor_next(op);
6821 if (cc || f) {
6822 vpop();
6823 vpushi(i ^ f);
6824 gsym(t);
6825 nocode_wanted -= f;
6826 } else {
6827 gvtst_set(i, t);
6831 static int is_cond_bool(SValue *sv)
6833 if ((sv->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
6834 && (sv->type.t & VT_BTYPE) == VT_INT)
6835 return (unsigned)sv->c.i < 2;
6836 if (sv->r == VT_CMP)
6837 return 1;
6838 return 0;
6841 static void expr_cond(void)
6843 int tt, u, r1, r2, rc, t1, t2, islv, c, g;
6844 SValue sv;
6845 CType type;
6846 int ncw_prev;
6848 expr_lor();
6849 if (tok == '?') {
6850 next();
6851 c = condition_3way();
6852 g = (tok == ':' && gnu_ext);
6853 tt = 0;
6854 if (!g) {
6855 if (c < 0) {
6856 save_regs(1);
6857 tt = gvtst(1, 0);
6858 } else {
6859 vpop();
6861 } else if (c < 0) {
6862 /* needed to avoid having different registers saved in
6863 each branch */
6864 save_regs(1);
6865 gv_dup();
6866 tt = gvtst(0, 0);
6869 ncw_prev = nocode_wanted;
6870 if (c == 0)
6871 nocode_wanted++;
6872 if (!g)
6873 gexpr();
6875 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6876 mk_pointer(&vtop->type);
6877 sv = *vtop; /* save value to handle it later */
6878 vtop--; /* no vpop so that FP stack is not flushed */
6880 if (g) {
6881 u = tt;
6882 } else if (c < 0) {
6883 u = gjmp(0);
6884 gsym(tt);
6885 } else
6886 u = 0;
6888 nocode_wanted = ncw_prev;
6889 if (c == 1)
6890 nocode_wanted++;
6891 skip(':');
6892 expr_cond();
6894 if (c < 0 && is_cond_bool(vtop) && is_cond_bool(&sv)) {
6895 /* optimize "if (f ? a > b : c || d) ..." for example, where normally
6896 "a < b" and "c || d" would be forced to "(int)0/1" first, whereas
6897 this code jumps directly to the if's then/else branches. */
6898 t1 = gvtst(0, 0);
6899 t2 = gjmp(0);
6900 gsym(u);
6901 vpushv(&sv);
6902 /* combine jump targets of 2nd op with VT_CMP of 1st op */
6903 gvtst_set(0, t1);
6904 gvtst_set(1, t2);
6905 nocode_wanted = ncw_prev;
6906 // tcc_warning("two conditions expr_cond");
6907 return;
6910 if ((vtop->type.t & VT_BTYPE) == VT_FUNC)
6911 mk_pointer(&vtop->type);
6913 /* cast operands to correct type according to ISOC rules */
6914 if (!combine_types(&type, &sv, vtop, '?'))
6915 type_incompatibility_error(&sv.type, &vtop->type,
6916 "type mismatch in conditional expression (have '%s' and '%s')");
6917 /* keep structs lvalue by transforming `(expr ? a : b)` to `*(expr ? &a : &b)` so
6918 that `(expr ? a : b).mem` does not error with "lvalue expected" */
6919 islv = (vtop->r & VT_LVAL) && (sv.r & VT_LVAL) && VT_STRUCT == (type.t & VT_BTYPE);
6921 /* now we convert second operand */
6922 if (c != 1) {
6923 gen_cast(&type);
6924 if (islv) {
6925 mk_pointer(&vtop->type);
6926 gaddrof();
6927 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6928 gaddrof();
6931 rc = RC_TYPE(type.t);
6932 /* for long longs, we use fixed registers to avoid having
6933 to handle a complicated move */
6934 if (USING_TWO_WORDS(type.t))
6935 rc = RC_RET(type.t);
6937 tt = r2 = 0;
6938 if (c < 0) {
6939 r2 = gv(rc);
6940 tt = gjmp(0);
6942 gsym(u);
6943 nocode_wanted = ncw_prev;
6945 /* this is horrible, but we must also convert first
6946 operand */
6947 if (c != 0) {
6948 *vtop = sv;
6949 gen_cast(&type);
6950 if (islv) {
6951 mk_pointer(&vtop->type);
6952 gaddrof();
6953 } else if (VT_STRUCT == (vtop->type.t & VT_BTYPE))
6954 gaddrof();
6957 if (c < 0) {
6958 r1 = gv(rc);
6959 move_reg(r2, r1, islv ? VT_PTR : type.t);
6960 vtop->r = r2;
6961 gsym(tt);
6964 if (islv)
6965 indir();
6969 static void expr_eq(void)
6971 int t;
6973 expr_cond();
6974 if ((t = tok) == '=' || TOK_ASSIGN(t)) {
6975 test_lvalue();
6976 next();
6977 if (t == '=') {
6978 expr_eq();
6979 } else {
6980 vdup();
6981 expr_eq();
6982 gen_op(TOK_ASSIGN_OP(t));
6984 vstore();
6988 ST_FUNC void gexpr(void)
6990 while (1) {
6991 expr_eq();
6992 if (tok != ',')
6993 break;
6994 vpop();
6995 next();
6999 /* parse a constant expression and return value in vtop. */
7000 static void expr_const1(void)
7002 const_wanted++;
7003 nocode_wanted += unevalmask + 1;
7004 expr_cond();
7005 nocode_wanted -= unevalmask + 1;
7006 const_wanted--;
7009 /* parse an integer constant and return its value. */
7010 static inline int64_t expr_const64(void)
7012 int64_t c;
7013 expr_const1();
7014 if ((vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) != VT_CONST)
7015 expect("constant expression");
7016 c = vtop->c.i;
7017 vpop();
7018 return c;
7021 /* parse an integer constant and return its value.
7022 Complain if it doesn't fit 32bit (signed or unsigned). */
7023 ST_FUNC int expr_const(void)
7025 int c;
7026 int64_t wc = expr_const64();
7027 c = wc;
7028 if (c != wc && (unsigned)c != wc)
7029 tcc_error("constant exceeds 32 bit");
7030 return c;
7033 /* ------------------------------------------------------------------------- */
7034 /* return from function */
7036 #ifndef TCC_TARGET_ARM64
7037 static void gfunc_return(CType *func_type)
7039 if ((func_type->t & VT_BTYPE) == VT_STRUCT) {
7040 CType type, ret_type;
7041 int ret_align, ret_nregs, regsize;
7042 ret_nregs = gfunc_sret(func_type, func_var, &ret_type,
7043 &ret_align, &regsize);
7044 if (ret_nregs < 0) {
7045 #ifdef TCC_TARGET_RISCV64
7046 arch_transfer_ret_regs(0);
7047 #endif
7048 } else if (0 == ret_nregs) {
7049 /* if returning structure, must copy it to implicit
7050 first pointer arg location */
7051 type = *func_type;
7052 mk_pointer(&type);
7053 vset(&type, VT_LOCAL | VT_LVAL, func_vc);
7054 indir();
7055 vswap();
7056 /* copy structure value to pointer */
7057 vstore();
7058 } else {
7059 /* returning structure packed into registers */
7060 int size, addr, align, rc;
7061 size = type_size(func_type,&align);
7062 if ((vtop->r != (VT_LOCAL | VT_LVAL) ||
7063 (vtop->c.i & (ret_align-1)))
7064 && (align & (ret_align-1))) {
7065 loc = (loc - size) & -ret_align;
7066 addr = loc;
7067 type = *func_type;
7068 vset(&type, VT_LOCAL | VT_LVAL, addr);
7069 vswap();
7070 vstore();
7071 vpop();
7072 vset(&ret_type, VT_LOCAL | VT_LVAL, addr);
7074 vtop->type = ret_type;
7075 rc = RC_RET(ret_type.t);
7076 if (ret_nregs == 1)
7077 gv(rc);
7078 else {
7079 for (;;) {
7080 vdup();
7081 gv(rc);
7082 vpop();
7083 if (--ret_nregs == 0)
7084 break;
7085 /* We assume that when a structure is returned in multiple
7086 registers, their classes are consecutive values of the
7087 suite s(n) = 2^n */
7088 rc <<= 1;
7089 vtop->c.i += regsize;
7093 } else {
7094 gv(RC_RET(func_type->t));
7096 vtop--; /* NOT vpop() because on x86 it would flush the fp stack */
7098 #endif
7100 static void check_func_return(void)
7102 if ((func_vt.t & VT_BTYPE) == VT_VOID)
7103 return;
7104 if (!strcmp (funcname, "main")
7105 && (func_vt.t & VT_BTYPE) == VT_INT) {
7106 /* main returns 0 by default */
7107 vpushi(0);
7108 gen_assign_cast(&func_vt);
7109 gfunc_return(&func_vt);
7110 } else {
7111 tcc_warning("function might return no value: '%s'", funcname);
7115 /* ------------------------------------------------------------------------- */
7116 /* switch/case */
7118 static int case_cmpi(const void *pa, const void *pb)
7120 int64_t a = (*(struct case_t**) pa)->v1;
7121 int64_t b = (*(struct case_t**) pb)->v1;
7122 return a < b ? -1 : a > b;
7125 static int case_cmpu(const void *pa, const void *pb)
7127 uint64_t a = (uint64_t)(*(struct case_t**) pa)->v1;
7128 uint64_t b = (uint64_t)(*(struct case_t**) pb)->v1;
7129 return a < b ? -1 : a > b;
7132 static void gtst_addr(int t, int a)
7134 gsym_addr(gvtst(0, t), a);
7137 static void gcase(struct case_t **base, int len, int *bsym)
7139 struct case_t *p;
7140 int e;
7141 int ll = (vtop->type.t & VT_BTYPE) == VT_LLONG;
7142 while (len > 8) {
7143 /* binary search */
7144 p = base[len/2];
7145 vdup();
7146 if (ll)
7147 vpushll(p->v2);
7148 else
7149 vpushi(p->v2);
7150 gen_op(TOK_LE);
7151 e = gvtst(1, 0);
7152 vdup();
7153 if (ll)
7154 vpushll(p->v1);
7155 else
7156 vpushi(p->v1);
7157 gen_op(TOK_GE);
7158 gtst_addr(0, p->sym); /* v1 <= x <= v2 */
7159 /* x < v1 */
7160 gcase(base, len/2, bsym);
7161 /* x > v2 */
7162 gsym(e);
7163 e = len/2 + 1;
7164 base += e; len -= e;
7166 /* linear scan */
7167 while (len--) {
7168 p = *base++;
7169 vdup();
7170 if (ll)
7171 vpushll(p->v2);
7172 else
7173 vpushi(p->v2);
7174 if (p->v1 == p->v2) {
7175 gen_op(TOK_EQ);
7176 gtst_addr(0, p->sym);
7177 } else {
7178 gen_op(TOK_LE);
7179 e = gvtst(1, 0);
7180 vdup();
7181 if (ll)
7182 vpushll(p->v1);
7183 else
7184 vpushi(p->v1);
7185 gen_op(TOK_GE);
7186 gtst_addr(0, p->sym);
7187 gsym(e);
7190 *bsym = gjmp(*bsym);
7193 /* ------------------------------------------------------------------------- */
7194 /* __attribute__((cleanup(fn))) */
7196 static void try_call_scope_cleanup(Sym *stop)
7198 Sym *cls = cur_scope->cl.s;
7200 for (; cls != stop; cls = cls->ncl) {
7201 Sym *fs = cls->next;
7202 Sym *vs = cls->prev_tok;
7204 vpushsym(&fs->type, fs);
7205 vset(&vs->type, vs->r, vs->c);
7206 vtop->sym = vs;
7207 mk_pointer(&vtop->type);
7208 gaddrof();
7209 gfunc_call(1);
7213 static void try_call_cleanup_goto(Sym *cleanupstate)
7215 Sym *oc, *cc;
7216 int ocd, ccd;
7218 if (!cur_scope->cl.s)
7219 return;
7221 /* search NCA of both cleanup chains given parents and initial depth */
7222 ocd = cleanupstate ? cleanupstate->v & ~SYM_FIELD : 0;
7223 for (ccd = cur_scope->cl.n, oc = cleanupstate; ocd > ccd; --ocd, oc = oc->ncl)
7225 for (cc = cur_scope->cl.s; ccd > ocd; --ccd, cc = cc->ncl)
7227 for (; cc != oc; cc = cc->ncl, oc = oc->ncl, --ccd)
7230 try_call_scope_cleanup(cc);
7233 /* call 'func' for each __attribute__((cleanup(func))) */
7234 static void block_cleanup(struct scope *o)
7236 int jmp = 0;
7237 Sym *g, **pg;
7238 for (pg = &pending_gotos; (g = *pg) && g->c > o->cl.n;) {
7239 if (g->prev_tok->r & LABEL_FORWARD) {
7240 Sym *pcl = g->next;
7241 if (!jmp)
7242 jmp = gjmp(0);
7243 gsym(pcl->jnext);
7244 try_call_scope_cleanup(o->cl.s);
7245 pcl->jnext = gjmp(0);
7246 if (!o->cl.n)
7247 goto remove_pending;
7248 g->c = o->cl.n;
7249 pg = &g->prev;
7250 } else {
7251 remove_pending:
7252 *pg = g->prev;
7253 sym_free(g);
7256 gsym(jmp);
7257 try_call_scope_cleanup(o->cl.s);
7260 /* ------------------------------------------------------------------------- */
7261 /* VLA */
7263 static void vla_restore(int loc)
7265 if (loc)
7266 gen_vla_sp_restore(loc);
7269 static void vla_leave(struct scope *o)
7271 struct scope *c = cur_scope, *v = NULL;
7272 for (; c != o && c; c = c->prev)
7273 if (c->vla.num)
7274 v = c;
7275 if (v)
7276 vla_restore(v->vla.locorig);
7279 /* ------------------------------------------------------------------------- */
7280 /* local scopes */
7282 void new_scope(struct scope *o)
7284 /* copy and link previous scope */
7285 *o = *cur_scope;
7286 o->prev = cur_scope;
7287 cur_scope = o;
7288 cur_scope->vla.num = 0;
7290 /* record local declaration stack position */
7291 o->lstk = local_stack;
7292 o->llstk = local_label_stack;
7293 ++local_scope;
7295 if (debug_modes)
7296 tcc_debug_stabn(tcc_state, N_LBRAC, ind - func_ind);
7299 void prev_scope(struct scope *o, int is_expr)
7301 vla_leave(o->prev);
7303 if (o->cl.s != o->prev->cl.s)
7304 block_cleanup(o->prev);
7306 /* pop locally defined labels */
7307 label_pop(&local_label_stack, o->llstk, is_expr);
7309 /* In the is_expr case (a statement expression is finished here),
7310 vtop might refer to symbols on the local_stack. Either via the
7311 type or via vtop->sym. We can't pop those nor any that in turn
7312 might be referred to. To make it easier we don't roll back
7313 any symbols in that case; some upper level call to block() will
7314 do that. We do have to remove such symbols from the lookup
7315 tables, though. sym_pop will do that. */
7317 /* pop locally defined symbols */
7318 pop_local_syms(o->lstk, is_expr);
7319 cur_scope = o->prev;
7320 --local_scope;
7322 if (debug_modes)
7323 tcc_debug_stabn(tcc_state, N_RBRAC, ind - func_ind);
7326 /* leave a scope via break/continue(/goto) */
7327 void leave_scope(struct scope *o)
7329 if (!o)
7330 return;
7331 try_call_scope_cleanup(o->cl.s);
7332 vla_leave(o);
7335 /* ------------------------------------------------------------------------- */
7336 /* call block from 'for do while' loops */
7338 static void lblock(int *bsym, int *csym)
7340 struct scope *lo = loop_scope, *co = cur_scope;
7341 int *b = co->bsym, *c = co->csym;
7342 if (csym) {
7343 co->csym = csym;
7344 loop_scope = co;
7346 co->bsym = bsym;
7347 block(0);
7348 co->bsym = b;
7349 if (csym) {
7350 co->csym = c;
7351 loop_scope = lo;
7355 static void block(int is_expr)
7357 int a, b, c, d, e, t;
7358 struct scope o;
7359 Sym *s;
7361 if (is_expr) {
7362 /* default return value is (void) */
7363 vpushi(0);
7364 vtop->type.t = VT_VOID;
7367 again:
7368 t = tok;
7369 /* If the token carries a value, next() might destroy it. Only with
7370 invalid code such as f(){"123"4;} */
7371 if (TOK_HAS_VALUE(t))
7372 goto expr;
7373 next();
7375 if (debug_modes)
7376 tcc_tcov_check_line (0), tcc_tcov_block_begin ();
7378 if (t == TOK_IF) {
7379 skip('(');
7380 gexpr();
7381 skip(')');
7382 a = gvtst(1, 0);
7383 block(0);
7384 if (tok == TOK_ELSE) {
7385 d = gjmp(0);
7386 gsym(a);
7387 next();
7388 block(0);
7389 gsym(d); /* patch else jmp */
7390 } else {
7391 gsym(a);
7394 } else if (t == TOK_WHILE) {
7395 d = gind();
7396 skip('(');
7397 gexpr();
7398 skip(')');
7399 a = gvtst(1, 0);
7400 b = 0;
7401 lblock(&a, &b);
7402 gjmp_addr(d);
7403 gsym_addr(b, d);
7404 gsym(a);
7406 } else if (t == '{') {
7407 new_scope(&o);
7409 /* handle local labels declarations */
7410 while (tok == TOK_LABEL) {
7411 do {
7412 next();
7413 if (tok < TOK_UIDENT)
7414 expect("label identifier");
7415 label_push(&local_label_stack, tok, LABEL_DECLARED);
7416 next();
7417 } while (tok == ',');
7418 skip(';');
7421 while (tok != '}') {
7422 decl(VT_LOCAL);
7423 if (tok != '}') {
7424 if (is_expr)
7425 vpop();
7426 block(is_expr);
7430 prev_scope(&o, is_expr);
7431 if (local_scope)
7432 next();
7433 else if (!nocode_wanted)
7434 check_func_return();
7436 } else if (t == TOK_RETURN) {
7437 b = (func_vt.t & VT_BTYPE) != VT_VOID;
7438 if (tok != ';') {
7439 gexpr();
7440 if (b) {
7441 gen_assign_cast(&func_vt);
7442 } else {
7443 if (vtop->type.t != VT_VOID)
7444 tcc_warning("void function returns a value");
7445 vtop--;
7447 } else if (b) {
7448 tcc_warning("'return' with no value");
7449 b = 0;
7451 leave_scope(root_scope);
7452 if (b)
7453 gfunc_return(&func_vt);
7454 skip(';');
7455 /* jump unless last stmt in top-level block */
7456 if (tok != '}' || local_scope != 1)
7457 rsym = gjmp(rsym);
7458 if (debug_modes)
7459 tcc_tcov_block_end (tcov_data.line);
7460 CODE_OFF();
7462 } else if (t == TOK_BREAK) {
7463 /* compute jump */
7464 if (!cur_scope->bsym)
7465 tcc_error("cannot break");
7466 if (cur_switch && cur_scope->bsym == cur_switch->bsym)
7467 leave_scope(cur_switch->scope);
7468 else
7469 leave_scope(loop_scope);
7470 *cur_scope->bsym = gjmp(*cur_scope->bsym);
7471 skip(';');
7473 } else if (t == TOK_CONTINUE) {
7474 /* compute jump */
7475 if (!cur_scope->csym)
7476 tcc_error("cannot continue");
7477 leave_scope(loop_scope);
7478 *cur_scope->csym = gjmp(*cur_scope->csym);
7479 skip(';');
7481 } else if (t == TOK_FOR) {
7482 new_scope(&o);
7484 skip('(');
7485 if (tok != ';') {
7486 /* c99 for-loop init decl? */
7487 if (!decl0(VT_LOCAL, 1, NULL)) {
7488 /* no, regular for-loop init expr */
7489 gexpr();
7490 vpop();
7493 skip(';');
7494 a = b = 0;
7495 c = d = gind();
7496 if (tok != ';') {
7497 gexpr();
7498 a = gvtst(1, 0);
7500 skip(';');
7501 if (tok != ')') {
7502 e = gjmp(0);
7503 d = gind();
7504 gexpr();
7505 vpop();
7506 gjmp_addr(c);
7507 gsym(e);
7509 skip(')');
7510 lblock(&a, &b);
7511 gjmp_addr(d);
7512 gsym_addr(b, d);
7513 gsym(a);
7514 prev_scope(&o, 0);
7516 } else if (t == TOK_DO) {
7517 a = b = 0;
7518 d = gind();
7519 lblock(&a, &b);
7520 gsym(b);
7521 skip(TOK_WHILE);
7522 skip('(');
7523 gexpr();
7524 skip(')');
7525 skip(';');
7526 c = gvtst(0, 0);
7527 gsym_addr(c, d);
7528 gsym(a);
7530 } else if (t == TOK_SWITCH) {
7531 struct switch_t *sw;
7533 sw = tcc_mallocz(sizeof *sw);
7534 sw->bsym = &a;
7535 sw->scope = cur_scope;
7536 sw->prev = cur_switch;
7537 cur_switch = sw;
7539 skip('(');
7540 gexpr();
7541 skip(')');
7542 sw->sv = *vtop--; /* save switch value */
7544 a = 0;
7545 b = gjmp(0); /* jump to first case */
7546 lblock(&a, NULL);
7547 a = gjmp(a); /* add implicit break */
7548 /* case lookup */
7549 gsym(b);
7551 if (sw->sv.type.t & VT_UNSIGNED)
7552 qsort(sw->p, sw->n, sizeof(void*), case_cmpu);
7553 else
7554 qsort(sw->p, sw->n, sizeof(void*), case_cmpi);
7556 for (b = 1; b < sw->n; b++)
7557 if (sw->sv.type.t & VT_UNSIGNED
7558 ? (uint64_t)sw->p[b - 1]->v2 >= (uint64_t)sw->p[b]->v1
7559 : sw->p[b - 1]->v2 >= sw->p[b]->v1)
7560 tcc_error("duplicate case value");
7562 vpushv(&sw->sv);
7563 gv(RC_INT);
7564 d = 0, gcase(sw->p, sw->n, &d);
7565 vpop();
7566 if (sw->def_sym)
7567 gsym_addr(d, sw->def_sym);
7568 else
7569 gsym(d);
7570 /* break label */
7571 gsym(a);
7573 dynarray_reset(&sw->p, &sw->n);
7574 cur_switch = sw->prev;
7575 tcc_free(sw);
7577 } else if (t == TOK_CASE) {
7578 struct case_t *cr = tcc_malloc(sizeof(struct case_t));
7579 if (!cur_switch)
7580 expect("switch");
7581 cr->v1 = cr->v2 = expr_const64();
7582 if (gnu_ext && tok == TOK_DOTS) {
7583 next();
7584 cr->v2 = expr_const64();
7585 if ((!(cur_switch->sv.type.t & VT_UNSIGNED) && cr->v2 < cr->v1)
7586 || (cur_switch->sv.type.t & VT_UNSIGNED && (uint64_t)cr->v2 < (uint64_t)cr->v1))
7587 tcc_warning("empty case range");
7589 tcov_data.ind = 0;
7590 cr->sym = gind();
7591 dynarray_add(&cur_switch->p, &cur_switch->n, cr);
7592 skip(':');
7593 is_expr = 0;
7594 goto block_after_label;
7596 } else if (t == TOK_DEFAULT) {
7597 if (!cur_switch)
7598 expect("switch");
7599 if (cur_switch->def_sym)
7600 tcc_error("too many 'default'");
7601 tcov_data.ind = 0;
7602 cur_switch->def_sym = gind();
7603 skip(':');
7604 is_expr = 0;
7605 goto block_after_label;
7607 } else if (t == TOK_GOTO) {
7608 if (cur_scope->vla.num)
7609 vla_restore(cur_scope->vla.locorig);
7610 if (tok == '*' && gnu_ext) {
7611 /* computed goto */
7612 next();
7613 gexpr();
7614 if ((vtop->type.t & VT_BTYPE) != VT_PTR)
7615 expect("pointer");
7616 ggoto();
7618 } else if (tok >= TOK_UIDENT) {
7619 s = label_find(tok);
7620 /* put forward definition if needed */
7621 if (!s)
7622 s = label_push(&global_label_stack, tok, LABEL_FORWARD);
7623 else if (s->r == LABEL_DECLARED)
7624 s->r = LABEL_FORWARD;
7626 if (s->r & LABEL_FORWARD) {
7627 /* start new goto chain for cleanups, linked via label->next */
7628 if (cur_scope->cl.s && !nocode_wanted) {
7629 sym_push2(&pending_gotos, SYM_FIELD, 0, cur_scope->cl.n);
7630 pending_gotos->prev_tok = s;
7631 s = sym_push2(&s->next, SYM_FIELD, 0, 0);
7632 pending_gotos->next = s;
7634 s->jnext = gjmp(s->jnext);
7635 } else {
7636 try_call_cleanup_goto(s->cleanupstate);
7637 gjmp_addr(s->jnext);
7639 next();
7641 } else {
7642 expect("label identifier");
7644 skip(';');
7646 } else if (t == TOK_ASM1 || t == TOK_ASM2 || t == TOK_ASM3) {
7647 asm_instr();
7649 } else {
7650 if (tok == ':' && t >= TOK_UIDENT) {
7651 /* label case */
7652 next();
7653 s = label_find(t);
7654 if (s) {
7655 if (s->r == LABEL_DEFINED)
7656 tcc_error("duplicate label '%s'", get_tok_str(s->v, NULL));
7657 s->r = LABEL_DEFINED;
7658 if (s->next) {
7659 Sym *pcl; /* pending cleanup goto */
7660 for (pcl = s->next; pcl; pcl = pcl->prev)
7661 gsym(pcl->jnext);
7662 sym_pop(&s->next, NULL, 0);
7663 } else
7664 gsym(s->jnext);
7665 } else {
7666 s = label_push(&global_label_stack, t, LABEL_DEFINED);
7668 s->jnext = gind();
7669 s->cleanupstate = cur_scope->cl.s;
7671 block_after_label:
7672 vla_restore(cur_scope->vla.loc);
7673 /* we accept this, but it is a mistake */
7674 if (tok == '}') {
7675 tcc_warning("deprecated use of label at end of compound statement");
7676 } else {
7677 goto again;
7680 } else {
7681 /* expression case */
7682 if (t != ';') {
7683 unget_tok(t);
7684 expr:
7685 if (is_expr) {
7686 vpop();
7687 gexpr();
7688 } else {
7689 gexpr();
7690 vpop();
7692 skip(';');
7697 if (debug_modes)
7698 tcc_tcov_check_line (0), tcc_tcov_block_end (0);
7701 /* This skips over a stream of tokens containing balanced {} and ()
7702 pairs, stopping at outer ',' ';' and '}' (or matching '}' if we started
7703 with a '{'). If STR then allocates and stores the skipped tokens
7704 in *STR. This doesn't check if () and {} are nested correctly,
7705 i.e. "({)}" is accepted. */
7706 static void skip_or_save_block(TokenString **str)
7708 int braces = tok == '{';
7709 int level = 0;
7710 if (str)
7711 *str = tok_str_alloc();
7713 while ((level > 0 || (tok != '}' && tok != ',' && tok != ';' && tok != ')'))) {
7714 int t;
7715 if (tok == TOK_EOF) {
7716 if (str || level > 0)
7717 tcc_error("unexpected end of file");
7718 else
7719 break;
7721 if (str)
7722 tok_str_add_tok(*str);
7723 t = tok;
7724 next();
7725 if (t == '{' || t == '(') {
7726 level++;
7727 } else if (t == '}' || t == ')') {
7728 level--;
7729 if (level == 0 && braces && t == '}')
7730 break;
7733 if (str) {
7734 tok_str_add(*str, -1);
7735 tok_str_add(*str, 0);
7739 #define EXPR_CONST 1
7740 #define EXPR_ANY 2
7742 static void parse_init_elem(int expr_type)
7744 int saved_global_expr;
7745 switch(expr_type) {
7746 case EXPR_CONST:
7747 /* compound literals must be allocated globally in this case */
7748 saved_global_expr = global_expr;
7749 global_expr = 1;
7750 expr_const1();
7751 global_expr = saved_global_expr;
7752 /* NOTE: symbols are accepted, as well as lvalue for anon symbols
7753 (compound literals). */
7754 if (((vtop->r & (VT_VALMASK | VT_LVAL)) != VT_CONST
7755 && ((vtop->r & (VT_SYM|VT_LVAL)) != (VT_SYM|VT_LVAL)
7756 || vtop->sym->v < SYM_FIRST_ANOM))
7757 #ifdef TCC_TARGET_PE
7758 || ((vtop->r & VT_SYM) && vtop->sym->a.dllimport)
7759 #endif
7761 tcc_error("initializer element is not constant");
7762 break;
7763 case EXPR_ANY:
7764 expr_eq();
7765 break;
7769 #if 1
7770 static void init_assert(init_params *p, int offset)
7772 if (p->sec ? !NODATA_WANTED && offset > p->sec->data_offset
7773 : !nocode_wanted && offset > p->local_offset)
7774 tcc_internal_error("initializer overflow");
7776 #else
7777 #define init_assert(sec, offset)
7778 #endif
7780 /* put zeros for variable based init */
7781 static void init_putz(init_params *p, unsigned long c, int size)
7783 init_assert(p, c + size);
7784 if (p->sec) {
7785 /* nothing to do because globals are already set to zero */
7786 } else {
7787 vpush_helper_func(TOK_memset);
7788 vseti(VT_LOCAL, c);
7789 #ifdef TCC_TARGET_ARM
7790 vpushs(size);
7791 vpushi(0);
7792 #else
7793 vpushi(0);
7794 vpushs(size);
7795 #endif
7796 gfunc_call(3);
7800 #define DIF_FIRST 1
7801 #define DIF_SIZE_ONLY 2
7802 #define DIF_HAVE_ELEM 4
7803 #define DIF_CLEAR 8
7805 /* delete relocations for specified range c ... c + size. Unfortunatly
7806 in very special cases, relocations may occur unordered */
7807 static void decl_design_delrels(Section *sec, int c, int size)
7809 ElfW_Rel *rel, *rel2, *rel_end;
7810 if (!sec || !sec->reloc)
7811 return;
7812 rel = rel2 = (ElfW_Rel*)sec->reloc->data;
7813 rel_end = (ElfW_Rel*)(sec->reloc->data + sec->reloc->data_offset);
7814 while (rel < rel_end) {
7815 if (rel->r_offset >= c && rel->r_offset < c + size) {
7816 sec->reloc->data_offset -= sizeof *rel;
7817 } else {
7818 if (rel2 != rel)
7819 memcpy(rel2, rel, sizeof *rel);
7820 ++rel2;
7822 ++rel;
7826 static void decl_design_flex(init_params *p, Sym *ref, int index)
7828 if (ref == p->flex_array_ref) {
7829 if (index >= ref->c)
7830 ref->c = index + 1;
7831 } else if (ref->c < 0)
7832 tcc_error("flexible array has zero size in this context");
7835 /* t is the array or struct type. c is the array or struct
7836 address. cur_field is the pointer to the current
7837 field, for arrays the 'c' member contains the current start
7838 index. 'flags' is as in decl_initializer.
7839 'al' contains the already initialized length of the
7840 current container (starting at c). This returns the new length of that. */
7841 static int decl_designator(init_params *p, CType *type, unsigned long c,
7842 Sym **cur_field, int flags, int al)
7844 Sym *s, *f;
7845 int index, index_last, align, l, nb_elems, elem_size;
7846 unsigned long corig = c;
7848 elem_size = 0;
7849 nb_elems = 1;
7851 if (flags & DIF_HAVE_ELEM)
7852 goto no_designator;
7854 if (gnu_ext && tok >= TOK_UIDENT) {
7855 l = tok, next();
7856 if (tok == ':')
7857 goto struct_field;
7858 unget_tok(l);
7861 /* NOTE: we only support ranges for last designator */
7862 while (nb_elems == 1 && (tok == '[' || tok == '.')) {
7863 if (tok == '[') {
7864 if (!(type->t & VT_ARRAY))
7865 expect("array type");
7866 next();
7867 index = index_last = expr_const();
7868 if (tok == TOK_DOTS && gnu_ext) {
7869 next();
7870 index_last = expr_const();
7872 skip(']');
7873 s = type->ref;
7874 decl_design_flex(p, s, index_last);
7875 if (index < 0 || index_last >= s->c || index_last < index)
7876 tcc_error("index exceeds array bounds or range is empty");
7877 if (cur_field)
7878 (*cur_field)->c = index_last;
7879 type = pointed_type(type);
7880 elem_size = type_size(type, &align);
7881 c += index * elem_size;
7882 nb_elems = index_last - index + 1;
7883 } else {
7884 int cumofs;
7885 next();
7886 l = tok;
7887 struct_field:
7888 next();
7889 if ((type->t & VT_BTYPE) != VT_STRUCT)
7890 expect("struct/union type");
7891 cumofs = 0;
7892 f = find_field(type, l, &cumofs);
7893 if (!f)
7894 expect("field");
7895 if (cur_field)
7896 *cur_field = f;
7897 type = &f->type;
7898 c += cumofs + f->c;
7900 cur_field = NULL;
7902 if (!cur_field) {
7903 if (tok == '=') {
7904 next();
7905 } else if (!gnu_ext) {
7906 expect("=");
7908 } else {
7909 no_designator:
7910 if (type->t & VT_ARRAY) {
7911 index = (*cur_field)->c;
7912 s = type->ref;
7913 decl_design_flex(p, s, index);
7914 if (index >= s->c)
7915 tcc_error("too many initializers");
7916 type = pointed_type(type);
7917 elem_size = type_size(type, &align);
7918 c += index * elem_size;
7919 } else {
7920 f = *cur_field;
7921 while (f && (f->v & SYM_FIRST_ANOM) && (f->type.t & VT_BITFIELD))
7922 *cur_field = f = f->next;
7923 if (!f)
7924 tcc_error("too many initializers");
7925 type = &f->type;
7926 c += f->c;
7930 if (!elem_size) /* for structs */
7931 elem_size = type_size(type, &align);
7933 /* Using designators the same element can be initialized more
7934 than once. In that case we need to delete possibly already
7935 existing relocations. */
7936 if (!(flags & DIF_SIZE_ONLY) && c - corig < al) {
7937 decl_design_delrels(p->sec, c, elem_size * nb_elems);
7938 flags &= ~DIF_CLEAR; /* mark stack dirty too */
7941 decl_initializer(p, type, c, flags & ~DIF_FIRST);
7943 if (!(flags & DIF_SIZE_ONLY) && nb_elems > 1) {
7944 Sym aref = {0};
7945 CType t1;
7946 int i;
7947 if (p->sec || (type->t & VT_ARRAY)) {
7948 /* make init_putv/vstore believe it were a struct */
7949 aref.c = elem_size;
7950 t1.t = VT_STRUCT, t1.ref = &aref;
7951 type = &t1;
7953 if (p->sec)
7954 vpush_ref(type, p->sec, c, elem_size);
7955 else
7956 vset(type, VT_LOCAL|VT_LVAL, c);
7957 for (i = 1; i < nb_elems; i++) {
7958 vdup();
7959 init_putv(p, type, c + elem_size * i);
7961 vpop();
7964 c += nb_elems * elem_size;
7965 if (c - corig > al)
7966 al = c - corig;
7967 return al;
7970 /* store a value or an expression directly in global data or in local array */
7971 static void init_putv(init_params *p, CType *type, unsigned long c)
7973 int bt;
7974 void *ptr;
7975 CType dtype;
7976 int size, align;
7977 Section *sec = p->sec;
7978 uint64_t val;
7980 dtype = *type;
7981 dtype.t &= ~VT_CONSTANT; /* need to do that to avoid false warning */
7983 size = type_size(type, &align);
7984 if (type->t & VT_BITFIELD)
7985 size = (BIT_POS(type->t) + BIT_SIZE(type->t) + 7) / 8;
7986 init_assert(p, c + size);
7988 if (sec) {
7989 /* XXX: not portable */
7990 /* XXX: generate error if incorrect relocation */
7991 gen_assign_cast(&dtype);
7992 bt = type->t & VT_BTYPE;
7994 if ((vtop->r & VT_SYM)
7995 && bt != VT_PTR
7996 && (bt != (PTR_SIZE == 8 ? VT_LLONG : VT_INT)
7997 || (type->t & VT_BITFIELD))
7998 && !((vtop->r & VT_CONST) && vtop->sym->v >= SYM_FIRST_ANOM)
8000 tcc_error("initializer element is not computable at load time");
8002 if (NODATA_WANTED) {
8003 vtop--;
8004 return;
8007 ptr = sec->data + c;
8008 val = vtop->c.i;
8010 /* XXX: make code faster ? */
8011 if ((vtop->r & (VT_SYM|VT_CONST)) == (VT_SYM|VT_CONST) &&
8012 vtop->sym->v >= SYM_FIRST_ANOM &&
8013 /* XXX This rejects compound literals like
8014 '(void *){ptr}'. The problem is that '&sym' is
8015 represented the same way, which would be ruled out
8016 by the SYM_FIRST_ANOM check above, but also '"string"'
8017 in 'char *p = "string"' is represented the same
8018 with the type being VT_PTR and the symbol being an
8019 anonymous one. That is, there's no difference in vtop
8020 between '(void *){x}' and '&(void *){x}'. Ignore
8021 pointer typed entities here. Hopefully no real code
8022 will ever use compound literals with scalar type. */
8023 (vtop->type.t & VT_BTYPE) != VT_PTR) {
8024 /* These come from compound literals, memcpy stuff over. */
8025 Section *ssec;
8026 ElfSym *esym;
8027 ElfW_Rel *rel;
8028 esym = elfsym(vtop->sym);
8029 ssec = tcc_state->sections[esym->st_shndx];
8030 memmove (ptr, ssec->data + esym->st_value + (int)vtop->c.i, size);
8031 if (ssec->reloc) {
8032 /* We need to copy over all memory contents, and that
8033 includes relocations. Use the fact that relocs are
8034 created it order, so look from the end of relocs
8035 until we hit one before the copied region. */
8036 int num_relocs = ssec->reloc->data_offset / sizeof(*rel);
8037 rel = (ElfW_Rel*)(ssec->reloc->data + ssec->reloc->data_offset);
8038 while (num_relocs--) {
8039 rel--;
8040 if (rel->r_offset >= esym->st_value + size)
8041 continue;
8042 if (rel->r_offset < esym->st_value)
8043 break;
8044 put_elf_reloca(symtab_section, sec,
8045 c + rel->r_offset - esym->st_value,
8046 ELFW(R_TYPE)(rel->r_info),
8047 ELFW(R_SYM)(rel->r_info),
8048 #if PTR_SIZE == 8
8049 rel->r_addend
8050 #else
8052 #endif
8056 } else {
8057 if (type->t & VT_BITFIELD) {
8058 int bit_pos, bit_size, bits, n;
8059 unsigned char *p, v, m;
8060 bit_pos = BIT_POS(vtop->type.t);
8061 bit_size = BIT_SIZE(vtop->type.t);
8062 p = (unsigned char*)ptr + (bit_pos >> 3);
8063 bit_pos &= 7, bits = 0;
8064 while (bit_size) {
8065 n = 8 - bit_pos;
8066 if (n > bit_size)
8067 n = bit_size;
8068 v = val >> bits << bit_pos;
8069 m = ((1 << n) - 1) << bit_pos;
8070 *p = (*p & ~m) | (v & m);
8071 bits += n, bit_size -= n, bit_pos = 0, ++p;
8073 } else
8074 switch(bt) {
8075 case VT_BOOL:
8076 *(char *)ptr = val != 0;
8077 break;
8078 case VT_BYTE:
8079 *(char *)ptr = val;
8080 break;
8081 case VT_SHORT:
8082 write16le(ptr, val);
8083 break;
8084 case VT_FLOAT:
8085 write32le(ptr, val);
8086 break;
8087 case VT_DOUBLE:
8088 write64le(ptr, val);
8089 break;
8090 case VT_LDOUBLE:
8091 #if defined TCC_IS_NATIVE_387
8092 /* Host and target platform may be different but both have x87.
8093 On windows, tcc does not use VT_LDOUBLE, except when it is a
8094 cross compiler. In this case a mingw gcc as host compiler
8095 comes here with 10-byte long doubles, while msvc or tcc won't.
8096 tcc itself can still translate by asm.
8097 In any case we avoid possibly random bytes 11 and 12.
8099 if (sizeof (long double) >= 10)
8100 memcpy(ptr, &vtop->c.ld, 10);
8101 #ifdef __TINYC__
8102 else if (sizeof (long double) == sizeof (double))
8103 __asm__("fldl %1\nfstpt %0\n" : "=m" (*ptr) : "m" (vtop->c.ld));
8104 #endif
8105 else if (vtop->c.ld == (f1-f1) )
8107 else
8108 #endif
8109 /* For other platforms it should work natively, but may not work
8110 for cross compilers */
8111 if (sizeof(long double) == LDOUBLE_SIZE)
8112 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
8113 else if (sizeof(double) == LDOUBLE_SIZE)
8114 memcpy(ptr, &vtop->c.ld, LDOUBLE_SIZE);
8115 #ifndef TCC_CROSS_TEST
8116 else
8117 tcc_error("can't cross compile long double constants");
8118 #endif
8119 break;
8121 #if PTR_SIZE == 8
8122 /* intptr_t may need a reloc too, see tcctest.c:relocation_test() */
8123 case VT_LLONG:
8124 case VT_PTR:
8125 if (vtop->r & VT_SYM)
8126 greloca(sec, vtop->sym, c, R_DATA_PTR, val);
8127 else
8128 write64le(ptr, val);
8129 break;
8130 case VT_INT:
8131 write32le(ptr, val);
8132 break;
8133 #else
8134 case VT_LLONG:
8135 write64le(ptr, val);
8136 break;
8137 case VT_PTR:
8138 case VT_INT:
8139 if (vtop->r & VT_SYM)
8140 greloc(sec, vtop->sym, c, R_DATA_PTR);
8141 write32le(ptr, val);
8142 break;
8143 #endif
8144 default:
8145 //tcc_internal_error("unexpected type");
8146 break;
8149 vtop--;
8150 } else {
8151 vset(&dtype, VT_LOCAL|VT_LVAL, c);
8152 vswap();
8153 vstore();
8154 vpop();
8158 /* 't' contains the type and storage info. 'c' is the offset of the
8159 object in section 'sec'. If 'sec' is NULL, it means stack based
8160 allocation. 'flags & DIF_FIRST' is true if array '{' must be read (multi
8161 dimension implicit array init handling). 'flags & DIF_SIZE_ONLY' is true if
8162 size only evaluation is wanted (only for arrays). */
8163 static void decl_initializer(init_params *p, CType *type, unsigned long c, int flags)
8165 int len, n, no_oblock, i;
8166 int size1, align1;
8167 Sym *s, *f;
8168 Sym indexsym;
8169 CType *t1;
8171 /* generate line number info */
8172 if (debug_modes && !p->sec)
8173 tcc_debug_line(tcc_state), tcc_tcov_check_line (1);
8175 if (!(flags & DIF_HAVE_ELEM) && tok != '{' &&
8176 /* In case of strings we have special handling for arrays, so
8177 don't consume them as initializer value (which would commit them
8178 to some anonymous symbol). */
8179 tok != TOK_LSTR && tok != TOK_STR &&
8180 !(flags & DIF_SIZE_ONLY)) {
8181 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
8182 flags |= DIF_HAVE_ELEM;
8185 if ((flags & DIF_HAVE_ELEM) &&
8186 !(type->t & VT_ARRAY) &&
8187 /* Use i_c_parameter_t, to strip toplevel qualifiers.
8188 The source type might have VT_CONSTANT set, which is
8189 of course assignable to non-const elements. */
8190 is_compatible_unqualified_types(type, &vtop->type)) {
8191 goto init_putv;
8193 } else if (type->t & VT_ARRAY) {
8194 no_oblock = 1;
8195 if (((flags & DIF_FIRST) && tok != TOK_LSTR && tok != TOK_STR) ||
8196 tok == '{') {
8197 skip('{');
8198 no_oblock = 0;
8201 s = type->ref;
8202 n = s->c;
8203 t1 = pointed_type(type);
8204 size1 = type_size(t1, &align1);
8206 /* only parse strings here if correct type (otherwise: handle
8207 them as ((w)char *) expressions */
8208 if ((tok == TOK_LSTR &&
8209 #ifdef TCC_TARGET_PE
8210 (t1->t & VT_BTYPE) == VT_SHORT && (t1->t & VT_UNSIGNED)
8211 #else
8212 (t1->t & VT_BTYPE) == VT_INT
8213 #endif
8214 ) || (tok == TOK_STR && (t1->t & VT_BTYPE) == VT_BYTE)) {
8215 len = 0;
8216 cstr_reset(&initstr);
8217 if (size1 != (tok == TOK_STR ? 1 : sizeof(nwchar_t)))
8218 tcc_error("unhandled string literal merging");
8219 while (tok == TOK_STR || tok == TOK_LSTR) {
8220 if (initstr.size)
8221 initstr.size -= size1;
8222 if (tok == TOK_STR)
8223 len += tokc.str.size;
8224 else
8225 len += tokc.str.size / sizeof(nwchar_t);
8226 len--;
8227 cstr_cat(&initstr, tokc.str.data, tokc.str.size);
8228 next();
8230 if (tok != ')' && tok != '}' && tok != ',' && tok != ';'
8231 && tok != TOK_EOF) {
8232 /* Not a lone literal but part of a bigger expression. */
8233 unget_tok(size1 == 1 ? TOK_STR : TOK_LSTR);
8234 tokc.str.size = initstr.size;
8235 tokc.str.data = initstr.data;
8236 goto do_init_array;
8239 if (!(flags & DIF_SIZE_ONLY)) {
8240 int nb = n;
8241 if (len < nb)
8242 nb = len;
8243 if (len > nb)
8244 tcc_warning("initializer-string for array is too long");
8245 /* in order to go faster for common case (char
8246 string in global variable, we handle it
8247 specifically */
8248 if (p->sec && size1 == 1) {
8249 init_assert(p, c + nb);
8250 if (!NODATA_WANTED)
8251 memcpy(p->sec->data + c, initstr.data, nb);
8252 } else {
8253 for(i=0;i<n;i++) {
8254 if (i >= nb) {
8255 /* only add trailing zero if enough storage (no
8256 warning in this case since it is standard) */
8257 if (flags & DIF_CLEAR)
8258 break;
8259 if (n - i >= 4) {
8260 init_putz(p, c + i * size1, (n - i) * size1);
8261 break;
8263 ch = 0;
8264 } else if (size1 == 1)
8265 ch = ((unsigned char *)initstr.data)[i];
8266 else
8267 ch = ((nwchar_t *)initstr.data)[i];
8268 vpushi(ch);
8269 init_putv(p, t1, c + i * size1);
8272 } else {
8273 decl_design_flex(p, s, len);
8275 } else {
8277 do_init_array:
8278 indexsym.c = 0;
8279 f = &indexsym;
8281 do_init_list:
8282 /* zero memory once in advance */
8283 if (!(flags & (DIF_CLEAR | DIF_SIZE_ONLY))) {
8284 init_putz(p, c, n*size1);
8285 flags |= DIF_CLEAR;
8288 len = 0;
8289 while (tok != '}' || (flags & DIF_HAVE_ELEM)) {
8290 len = decl_designator(p, type, c, &f, flags, len);
8291 flags &= ~DIF_HAVE_ELEM;
8292 if (type->t & VT_ARRAY) {
8293 ++indexsym.c;
8294 /* special test for multi dimensional arrays (may not
8295 be strictly correct if designators are used at the
8296 same time) */
8297 if (no_oblock && len >= n*size1)
8298 break;
8299 } else {
8300 if (s->type.t == VT_UNION)
8301 f = NULL;
8302 else
8303 f = f->next;
8304 if (no_oblock && f == NULL)
8305 break;
8308 if (tok == '}')
8309 break;
8310 skip(',');
8313 if (!no_oblock)
8314 skip('}');
8315 } else if ((type->t & VT_BTYPE) == VT_STRUCT) {
8316 no_oblock = 1;
8317 if ((flags & DIF_FIRST) || tok == '{') {
8318 skip('{');
8319 no_oblock = 0;
8321 s = type->ref;
8322 f = s->next;
8323 n = s->c;
8324 size1 = 1;
8325 goto do_init_list;
8326 } else if (tok == '{') {
8327 if (flags & DIF_HAVE_ELEM)
8328 skip(';');
8329 next();
8330 decl_initializer(p, type, c, flags & ~DIF_HAVE_ELEM);
8331 skip('}');
8332 } else if ((flags & DIF_SIZE_ONLY)) {
8333 /* If we supported only ISO C we wouldn't have to accept calling
8334 this on anything than an array if DIF_SIZE_ONLY (and even then
8335 only on the outermost level, so no recursion would be needed),
8336 because initializing a flex array member isn't supported.
8337 But GNU C supports it, so we need to recurse even into
8338 subfields of structs and arrays when DIF_SIZE_ONLY is set. */
8339 /* just skip expression */
8340 skip_or_save_block(NULL);
8341 } else {
8342 if (!(flags & DIF_HAVE_ELEM)) {
8343 /* This should happen only when we haven't parsed
8344 the init element above for fear of committing a
8345 string constant to memory too early. */
8346 if (tok != TOK_STR && tok != TOK_LSTR)
8347 expect("string constant");
8348 parse_init_elem(!p->sec ? EXPR_ANY : EXPR_CONST);
8350 init_putv:
8351 if (!p->sec && (flags & DIF_CLEAR) /* container was already zero'd */
8352 && (vtop->r & (VT_VALMASK | VT_LVAL | VT_SYM)) == VT_CONST
8353 && vtop->c.i == 0
8354 && btype_size(type->t & VT_BTYPE) /* not for fp constants */
8356 vpop();
8357 else
8358 init_putv(p, type, c);
8362 /* parse an initializer for type 't' if 'has_init' is non zero, and
8363 allocate space in local or global data space ('r' is either
8364 VT_LOCAL or VT_CONST). If 'v' is non zero, then an associated
8365 variable 'v' of scope 'scope' is declared before initializers
8366 are parsed. If 'v' is zero, then a reference to the new object
8367 is put in the value stack. If 'has_init' is 2, a special parsing
8368 is done to handle string constants. */
8369 static void decl_initializer_alloc(CType *type, AttributeDef *ad, int r,
8370 int has_init, int v, int scope)
8372 int size, align, addr;
8373 TokenString *init_str = NULL;
8375 Section *sec;
8376 Sym *flexible_array;
8377 Sym *sym = NULL;
8378 int saved_nocode_wanted = nocode_wanted;
8379 #ifdef CONFIG_TCC_BCHECK
8380 int bcheck = tcc_state->do_bounds_check && !NODATA_WANTED;
8381 #endif
8382 init_params p = {0};
8384 /* Always allocate static or global variables */
8385 if (v && (r & VT_VALMASK) == VT_CONST)
8386 nocode_wanted |= 0x80000000;
8388 flexible_array = NULL;
8389 size = type_size(type, &align);
8391 /* exactly one flexible array may be initialized, either the
8392 toplevel array or the last member of the toplevel struct */
8394 if (size < 0) {
8395 /* If the base type itself was an array type of unspecified size
8396 (like in 'typedef int arr[]; arr x = {1};') then we will
8397 overwrite the unknown size by the real one for this decl.
8398 We need to unshare the ref symbol holding that size. */
8399 type->ref = sym_push(SYM_FIELD, &type->ref->type, 0, type->ref->c);
8400 p.flex_array_ref = type->ref;
8402 } else if (has_init && (type->t & VT_BTYPE) == VT_STRUCT) {
8403 Sym *field = type->ref->next;
8404 if (field) {
8405 while (field->next)
8406 field = field->next;
8407 if (field->type.t & VT_ARRAY && field->type.ref->c < 0) {
8408 flexible_array = field;
8409 p.flex_array_ref = field->type.ref;
8410 size = -1;
8415 if (size < 0) {
8416 /* If unknown size, do a dry-run 1st pass */
8417 if (!has_init)
8418 tcc_error("unknown type size");
8419 if (has_init == 2) {
8420 /* only get strings */
8421 init_str = tok_str_alloc();
8422 while (tok == TOK_STR || tok == TOK_LSTR) {
8423 tok_str_add_tok(init_str);
8424 next();
8426 tok_str_add(init_str, -1);
8427 tok_str_add(init_str, 0);
8428 } else
8429 skip_or_save_block(&init_str);
8430 unget_tok(0);
8432 /* compute size */
8433 begin_macro(init_str, 1);
8434 next();
8435 decl_initializer(&p, type, 0, DIF_FIRST | DIF_SIZE_ONLY);
8436 /* prepare second initializer parsing */
8437 macro_ptr = init_str->str;
8438 next();
8440 /* if still unknown size, error */
8441 size = type_size(type, &align);
8442 if (size < 0)
8443 tcc_error("unknown type size");
8445 /* If there's a flex member and it was used in the initializer
8446 adjust size. */
8447 if (flexible_array && flexible_array->type.ref->c > 0)
8448 size += flexible_array->type.ref->c
8449 * pointed_size(&flexible_array->type);
8452 /* take into account specified alignment if bigger */
8453 if (ad->a.aligned) {
8454 int speca = 1 << (ad->a.aligned - 1);
8455 if (speca > align)
8456 align = speca;
8457 } else if (ad->a.packed) {
8458 align = 1;
8461 if (!v && NODATA_WANTED)
8462 size = 0, align = 1;
8464 if ((r & VT_VALMASK) == VT_LOCAL) {
8465 sec = NULL;
8466 #ifdef CONFIG_TCC_BCHECK
8467 if (bcheck && v) {
8468 /* add padding between stack variables for bound checking */
8469 loc -= align;
8471 #endif
8472 loc = (loc - size) & -align;
8473 addr = loc;
8474 p.local_offset = addr + size;
8475 #ifdef CONFIG_TCC_BCHECK
8476 if (bcheck && v) {
8477 /* add padding between stack variables for bound checking */
8478 loc -= align;
8480 #endif
8481 if (v) {
8482 /* local variable */
8483 #ifdef CONFIG_TCC_ASM
8484 if (ad->asm_label) {
8485 int reg = asm_parse_regvar(ad->asm_label);
8486 if (reg >= 0)
8487 r = (r & ~VT_VALMASK) | reg;
8489 #endif
8490 sym = sym_push(v, type, r, addr);
8491 if (ad->cleanup_func) {
8492 Sym *cls = sym_push2(&all_cleanups,
8493 SYM_FIELD | ++cur_scope->cl.n, 0, 0);
8494 cls->prev_tok = sym;
8495 cls->next = ad->cleanup_func;
8496 cls->ncl = cur_scope->cl.s;
8497 cur_scope->cl.s = cls;
8500 sym->a = ad->a;
8501 } else {
8502 /* push local reference */
8503 vset(type, r, addr);
8505 } else {
8506 if (v && scope == VT_CONST) {
8507 /* see if the symbol was already defined */
8508 sym = sym_find(v);
8509 if (sym) {
8510 patch_storage(sym, ad, type);
8511 /* we accept several definitions of the same global variable. */
8512 if (!has_init && sym->c && elfsym(sym)->st_shndx != SHN_UNDEF)
8513 goto no_alloc;
8517 /* allocate symbol in corresponding section */
8518 sec = ad->section;
8519 if (!sec) {
8520 if (type->t & VT_CONSTANT)
8521 sec = data_ro_section;
8522 else if (has_init)
8523 sec = data_section;
8524 else if (tcc_state->nocommon)
8525 sec = bss_section;
8528 if (sec) {
8529 addr = section_add(sec, size, align);
8530 #ifdef CONFIG_TCC_BCHECK
8531 /* add padding if bound check */
8532 if (bcheck)
8533 section_add(sec, 1, 1);
8534 #endif
8535 } else {
8536 addr = align; /* SHN_COMMON is special, symbol value is align */
8537 sec = common_section;
8540 if (v) {
8541 if (!sym) {
8542 sym = sym_push(v, type, r | VT_SYM, 0);
8543 patch_storage(sym, ad, NULL);
8545 /* update symbol definition */
8546 put_extern_sym(sym, sec, addr, size);
8547 } else {
8548 /* push global reference */
8549 vpush_ref(type, sec, addr, size);
8550 sym = vtop->sym;
8551 vtop->r |= r;
8554 #ifdef CONFIG_TCC_BCHECK
8555 /* handles bounds now because the symbol must be defined
8556 before for the relocation */
8557 if (bcheck) {
8558 addr_t *bounds_ptr;
8560 greloca(bounds_section, sym, bounds_section->data_offset, R_DATA_PTR, 0);
8561 /* then add global bound info */
8562 bounds_ptr = section_ptr_add(bounds_section, 2 * sizeof(addr_t));
8563 bounds_ptr[0] = 0; /* relocated */
8564 bounds_ptr[1] = size;
8566 #endif
8569 if (type->t & VT_VLA) {
8570 int a;
8572 if (NODATA_WANTED)
8573 goto no_alloc;
8575 /* save before-VLA stack pointer if needed */
8576 if (cur_scope->vla.num == 0) {
8577 if (cur_scope->prev && cur_scope->prev->vla.num) {
8578 cur_scope->vla.locorig = cur_scope->prev->vla.loc;
8579 } else {
8580 gen_vla_sp_save(loc -= PTR_SIZE);
8581 cur_scope->vla.locorig = loc;
8585 vla_runtime_type_size(type, &a);
8586 gen_vla_alloc(type, a);
8587 #if defined TCC_TARGET_PE && defined TCC_TARGET_X86_64
8588 /* on _WIN64, because of the function args scratch area, the
8589 result of alloca differs from RSP and is returned in RAX. */
8590 gen_vla_result(addr), addr = (loc -= PTR_SIZE);
8591 #endif
8592 gen_vla_sp_save(addr);
8593 cur_scope->vla.loc = addr;
8594 cur_scope->vla.num++;
8595 } else if (has_init) {
8596 p.sec = sec;
8597 decl_initializer(&p, type, addr, DIF_FIRST);
8598 /* patch flexible array member size back to -1, */
8599 /* for possible subsequent similar declarations */
8600 if (flexible_array)
8601 flexible_array->type.ref->c = -1;
8604 no_alloc:
8605 /* restore parse state if needed */
8606 if (init_str) {
8607 end_macro();
8608 next();
8611 nocode_wanted = saved_nocode_wanted;
8614 /* parse a function defined by symbol 'sym' and generate its code in
8615 'cur_text_section' */
8616 static void gen_function(Sym *sym)
8618 struct scope f = { 0 };
8619 cur_scope = root_scope = &f;
8620 nocode_wanted = 0;
8621 ind = cur_text_section->data_offset;
8622 if (sym->a.aligned) {
8623 size_t newoff = section_add(cur_text_section, 0,
8624 1 << (sym->a.aligned - 1));
8625 gen_fill_nops(newoff - ind);
8627 /* NOTE: we patch the symbol size later */
8628 put_extern_sym(sym, cur_text_section, ind, 0);
8629 if (sym->type.ref->f.func_ctor)
8630 add_array (tcc_state, ".init_array", sym->c);
8631 if (sym->type.ref->f.func_dtor)
8632 add_array (tcc_state, ".fini_array", sym->c);
8634 funcname = get_tok_str(sym->v, NULL);
8635 func_ind = ind;
8636 func_vt = sym->type.ref->type;
8637 func_var = sym->type.ref->f.func_type == FUNC_ELLIPSIS;
8639 /* put debug symbol */
8640 tcc_debug_funcstart(tcc_state, sym);
8641 /* push a dummy symbol to enable local sym storage */
8642 sym_push2(&local_stack, SYM_FIELD, 0, 0);
8643 local_scope = 1; /* for function parameters */
8644 gfunc_prolog(sym);
8645 local_scope = 0;
8646 rsym = 0;
8647 clear_temp_local_var_list();
8648 block(0);
8649 gsym(rsym);
8650 nocode_wanted = 0;
8651 /* reset local stack */
8652 pop_local_syms(NULL, 0);
8653 gfunc_epilog();
8654 cur_text_section->data_offset = ind;
8655 local_scope = 0;
8656 label_pop(&global_label_stack, NULL, 0);
8657 sym_pop(&all_cleanups, NULL, 0);
8658 /* patch symbol size */
8659 elfsym(sym)->st_size = ind - func_ind;
8660 /* end of function */
8661 tcc_debug_funcend(tcc_state, ind - func_ind);
8662 /* It's better to crash than to generate wrong code */
8663 cur_text_section = NULL;
8664 funcname = ""; /* for safety */
8665 func_vt.t = VT_VOID; /* for safety */
8666 func_var = 0; /* for safety */
8667 ind = 0; /* for safety */
8668 nocode_wanted = 0x80000000;
8669 check_vstack();
8670 /* do this after funcend debug info */
8671 next();
8674 static void gen_inline_functions(TCCState *s)
8676 Sym *sym;
8677 int inline_generated, i;
8678 struct InlineFunc *fn;
8680 tcc_open_bf(s, ":inline:", 0);
8681 /* iterate while inline function are referenced */
8682 do {
8683 inline_generated = 0;
8684 for (i = 0; i < s->nb_inline_fns; ++i) {
8685 fn = s->inline_fns[i];
8686 sym = fn->sym;
8687 if (sym && (sym->c || !(sym->type.t & VT_INLINE))) {
8688 /* the function was used or forced (and then not internal):
8689 generate its code and convert it to a normal function */
8690 fn->sym = NULL;
8691 tcc_debug_putfile(s, fn->filename);
8692 begin_macro(fn->func_str, 1);
8693 next();
8694 cur_text_section = text_section;
8695 gen_function(sym);
8696 end_macro();
8698 inline_generated = 1;
8701 } while (inline_generated);
8702 tcc_close();
8705 static void free_inline_functions(TCCState *s)
8707 int i;
8708 /* free tokens of unused inline functions */
8709 for (i = 0; i < s->nb_inline_fns; ++i) {
8710 struct InlineFunc *fn = s->inline_fns[i];
8711 if (fn->sym)
8712 tok_str_free(fn->func_str);
8714 dynarray_reset(&s->inline_fns, &s->nb_inline_fns);
8717 /* 'l' is VT_LOCAL or VT_CONST to define default storage type, or VT_CMP
8718 if parsing old style parameter decl list (and FUNC_SYM is set then) */
8719 static int decl0(int l, int is_for_loop_init, Sym *func_sym)
8721 int v, has_init, r, oldint;
8722 CType type, btype;
8723 Sym *sym;
8724 AttributeDef ad, adbase;
8726 while (1) {
8727 if (tok == TOK_STATIC_ASSERT) {
8728 CString error_str;
8729 int c;
8731 next();
8732 skip('(');
8733 c = expr_const();
8735 if (tok == ')') {
8736 if (!c)
8737 tcc_error("_Static_assert fail");
8738 next();
8739 goto static_assert_out;
8742 skip(',');
8743 parse_mult_str(&error_str, "string constant");
8744 if (c == 0)
8745 tcc_error("%s", (char *)error_str.data);
8746 cstr_free(&error_str);
8747 skip(')');
8748 static_assert_out:
8749 skip(';');
8750 continue;
8753 oldint = 0;
8754 if (!parse_btype(&btype, &adbase)) {
8755 if (is_for_loop_init)
8756 return 0;
8757 /* skip redundant ';' if not in old parameter decl scope */
8758 if (tok == ';' && l != VT_CMP) {
8759 next();
8760 continue;
8762 if (l != VT_CONST)
8763 break;
8764 if (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3) {
8765 /* global asm block */
8766 asm_global_instr();
8767 continue;
8769 if (tok >= TOK_UIDENT) {
8770 /* special test for old K&R protos without explicit int
8771 type. Only accepted when defining global data */
8772 btype.t = VT_INT;
8773 oldint = 1;
8774 } else {
8775 if (tok != TOK_EOF)
8776 expect("declaration");
8777 break;
8781 if (tok == ';') {
8782 if ((btype.t & VT_BTYPE) == VT_STRUCT) {
8783 v = btype.ref->v;
8784 if (!(v & SYM_FIELD) && (v & ~SYM_STRUCT) >= SYM_FIRST_ANOM)
8785 tcc_warning("unnamed struct/union that defines no instances");
8786 next();
8787 continue;
8789 if (IS_ENUM(btype.t)) {
8790 next();
8791 continue;
8795 while (1) { /* iterate thru each declaration */
8796 type = btype;
8797 ad = adbase;
8798 type_decl(&type, &ad, &v, TYPE_DIRECT);
8799 #if 0
8801 char buf[500];
8802 type_to_str(buf, sizeof(buf), &type, get_tok_str(v, NULL));
8803 printf("type = '%s'\n", buf);
8805 #endif
8806 if ((type.t & VT_BTYPE) == VT_FUNC) {
8807 if ((type.t & VT_STATIC) && (l == VT_LOCAL))
8808 tcc_error("function without file scope cannot be static");
8809 /* if old style function prototype, we accept a
8810 declaration list */
8811 sym = type.ref;
8812 if (sym->f.func_type == FUNC_OLD && l == VT_CONST)
8813 decl0(VT_CMP, 0, sym);
8814 #ifdef TCC_TARGET_MACHO
8815 if (sym->f.func_alwinl
8816 && ((type.t & (VT_EXTERN | VT_INLINE))
8817 == (VT_EXTERN | VT_INLINE))) {
8818 /* always_inline functions must be handled as if they
8819 don't generate multiple global defs, even if extern
8820 inline, i.e. GNU inline semantics for those. Rewrite
8821 them into static inline. */
8822 type.t &= ~VT_EXTERN;
8823 type.t |= VT_STATIC;
8825 #endif
8826 /* always compile 'extern inline' */
8827 if (type.t & VT_EXTERN)
8828 type.t &= ~VT_INLINE;
8830 } else if (oldint) {
8831 tcc_warning("type defaults to int");
8834 if (gnu_ext && (tok == TOK_ASM1 || tok == TOK_ASM2 || tok == TOK_ASM3)) {
8835 ad.asm_label = asm_label_instr();
8836 /* parse one last attribute list, after asm label */
8837 parse_attribute(&ad);
8838 #if 0
8839 /* gcc does not allow __asm__("label") with function definition,
8840 but why not ... */
8841 if (tok == '{')
8842 expect(";");
8843 #endif
8846 #ifdef TCC_TARGET_PE
8847 if (ad.a.dllimport || ad.a.dllexport) {
8848 if (type.t & VT_STATIC)
8849 tcc_error("cannot have dll linkage with static");
8850 if (type.t & VT_TYPEDEF) {
8851 tcc_warning("'%s' attribute ignored for typedef",
8852 ad.a.dllimport ? (ad.a.dllimport = 0, "dllimport") :
8853 (ad.a.dllexport = 0, "dllexport"));
8854 } else if (ad.a.dllimport) {
8855 if ((type.t & VT_BTYPE) == VT_FUNC)
8856 ad.a.dllimport = 0;
8857 else
8858 type.t |= VT_EXTERN;
8861 #endif
8862 if (tok == '{') {
8863 if (l != VT_CONST)
8864 tcc_error("cannot use local functions");
8865 if ((type.t & VT_BTYPE) != VT_FUNC)
8866 expect("function definition");
8868 /* reject abstract declarators in function definition
8869 make old style params without decl have int type */
8870 sym = type.ref;
8871 while ((sym = sym->next) != NULL) {
8872 if (!(sym->v & ~SYM_FIELD))
8873 expect("identifier");
8874 if (sym->type.t == VT_VOID)
8875 sym->type = int_type;
8878 /* apply post-declaraton attributes */
8879 merge_funcattr(&type.ref->f, &ad.f);
8881 /* put function symbol */
8882 type.t &= ~VT_EXTERN;
8883 sym = external_sym(v, &type, 0, &ad);
8885 /* static inline functions are just recorded as a kind
8886 of macro. Their code will be emitted at the end of
8887 the compilation unit only if they are used */
8888 if (sym->type.t & VT_INLINE) {
8889 struct InlineFunc *fn;
8890 fn = tcc_malloc(sizeof *fn + strlen(file->filename));
8891 strcpy(fn->filename, file->filename);
8892 fn->sym = sym;
8893 skip_or_save_block(&fn->func_str);
8894 dynarray_add(&tcc_state->inline_fns,
8895 &tcc_state->nb_inline_fns, fn);
8896 } else {
8897 /* compute text section */
8898 cur_text_section = ad.section;
8899 if (!cur_text_section)
8900 cur_text_section = text_section;
8901 gen_function(sym);
8903 break;
8904 } else {
8905 if (l == VT_CMP) {
8906 /* find parameter in function parameter list */
8907 for (sym = func_sym->next; sym; sym = sym->next)
8908 if ((sym->v & ~SYM_FIELD) == v)
8909 goto found;
8910 tcc_error("declaration for parameter '%s' but no such parameter",
8911 get_tok_str(v, NULL));
8912 found:
8913 if (type.t & VT_STORAGE) /* 'register' is okay */
8914 tcc_error("storage class specified for '%s'",
8915 get_tok_str(v, NULL));
8916 if (sym->type.t != VT_VOID)
8917 tcc_error("redefinition of parameter '%s'",
8918 get_tok_str(v, NULL));
8919 convert_parameter_type(&type);
8920 sym->type = type;
8921 } else if (type.t & VT_TYPEDEF) {
8922 /* save typedefed type */
8923 /* XXX: test storage specifiers ? */
8924 sym = sym_find(v);
8925 if (sym && sym->sym_scope == local_scope) {
8926 if (!is_compatible_types(&sym->type, &type)
8927 || !(sym->type.t & VT_TYPEDEF))
8928 tcc_error("incompatible redefinition of '%s'",
8929 get_tok_str(v, NULL));
8930 sym->type = type;
8931 } else {
8932 sym = sym_push(v, &type, 0, 0);
8934 sym->a = ad.a;
8935 sym->f = ad.f;
8936 if (debug_modes)
8937 tcc_debug_typedef (tcc_state, sym);
8938 } else if ((type.t & VT_BTYPE) == VT_VOID
8939 && !(type.t & VT_EXTERN)) {
8940 tcc_error("declaration of void object");
8941 } else {
8942 r = 0;
8943 if ((type.t & VT_BTYPE) == VT_FUNC) {
8944 /* external function definition */
8945 /* specific case for func_call attribute */
8946 type.ref->f = ad.f;
8947 } else if (!(type.t & VT_ARRAY)) {
8948 /* not lvalue if array */
8949 r |= VT_LVAL;
8951 has_init = (tok == '=');
8952 if (has_init && (type.t & VT_VLA))
8953 tcc_error("variable length array cannot be initialized");
8954 if (((type.t & VT_EXTERN) && (!has_init || l != VT_CONST))
8955 || (type.t & VT_BTYPE) == VT_FUNC
8956 /* as with GCC, uninitialized global arrays with no size
8957 are considered extern: */
8958 || ((type.t & VT_ARRAY) && !has_init
8959 && l == VT_CONST && type.ref->c < 0)
8961 /* external variable or function */
8962 type.t |= VT_EXTERN;
8963 sym = external_sym(v, &type, r, &ad);
8964 if (ad.alias_target) {
8965 /* Aliases need to be emitted when their target
8966 symbol is emitted, even if perhaps unreferenced.
8967 We only support the case where the base is
8968 already defined, otherwise we would need
8969 deferring to emit the aliases until the end of
8970 the compile unit. */
8971 Sym *alias_target = sym_find(ad.alias_target);
8972 ElfSym *esym = elfsym(alias_target);
8973 if (!esym)
8974 tcc_error("unsupported forward __alias__ attribute");
8975 put_extern_sym2(sym, esym->st_shndx,
8976 esym->st_value, esym->st_size, 1);
8978 } else {
8979 if (type.t & VT_STATIC)
8980 r |= VT_CONST;
8981 else
8982 r |= l;
8983 if (has_init)
8984 next();
8985 else if (l == VT_CONST)
8986 /* uninitialized global variables may be overridden */
8987 type.t |= VT_EXTERN;
8988 decl_initializer_alloc(&type, &ad, r, has_init, v, l);
8991 if (tok != ',') {
8992 if (is_for_loop_init)
8993 return 1;
8994 skip(';');
8995 break;
8997 next();
9001 return 0;
9004 static void decl(int l)
9006 decl0(l, 0, NULL);
9009 /* ------------------------------------------------------------------------- */
9010 #undef gjmp_addr
9011 #undef gjmp
9012 /* ------------------------------------------------------------------------- */